org.apache.flink.table.descriptors.ConnectorDescriptor Java Examples
The following examples show how to use
org.apache.flink.table.descriptors.ConnectorDescriptor.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SchemaITest.java From pulsar-flink with Apache License 2.0 | 5 votes |
private ConnectorDescriptor getPulsarSourceDescriptor(String tableName) { return new Pulsar() .urls(getServiceUrl(), getAdminUrl()) .topic(tableName) .startFromEarliest() .property(PulsarOptions.PARTITION_DISCOVERY_INTERVAL_MS_OPTION_KEY, "5000"); }
Example #2
Source File: CatalogTableBuilder.java From flink with Apache License 2.0 | 5 votes |
public CatalogTableBuilder(ConnectorDescriptor connectorDescriptor, TableSchema tableSchema) { super(connectorDescriptor); this.tableSchema = Preconditions.checkNotNull(tableSchema); // We don't support non-generic table currently this.isGeneric = true; }
Example #3
Source File: ITestUtil.java From AthenaX with Apache License 2.0 | 5 votes |
static KafkaInputExternalCatalogTable getKafkaExternalCatalogTable(Map<String, String> props) { ConnectorDescriptor descriptor = new ConnectorDescriptor("kafka+json", 1, false) { @Override public void addConnectorProperties(DescriptorProperties properties) { properties.putTableSchema("athenax.kafka.topic.schema", KafkaInputExternalCatalogTable.SCHEMA); properties.putProperties(props); } }; return new KafkaInputExternalCatalogTable(descriptor); }
Example #4
Source File: KafkaJsonConnectorITest.java From AthenaX with Apache License 2.0 | 5 votes |
private static ExternalCatalogTable mockExternalCatalogTable(String topic, String brokerAddress) { TableSchema schema = new TableSchema(new String[] {"foo"}, new TypeInformation[] {INT_TYPE_INFO}); ConnectorDescriptor descriptor = new ConnectorDescriptor("kafka+json", 1, false) { @Override public void addConnectorProperties(DescriptorProperties properties) { properties.putTableSchema(TOPIC_SCHEMA_KEY, schema); properties.putString(TOPIC_NAME_KEY, topic); properties.putString(KAFKA_CONFIG_PREFIX + "." + ConsumerConfig.GROUP_ID_CONFIG, "foo"); properties.putString(KAFKA_CONFIG_PREFIX + "." + ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerAddress); properties.putString(KAFKA_CONFIG_PREFIX + "." + ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); } }; return new ExternalCatalogTable(descriptor, Option.empty(), Option.empty(), Option.empty(), Option.empty()); }
Example #5
Source File: MockExternalCatalogTable.java From AthenaX with Apache License 2.0 | 5 votes |
ExternalCatalogTable toExternalCatalogTable() { TableSchema tableSchema = new TableSchema(schema.getFieldNames(), schema.getFieldTypes()); ConnectorDescriptor descriptor = new ConnectorDescriptor(CONNECTOR_TYPE, CONNECTOR_VERSION, false) { @Override public void addConnectorProperties(DescriptorProperties properties) { properties.putTableSchema(TABLE_SCHEMA_CONNECTOR_PROPERTY, tableSchema); properties.putString(TABLE_DATA_CONNECTOR_PROPERTY, serializeRows()); } }; return new ExternalCatalogTable(descriptor, Option.empty(), Option.empty(), Option.empty(), Option.empty()); }
Example #6
Source File: CatalogTableBuilder.java From flink with Apache License 2.0 | 5 votes |
public CatalogTableBuilder(ConnectorDescriptor connectorDescriptor, TableSchema tableSchema) { super(connectorDescriptor); this.tableSchema = Preconditions.checkNotNull(tableSchema); // We don't support non-generic table currently this.isGeneric = true; }
Example #7
Source File: SchemaITest.java From pulsar-flink with Apache License 2.0 | 5 votes |
private ConnectorDescriptor getPulsarSinkDescriptor(String tableName) { return new Pulsar() .urls(getServiceUrl(), getAdminUrl()) .topic(tableName) .property(PulsarOptions.FLUSH_ON_CHECKPOINT_OPTION_KEY, "true") .property(PulsarOptions.FAIL_ON_WRITE_OPTION_KEY, "true"); }
Example #8
Source File: FlinkPulsarTableITest.java From pulsar-flink with Apache License 2.0 | 5 votes |
private ConnectorDescriptor getPulsarDescriptor(String tableName) { return new Pulsar() .urls(getServiceUrl(), getAdminUrl()) .topic(tableName) .startFromEarliest() .property(PulsarOptions.PARTITION_DISCOVERY_INTERVAL_MS_OPTION_KEY, "5000"); }
Example #9
Source File: StreamTableEnvironmentImpl.java From flink with Apache License 2.0 | 4 votes |
@Override public StreamTableDescriptor connect(ConnectorDescriptor connectorDescriptor) { return (StreamTableDescriptor) super.connect(connectorDescriptor); }
Example #10
Source File: TableEnvironmentImpl.java From flink with Apache License 2.0 | 4 votes |
@Override public ConnectTableDescriptor connect(ConnectorDescriptor connectorDescriptor) { return new StreamTableDescriptor(this, connectorDescriptor); }
Example #11
Source File: ITestUtil.java From AthenaX with Apache License 2.0 | 4 votes |
KafkaInputExternalCatalogTable(ConnectorDescriptor descriptor) { super(descriptor, Option.empty(), Option.empty(), Option.empty(), Option.empty()); }
Example #12
Source File: FlinkPravegaTableSourceTest.java From flink-connectors with Apache License 2.0 | 4 votes |
public TestTableDescriptor(ConnectorDescriptor connectorDescriptor) { super(connectorDescriptor); }
Example #13
Source File: TableEnvironmentImpl.java From flink with Apache License 2.0 | 4 votes |
@Override public ConnectTableDescriptor connect(ConnectorDescriptor connectorDescriptor) { return new StreamTableDescriptor(registration, connectorDescriptor); }
Example #14
Source File: StreamTableEnvironmentImpl.java From flink with Apache License 2.0 | 4 votes |
@Override public StreamTableDescriptor connect(ConnectorDescriptor connectorDescriptor) { return (StreamTableDescriptor) super.connect(connectorDescriptor); }
Example #15
Source File: TableEnvironment.java From flink with Apache License 2.0 | 2 votes |
/** * Creates a table source and/or table sink from a descriptor. * * <p>Descriptors allow for declaring the communication to external systems in an * implementation-agnostic way. The classpath is scanned for suitable table factories that match * the desired configuration. * * <p>The following example shows how to read from a connector using a JSON format and * register a table source as "MyTable": * * <pre> * {@code * * tableEnv * .connect( * new ExternalSystemXYZ() * .version("0.11")) * .withFormat( * new Json() * .jsonSchema("{...}") * .failOnMissingField(false)) * .withSchema( * new Schema() * .field("user-name", "VARCHAR").from("u_name") * .field("count", "DECIMAL") * .registerSource("MyTable"); * } *</pre> * * @param connectorDescriptor connector descriptor describing the external system */ ConnectTableDescriptor connect(ConnectorDescriptor connectorDescriptor);
Example #16
Source File: StreamTableEnvironment.java From flink with Apache License 2.0 | 2 votes |
/** * Creates a table source and/or table sink from a descriptor. * * <p>Descriptors allow for declaring the communication to external systems in an * implementation-agnostic way. The classpath is scanned for suitable table factories that match * the desired configuration. * * <p>The following example shows how to read from a Kafka connector using a JSON format and * registering a table source "MyTable" in append mode: * * <pre> * {@code * * tableEnv * .connect( * new Kafka() * .version("0.11") * .topic("clicks") * .property("zookeeper.connect", "localhost") * .property("group.id", "click-group") * .startFromEarliest()) * .withFormat( * new Json() * .jsonSchema("{...}") * .failOnMissingField(false)) * .withSchema( * new Schema() * .field("user-name", "VARCHAR").from("u_name") * .field("count", "DECIMAL") * .field("proc-time", "TIMESTAMP").proctime()) * .inAppendMode() * .registerSource("MyTable") * } * </pre> * * @param connectorDescriptor connector descriptor describing the external system */ @Override StreamTableDescriptor connect(ConnectorDescriptor connectorDescriptor);
Example #17
Source File: BatchTableEnvironment.java From flink with Apache License 2.0 | 2 votes |
/** * Creates a table source and/or table sink from a descriptor. * * <p>Descriptors allow for declaring the communication to external systems in an * implementation-agnostic way. The classpath is scanned for suitable table factories that match * the desired configuration. * * <p>The following example shows how to read from a connector using a JSON format and * registering a table source as "MyTable": * * <pre> * {@code * * tableEnv * .connect( * new ExternalSystemXYZ() * .version("0.11")) * .withFormat( * new Json() * .jsonSchema("{...}") * .failOnMissingField(false)) * .withSchema( * new Schema() * .field("user-name", "VARCHAR").from("u_name") * .field("count", "DECIMAL") * .registerSource("MyTable") * } * </pre> * * @param connectorDescriptor connector descriptor describing the external system */ @Override BatchTableDescriptor connect(ConnectorDescriptor connectorDescriptor);
Example #18
Source File: TableEnvironment.java From flink with Apache License 2.0 | 2 votes |
/** * Creates a temporary table from a descriptor. * * <p>Descriptors allow for declaring the communication to external systems in an * implementation-agnostic way. The classpath is scanned for suitable table factories that match * the desired configuration. * * <p>The following example shows how to read from a connector using a JSON format and * register a temporary table as "MyTable": * * <pre> * {@code * * tableEnv * .connect( * new ExternalSystemXYZ() * .version("0.11")) * .withFormat( * new Json() * .jsonSchema("{...}") * .failOnMissingField(false)) * .withSchema( * new Schema() * .field("user-name", "VARCHAR").from("u_name") * .field("count", "DECIMAL") * .createTemporaryTable("MyTable"); * } *</pre> * * @param connectorDescriptor connector descriptor describing the external system * @deprecated The SQL {@code CREATE TABLE} DDL is richer than this part of the API. This method * might be refactored in the next versions. Please use {@link #executeSql(String) executeSql(ddl)} * to register a table instead. */ @Deprecated ConnectTableDescriptor connect(ConnectorDescriptor connectorDescriptor);
Example #19
Source File: StreamTableEnvironment.java From flink with Apache License 2.0 | 2 votes |
/** * Creates a table source and/or table sink from a descriptor. * * <p>Descriptors allow for declaring the communication to external systems in an * implementation-agnostic way. The classpath is scanned for suitable table factories that match * the desired configuration. * * <p>The following example shows how to read from a Kafka connector using a JSON format and * registering a table source "MyTable" in append mode: * * <pre> * {@code * * tableEnv * .connect( * new Kafka() * .version("0.11") * .topic("clicks") * .property("group.id", "click-group") * .startFromEarliest()) * .withFormat( * new Json() * .jsonSchema("{...}") * .failOnMissingField(false)) * .withSchema( * new Schema() * .field("user-name", "VARCHAR").from("u_name") * .field("count", "DECIMAL") * .field("proc-time", "TIMESTAMP").proctime()) * .inAppendMode() * .createTemporaryTable("MyTable") * } * </pre> * * @param connectorDescriptor connector descriptor describing the external system * @deprecated The SQL {@code CREATE TABLE} DDL is richer than this part of the API. This method * might be refactored in the next versions. Please use {@link #executeSql(String) executeSql(ddl)} * to register a table instead. */ @Override @Deprecated StreamTableDescriptor connect(ConnectorDescriptor connectorDescriptor);
Example #20
Source File: BatchTableEnvironment.java From flink with Apache License 2.0 | 2 votes |
/** * Creates a temporary table from a descriptor. * * <p>Descriptors allow for declaring the communication to external systems in an * implementation-agnostic way. The classpath is scanned for suitable table factories that match * the desired configuration. * * <p>The following example shows how to read from a connector using a JSON format and * registering a temporary table as "MyTable": * * <pre> * {@code * * tableEnv * .connect( * new ExternalSystemXYZ() * .version("0.11")) * .withFormat( * new Json() * .jsonSchema("{...}") * .failOnMissingField(false)) * .withSchema( * new Schema() * .field("user-name", "VARCHAR").from("u_name") * .field("count", "DECIMAL") * .createTemporaryTable("MyTable") * } * </pre> * * @param connectorDescriptor connector descriptor describing the external system * @deprecated The SQL {@code CREATE TABLE} DDL is richer than this part of the API. This method * might be refactored in the next versions. Please use {@link #executeSql(String) executeSql(ddl)} * to register a table instead. */ @Override @Deprecated BatchTableDescriptor connect(ConnectorDescriptor connectorDescriptor);