Java Code Examples for org.apache.flink.table.descriptors.DescriptorProperties#getTableSchema()
The following examples show how to use
org.apache.flink.table.descriptors.DescriptorProperties#getTableSchema() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KafkaTableSourceSinkFactoryBase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public StreamTableSink<Row> createStreamTableSink(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); final TableSchema schema = descriptorProperties.getTableSchema(SCHEMA()); final String topic = descriptorProperties.getString(CONNECTOR_TOPIC); final Optional<String> proctime = SchemaValidator.deriveProctimeAttribute(descriptorProperties); final List<RowtimeAttributeDescriptor> rowtimeAttributeDescriptors = SchemaValidator.deriveRowtimeAttributes(descriptorProperties); // see also FLINK-9870 if (proctime.isPresent() || !rowtimeAttributeDescriptors.isEmpty() || checkForCustomFieldMapping(descriptorProperties, schema)) { throw new TableException("Time attributes and custom field mappings are not supported yet."); } return createKafkaTableSink( schema, topic, getKafkaProperties(descriptorProperties), getFlinkKafkaPartitioner(descriptorProperties), getSerializationSchema(properties)); }
Example 2
Source File: HBaseTableFactory.java From flink with Apache License 2.0 | 6 votes |
@Override public StreamTableSource<Row> createStreamTableSource(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); // create default configuration from current runtime env (`hbase-site.xml` in classpath) first, Configuration hbaseClientConf = HBaseConfiguration.create(); String hbaseZk = descriptorProperties.getString(CONNECTOR_ZK_QUORUM); hbaseClientConf.set(HConstants.ZOOKEEPER_QUORUM, hbaseZk); descriptorProperties .getOptionalString(CONNECTOR_ZK_NODE_PARENT) .ifPresent(v -> hbaseClientConf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, v)); String hTableName = descriptorProperties.getString(CONNECTOR_TABLE_NAME); TableSchema tableSchema = descriptorProperties.getTableSchema(SCHEMA); HBaseTableSchema hbaseSchema = validateTableSchema(tableSchema); return new HBaseTableSource(hbaseClientConf, hTableName, hbaseSchema, null); }
Example 3
Source File: KafkaTableSourceSinkFactoryBase.java From flink with Apache License 2.0 | 6 votes |
@Override public StreamTableSink<Row> createStreamTableSink(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); final TableSchema schema = descriptorProperties.getTableSchema(SCHEMA); final String topic = descriptorProperties.getString(CONNECTOR_TOPIC); final Optional<String> proctime = SchemaValidator.deriveProctimeAttribute(descriptorProperties); final List<RowtimeAttributeDescriptor> rowtimeAttributeDescriptors = SchemaValidator.deriveRowtimeAttributes(descriptorProperties); // see also FLINK-9870 if (proctime.isPresent() || !rowtimeAttributeDescriptors.isEmpty() || checkForCustomFieldMapping(descriptorProperties, schema)) { throw new TableException("Time attributes and custom field mappings are not supported yet."); } return createKafkaTableSink( schema, topic, getKafkaProperties(descriptorProperties), getFlinkKafkaPartitioner(descriptorProperties), getSerializationSchema(properties)); }
Example 4
Source File: DatahubTableFactory.java From alibaba-flink-connectors with Apache License 2.0 | 6 votes |
@Override public TableSink<Row> createTableSink(Map<String, String> prop) { DescriptorProperties params = new DescriptorProperties(); params.putProperties(prop); new DatahubDescriptorValidator().validate(params); TableSchema schema = params.getTableSchema(SCHEMA); String project = params.getString(CONNECTOR_PROJECT); String topic = params.getString(CONNECTOR_TOPIC); String accessId = params.getString(CONNECTOR_ACCESS_ID); String accessKey = params.getString(CONNECTOR_ACCESS_KEY); String endpoint = params.getString(CONNECTOR_ENDPOINT); return new DatahubTableSink( project, topic, accessId, accessKey, endpoint, schema, params ); }
Example 5
Source File: TestTableSourceFactoryBase.java From flink with Apache License 2.0 | 5 votes |
@Override public StreamTableSource<Row> createStreamTableSource(Map<String, String> properties) { final DescriptorProperties params = new DescriptorProperties(true); params.putProperties(properties); final Optional<String> proctime = SchemaValidator.deriveProctimeAttribute(params); final List<RowtimeAttributeDescriptor> rowtime = SchemaValidator.deriveRowtimeAttributes(params); return new TestTableSource( params.getTableSchema(SCHEMA), properties.get(testProperty), proctime.orElse(null), rowtime); }
Example 6
Source File: CatalogTableImpl.java From flink with Apache License 2.0 | 5 votes |
/** * Construct a {@link CatalogTableImpl} from complete properties that contains table schema. */ public static CatalogTableImpl fromProperties(Map<String, String> properties) { DescriptorProperties descriptorProperties = new DescriptorProperties(); descriptorProperties.putProperties(properties); TableSchema tableSchema = descriptorProperties.getTableSchema(Schema.SCHEMA); List<String> partitionKeys = descriptorProperties.getPartitionKeys(); return new CatalogTableImpl( tableSchema, partitionKeys, removeRedundant(properties, tableSchema, partitionKeys), "" ); }
Example 7
Source File: KuduTableFactory.java From bahir-flink with Apache License 2.0 | 5 votes |
@Override public KuduTableSink createTableSink(Map<String, String> properties) { DescriptorProperties descriptorProperties = getValidatedProps(properties); String tableName = descriptorProperties.getString(KUDU_TABLE); TableSchema schema = descriptorProperties.getTableSchema(SCHEMA); return createTableSink(tableName, schema, properties); }
Example 8
Source File: KuduTableFactory.java From bahir-flink with Apache License 2.0 | 5 votes |
@Override public KuduTableSource createTableSource(Map<String, String> properties) { DescriptorProperties descriptorProperties = getValidatedProps(properties); String tableName = descriptorProperties.getString(KUDU_TABLE); TableSchema schema = descriptorProperties.getTableSchema(SCHEMA); return createTableSource(tableName, schema, properties); }
Example 9
Source File: RedisTableSink.java From bahir-flink with Apache License 2.0 | 5 votes |
public RedisTableSink(Map<String, String> properties) { this.properties = properties; Preconditions.checkNotNull(properties, "properties should not be null"); redisMapper = RedisHandlerServices .findRedisHandler(RedisMapperHandler.class, properties) .createRedisMapper(properties); flinkJedisConfigBase = RedisHandlerServices .findRedisHandler(FlinkJedisConfigHandler.class, properties) .createFlinkJedisConfig(properties); final DescriptorProperties descriptorProperties = new DescriptorProperties(true); descriptorProperties.putProperties(properties); tableSchema = descriptorProperties.getTableSchema(SCHEMA); }
Example 10
Source File: JsonTableSourceFactory.java From AthenaX with Apache License 2.0 | 5 votes |
@Override public StreamTableSource<Row> create(Map<String, String> properties) { DescriptorProperties params = new DescriptorProperties(true); params.putProperties(properties); TableSchema schema = params.getTableSchema(TOPIC_SCHEMA_KEY); String topic = params.getString(TOPIC_NAME_KEY); Properties conf = new Properties(); conf.putAll(params.getPrefix(KAFKA_CONFIG_PREFIX)); return new JsonTableSource(topic, conf, schema); }
Example 11
Source File: MockTableSourceFactory.java From AthenaX with Apache License 2.0 | 5 votes |
@Override public TableSource<Row> create(Map<String, String> properties) { DescriptorProperties params = new DescriptorProperties(true); params.putProperties(properties); TableSchema schema = params.getTableSchema(TABLE_SCHEMA_CONNECTOR_PROPERTY); List<Row> rows = deserializeRows(params.getString(TABLE_DATA_CONNECTOR_PROPERTY)); return new MockTableSource(rows, new RowTypeInfo(schema.getTypes(), schema.getColumnNames())); }
Example 12
Source File: MockTableSinkProvider.java From AthenaX with Apache License 2.0 | 5 votes |
@Override public AppendStreamTableSink<Row> getAppendStreamTableSink(ExternalCatalogTable table) throws IOException { DescriptorProperties params = new DescriptorProperties(true); table.addProperties(params); TableSchema tableSchema = params.getTableSchema(MockExternalCatalogTable.TABLE_SCHEMA_CONNECTOR_PROPERTY); RowTypeInfo type = new RowTypeInfo(tableSchema.getTypes(), tableSchema.getColumnNames()); return new MockAppendStreamTableSink(type); }
Example 13
Source File: CsvTableSinkFactoryBase.java From flink with Apache License 2.0 | 5 votes |
protected CsvTableSink createTableSink( Boolean isStreaming, Map<String, String> properties) { DescriptorProperties params = new DescriptorProperties(); params.putProperties(properties); // validate new FileSystemValidator().validate(params); new OldCsvValidator().validate(params); new SchemaValidator(isStreaming, false, false).validate(params); // build TableSchema formatSchema = params.getTableSchema(FORMAT_FIELDS); TableSchema tableSchema = params.getTableSchema(SCHEMA); if (!formatSchema.equals(tableSchema)) { throw new TableException( "Encodings that differ from the schema are not supported yet for CsvTableSink."); } String path = params.getString(CONNECTOR_PATH); String fieldDelimiter = params.getOptionalString(FORMAT_FIELD_DELIMITER).orElse(","); CsvTableSink csvTableSink = new CsvTableSink(path, fieldDelimiter); return (CsvTableSink) csvTableSink.configure(formatSchema.getFieldNames(), formatSchema.getFieldTypes()); }
Example 14
Source File: TableSourceFactoryMock.java From flink with Apache License 2.0 | 5 votes |
@Override public TableSource<Row> createTableSource(Map<String, String> properties) { final DescriptorProperties descriptorProperties = new DescriptorProperties(); descriptorProperties.putProperties(properties); final TableSchema schema = descriptorProperties.getTableSchema(Schema.SCHEMA); return new TableSourceMock(schema.toRowDataType(), schema); }
Example 15
Source File: HBaseTableFactory.java From flink with Apache License 2.0 | 5 votes |
@Override public StreamTableSink<Tuple2<Boolean, Row>> createStreamTableSink(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); HBaseOptions.Builder hbaseOptionsBuilder = HBaseOptions.builder(); hbaseOptionsBuilder.setZkQuorum(descriptorProperties.getString(CONNECTOR_ZK_QUORUM)); hbaseOptionsBuilder.setTableName(descriptorProperties.getString(CONNECTOR_TABLE_NAME)); descriptorProperties .getOptionalString(CONNECTOR_ZK_NODE_PARENT) .ifPresent(hbaseOptionsBuilder::setZkNodeParent); TableSchema tableSchema = descriptorProperties.getTableSchema(SCHEMA); HBaseTableSchema hbaseSchema = validateTableSchema(tableSchema); HBaseWriteOptions.Builder writeBuilder = HBaseWriteOptions.builder(); descriptorProperties .getOptionalInt(CONNECTOR_WRITE_BUFFER_FLUSH_MAX_ROWS) .ifPresent(writeBuilder::setBufferFlushMaxRows); descriptorProperties .getOptionalMemorySize(CONNECTOR_WRITE_BUFFER_FLUSH_MAX_SIZE) .ifPresent(v -> writeBuilder.setBufferFlushMaxSizeInBytes(v.getBytes())); descriptorProperties .getOptionalDuration(CONNECTOR_WRITE_BUFFER_FLUSH_INTERVAL) .ifPresent(v -> writeBuilder.setBufferFlushIntervalMillis(v.toMillis())); return new HBaseUpsertTableSink( hbaseSchema, hbaseOptionsBuilder.build(), writeBuilder.build() ); }
Example 16
Source File: TestTableSourceFactoryBase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public StreamTableSource<Row> createStreamTableSource(Map<String, String> properties) { final DescriptorProperties params = new DescriptorProperties(true); params.putProperties(properties); final Optional<String> proctime = SchemaValidator.deriveProctimeAttribute(params); final List<RowtimeAttributeDescriptor> rowtime = SchemaValidator.deriveRowtimeAttributes(params); return new TestTableSource( params.getTableSchema(SCHEMA()), properties.get(testProperty), proctime.orElse(null), rowtime); }
Example 17
Source File: CsvTableSourceFactoryBase.java From flink with Apache License 2.0 | 4 votes |
protected CsvTableSource createTableSource( Boolean isStreaming, Map<String, String> properties) { DescriptorProperties params = new DescriptorProperties(); params.putProperties(properties); // validate new FileSystemValidator().validate(params); new OldCsvValidator().validate(params); new SchemaValidator(isStreaming, false, false).validate(params); // build CsvTableSource.Builder csvTableSourceBuilder = new CsvTableSource.Builder(); TableSchema formatSchema = params.getTableSchema(FORMAT_FIELDS); TableSchema tableSchema = params.getTableSchema(SCHEMA); // the CsvTableSource needs some rework first // for now the schema must be equal to the encoding if (!formatSchema.equals(tableSchema)) { throw new TableException( "Encodings that differ from the schema are not supported yet for CsvTableSources."); } params.getOptionalString(CONNECTOR_PATH).ifPresent(csvTableSourceBuilder::path); params.getOptionalString(FORMAT_FIELD_DELIMITER).ifPresent(csvTableSourceBuilder::fieldDelimiter); params.getOptionalString(FORMAT_LINE_DELIMITER).ifPresent(csvTableSourceBuilder::lineDelimiter); for (int i = 0; i < formatSchema.getFieldCount(); ++i) { csvTableSourceBuilder.field(formatSchema.getFieldNames()[i], formatSchema.getFieldTypes()[i]); } params.getOptionalCharacter(FORMAT_QUOTE_CHARACTER).ifPresent(csvTableSourceBuilder::quoteCharacter); params.getOptionalString(FORMAT_COMMENT_PREFIX).ifPresent(csvTableSourceBuilder::commentPrefix); params.getOptionalBoolean(FORMAT_IGNORE_FIRST_LINE).ifPresent(flag -> { if (flag) { csvTableSourceBuilder.ignoreFirstLine(); } }); params.getOptionalBoolean(FORMAT_IGNORE_PARSE_ERRORS).ifPresent(flag -> { if (flag) { csvTableSourceBuilder.ignoreParseErrors(); } }); return csvTableSourceBuilder.build(); }
Example 18
Source File: CsvTableSinkFactoryBase.java From flink with Apache License 2.0 | 4 votes |
protected CsvTableSink createTableSink( Boolean isStreaming, Map<String, String> properties) { DescriptorProperties params = new DescriptorProperties(); params.putProperties(properties); // validate new FileSystemValidator().validate(params); new OldCsvValidator().validate(params); new SchemaValidator(isStreaming, false, false).validate(params); // build TableSchema tableSchema = TableSchemaUtils.getPhysicalSchema(params.getTableSchema(SCHEMA)); // if a schema is defined, no matter derive schema is set or not, will use the defined schema final boolean hasSchema = params.hasPrefix(FORMAT_FIELDS); if (hasSchema) { TableSchema formatSchema = params.getTableSchema(FORMAT_FIELDS); if (!getFieldLogicalTypes(formatSchema).equals(getFieldLogicalTypes(tableSchema))) { throw new TableException(String.format( "Encodings that differ from the schema are not supported yet for" + " CsvTableSink, format schema is '%s', but table schema is '%s'.", formatSchema, tableSchema)); } } String path = params.getString(CONNECTOR_PATH); String fieldDelimiter = params.getOptionalString(FORMAT_FIELD_DELIMITER).orElse(","); Optional<String> writeModeParm = params.getOptionalString(FORMAT_WRITE_MODE); FileSystem.WriteMode writeMode = (writeModeParm.isPresent()) ? FileSystem.WriteMode.valueOf(writeModeParm.get()) : null; int numFiles = params.getOptionalInt(FORMAT_NUM_FILES).orElse(-1); // bridge to java.sql.Timestamp/Time/Date DataType[] dataTypes = Arrays.stream(tableSchema.getFieldDataTypes()) .map(dt -> { switch (dt.getLogicalType().getTypeRoot()) { case TIMESTAMP_WITHOUT_TIME_ZONE: return dt.bridgedTo(Timestamp.class); case TIME_WITHOUT_TIME_ZONE: return dt.bridgedTo(Time.class); case DATE: return dt.bridgedTo(Date.class); default: return dt; } }) .toArray(DataType[]::new); return new CsvTableSink( path, fieldDelimiter, numFiles, writeMode, tableSchema.getFieldNames(), dataTypes); }
Example 19
Source File: CsvTableSourceFactoryBase.java From flink with Apache License 2.0 | 4 votes |
protected CsvTableSource createTableSource( Boolean isStreaming, Map<String, String> properties) { DescriptorProperties params = new DescriptorProperties(); params.putProperties(properties); // validate new FileSystemValidator().validate(params); new OldCsvValidator().validate(params); new SchemaValidator(isStreaming, false, false).validate(params); // build CsvTableSource.Builder csvTableSourceBuilder = new CsvTableSource.Builder(); TableSchema tableSchema = TableSchemaUtils.getPhysicalSchema(params.getTableSchema(SCHEMA)); // if a schema is defined, no matter derive schema is set or not, will use the defined schema final boolean hasSchema = params.hasPrefix(FORMAT_FIELDS); if (hasSchema) { TableSchema formatSchema = params.getTableSchema(FORMAT_FIELDS); // the CsvTableSource needs some rework first // for now the schema must be equal to the encoding // Ignore conversion classes in DataType if (!getFieldLogicalTypes(formatSchema).equals(getFieldLogicalTypes(tableSchema))) { throw new TableException(String.format( "Encodings that differ from the schema are not supported yet for" + " CsvTableSource, format schema is '%s', but table schema is '%s'.", formatSchema, tableSchema)); } } params.getOptionalString(CONNECTOR_PATH).ifPresent(csvTableSourceBuilder::path); params.getOptionalString(FORMAT_FIELD_DELIMITER).ifPresent(csvTableSourceBuilder::fieldDelimiter); params.getOptionalString(FORMAT_LINE_DELIMITER).ifPresent(csvTableSourceBuilder::lineDelimiter); for (int i = 0; i < tableSchema.getFieldCount(); ++i) { csvTableSourceBuilder.field(tableSchema.getFieldNames()[i], tableSchema.getFieldDataTypes()[i]); } params.getOptionalCharacter(FORMAT_QUOTE_CHARACTER).ifPresent(csvTableSourceBuilder::quoteCharacter); params.getOptionalString(FORMAT_COMMENT_PREFIX).ifPresent(csvTableSourceBuilder::commentPrefix); params.getOptionalBoolean(FORMAT_IGNORE_FIRST_LINE).ifPresent(flag -> { if (flag) { csvTableSourceBuilder.ignoreFirstLine(); } }); params.getOptionalBoolean(FORMAT_IGNORE_PARSE_ERRORS).ifPresent(flag -> { if (flag) { csvTableSourceBuilder.ignoreParseErrors(); } }); return csvTableSourceBuilder.build(); }