Java Code Examples for org.apache.flink.table.descriptors.DescriptorProperties#getString()
The following examples show how to use
org.apache.flink.table.descriptors.DescriptorProperties#getString() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KafkaTableSourceSinkFactoryBase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public StreamTableSink<Row> createStreamTableSink(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); final TableSchema schema = descriptorProperties.getTableSchema(SCHEMA()); final String topic = descriptorProperties.getString(CONNECTOR_TOPIC); final Optional<String> proctime = SchemaValidator.deriveProctimeAttribute(descriptorProperties); final List<RowtimeAttributeDescriptor> rowtimeAttributeDescriptors = SchemaValidator.deriveRowtimeAttributes(descriptorProperties); // see also FLINK-9870 if (proctime.isPresent() || !rowtimeAttributeDescriptors.isEmpty() || checkForCustomFieldMapping(descriptorProperties, schema)) { throw new TableException("Time attributes and custom field mappings are not supported yet."); } return createKafkaTableSink( schema, topic, getKafkaProperties(descriptorProperties), getFlinkKafkaPartitioner(descriptorProperties), getSerializationSchema(properties)); }
Example 2
Source File: KafkaTableSourceSinkFactoryBase.java From flink with Apache License 2.0 | 6 votes |
@Override public StreamTableSink<Row> createStreamTableSink(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); final TableSchema schema = descriptorProperties.getTableSchema(SCHEMA); final String topic = descriptorProperties.getString(CONNECTOR_TOPIC); final Optional<String> proctime = SchemaValidator.deriveProctimeAttribute(descriptorProperties); final List<RowtimeAttributeDescriptor> rowtimeAttributeDescriptors = SchemaValidator.deriveRowtimeAttributes(descriptorProperties); // see also FLINK-9870 if (proctime.isPresent() || !rowtimeAttributeDescriptors.isEmpty() || checkForCustomFieldMapping(descriptorProperties, schema)) { throw new TableException("Time attributes and custom field mappings are not supported yet."); } return createKafkaTableSink( schema, topic, getKafkaProperties(descriptorProperties), getFlinkKafkaPartitioner(descriptorProperties), getSerializationSchema(properties)); }
Example 3
Source File: KafkaTableSourceSinkFactoryBase.java From flink with Apache License 2.0 | 6 votes |
@Override public StreamTableSource<Row> createStreamTableSource(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); final String topic = descriptorProperties.getString(CONNECTOR_TOPIC); final DeserializationSchema<Row> deserializationSchema = getDeserializationSchema(properties); final StartupOptions startupOptions = getStartupOptions(descriptorProperties, topic); return createKafkaTableSource( descriptorProperties.getTableSchema(SCHEMA), SchemaValidator.deriveProctimeAttribute(descriptorProperties), SchemaValidator.deriveRowtimeAttributes(descriptorProperties), SchemaValidator.deriveFieldMapping( descriptorProperties, Optional.of(deserializationSchema.getProducedType())), topic, getKafkaProperties(descriptorProperties), deserializationSchema, startupOptions.startupMode, startupOptions.specificOffsets); }
Example 4
Source File: DatahubTableFactory.java From alibaba-flink-connectors with Apache License 2.0 | 6 votes |
@Override public TableSink<Row> createTableSink(Map<String, String> prop) { DescriptorProperties params = new DescriptorProperties(); params.putProperties(prop); new DatahubDescriptorValidator().validate(params); TableSchema schema = params.getTableSchema(SCHEMA); String project = params.getString(CONNECTOR_PROJECT); String topic = params.getString(CONNECTOR_TOPIC); String accessId = params.getString(CONNECTOR_ACCESS_ID); String accessKey = params.getString(CONNECTOR_ACCESS_KEY); String endpoint = params.getString(CONNECTOR_ENDPOINT); return new DatahubTableSink( project, topic, accessId, accessKey, endpoint, schema, params ); }
Example 5
Source File: PulsarCatalogFactory.java From pulsar-flink with Apache License 2.0 | 5 votes |
@Override public Catalog createCatalog(String name, Map<String, String> properties) { DescriptorProperties dp = getValidateProperties(properties); String defaultDB = dp.getOptionalString(CATALOG_DEFAULT_DATABASE).orElse("public/default"); String adminUrl = dp.getString(CATALOG_ADMIN_URL); return new PulsarCatalog(adminUrl, name, dp.asMap(), defaultDB); }
Example 6
Source File: TableEntry.java From flink with Apache License 2.0 | 5 votes |
private static TableEntry create(DescriptorProperties properties) { properties.validateString(TABLES_NAME, false, 1); properties.validateEnumValues( TABLES_TYPE, false, Arrays.asList( TABLES_TYPE_VALUE_SOURCE, TABLES_TYPE_VALUE_SOURCE_TABLE, TABLES_TYPE_VALUE_SINK, TABLES_TYPE_VALUE_SINK_TABLE, TABLES_TYPE_VALUE_BOTH, TABLES_TYPE_VALUE_SOURCE_SINK_TABLE, TABLES_TYPE_VALUE_VIEW, TABLES_TYPE_VALUE_TEMPORAL_TABLE)); final String name = properties.getString(TABLES_NAME); final DescriptorProperties cleanedProperties = properties.withoutKeys(Arrays.asList(TABLES_NAME, TABLES_TYPE)); switch (properties.getString(TABLES_TYPE)) { case TABLES_TYPE_VALUE_SOURCE: case TABLES_TYPE_VALUE_SOURCE_TABLE: return new SourceTableEntry(name, cleanedProperties); case TABLES_TYPE_VALUE_SINK: case TABLES_TYPE_VALUE_SINK_TABLE: return new SinkTableEntry(name, cleanedProperties); case TABLES_TYPE_VALUE_BOTH: case TABLES_TYPE_VALUE_SOURCE_SINK_TABLE: return new SourceSinkTableEntry(name, cleanedProperties); case TABLES_TYPE_VALUE_VIEW: return new ViewEntry(name, cleanedProperties); case TABLES_TYPE_VALUE_TEMPORAL_TABLE: return new TemporalTableEntry(name, cleanedProperties); default: throw new SqlClientException("Unexpected table type."); } }
Example 7
Source File: JdbcTableSourceSinkFactory.java From flink with Apache License 2.0 | 5 votes |
private JdbcOptions getJdbcOptions(DescriptorProperties descriptorProperties) { final String url = descriptorProperties.getString(CONNECTOR_URL); final JdbcOptions.Builder builder = JdbcOptions.builder() .setDBUrl(url) .setTableName(descriptorProperties.getString(CONNECTOR_TABLE)) .setDialect(JdbcDialects.get(url).get()); descriptorProperties.getOptionalString(CONNECTOR_DRIVER).ifPresent(builder::setDriverName); descriptorProperties.getOptionalString(CONNECTOR_USERNAME).ifPresent(builder::setUsername); descriptorProperties.getOptionalString(CONNECTOR_PASSWORD).ifPresent(builder::setPassword); return builder.build(); }
Example 8
Source File: JsonTableSourceFactory.java From AthenaX with Apache License 2.0 | 5 votes |
@Override public StreamTableSource<Row> create(Map<String, String> properties) { DescriptorProperties params = new DescriptorProperties(true); params.putProperties(properties); TableSchema schema = params.getTableSchema(TOPIC_SCHEMA_KEY); String topic = params.getString(TOPIC_NAME_KEY); Properties conf = new Properties(); conf.putAll(params.getPrefix(KAFKA_CONFIG_PREFIX)); return new JsonTableSource(topic, conf, schema); }
Example 9
Source File: TableEntry.java From flink with Apache License 2.0 | 5 votes |
private static TableEntry create(DescriptorProperties properties) { properties.validateString(TABLES_NAME, false, 1); properties.validateEnumValues( TABLES_TYPE, false, Arrays.asList( TABLES_TYPE_VALUE_SOURCE, TABLES_TYPE_VALUE_SOURCE_TABLE, TABLES_TYPE_VALUE_SINK, TABLES_TYPE_VALUE_SINK_TABLE, TABLES_TYPE_VALUE_BOTH, TABLES_TYPE_VALUE_SOURCE_SINK_TABLE, TABLES_TYPE_VALUE_VIEW, TABLES_TYPE_VALUE_TEMPORAL_TABLE)); final String name = properties.getString(TABLES_NAME); final DescriptorProperties cleanedProperties = properties.withoutKeys(Arrays.asList(TABLES_NAME, TABLES_TYPE)); switch (properties.getString(TABLES_TYPE)) { case TABLES_TYPE_VALUE_SOURCE: case TABLES_TYPE_VALUE_SOURCE_TABLE: return new SourceTableEntry(name, cleanedProperties); case TABLES_TYPE_VALUE_SINK: case TABLES_TYPE_VALUE_SINK_TABLE: return new SinkTableEntry(name, cleanedProperties); case TABLES_TYPE_VALUE_BOTH: case TABLES_TYPE_VALUE_SOURCE_SINK_TABLE: return new SourceSinkTableEntry(name, cleanedProperties); case TABLES_TYPE_VALUE_VIEW: return new ViewEntry(name, cleanedProperties); case TABLES_TYPE_VALUE_TEMPORAL_TABLE: return new TemporalTableEntry(name, cleanedProperties); default: throw new SqlClientException("Unexpected table type."); } }
Example 10
Source File: TemporalTableEntry.java From flink with Apache License 2.0 | 5 votes |
TemporalTableEntry(String name, DescriptorProperties properties) { super(name, properties); historyTable = properties.getString(TABLES_HISTORY_TABLE); primaryKeyFields = properties.getArray(TABLES_PRIMARY_KEY, properties::getString); timeAttribute = properties.getString(TABLES_TIME_ATTRIBUTE); }
Example 11
Source File: AvroRowFormatFactory.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public SerializationSchema<Row> createSerializationSchema(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); // create and configure if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) { return new AvroRowSerializationSchema( descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class)); } else { return new AvroRowSerializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA)); } }
Example 12
Source File: AvroRowFormatFactory.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public DeserializationSchema<Row> createDeserializationSchema(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); // create and configure if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) { return new AvroRowDeserializationSchema( descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class)); } else { return new AvroRowDeserializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA)); } }
Example 13
Source File: TemporalTableEntry.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
TemporalTableEntry(String name, DescriptorProperties properties) { super(name, properties); historyTable = properties.getString(TABLES_HISTORY_TABLE); primaryKeyFields = properties.getArray(TABLES_PRIMARY_KEY, properties::getString); timeAttribute = properties.getString(TABLES_TIME_ATTRIBUTE); }
Example 14
Source File: AvroRowFormatFactory.java From flink with Apache License 2.0 | 5 votes |
@Override public SerializationSchema<Row> createSerializationSchema(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); // create and configure if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) { return new AvroRowSerializationSchema( descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class)); } else { return new AvroRowSerializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA)); } }
Example 15
Source File: KuduTableFactory.java From bahir-flink with Apache License 2.0 | 5 votes |
@Override public KuduTableSink createTableSink(Map<String, String> properties) { DescriptorProperties descriptorProperties = getValidatedProps(properties); String tableName = descriptorProperties.getString(KUDU_TABLE); TableSchema schema = descriptorProperties.getTableSchema(SCHEMA); return createTableSink(tableName, schema, properties); }
Example 16
Source File: KuduCatalogFactory.java From bahir-flink with Apache License 2.0 | 4 votes |
@Override public Catalog createCatalog(String name, Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); return new KuduCatalog(name, descriptorProperties.getString(KuduTableFactory.KUDU_MASTERS)); }
Example 17
Source File: ViewEntry.java From flink with Apache License 2.0 | 4 votes |
ViewEntry(String name, DescriptorProperties properties) { super(name, properties); query = properties.getString(TABLES_QUERY); }
Example 18
Source File: CsvTableSinkFactoryBase.java From flink with Apache License 2.0 | 4 votes |
protected CsvTableSink createTableSink( Boolean isStreaming, Map<String, String> properties) { DescriptorProperties params = new DescriptorProperties(); params.putProperties(properties); // validate new FileSystemValidator().validate(params); new OldCsvValidator().validate(params); new SchemaValidator(isStreaming, false, false).validate(params); // build TableSchema tableSchema = TableSchemaUtils.getPhysicalSchema(params.getTableSchema(SCHEMA)); // if a schema is defined, no matter derive schema is set or not, will use the defined schema final boolean hasSchema = params.hasPrefix(FORMAT_FIELDS); if (hasSchema) { TableSchema formatSchema = params.getTableSchema(FORMAT_FIELDS); if (!getFieldLogicalTypes(formatSchema).equals(getFieldLogicalTypes(tableSchema))) { throw new TableException(String.format( "Encodings that differ from the schema are not supported yet for" + " CsvTableSink, format schema is '%s', but table schema is '%s'.", formatSchema, tableSchema)); } } String path = params.getString(CONNECTOR_PATH); String fieldDelimiter = params.getOptionalString(FORMAT_FIELD_DELIMITER).orElse(","); Optional<String> writeModeParm = params.getOptionalString(FORMAT_WRITE_MODE); FileSystem.WriteMode writeMode = (writeModeParm.isPresent()) ? FileSystem.WriteMode.valueOf(writeModeParm.get()) : null; int numFiles = params.getOptionalInt(FORMAT_NUM_FILES).orElse(-1); // bridge to java.sql.Timestamp/Time/Date DataType[] dataTypes = Arrays.stream(tableSchema.getFieldDataTypes()) .map(dt -> { switch (dt.getLogicalType().getTypeRoot()) { case TIMESTAMP_WITHOUT_TIME_ZONE: return dt.bridgedTo(Timestamp.class); case TIME_WITHOUT_TIME_ZONE: return dt.bridgedTo(Time.class); case DATE: return dt.bridgedTo(Date.class); default: return dt; } }) .toArray(DataType[]::new); return new CsvTableSink( path, fieldDelimiter, numFiles, writeMode, tableSchema.getFieldNames(), dataTypes); }
Example 19
Source File: ConnectorConfigurations.java From flink-connectors with Apache License 2.0 | 4 votes |
@SuppressWarnings("unchecked") private void populateReaderConfig(DescriptorProperties descriptorProperties) { uid = descriptorProperties.getOptionalString(CONNECTOR_READER_READER_GROUP_UID); rgScope = descriptorProperties.getOptionalString(CONNECTOR_READER_READER_GROUP_SCOPE); rgName = descriptorProperties.getOptionalString(CONNECTOR_READER_READER_GROUP_NAME); refreshInterval = descriptorProperties.getOptionalLong(CONNECTOR_READER_READER_GROUP_REFRESH_INTERVAL); eventReadTimeoutInterval = descriptorProperties.getOptionalLong(CONNECTOR_READER_READER_GROUP_EVENT_READ_TIMEOUT_INTERVAL); checkpointInitiateTimeoutInterval = descriptorProperties.getOptionalLong(CONNECTOR_READER_READER_GROUP_CHECKPOINT_INITIATE_TIMEOUT_INTERVAL); final Optional<Class<AssignerWithTimeWindows>> assignerClass = descriptorProperties.getOptionalClass( CONNECTOR_READER_USER_TIMESTAMP_ASSIGNER, AssignerWithTimeWindows.class); if (assignerClass.isPresent()) { assignerWithTimeWindows = Optional.of((AssignerWithTimeWindows<Row>) InstantiationUtil.instantiate(assignerClass.get())); } else { assignerWithTimeWindows = Optional.empty(); } if (!defaultScope.isPresent() && !rgScope.isPresent()) { throw new ValidationException("Must supply either " + CONNECTOR_READER_READER_GROUP_SCOPE + " or " + CONNECTOR_CONNECTION_CONFIG_DEFAULT_SCOPE); } final List<Map<String, String>> streamPropsList = descriptorProperties.getVariableIndexedProperties( CONNECTOR_READER_STREAM_INFO, Arrays.asList(CONNECTOR_READER_STREAM_INFO_STREAM)); if (streamPropsList.isEmpty()) { throw new ValidationException(CONNECTOR_READER_STREAM_INFO + " cannot be empty"); } int index = 0; for (Map<String, String> propsMap : streamPropsList) { if (!propsMap.containsKey(CONNECTOR_READER_STREAM_INFO_SCOPE) && !defaultScope.isPresent()) { throw new ValidationException("Must supply either " + CONNECTOR_READER_STREAM_INFO + "." + index + "." + CONNECTOR_READER_STREAM_INFO_SCOPE + " or " + CONNECTOR_CONNECTION_CONFIG_DEFAULT_SCOPE); } String scopeName = (propsMap.containsKey(CONNECTOR_READER_STREAM_INFO_SCOPE)) ? descriptorProperties.getString(propsMap.get(CONNECTOR_READER_STREAM_INFO_SCOPE)) : defaultScope.get(); if (!propsMap.containsKey(CONNECTOR_READER_STREAM_INFO_STREAM)) { throw new ValidationException(CONNECTOR_READER_STREAM_INFO + "." + index + "." + CONNECTOR_READER_STREAM_INFO_STREAM + " cannot be empty"); } String streamName = descriptorProperties.getString(propsMap.get(CONNECTOR_READER_STREAM_INFO_STREAM)); String startCut = StreamCut.UNBOUNDED.asText(); if (propsMap.containsKey(CONNECTOR_READER_STREAM_INFO_START_STREAMCUT)) { startCut = descriptorProperties.getString(propsMap.get(CONNECTOR_READER_STREAM_INFO_START_STREAMCUT)); } String endCut = StreamCut.UNBOUNDED.asText(); if (propsMap.containsKey(CONNECTOR_READER_STREAM_INFO_END_STREAMCUT)) { endCut = descriptorProperties.getString(propsMap.get(CONNECTOR_READER_STREAM_INFO_END_STREAMCUT)); } Stream stream = Stream.of(scopeName, streamName); readerStreams.add(new StreamWithBoundaries(stream, StreamCut.from(startCut), StreamCut.from(endCut))); index++; } }
Example 20
Source File: CatalogEntry.java From flink with Apache License 2.0 | 3 votes |
private static CatalogEntry create(DescriptorProperties properties) { properties.validateString(CATALOG_NAME, false, 1); final String name = properties.getString(CATALOG_NAME); final DescriptorProperties cleanedProperties = properties.withoutKeys(Collections.singletonList(CATALOG_NAME)); return new CatalogEntry(name, cleanedProperties); }