Java Code Examples for org.apache.flink.table.descriptors.DescriptorProperties#asMap()
The following examples show how to use
org.apache.flink.table.descriptors.DescriptorProperties#asMap() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HBaseConnectorITCase.java From flink with Apache License 2.0 | 6 votes |
private static Map<String, String> hbaseTableProperties() { Map<String, String> properties = new HashMap<>(); properties.put(CONNECTOR_TYPE, CONNECTOR_TYPE_VALUE_HBASE); properties.put(CONNECTOR_VERSION, CONNECTOR_VERSION_VALUE_143); properties.put(CONNECTOR_PROPERTY_VERSION, "1"); properties.put(CONNECTOR_TABLE_NAME, TEST_TABLE_1); // get zk quorum from "hbase-site.xml" in classpath String hbaseZk = HBaseConfiguration.create().get(HConstants.ZOOKEEPER_QUORUM); properties.put(CONNECTOR_ZK_QUORUM, hbaseZk); // schema String[] columnNames = {FAMILY1, ROWKEY, FAMILY2, FAMILY3}; TypeInformation<Row> f1 = Types.ROW_NAMED(new String[]{F1COL1}, Types.INT); TypeInformation<Row> f2 = Types.ROW_NAMED(new String[]{F2COL1, F2COL2}, Types.STRING, Types.LONG); TypeInformation<Row> f3 = Types.ROW_NAMED(new String[]{F3COL1, F3COL2, F3COL3}, Types.DOUBLE, Types.BOOLEAN, Types.STRING); TypeInformation[] columnTypes = new TypeInformation[]{f1, Types.INT, f2, f3}; DescriptorProperties descriptorProperties = new DescriptorProperties(true); TableSchema tableSchema = new TableSchema(columnNames, columnTypes); descriptorProperties.putTableSchema(SCHEMA, tableSchema); descriptorProperties.putProperties(properties); return descriptorProperties.asMap(); }
Example 2
Source File: Pravega.java From flink-connectors with Apache License 2.0 | 6 votes |
/** * Internal method for connector properties conversion. */ @Override protected Map<String, String> toConnectorProperties() { final DescriptorProperties properties = new DescriptorProperties(); properties.putString(CONNECTOR_VERSION, String.valueOf(CONNECTOR_VERSION_VALUE)); if (tableSourceReaderBuilder == null && tableSinkWriterBuilder == null) { throw new ValidationException("Missing both reader and writer configurations."); } PravegaConfig pravegaConfig = tableSourceReaderBuilder != null ? tableSourceReaderBuilder.getPravegaConfig() : tableSinkWriterBuilder.getPravegaConfig(); populateConnectionConfig(pravegaConfig, properties); boolean metrics = tableSourceReaderBuilder != null ? tableSourceReaderBuilder.isMetricsEnabled() : tableSinkWriterBuilder.isMetricsEnabled(); properties.putBoolean(CONNECTOR_METRICS, metrics); if (tableSourceReaderBuilder != null) { populateReaderProperties(properties); } if (tableSinkWriterBuilder != null) { populateWriterProperties(properties); } return properties.asMap(); }
Example 3
Source File: PulsarCatalogDescriptor.java From pulsar-flink with Apache License 2.0 | 5 votes |
@Override protected Map<String, String> toCatalogProperties() { DescriptorProperties props = new DescriptorProperties(); if (pulsarVersion != null) { props.putString(CATALOG_PULSAR_VERSION, pulsarVersion); } return props.asMap(); }
Example 4
Source File: PulsarCatalogFactory.java From pulsar-flink with Apache License 2.0 | 5 votes |
@Override public Catalog createCatalog(String name, Map<String, String> properties) { DescriptorProperties dp = getValidateProperties(properties); String defaultDB = dp.getOptionalString(CATALOG_DEFAULT_DATABASE).orElse("public/default"); String adminUrl = dp.getString(CATALOG_ADMIN_URL); return new PulsarCatalog(adminUrl, name, dp.asMap(), defaultDB); }
Example 5
Source File: HiveCatalogDescriptor.java From flink with Apache License 2.0 | 5 votes |
@Override protected Map<String, String> toCatalogProperties() { final DescriptorProperties properties = new DescriptorProperties(); if (hiveSitePath != null) { properties.putString(CATALOG_HIVE_CONF_DIR, hiveSitePath); } if (hiveVersion != null) { properties.putString(CATALOG_HIVE_VERSION, hiveVersion); } return properties.asMap(); }
Example 6
Source File: StreamPlannerFactory.java From flink with Apache License 2.0 | 5 votes |
@Override public Map<String, String> requiredContext() { DescriptorProperties properties = new DescriptorProperties(); properties.putBoolean(EnvironmentSettings.STREAMING_MODE, true); return properties.asMap(); }
Example 7
Source File: HiveModuleDescriptor.java From flink with Apache License 2.0 | 5 votes |
@Override protected Map<String, String> toModuleProperties() { final DescriptorProperties properties = new DescriptorProperties(); if (hiveVersion != null) { properties.putString(MODULE_HIVE_VERSION, hiveVersion); } return properties.asMap(); }
Example 8
Source File: CatalogTableImpl.java From flink with Apache License 2.0 | 5 votes |
@Override public Map<String, String> toProperties() { DescriptorProperties descriptor = new DescriptorProperties(); descriptor.putTableSchema(Schema.SCHEMA, getSchema()); descriptor.putPartitionKeys(getPartitionKeys()); Map<String, String> properties = new HashMap<>(getProperties()); properties.remove(CatalogConfig.IS_GENERIC); descriptor.putProperties(properties); return descriptor.asMap(); }
Example 9
Source File: JsonRowFormatFactoryTest.java From flink with Apache License 2.0 | 5 votes |
private static Map<String, String> toMap(Descriptor... desc) { final DescriptorProperties descriptorProperties = new DescriptorProperties(); for (Descriptor d : desc) { descriptorProperties.putProperties(d.toProperties()); } return descriptorProperties.asMap(); }
Example 10
Source File: HiveCatalogDescriptor.java From flink with Apache License 2.0 | 5 votes |
@Override protected Map<String, String> toCatalogProperties() { final DescriptorProperties properties = new DescriptorProperties(); if (hiveSitePath != null) { properties.putString(CATALOG_HIVE_CONF_DIR, hiveSitePath); } if (hiveVersion != null) { properties.putString(CATALOG_HIVE_VERSION, hiveVersion); } return properties.asMap(); }
Example 11
Source File: JsonRowFormatFactoryTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private static Map<String, String> toMap(Descriptor... desc) { final DescriptorProperties descriptorProperties = new DescriptorProperties(); for (Descriptor d : desc) { descriptorProperties.putProperties(d.toProperties()); } return descriptorProperties.asMap(); }
Example 12
Source File: JsonRowFormatFactoryTest.java From flink with Apache License 2.0 | 5 votes |
private static Map<String, String> toMap(Descriptor... desc) { final DescriptorProperties descriptorProperties = new DescriptorProperties(); for (Descriptor d : desc) { descriptorProperties.putProperties(d.toProperties()); } return descriptorProperties.asMap(); }
Example 13
Source File: JdbcTableSourceSinkFactoryTest.java From flink with Apache License 2.0 | 5 votes |
private Map<String, String> getBasicProperties() { Map<String, String> properties = new HashMap<>(); properties.put("connector.type", "jdbc"); properties.put("connector.property-version", "1"); properties.put("connector.url", "jdbc:derby:memory:mydb"); properties.put("connector.table", "mytable"); DescriptorProperties descriptorProperties = new DescriptorProperties(); descriptorProperties.putProperties(properties); descriptorProperties.putTableSchema("schema", schema); return new HashMap<>(descriptorProperties.asMap()); }
Example 14
Source File: TaxiRides.java From flink-training-exercises with Apache License 2.0 | 5 votes |
@Override protected Map<String, String> toConnectorProperties() { DescriptorProperties properties = new DescriptorProperties(); if (this.path != null) { properties.putString(CONNECTOR_PATH, this.path); } if (this.maxEventDelaySecs != null) { properties.putInt(CONNECTOR_MAX_EVENT_DELAY_SECS, this.maxEventDelaySecs); } if (this.servingSpeedFactor != null) { properties.putInt(CONNECTOR_SERVING_SPEED_FACTOR, this.servingSpeedFactor); } return properties.asMap(); }
Example 15
Source File: TaxiFares.java From flink-training-exercises with Apache License 2.0 | 5 votes |
@Override protected Map<String, String> toConnectorProperties() { DescriptorProperties properties = new DescriptorProperties(); if (this.path != null) { properties.putString(CONNECTOR_PATH, this.path); } if (this.maxEventDelaySecs != null) { properties.putInt(CONNECTOR_MAX_EVENT_DELAY_SECS, this.maxEventDelaySecs); } if (this.servingSpeedFactor != null) { properties.putInt(CONNECTOR_SERVING_SPEED_FACTOR, this.servingSpeedFactor); } return properties.asMap(); }
Example 16
Source File: BlinkPlannerFactory.java From flink with Apache License 2.0 | 4 votes |
@Override public Map<String, String> requiredContext() { DescriptorProperties properties = new DescriptorProperties(); return properties.asMap(); }
Example 17
Source File: BlinkPlannerFactory.java From flink with Apache License 2.0 | 4 votes |
@Override public Map<String, String> requiredContext() { DescriptorProperties properties = new DescriptorProperties(); return properties.asMap(); }
Example 18
Source File: BlinkExecutorFactory.java From flink with Apache License 2.0 | 4 votes |
@Override public Map<String, String> requiredContext() { DescriptorProperties properties = new DescriptorProperties(); return properties.asMap(); }
Example 19
Source File: StreamExecutorFactory.java From flink with Apache License 2.0 | 4 votes |
@Override public Map<String, String> requiredContext() { DescriptorProperties properties = new DescriptorProperties(); properties.putBoolean(EnvironmentSettings.STREAMING_MODE, true); return properties.asMap(); }
Example 20
Source File: CatalogTableBuilder.java From flink with Apache License 2.0 | 3 votes |
@Override protected Map<String, String> additionalProperties() { DescriptorProperties descriptorProperties = new DescriptorProperties(); descriptorProperties.putBoolean(CatalogConfig.IS_GENERIC, isGeneric); descriptorProperties.putProperties(this.properties); return descriptorProperties.asMap(); }