Java Code Examples for org.apache.flink.table.api.Table#getSchema()
The following examples show how to use
org.apache.flink.table.api.Table#getSchema() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AbstractFlinkClient.java From alchemy with Apache License 2.0 | 5 votes |
private void registerSink(Table table, SinkDescriptor sinkDescriptor) throws Exception { TableSchema tableSchema = table.getSchema(); TableSink tableSink = sinkDescriptor.transform(tableSchema); table.writeToSink(tableSink); LOGGER.info("register sink, name:{}, class:{}", sinkDescriptor.getName(), sinkDescriptor.getClass()); }
Example 2
Source File: JdbcDB.java From Alink with Apache License 2.0 | 5 votes |
@Override public void sinkBatch(String tableName, Table in, Params parameter, Long sessionId) { dropAndCreateTable(this, tableName, in, parameter); TableSchema schema = in.getSchema(); String[] colNames = schema.getFieldNames(); StringBuilder sbd = new StringBuilder(); sbd.append("INSERT INTO ").append(tableName).append(" (").append(colNames[0]); for (int i = 1; i < colNames.length; i++) { sbd.append(",").append(colNames[i]); } sbd.append(") VALUES (?"); for (int i = 1; i < colNames.length; i++) { sbd.append(",").append("?"); } sbd.append(")"); JDBCAppendTableSink jdbcAppendTableSink = JDBCAppendTableSink.builder() .setUsername(getUserName()) .setPassword(getPassword()) .setDrivername(getDriverName()) .setDBUrl(getDbUrl()) .setQuery(sbd.toString()) .setParameterTypes(schema.getFieldTypes()) .build(); jdbcAppendTableSink.emitDataSet(BatchOperator.fromTable(in).setMLEnvironmentId(sessionId).getDataSet()); }
Example 3
Source File: TableUtil.java From Alink with Apache License 2.0 | 4 votes |
public static Table[] splitTable(Table table) { TableSchema schema = table.getSchema(); final String[] colNames = schema.getFieldNames(); String idCol = colNames[0]; if (!idCol.equalsIgnoreCase("table_id")) { throw new IllegalArgumentException("The table can't be splited."); } String lastCol = colNames[colNames.length - 1]; int maxTableId = Integer.valueOf(lastCol.substring(1, lastCol.indexOf('_'))); int numTables = maxTableId + 1; int[] numColsOfEachTable = new int[numTables]; for (int i = 1; i < colNames.length; i++) { int tableId = Integer.valueOf(colNames[i].substring(1, lastCol.indexOf('_'))); numColsOfEachTable[tableId]++; } Table[] splited = new Table[numTables]; int startCol = 1; for (int i = 0; i < numTables; i++) { if (numColsOfEachTable[i] == 0) { continue; } String[] selectedCols = Arrays.copyOfRange(colNames, startCol, startCol + numColsOfEachTable[i]); BatchOperator sub = BatchOperator.fromTable(table) .where(String.format("%s=%d", "table_id", i)) .select(selectedCols); // recover the col names String prefix = String.format("t%d_", i); StringBuilder sbd = new StringBuilder(); for (int j = 0; j < selectedCols.length; j++) { if (j > 0) { sbd.append(","); } sbd.append(selectedCols[j].substring(prefix.length())); } sub = sub.as(sbd.toString()); splited[i] = sub.getOutputTable(); startCol += numColsOfEachTable[i]; } return splited; }
Example 4
Source File: JdbcDB.java From Alink with Apache License 2.0 | 4 votes |
@Override public void sinkStream(String tableName, Table in, Params parameter, Long sessionId) { try { if (!this.hasTable(tableName)) { this.createTable(tableName, in.getSchema(), parameter); } } catch (Exception e) { throw new RuntimeException("Fail to create table: " + e); } TableSchema schema = in.getSchema(); String[] colNames = schema.getFieldNames(); StringBuilder sbd = new StringBuilder(); sbd.append("INSERT INTO ").append(tableName).append(" (").append(colNames[0]); for (int i = 1; i < colNames.length; i++) { sbd.append(",").append(colNames[i]); } sbd.append(") VALUES (?"); for (int i = 1; i < colNames.length; i++) { sbd.append(",").append("?"); } sbd.append(")"); String sql = sbd.toString(); LOG.info("JdbcDB sink stream to table {}: {}", tableName, sql); String[] primaryColNames = parameter.getStringArrayOrDefault("primaryKeys", null); if (primaryColNames == null || primaryColNames.length == 0) { JDBCAppendTableSink jdbcAppendTableSink = JDBCAppendTableSink.builder() .setUsername(getUserName()) .setPassword(getPassword()) .setDrivername(getDriverName()) .setDBUrl(getDbUrl()) .setQuery(sql) .setParameterTypes(schema.getFieldTypes()) .build(); StreamTableEnvironment tEnv = MLEnvironmentFactory.get(sessionId).getStreamTableEnvironment(); jdbcAppendTableSink.emitDataStream(tEnv .toAppendStream(in, new RowTypeInfo(in.getSchema().getFieldTypes())) ); } else { new TableSourceStreamOp(in) .setMLEnvironmentId(sessionId) .link(new JdbcRetractSinkStreamOp(this, tableName, primaryColNames)); } }