org.apache.flink.table.functions.FunctionContext Java Examples
The following examples show how to use
org.apache.flink.table.functions.FunctionContext.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HBaseLookupFunction.java From flink with Apache License 2.0 | 6 votes |
@Override public void open(FunctionContext context) { LOG.info("start open ..."); org.apache.hadoop.conf.Configuration config = prepareRuntimeConfiguration(); try { hConnection = ConnectionFactory.createConnection(config); table = (HTable) hConnection.getTable(TableName.valueOf(hTableName)); } catch (TableNotFoundException tnfe) { LOG.error("Table '{}' not found ", hTableName, tnfe); throw new RuntimeException("HBase table '" + hTableName + "' not found.", tnfe); } catch (IOException ioe) { LOG.error("Exception while creating connection to HBase.", ioe); throw new RuntimeException("Cannot create connection to HBase.", ioe); } this.readHelper = new HBaseReadWriteHelper(hbaseTableSchema); LOG.info("end open."); }
Example #2
Source File: HiveGenericUDTF.java From flink with Apache License 2.0 | 6 votes |
@Override public void open(FunctionContext context) throws Exception { function = hiveFunctionWrapper.createFunction(); function.setCollector(input -> { Row row = (Row) HiveInspectors.toFlinkObject(returnInspector, input, hiveShim); HiveGenericUDTF.this.collect(row); }); ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(hiveShim, constantArguments, argTypes); returnInspector = function.initialize(argumentInspectors); isArgsSingleArray = HiveFunctionUtil.isSingleBoxedArray(argTypes); conversions = new HiveObjectConversion[argumentInspectors.length]; for (int i = 0; i < argumentInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(argumentInspectors[i], argTypes[i].getLogicalType(), hiveShim); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); }
Example #3
Source File: HBaseLookupFunction.java From flink with Apache License 2.0 | 6 votes |
@Override public void open(FunctionContext context) { LOG.info("start open ..."); org.apache.hadoop.conf.Configuration config = prepareRuntimeConfiguration(); try { hConnection = ConnectionFactory.createConnection(config); table = (HTable) hConnection.getTable(TableName.valueOf(hTableName)); } catch (TableNotFoundException tnfe) { LOG.error("Table '{}' not found ", hTableName, tnfe); throw new RuntimeException("HBase table '" + hTableName + "' not found.", tnfe); } catch (IOException ioe) { LOG.error("Exception while creating connection to HBase.", ioe); throw new RuntimeException("Cannot create connection to HBase.", ioe); } this.readHelper = new HBaseReadWriteHelper(hbaseTableSchema); LOG.info("end open."); }
Example #4
Source File: HBaseRowDataLookupFunction.java From flink with Apache License 2.0 | 6 votes |
@Override public void open(FunctionContext context) { LOG.info("start open ..."); Configuration config = prepareRuntimeConfiguration(); try { hConnection = ConnectionFactory.createConnection(config); table = (HTable) hConnection.getTable(TableName.valueOf(hTableName)); } catch (TableNotFoundException tnfe) { LOG.error("Table '{}' not found ", hTableName, tnfe); throw new RuntimeException("HBase table '" + hTableName + "' not found.", tnfe); } catch (IOException ioe) { LOG.error("Exception while creating connection to HBase.", ioe); throw new RuntimeException("Cannot create connection to HBase.", ioe); } this.serde = new HBaseSerde(hbaseTableSchema, nullStringLiteral); LOG.info("end open."); }
Example #5
Source File: CsvTableSource.java From flink with Apache License 2.0 | 6 votes |
@Override public void open(FunctionContext context) throws Exception { super.open(context); TypeInformation<Row> rowType = getResultType(); RowCsvInputFormat inputFormat = config.createInputFormat(); FileInputSplit[] inputSplits = inputFormat.createInputSplits(1); for (FileInputSplit split : inputSplits) { inputFormat.open(split); Row row = new Row(rowType.getArity()); while (true) { Row r = inputFormat.nextRecord(row); if (r == null) { break; } else { Object key = getTargetKey(r); List<Row> rows = dataMap.computeIfAbsent(key, k -> new ArrayList<>()); rows.add(Row.copy(r)); } } inputFormat.close(); } }
Example #6
Source File: CsvTableSource.java From flink with Apache License 2.0 | 6 votes |
@Override public void open(FunctionContext context) throws Exception { super.open(context); TypeInformation<Row> rowType = getResultType(); RowCsvInputFormat inputFormat = config.createInputFormat(); FileInputSplit[] inputSplits = inputFormat.createInputSplits(1); for (FileInputSplit split : inputSplits) { inputFormat.open(split); Row row = new Row(rowType.getArity()); while (true) { Row r = inputFormat.nextRecord(row); if (r == null) { break; } else { Object key = getTargetKey(r); List<Row> rows = dataMap.computeIfAbsent(key, k -> new ArrayList<>()); rows.add(Row.copy(r)); } } inputFormat.close(); } }
Example #7
Source File: HiveGenericUDTF.java From flink with Apache License 2.0 | 6 votes |
@Override public void open(FunctionContext context) throws Exception { function = hiveFunctionWrapper.createFunction(); function.setCollector(input -> { Row row = (Row) HiveInspectors.toFlinkObject(returnInspector, input); HiveGenericUDTF.this.collect(row); }); ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(constantArguments, argTypes); returnInspector = function.initialize(argumentInspectors); isArgsSingleArray = HiveFunctionUtil.isSingleBoxedArray(argTypes); conversions = new HiveObjectConversion[argumentInspectors.length]; for (int i = 0; i < argumentInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(argumentInspectors[i], argTypes[i].getLogicalType()); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); }
Example #8
Source File: FlinkAllTableFunction.java From alchemy with Apache License 2.0 | 5 votes |
@Override public void open(FunctionContext context) throws Exception { if (streamTableFunction == null) { Class<StreamTableFunction> clazz = GroovyCompiler.compile(code, name); this.streamTableFunction = clazz.newInstance(); } super.open(context); }
Example #9
Source File: FlinkAllScalarFunction.java From alchemy with Apache License 2.0 | 5 votes |
@Override public void open(FunctionContext context) throws Exception { if (streamScalarFunction == null) { Class<StreamScalarFunction> clazz = GroovyCompiler.compile(code, name); this.streamScalarFunction = clazz.newInstance(); } super.open(context); }
Example #10
Source File: JDBCLookupFunction.java From flink with Apache License 2.0 | 5 votes |
@Override public void open(FunctionContext context) throws Exception { try { establishConnection(); statement = dbConn.prepareStatement(query); this.cache = cacheMaxSize == -1 || cacheExpireMs == -1 ? null : CacheBuilder.newBuilder() .expireAfterWrite(cacheExpireMs, TimeUnit.MILLISECONDS) .maximumSize(cacheMaxSize) .build(); } catch (SQLException sqe) { throw new IllegalArgumentException("open() failed.", sqe); } catch (ClassNotFoundException cnfe) { throw new IllegalArgumentException("JDBC driver class not found.", cnfe); } }
Example #11
Source File: FileSystemLookupFunction.java From flink with Apache License 2.0 | 5 votes |
@Override public void open(FunctionContext context) throws Exception { super.open(context); cache = new HashMap<>(); nextLoadTime = -1; // TODO: get ExecutionConfig from context? serializer = getResultType().createSerializer(new ExecutionConfig()); }
Example #12
Source File: AnalyzeUseragentFunction.java From yauaa with Apache License 2.0 | 5 votes |
@Override public void open(FunctionContext context) { userAgentAnalyzer = UserAgentAnalyzer .newBuilder() .withFields(extractedFields) .withCache(cacheSize) .immediateInitialization() .build(); if (extractedFields.isEmpty()) { extractedFields.addAll(userAgentAnalyzer.getAllPossibleFieldNamesSorted()); } }
Example #13
Source File: JdbcRowDataLookupFunction.java From flink with Apache License 2.0 | 5 votes |
@Override public void open(FunctionContext context) throws Exception { try { establishConnectionAndStatement(); this.cache = cacheMaxSize == -1 || cacheExpireMs == -1 ? null : CacheBuilder.newBuilder() .expireAfterWrite(cacheExpireMs, TimeUnit.MILLISECONDS) .maximumSize(cacheMaxSize) .build(); } catch (SQLException sqe) { throw new IllegalArgumentException("open() failed.", sqe); } catch (ClassNotFoundException cnfe) { throw new IllegalArgumentException("JDBC driver class not found.", cnfe); } }
Example #14
Source File: JdbcLookupFunction.java From flink with Apache License 2.0 | 5 votes |
@Override public void open(FunctionContext context) throws Exception { try { establishConnectionAndStatement(); this.cache = cacheMaxSize == -1 || cacheExpireMs == -1 ? null : CacheBuilder.newBuilder() .expireAfterWrite(cacheExpireMs, TimeUnit.MILLISECONDS) .maximumSize(cacheMaxSize) .build(); } catch (SQLException sqe) { throw new IllegalArgumentException("open() failed.", sqe); } catch (ClassNotFoundException cnfe) { throw new IllegalArgumentException("JDBC driver class not found.", cnfe); } }
Example #15
Source File: JavaUserDefinedScalarFunctions.java From flink with Apache License 2.0 | 4 votes |
@Override public void open(FunctionContext context) throws Exception { super.open(context); this.isOpened = true; }
Example #16
Source File: JavaUserDefinedScalarFunctions.java From flink with Apache License 2.0 | 4 votes |
@Override public void open(FunctionContext context) { openCalled = true; }
Example #17
Source File: TestValuesRuntimeFunctions.java From flink with Apache License 2.0 | 4 votes |
@Override public void open(FunctionContext context) throws Exception { RESOURCE_COUNTER.incrementAndGet(); isOpenCalled = true; }
Example #18
Source File: TestValuesRuntimeFunctions.java From flink with Apache License 2.0 | 4 votes |
@Override public void open(FunctionContext context) throws Exception { RESOURCE_COUNTER.incrementAndGet(); isOpenCalled = true; executor = Executors.newSingleThreadExecutor(); }
Example #19
Source File: HiveScalarFunction.java From flink with Apache License 2.0 | 4 votes |
@Override public void open(FunctionContext context) { openInternal(); isArgsSingleArray = HiveFunctionUtil.isSingleBoxedArray(argTypes); }
Example #20
Source File: HiveGenericUDAF.java From flink with Apache License 2.0 | 4 votes |
@Override public void open(FunctionContext context) throws Exception { super.open(context); init(); }
Example #21
Source File: JavaUserDefinedScalarFunctions.java From flink with Apache License 2.0 | 4 votes |
@Override public void open(FunctionContext context) throws Exception { super.open(context); this.isOpened = true; }
Example #22
Source File: HiveScalarFunction.java From flink with Apache License 2.0 | 4 votes |
@Override public void open(FunctionContext context) { openInternal(); isArgsSingleArray = HiveFunctionUtil.isSingleBoxedArray(argTypes); }
Example #23
Source File: HiveGenericUDAF.java From flink with Apache License 2.0 | 4 votes |
@Override public void open(FunctionContext context) throws Exception { super.open(context); init(); }
Example #24
Source File: AthenaXScalarFunction.java From AthenaX with Apache License 2.0 | 2 votes |
/** * <p>Setup method for user-defined function. It can be used for initialization work.</p> * * <p>By default, this method does nothing.</p> */ @Override public void open(FunctionContext context) throws Exception { super.open(context); }
Example #25
Source File: AthenaXTableFunction.java From AthenaX with Apache License 2.0 | 2 votes |
/** * <p>Setup method for user-defined table function. It can be used for initialization work.</p> * * <p>By default, this method does nothing.</p> */ @Override public void open(FunctionContext context) throws Exception { super.open(context); }
Example #26
Source File: AthenaXAggregateFunction.java From AthenaX with Apache License 2.0 | 2 votes |
/** * <p>Setup method for user-defined function. It can be used for initialization work.</p> * * <p>By default, this method does nothing.</p> */ @Override public void open(FunctionContext context) throws Exception { super.open(context); }