org.apache.flink.util.function.BiConsumerWithException Java Examples
The following examples show how to use
org.apache.flink.util.function.BiConsumerWithException.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ChannelStateWriteRequest.java From flink with Apache License 2.0 | 6 votes |
static ChannelStateWriteRequest buildWriteRequest( long checkpointId, String name, CloseableIterator<Buffer> iterator, BiConsumerWithException<ChannelStateCheckpointWriter, Buffer, Exception> bufferConsumer) { return new CheckpointInProgressRequest( name, checkpointId, writer -> { while (iterator.hasNext()) { Buffer buffer = iterator.next(); try { checkArgument(buffer.isBuffer()); } catch (Exception e) { buffer.recycleBuffer(); throw e; } bufferConsumer.accept(writer, buffer); } }, throwable -> iterator.close(), false); }
Example #2
Source File: TestingExecutor.java From flink with Apache License 2.0 | 6 votes |
TestingExecutor( List<SupplierWithException<TypedResult<List<Tuple2<Boolean, Row>>>, SqlExecutionException>> resultChanges, List<SupplierWithException<TypedResult<Integer>, SqlExecutionException>> snapshotResults, List<SupplierWithException<List<Row>, SqlExecutionException>> resultPages, BiConsumerWithException<String, String, SqlExecutionException> useCatalogConsumer, BiConsumerWithException<String, String, SqlExecutionException> useDatabaseConsumer, BiFunctionWithException<String, String, TableResult, SqlExecutionException> executeSqlConsumer, TriFunctionWithException<String, String, String, Void, SqlExecutionException> setSessionPropertyFunction, FunctionWithException<String, Void, SqlExecutionException> resetSessionPropertiesFunction) { this.resultChanges = resultChanges; this.snapshotResults = snapshotResults; this.resultPages = resultPages; this.useCatalogConsumer = useCatalogConsumer; this.useDatabaseConsumer = useDatabaseConsumer; this.executeSqlConsumer = executeSqlConsumer; this.setSessionPropertyFunction = setSessionPropertyFunction; this.resetSessionPropertiesFunction = resetSessionPropertiesFunction; helper = new SqlParserHelper(); helper.registerTables(); }
Example #3
Source File: LinkedOptionalMapSerializer.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public static <K, V> void writeOptionalMap( DataOutputView out, LinkedOptionalMap<K, V> map, BiConsumerWithException<DataOutputView, K, IOException> keyWriter, BiConsumerWithException<DataOutputView, V, IOException> valueWriter) throws IOException { out.writeLong(HEADER); out.writeInt(map.size()); map.forEach(((keyName, key, value) -> { out.writeUTF(keyName); if (key == null) { out.writeBoolean(false); } else { out.writeBoolean(true); writeFramed(out, keyWriter, key); } if (value == null) { out.writeBoolean(false); } else { out.writeBoolean(true); writeFramed(out, valueWriter, value); } })); }
Example #4
Source File: TestInternalTimerService.java From flink with Apache License 2.0 | 5 votes |
@Override public void forEachProcessingTimeTimer(BiConsumerWithException<N, Long, Exception> consumer) throws Exception { for (Timer<K, N> timer : processingTimeTimers) { keyContext.setCurrentKey(timer.getKey()); consumer.accept(timer.getNamespace(), timer.getTimestamp()); } }
Example #5
Source File: TestInternalTimerService.java From flink with Apache License 2.0 | 5 votes |
@Override public void forEachEventTimeTimer(BiConsumerWithException<N, Long, Exception> consumer) throws Exception { for (Timer<K, N> timer : watermarkTimers) { keyContext.setCurrentKey(timer.getKey()); consumer.accept(timer.getNamespace(), timer.getTimestamp()); } }
Example #6
Source File: InternalTimerServiceImpl.java From flink with Apache License 2.0 | 5 votes |
private void foreachTimer(BiConsumerWithException<N, Long, Exception> consumer, KeyGroupedInternalPriorityQueue<TimerHeapInternalTimer<K, N>> queue) throws Exception { try (final CloseableIterator<TimerHeapInternalTimer<K, N>> iterator = queue.iterator()) { while (iterator.hasNext()) { final TimerHeapInternalTimer<K, N> timer = iterator.next(); keyContext.setCurrentKey(timer.getKey()); consumer.accept(timer.getNamespace(), timer.getTimestamp()); } } }
Example #7
Source File: TestingJobLeaderService.java From flink with Apache License 2.0 | 5 votes |
TestingJobLeaderService( QuadConsumer<String, RpcService, HighAvailabilityServices, JobLeaderListener> startConsumer, ThrowingRunnable<? extends Exception> stopRunnable, Consumer<JobID> removeJobConsumer, BiConsumerWithException<JobID, String, ? extends Exception> addJobConsumer, Consumer<JobID> reconnectConsumer, Function<JobID, Boolean> containsJobFunction) { this.startConsumer = startConsumer; this.stopRunnable = stopRunnable; this.removeJobConsumer = removeJobConsumer; this.addJobConsumer = addJobConsumer; this.reconnectConsumer = reconnectConsumer; this.containsJobFunction = containsJobFunction; }
Example #8
Source File: ChannelStateWriterImplTest.java From flink with Apache License 2.0 | 5 votes |
private void runWithSyncWorker(BiConsumerWithException<ChannelStateWriter, SyncChannelStateWriteRequestExecutor, Exception> testFn) throws Exception { try ( SyncChannelStateWriteRequestExecutor worker = new SyncChannelStateWriteRequestExecutor(); ChannelStateWriterImpl writer = new ChannelStateWriterImpl(TASK_NAME, new ConcurrentHashMap<>(), worker, 5) ) { writer.open(); testFn.accept(writer, worker); worker.processAllRequests(); } }
Example #9
Source File: ChannelStateWriteRequestExecutorImplTest.java From flink with Apache License 2.0 | 5 votes |
private void testSubmitFailure(BiConsumerWithException<ChannelStateWriteRequestExecutor, ChannelStateWriteRequest, Exception> submitAction) throws Exception { TestWriteRequest request = new TestWriteRequest(); LinkedBlockingDeque<ChannelStateWriteRequest> deque = new LinkedBlockingDeque<>(); try { submitAction.accept(new ChannelStateWriteRequestExecutorImpl(TASK_NAME, NO_OP, deque), request); } catch (IllegalStateException e) { // expected: executor not started; return; } finally { assertTrue(request.cancelled); assertTrue(deque.isEmpty()); } throw new RuntimeException("expected exception not thrown"); }
Example #10
Source File: ChannelStateWriteRequestExecutorImplTest.java From flink with Apache License 2.0 | 5 votes |
private void testCloseAfterSubmit(BiConsumerWithException<ChannelStateWriteRequestExecutor, ChannelStateWriteRequest, Exception> requestFun) throws Exception { WorkerClosingDeque closingDeque = new WorkerClosingDeque(); ChannelStateWriteRequestExecutorImpl worker = new ChannelStateWriteRequestExecutorImpl(TASK_NAME, NO_OP, closingDeque); closingDeque.setWorker(worker); TestWriteRequest request = new TestWriteRequest(); requestFun.accept(worker, request); assertTrue(closingDeque.isEmpty()); assertFalse(request.isCancelled()); }
Example #11
Source File: Dispatcher.java From flink with Apache License 2.0 | 5 votes |
private CompletableFuture<Void> persistAndRunJob(JobGraph jobGraph) throws Exception { jobGraphWriter.putJobGraph(jobGraph); final CompletableFuture<Void> runJobFuture = runJob(jobGraph); return runJobFuture.whenComplete(BiConsumerWithException.unchecked((Object ignored, Throwable throwable) -> { if (throwable != null) { jobGraphWriter.removeJobGraph(jobGraph.getJobID()); } })); }
Example #12
Source File: MetadataV3Serializer.java From flink with Apache License 2.0 | 5 votes |
private <T extends StateObject> void serializeCollection( StateObjectCollection<T> stateObjectCollection, DataOutputStream dos, BiConsumerWithException<T, DataOutputStream, IOException> cons) throws IOException { if (stateObjectCollection == null) { dos.writeInt(0); } else { dos.writeInt(stateObjectCollection.size()); for (T stateObject : stateObjectCollection) { cons.accept(stateObject, dos); } } }
Example #13
Source File: MetadataV2V3SerializerBase.java From flink with Apache License 2.0 | 5 votes |
private static <T extends StateObject> void serializeSingleton( StateObjectCollection<T> stateObjectCollection, DataOutputStream dos, BiConsumerWithException<T, DataOutputStream, IOException> cons) throws IOException { final T state = extractSingleton(stateObjectCollection); if (state != null) { dos.writeInt(1); cons.accept(state, dos); } else { dos.writeInt(0); } }
Example #14
Source File: ChannelStateHandleSerializer.java From flink with Apache License 2.0 | 5 votes |
private static <I> void serializeChannelStateHandle( AbstractChannelStateHandle<I> handle, DataOutputStream dos, BiConsumerWithException<I, DataOutputStream, IOException> infoWriter) throws IOException { infoWriter.accept(handle.getInfo(), dos); dos.writeInt(handle.getOffsets().size()); for (long offset : handle.getOffsets()) { dos.writeLong(offset); } dos.writeLong(handle.getStateSize()); serializeStreamStateHandle(handle.getDelegate(), dos); }
Example #15
Source File: LinkedOptionalMapSerializer.java From flink with Apache License 2.0 | 5 votes |
private static <T> void writeFramed(DataOutputView out, BiConsumerWithException<DataOutputView, T, IOException> writer, T item) throws IOException { DataOutputSerializer frame = new DataOutputSerializer(64); writer.accept(frame, item); final byte[] buffer = frame.getSharedBuffer(); final int bufferSize = frame.length(); out.writeInt(bufferSize); out.write(buffer, 0, bufferSize); }
Example #16
Source File: LinkedOptionalMapSerializer.java From flink with Apache License 2.0 | 5 votes |
public static <K, V> void writeOptionalMap( DataOutputView out, LinkedOptionalMap<K, V> map, BiConsumerWithException<DataOutputView, K, IOException> keyWriter, BiConsumerWithException<DataOutputView, V, IOException> valueWriter) throws IOException { out.writeLong(HEADER); out.writeInt(map.size()); map.forEach(((keyName, key, value) -> { out.writeUTF(keyName); if (key == null) { out.writeBoolean(false); } else { out.writeBoolean(true); writeFramed(out, keyWriter, key); } if (value == null) { out.writeBoolean(false); } else { out.writeBoolean(true); writeFramed(out, valueWriter, value); } })); }
Example #17
Source File: LinkedOptionalMapSerializer.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private static <T> void writeFramed(DataOutputView out, BiConsumerWithException<DataOutputView, T, IOException> writer, T item) throws IOException { DataOutputSerializer frame = new DataOutputSerializer(64); writer.accept(frame, item); final byte[] buffer = frame.getSharedBuffer(); final int bufferSize = frame.length(); out.writeInt(bufferSize); out.write(buffer, 0, bufferSize); }
Example #18
Source File: Dispatcher.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private CompletableFuture<Void> persistAndRunJob(JobGraph jobGraph) throws Exception { submittedJobGraphStore.putJobGraph(new SubmittedJobGraph(jobGraph)); final CompletableFuture<Void> runJobFuture = runJob(jobGraph); return runJobFuture.whenComplete(BiConsumerWithException.unchecked((Object ignored, Throwable throwable) -> { if (throwable != null) { submittedJobGraphStore.removeJobGraph(jobGraph.getJobID()); } })); }
Example #19
Source File: TestInternalTimerService.java From flink with Apache License 2.0 | 5 votes |
@Override public void forEachProcessingTimeTimer(BiConsumerWithException<N, Long, Exception> consumer) throws Exception { for (Timer<K, N> timer : processingTimeTimers) { keyContext.setCurrentKey(timer.getKey()); consumer.accept(timer.getNamespace(), timer.getTimestamp()); } }
Example #20
Source File: TestInternalTimerService.java From flink with Apache License 2.0 | 5 votes |
@Override public void forEachEventTimeTimer(BiConsumerWithException<N, Long, Exception> consumer) throws Exception { for (Timer<K, N> timer : watermarkTimers) { keyContext.setCurrentKey(timer.getKey()); consumer.accept(timer.getNamespace(), timer.getTimestamp()); } }
Example #21
Source File: InternalTimerServiceImpl.java From flink with Apache License 2.0 | 5 votes |
private void foreachTimer(BiConsumerWithException<N, Long, Exception> consumer, KeyGroupedInternalPriorityQueue<TimerHeapInternalTimer<K, N>> queue) throws Exception { try (final CloseableIterator<TimerHeapInternalTimer<K, N>> iterator = queue.iterator()) { while (iterator.hasNext()) { final TimerHeapInternalTimer<K, N> timer = iterator.next(); keyContext.setCurrentKey(timer.getKey()); consumer.accept(timer.getNamespace(), timer.getTimestamp()); } } }
Example #22
Source File: LinkedOptionalMapSerializer.java From flink with Apache License 2.0 | 5 votes |
public static <K, V> void writeOptionalMap( DataOutputView out, LinkedOptionalMap<K, V> map, BiConsumerWithException<DataOutputView, K, IOException> keyWriter, BiConsumerWithException<DataOutputView, V, IOException> valueWriter) throws IOException { out.writeLong(HEADER); out.writeInt(map.size()); map.forEach(((keyName, key, value) -> { out.writeUTF(keyName); if (key == null) { out.writeBoolean(false); } else { out.writeBoolean(true); writeFramed(out, keyWriter, key); } if (value == null) { out.writeBoolean(false); } else { out.writeBoolean(true); writeFramed(out, valueWriter, value); } })); }
Example #23
Source File: LinkedOptionalMapSerializer.java From flink with Apache License 2.0 | 5 votes |
private static <T> void writeFramed(DataOutputView out, BiConsumerWithException<DataOutputView, T, IOException> writer, T item) throws IOException { DataOutputSerializer frame = new DataOutputSerializer(64); writer.accept(frame, item); final byte[] buffer = frame.getSharedBuffer(); final int bufferSize = frame.length(); out.writeInt(bufferSize); out.write(buffer, 0, bufferSize); }
Example #24
Source File: Dispatcher.java From flink with Apache License 2.0 | 5 votes |
private CompletableFuture<Void> persistAndRunJob(JobGraph jobGraph) throws Exception { submittedJobGraphStore.putJobGraph(new SubmittedJobGraph(jobGraph)); final CompletableFuture<Void> runJobFuture = runJob(jobGraph); return runJobFuture.whenComplete(BiConsumerWithException.unchecked((Object ignored, Throwable throwable) -> { if (throwable != null) { submittedJobGraphStore.removeJobGraph(jobGraph.getJobID()); } })); }
Example #25
Source File: ReductionsTest.java From flink-statefun with Apache License 2.0 | 4 votes |
@Override public void forEachEventTimeTimer( BiConsumerWithException<VoidNamespace, Long, Exception> consumer) throws Exception { throw new UnsupportedOperationException(); }
Example #26
Source File: PojoSerializerSnapshotData.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@SuppressWarnings("unchecked") static <K> BiConsumerWithException<DataOutputView, K, IOException> noopWriter() { return (BiConsumerWithException<DataOutputView, K, IOException>) INSTANCE; }
Example #27
Source File: InternalTimerServiceImpl.java From flink with Apache License 2.0 | 4 votes |
@Override public void forEachProcessingTimeTimer(BiConsumerWithException<N, Long, Exception> consumer) throws Exception { foreachTimer(consumer, processingTimeTimersQueue); }
Example #28
Source File: InternalTimerServiceImpl.java From flink with Apache License 2.0 | 4 votes |
@Override public void forEachEventTimeTimer(BiConsumerWithException<N, Long, Exception> consumer) throws Exception { foreachTimer(consumer, eventTimeTimersQueue); }
Example #29
Source File: TestingJobLeaderService.java From flink with Apache License 2.0 | 4 votes |
public Builder setAddJobConsumer(BiConsumerWithException<JobID, String, ? extends Exception> addJobConsumer) { this.addJobConsumer = addJobConsumer; return this; }
Example #30
Source File: PojoSerializerSnapshotData.java From flink with Apache License 2.0 | 4 votes |
@SuppressWarnings("unchecked") static <K> BiConsumerWithException<DataOutputView, K, IOException> noopWriter() { return (BiConsumerWithException<DataOutputView, K, IOException>) INSTANCE; }