Java Code Examples for org.apache.beam.runners.dataflow.DataflowRunner#hasExperiment()
The following examples show how to use
org.apache.beam.runners.dataflow.DataflowRunner#hasExperiment() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SourceOperationExecutorFactory.java From beam with Apache License 2.0 | 5 votes |
public static SourceOperationExecutor create( PipelineOptions options, SourceOperationRequest request, CounterSet counters, DataflowExecutionContext<?> executionContext, String stageName) throws Exception { boolean beamFnApi = DataflowRunner.hasExperiment(options.as(DataflowPipelineDebugOptions.class), "beam_fn_api"); Preconditions.checkNotNull(request, "SourceOperationRequest must be non-null"); Preconditions.checkNotNull(executionContext, "executionContext must be non-null"); // Disable splitting when fn api is enabled. // TODO: Fix this once source splitting is supported. if (beamFnApi) { return new NoOpSourceOperationExecutor(request); } else { DataflowOperationContext operationContext = executionContext.createOperationContext( NameContext.create( stageName, request.getOriginalName(), request.getSystemName(), request.getName())); return new WorkerCustomSourceOperationExecutor( options, request, counters, executionContext, operationContext); } }
Example 2
Source File: ServerStreamObserverFactory.java From beam with Apache License 2.0 | 5 votes |
public static ServerStreamObserverFactory fromOptions(PipelineOptions options) { DataflowPipelineDebugOptions dataflowOptions = options.as(DataflowPipelineDebugOptions.class); if (DataflowRunner.hasExperiment(dataflowOptions, "beam_fn_api_buffered_stream")) { int bufferSize = Buffered.DEFAULT_BUFFER_SIZE; for (String experiment : dataflowOptions.getExperiments()) { if (experiment.startsWith("beam_fn_api_buffered_stream_buffer_size=")) { bufferSize = Integer.parseInt( experiment.substring("beam_fn_api_buffered_stream_buffer_size=".length())); } } return new Buffered(options.as(GcsOptions.class).getExecutorService(), bufferSize); } return new Direct(); }
Example 3
Source File: BatchDataflowWorker.java From beam with Apache License 2.0 | 4 votes |
protected BatchDataflowWorker( @Nullable RunnerApi.Pipeline pipeline, SdkHarnessRegistry sdkHarnessRegistry, WorkUnitClient workUnitClient, DataflowMapTaskExecutorFactory mapTaskExecutorFactory, DataflowWorkerHarnessOptions options) { this.mapTaskExecutorFactory = mapTaskExecutorFactory; this.sdkHarnessRegistry = sdkHarnessRegistry; this.workUnitClient = workUnitClient; this.options = options; this.sideInputDataCache = CacheBuilder.newBuilder() .maximumWeight(options.getWorkerCacheMb() * MEGABYTES) // weights are in bytes .weigher(Weighers.fixedWeightKeys(OVERHEAD_WEIGHT)) .softValues() .concurrencyLevel(CACHE_CONCURRENCY_LEVEL) .build(); this.sideInputWeakReferenceCache = CacheBuilder.newBuilder() .maximumSize(MAX_LOGICAL_REFERENCES) .weakValues() .concurrencyLevel(CACHE_CONCURRENCY_LEVEL) .build(); this.memoryMonitor = MemoryMonitor.fromOptions(options); this.statusPages = WorkerStatusPages.create( DEFAULT_STATUS_PORT, this.memoryMonitor, sdkHarnessRegistry::sdkHarnessesAreHealthy); if (!DataflowRunner.hasExperiment(options, "disable_debug_capture")) { this.debugCaptureManager = initializeAndStartDebugCaptureManager(options, statusPages.getDebugCapturePages()); } // TODO: this conditional -> two implementations of common interface, or // param/injection if (DataflowRunner.hasExperiment(options, "beam_fn_api")) { Function<MutableNetwork<Node, Edge>, MutableNetwork<Node, Edge>> transformToRunnerNetwork; Function<MutableNetwork<Node, Edge>, Node> sdkFusedStage; Function<MutableNetwork<Node, Edge>, MutableNetwork<Node, Edge>> lengthPrefixUnknownCoders = LengthPrefixUnknownCoders::forSdkNetwork; if (DataflowRunner.hasExperiment(options, "use_executable_stage_bundle_execution")) { sdkFusedStage = new CreateExecutableStageNodeFunction(pipeline, idGenerator); transformToRunnerNetwork = new CreateRegisterFnOperationFunction( idGenerator, this::createPortNode, lengthPrefixUnknownCoders.andThen(sdkFusedStage), true); } else { sdkFusedStage = pipeline == null ? RegisterNodeFunction.withoutPipeline( idGenerator, sdkHarnessRegistry.beamFnStateApiServiceDescriptor(), sdkHarnessRegistry.beamFnDataApiServiceDescriptor()) : RegisterNodeFunction.forPipeline( pipeline, idGenerator, sdkHarnessRegistry.beamFnStateApiServiceDescriptor(), sdkHarnessRegistry.beamFnDataApiServiceDescriptor()); transformToRunnerNetwork = new CreateRegisterFnOperationFunction( idGenerator, this::createPortNode, lengthPrefixUnknownCoders.andThen(sdkFusedStage), false); } mapTaskToNetwork = mapTaskToBaseNetwork .andThen(new ReplacePgbkWithPrecombineFunction()) .andThen(new DeduceNodeLocationsFunction()) .andThen(new DeduceFlattenLocationsFunction()) .andThen(new CloneAmbiguousFlattensFunction()) .andThen(transformToRunnerNetwork) .andThen(LengthPrefixUnknownCoders::andReplaceForRunnerNetwork); } else { mapTaskToNetwork = mapTaskToBaseNetwork; } this.memoryMonitorThread = startMemoryMonitorThread(memoryMonitor); ExecutionStateSampler.instance().start(); }
Example 4
Source File: GroupingShuffleReaderFactory.java From beam with Apache License 2.0 | 4 votes |
/** Returns true if we should inject errors in the shuffle read bytes counter for testing. */ private static boolean shouldUseGroupingShuffleReaderWithFaultyBytesReadCounter( PipelineOptions options) { return DataflowRunner.hasExperiment( options.as(DataflowPipelineDebugOptions.class), "inject_shuffle_read_count_error"); }