Java Code Examples for org.apache.beam.sdk.transforms.Create#of()
The following examples show how to use
org.apache.beam.sdk.transforms.Create#of() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TaskTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void groupByKey() { Create.Values<String> values = Create.of("apple", "ball", "car", "bear", "cheetah", "ant"); PCollection<String> numbers = testPipeline.apply(values); PCollection<KV<String, Iterable<String>>> results = Task.applyTransform(numbers); PAssert.that(results) .satisfies( containsKvs( KV.of("a", ImmutableList.of("apple", "ant")), KV.of("b", ImmutableList.of("ball", "bear")), KV.of("c", ImmutableList.of("car", "cheetah")) ) ); testPipeline.run().waitUntilFinish(); }
Example 2
Source File: TaskTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void branching() { Create.Values<Integer> values = Create.of(1, 2, 3, 4, 5); PCollection<Integer> numbers = testPipeline.apply(values); PCollection<Integer> mult5Results = Task.applyMultiply5Transform(numbers); PCollection<Integer> mult10Results = Task.applyMultiply10Transform(numbers); PAssert.that(mult5Results) .containsInAnyOrder(5, 10, 15, 20, 25); PAssert.that(mult10Results) .containsInAnyOrder(10, 20, 30, 40, 50); testPipeline.run().waitUntilFinish(); }
Example 3
Source File: CacheTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void shouldCacheTest() { SparkPipelineOptions options = createOptions(); options.setCacheDisabled(true); Pipeline pipeline = Pipeline.create(options); Values<String> valuesTransform = Create.of("foo", "bar"); PCollection pCollection = mock(PCollection.class); JavaSparkContext jsc = SparkContextFactory.getSparkContext(options); EvaluationContext ctxt = new EvaluationContext(jsc, pipeline, options); ctxt.getCacheCandidates().put(pCollection, 2L); assertFalse(ctxt.shouldCache(valuesTransform, pCollection)); options.setCacheDisabled(false); assertTrue(ctxt.shouldCache(valuesTransform, pCollection)); GroupByKey<String, String> gbkTransform = GroupByKey.create(); assertFalse(ctxt.shouldCache(gbkTransform, pCollection)); }
Example 4
Source File: TaskTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void parDo() { Create.Values<String> lines = Create.of( "apple orange grape banana apple banana", "banana orange banana papaya"); PCollection<String> linesPColl = testPipeline.apply(lines); PCollection<String> results = Task.applyTransform(linesPColl); PAssert.that(results) .containsInAnyOrder( "apple:2", "banana:4", "grape:1", "orange:2", "papaya:1" ); testPipeline.run().waitUntilFinish(); }
Example 5
Source File: SdkComponentsTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void registerTransformNoChildren() throws IOException { Create.Values<Integer> create = Create.of(1, 2, 3); PCollection<Integer> pt = pipeline.apply(create); String userName = "my_transform/my_nesting"; AppliedPTransform<?, ?, ?> transform = AppliedPTransform.of(userName, pipeline.begin().expand(), pt.expand(), create, pipeline); String componentName = components.registerPTransform(transform, Collections.emptyList()); assertThat(componentName, equalTo(userName)); assertThat(components.getExistingPTransformId(transform), equalTo(componentName)); }
Example 6
Source File: TaskTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void min() { Create.Values<Integer> values = Create.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); PCollection<Integer> numbers = testPipeline.apply(values); PCollection<Integer> results = Task.applyTransform(numbers); PAssert.that(results) .containsInAnyOrder(1); testPipeline.run().waitUntilFinish(); }
Example 7
Source File: SdkComponentsTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void registerTransformEmptyFullName() throws IOException { Create.Values<Integer> create = Create.of(1, 2, 3); PCollection<Integer> pt = pipeline.apply(create); AppliedPTransform<?, ?, ?> transform = AppliedPTransform.of("", pipeline.begin().expand(), pt.expand(), create, pipeline); thrown.expect(IllegalArgumentException.class); thrown.expectMessage(transform.toString()); components.getExistingPTransformId(transform); }
Example 8
Source File: TaskTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void sum() { Create.Values<Integer> values = Create.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); PCollection<Integer> numbers = testPipeline.apply(values); PCollection<Integer> results = Task.applyTransform(numbers); PAssert.that(results) .containsInAnyOrder(55); testPipeline.run().waitUntilFinish(); }
Example 9
Source File: TaskTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void filter_parDo() { Create.Values<Integer> values = Create.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); PCollection<Integer> numbers = testPipeline.apply(values); PCollection<Integer> results = Task.applyTransform(numbers); PAssert.that(results) .containsInAnyOrder(1, 3, 5, 7, 9); testPipeline.run().waitUntilFinish(); }
Example 10
Source File: TaskTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void filter() { Create.Values<Integer> values = Create.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); PCollection<Integer> numbers = testPipeline.apply(values); PCollection<Integer> results = Task.applyTransform(numbers); PAssert.that(results) .containsInAnyOrder(2, 4, 6, 8, 10); testPipeline.run().waitUntilFinish(); }
Example 11
Source File: TaskTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void compositeTransform() { Create.Values<String> values = Create.of("1,2,3,4,5", "6,7,8,9,10"); PCollection<Integer> results = testPipeline .apply(values) .apply(new ExtractAndMultiplyNumbers()); PAssert.that(results) .containsInAnyOrder(10, 20, 30, 40, 50, 60, 70, 80, 90, 100); testPipeline.run().waitUntilFinish(); }
Example 12
Source File: TaskTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void parDo() { Create.Values<Integer> values = Create.of(1, 2, 3, 4, 5); PCollection<Integer> numbers = testPipeline.apply(values); PCollection<Integer> results = Task.applyTransform(numbers); PAssert.that(results) .containsInAnyOrder(10, 20, 30, 40, 50); testPipeline.run().waitUntilFinish(); }
Example 13
Source File: TaskTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void flatMapElements() { Create.Values<String> values = Create.of("Apache Beam", "Unified Batch and Streaming"); PCollection<String> numbers = testPipeline.apply(values); PCollection<String> results = Task.applyTransform(numbers); PAssert.that(results) .containsInAnyOrder("Apache", "Beam", "Unified", "Batch", "and", "Streaming"); testPipeline.run().waitUntilFinish(); }
Example 14
Source File: TaskTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void parDo_oneToMany() { Create.Values<String> values = Create.of("Hello Beam", "It is awesome"); PCollection<String> numbers = testPipeline.apply(values); PCollection<String> results = Task.applyTransform(numbers); PAssert.that(results) .containsInAnyOrder("Hello", "Beam", "It", "is", "awesome"); testPipeline.run().waitUntilFinish(); }
Example 15
Source File: StructuredStreamingPipelineStateTest.java From beam with Apache License 2.0 | 5 votes |
private PTransform<PBegin, PCollection<String>> getValues( final SparkStructuredStreamingPipelineOptions options) { final boolean doNotSyncWithWatermark = false; return options.isStreaming() ? CreateStream.of(StringUtf8Coder.of(), Duration.millis(1), doNotSyncWithWatermark) .nextBatch("one", "two") : Create.of("one", "two"); }
Example 16
Source File: TaskTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void combine_binaryCombineFn() { Create.Values<BigInteger> values = Create.of( BigInteger.valueOf(10), BigInteger.valueOf(20), BigInteger.valueOf(30), BigInteger.valueOf(40), BigInteger.valueOf(50) ); PCollection<BigInteger> numbers = testPipeline.apply(values); PCollection<BigInteger> results = Task.applyTransform(numbers); PAssert.that(results) .containsInAnyOrder(BigInteger.valueOf(150)); testPipeline.run().waitUntilFinish(); }
Example 17
Source File: TaskTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void combine_binaryCombineFn_lambda() { Create.Values<BigInteger> values = Create.of( BigInteger.valueOf(10), BigInteger.valueOf(20), BigInteger.valueOf(30), BigInteger.valueOf(40), BigInteger.valueOf(50) ); PCollection<BigInteger> numbers = testPipeline.apply(values); PCollection<BigInteger> results = Task.applyTransform(numbers); PAssert.that(results) .containsInAnyOrder(BigInteger.valueOf(150)); testPipeline.run().waitUntilFinish(); }
Example 18
Source File: TaskTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void combine_simpleFn() { Create.Values<Integer> values = Create.of(10, 30, 50, 70, 90); PCollection<Integer> numbers = testPipeline.apply(values); PCollection<Integer> results = Task.applyTransform(numbers); PAssert.that(results) .containsInAnyOrder(250); testPipeline.run().waitUntilFinish(); }
Example 19
Source File: TaskTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void combine_combineFn() { Create.Values<Integer> values = Create.of(10, 20, 50, 70, 90); PCollection<Integer> numbers = testPipeline.apply(values); PCollection<Double> results = Task.applyTransform(numbers); PAssert.that(results) .containsInAnyOrder(48.0); testPipeline.run().waitUntilFinish(); }
Example 20
Source File: SparkPipelineStateTest.java From beam with Apache License 2.0 | 5 votes |
private PTransform<PBegin, PCollection<String>> getValues(final SparkPipelineOptions options) { final boolean doNotSyncWithWatermark = false; return options.isStreaming() ? CreateStream.of(StringUtf8Coder.of(), Duration.millis(1), doNotSyncWithWatermark) .nextBatch("one", "two") : Create.of("one", "two"); }