org.apache.beam.sdk.testing.TestPipeline Java Examples
The following examples show how to use
org.apache.beam.sdk.testing.TestPipeline.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MongoDBIOIT.java From beam with Apache License 2.0 | 7 votes |
@BeforeClass public static void setUp() { PipelineOptionsFactory.register(MongoDBPipelineOptions.class); options = TestPipeline.testingPipelineOptions().as(MongoDBPipelineOptions.class); collection = String.format("test_%s", new Date().getTime()); bigQueryDataset = options.getBigQueryDataset(); bigQueryTable = options.getBigQueryTable(); mongoUrl = String.format("mongodb://%s:%s", options.getMongoDBHostName(), options.getMongoDBPort()); mongoClient = MongoClients.create(mongoUrl); settings = InfluxDBSettings.builder() .withHost(options.getInfluxHost()) .withDatabase(options.getInfluxDatabase()) .withMeasurement(options.getInfluxMeasurement()) .get(); }
Example #2
Source File: DataflowPTransformMatchersTest.java From beam with Apache License 2.0 | 6 votes |
/** Creates a simple pipeline with a {@link Combine.PerKey} with side inputs. */ private static TestPipeline createCombinePerKeyWithSideInputsPipeline() { TestPipeline pipeline = TestPipeline.create().enableAbandonedNodeEnforcement(false); PCollection<KV<String, Integer>> input = pipeline .apply(Create.of(KV.of("key", 1))) .setCoder(KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of())); PCollection<String> sideInput = pipeline.apply(Create.of("side input")); PCollectionView<String> sideInputView = sideInput.apply(View.asSingleton()); input.apply( Combine.<String, Integer, Integer>perKey(new SumCombineFnWithContext()) .withSideInputs(sideInputView)); return pipeline; }
Example #3
Source File: TestDataflowRunnerTest.java From beam with Apache License 2.0 | 6 votes |
/** A streaming job that terminates with no error messages is a success. */ @Test public void testRunStreamingJobUsingPAssertThatSucceeds() throws Exception { options.setStreaming(true); Pipeline p = TestPipeline.create(options); PCollection<Integer> pc = p.apply(Create.of(1, 2, 3)); PAssert.that(pc).containsInAnyOrder(1, 2, 3); DataflowPipelineJob mockJob = Mockito.mock(DataflowPipelineJob.class); when(mockJob.getState()).thenReturn(State.DONE); when(mockJob.waitUntilFinish(any(Duration.class), any(JobMessagesHandler.class))) .thenReturn(State.DONE); when(mockJob.getProjectId()).thenReturn("test-project"); when(mockJob.getJobId()).thenReturn("test-job"); DataflowRunner mockRunner = Mockito.mock(DataflowRunner.class); when(mockRunner.run(any(Pipeline.class))).thenReturn(mockJob); when(mockClient.getJobMetrics(anyString())) .thenReturn(generateMockMetricResponse(true /* success */, true /* tentative */)); TestDataflowRunner runner = TestDataflowRunner.fromOptionsAndClient(options, mockClient); runner.run(p, mockRunner); }
Example #4
Source File: UnboundedEventSourceTest.java From beam with Apache License 2.0 | 6 votes |
/** * Check aggressively checkpointing and resuming a reader gives us exactly the same event stream * as reading directly. */ @Ignore("TODO(BEAM-5070) Test is flaky. Fix before reenabling.") @Test public void resumeFromCheckpoint() throws IOException { Random random = new Random(297); int n = 47293; GeneratorConfig config = makeConfig(n); Generator modelGenerator = new Generator(config); EventIdChecker checker = new EventIdChecker(); PipelineOptions options = TestPipeline.testingPipelineOptions(); UnboundedEventSource source = new UnboundedEventSource(config, 1, 0, false); UnboundedReader<Event> reader = source.createReader(options, null); while (n > 0) { int m = Math.min(459 + random.nextInt(455), n); System.out.printf("reading %d...%n", m); checker.add(m, reader, modelGenerator); n -= m; System.out.printf("splitting with %d remaining...%n", n); CheckpointMark checkpointMark = reader.getCheckpointMark(); reader = source.createReader(options, (GeneratorCheckpoint) checkpointMark); } assertFalse(reader.advance()); }
Example #5
Source File: ElasticsearchIOIT.java From beam with Apache License 2.0 | 6 votes |
@BeforeClass public static void beforeClass() throws Exception { PipelineOptionsFactory.register(ElasticsearchPipelineOptions.class); options = TestPipeline.testingPipelineOptions().as(ElasticsearchPipelineOptions.class); readConnectionConfiguration = ElasticsearchIOITCommon.getConnectionConfiguration( options, ElasticsearchIOITCommon.IndexMode.READ); writeConnectionConfiguration = ElasticsearchIOITCommon.getConnectionConfiguration( options, ElasticsearchIOITCommon.IndexMode.WRITE); updateConnectionConfiguration = ElasticsearchIOITCommon.getConnectionConfiguration( options, ElasticsearchIOITCommon.IndexMode.WRITE_PARTIAL); restClient = readConnectionConfiguration.createClient(); elasticsearchIOTestCommon = new ElasticsearchIOTestCommon(readConnectionConfiguration, restClient, true); }
Example #6
Source File: BigQueryIOWriteTest.java From beam with Apache License 2.0 | 6 votes |
@Override public Statement apply(final Statement base, final Description description) { // We need to set up the temporary folder, and then set up the TestPipeline based on the // chosen folder. Unfortunately, since rule evaluation order is unspecified and unrelated // to field order, and is separate from construction, that requires manually creating this // TestRule. Statement withPipeline = new Statement() { @Override public void evaluate() throws Throwable { options = TestPipeline.testingPipelineOptions(); options.as(BigQueryOptions.class).setProject("project-id"); options .as(BigQueryOptions.class) .setTempLocation(testFolder.getRoot().getAbsolutePath()); p = TestPipeline.fromOptions(options); p.apply(base, description).evaluate(); } }; return testFolder.apply(withPipeline, description); }
Example #7
Source File: BigQueryToTableIT.java From beam with Apache License 2.0 | 6 votes |
@BeforeClass public static void setupTestEnvironment() throws Exception { PipelineOptionsFactory.register(BigQueryToTableOptions.class); project = TestPipeline.testingPipelineOptions().as(GcpOptions.class).getProject(); // Create one BQ dataset for all test cases. BQ_CLIENT.createNewDataset(project, BIG_QUERY_DATASET_ID); // Create table and insert data for new type query test cases. BQ_CLIENT.createNewTable( project, BIG_QUERY_DATASET_ID, new Table() .setSchema(BigQueryToTableIT.NEW_TYPES_QUERY_TABLE_SCHEMA) .setTableReference( new TableReference() .setTableId(BigQueryToTableIT.NEW_TYPES_QUERY_TABLE_NAME) .setDatasetId(BIG_QUERY_DATASET_ID) .setProjectId(project))); BQ_CLIENT.insertDataToTable( project, BIG_QUERY_DATASET_ID, BigQueryToTableIT.NEW_TYPES_QUERY_TABLE_NAME, BigQueryToTableIT.NEW_TYPES_QUERY_TABLE_DATA); }
Example #8
Source File: DirectGraphVisitorTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void getViewsReturnsViews() { PCollectionView<List<String>> listView = p.apply("listCreate", Create.of("foo", "bar")) .apply( ParDo.of( new DoFn<String, String>() { @ProcessElement public void processElement(DoFn<String, String>.ProcessContext c) throws Exception { c.output(Integer.toString(c.element().length())); } })) .apply(View.asList()); PCollectionView<Object> singletonView = p.apply("singletonCreate", Create.<Object>of(1, 2, 3)).apply(View.asSingleton()); p.replaceAll( DirectRunner.fromOptions(TestPipeline.testingPipelineOptions()) .defaultTransformOverrides()); p.traverseTopologically(visitor); assertThat(visitor.getGraph().getViews(), Matchers.containsInAnyOrder(listView, singletonView)); }
Example #9
Source File: PubsubToPubsubTest.java From DataflowTemplates with Apache License 2.0 | 6 votes |
/** Tests whether only the valid messages flow through when a filter is provided. */ @Test @Category(NeedsRunner.class) public void testInputFilterProvided() { PubsubToPubsub.Options options = TestPipeline.testingPipelineOptions().as(PubsubToPubsub.Options.class); PCollection<Long> pc = pipeline .apply(Create.of(allTestMessages)) .apply( ParDo.of( ExtractAndFilterEventsFn.newBuilder() .withFilterKey(options.getFilterKey()) .withFilterValue(options.getFilterValue()) .build())) .apply(Count.globally()); PAssert.thatSingleton(pc).isEqualTo(Long.valueOf(goodTestMessages.size())); options.setFilterKey(ValueProvider.StaticValueProvider.of(FILTER_KEY)); options.setFilterValue(ValueProvider.StaticValueProvider.of(FILTER_VALUE)); pipeline.run(options); }
Example #10
Source File: TestDataflowRunnerTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testCheckingForSuccessSkipsNonTentativeMetrics() throws Exception { DataflowPipelineJob job = spy(new DataflowPipelineJob(mockClient, "test-job", options, null)); Pipeline p = TestPipeline.create(options); PCollection<Integer> pc = p.apply(Create.of(1, 2, 3)); PAssert.that(pc).containsInAnyOrder(1, 2, 3); when(mockClient.getJobMetrics(anyString())) .thenReturn( buildJobMetrics(generateMockMetrics(true /* success */, false /* tentative */))); TestDataflowRunner runner = TestDataflowRunner.fromOptionsAndClient(options, mockClient); runner.updatePAssertCount(p); doReturn(State.RUNNING).when(job).getState(); assertThat(runner.checkForPAssertSuccess(job), equalTo(Optional.<Boolean>absent())); }
Example #11
Source File: ElasticsearchIOIT.java From beam with Apache License 2.0 | 6 votes |
@BeforeClass public static void beforeClass() throws Exception { PipelineOptionsFactory.register(ElasticsearchPipelineOptions.class); options = TestPipeline.testingPipelineOptions().as(ElasticsearchPipelineOptions.class); readConnectionConfiguration = ElasticsearchIOITCommon.getConnectionConfiguration( options, ElasticsearchIOITCommon.IndexMode.READ); writeConnectionConfiguration = ElasticsearchIOITCommon.getConnectionConfiguration( options, ElasticsearchIOITCommon.IndexMode.WRITE); updateConnectionConfiguration = ElasticsearchIOITCommon.getConnectionConfiguration( options, ElasticsearchIOITCommon.IndexMode.WRITE_PARTIAL); restClient = readConnectionConfiguration.createClient(); elasticsearchIOTestCommon = new ElasticsearchIOTestCommon(readConnectionConfiguration, restClient, true); }
Example #12
Source File: ExportTimestampTest.java From DataflowTemplates with Apache License 2.0 | 6 votes |
private void compareDbs(String sourceDb, String destDb, TestPipeline comparePipeline) { SpannerConfig sourceConfig = spannerServer.getSpannerConfig(sourceDb); SpannerConfig copyConfig = spannerServer.getSpannerConfig(destDb); PCollection<Long> mismatchCount = comparePipeline .apply("Compare", new CompareDatabases(sourceConfig, copyConfig)); PAssert.that(mismatchCount).satisfies((x) -> { assertEquals(Lists.newArrayList(x), Lists.newArrayList(0L)); return null; }); PipelineResult compareResult = comparePipeline.run(); compareResult.waitUntilFinish(); Ddl sourceDdl = readDdl(sourceDb); Ddl destinationDdl = readDdl(destDb); assertThat(sourceDdl.prettyPrint(), equalToIgnoringWhiteSpace(destinationDdl.prettyPrint())); }
Example #13
Source File: BigQueryIOStorageReadTest.java From beam with Apache License 2.0 | 6 votes |
@Override public Statement apply(Statement base, Description description) { // We need to set up the temporary folder, and then set up the TestPipeline based on the // chosen folder. Unfortunately, since rule evaluation order is unspecified and unrelated // to field order, and is separate from construction, that requires manually creating this // TestRule. Statement withPipeline = new Statement() { @Override public void evaluate() throws Throwable { options = TestPipeline.testingPipelineOptions(); options.as(BigQueryOptions.class).setProject("project-id"); options .as(BigQueryOptions.class) .setTempLocation(testFolder.getRoot().getAbsolutePath()); p = TestPipeline.fromOptions(options); p.apply(base, description).evaluate(); } }; return testFolder.apply(withPipeline, description); }
Example #14
Source File: AbstractOperatorTest.java From beam with Apache License 2.0 | 6 votes |
/** * Run all tests with given runner. * * @param tc the test case to executeSync */ @SuppressWarnings("unchecked") public <T> void execute(TestCase<T> tc) { final SingleJvmAccumulatorProvider.Factory accumulatorProvider = SingleJvmAccumulatorProvider.Factory.get(); final PipelineOptions pipelineOptions = PipelineOptionsFactory.create(); final EuphoriaOptions euphoriaOptions = pipelineOptions.as(EuphoriaOptions.class); euphoriaOptions.setAccumulatorProviderFactory(accumulatorProvider); final Pipeline pipeline = TestPipeline.create(pipelineOptions); pipeline.getCoderRegistry().registerCoderForClass(Object.class, KryoCoder.of(pipelineOptions)); final PCollection<T> output = tc.getOutput(pipeline); tc.validate(output); pipeline.run().waitUntilFinish(); tc.validateAccumulators(accumulatorProvider); }
Example #15
Source File: ProvidedSparkContextTest.java From beam with Apache License 2.0 | 6 votes |
private void testWithValidProvidedContext(JavaSparkContext jsc) throws Exception { SparkContextOptions options = getSparkContextOptions(jsc); Pipeline p = Pipeline.create(options); PCollection<String> inputWords = p.apply(Create.of(WORDS).withCoder(StringUtf8Coder.of())); PCollection<String> output = inputWords .apply(new WordCount.CountWords()) .apply(MapElements.via(new WordCount.FormatAsTextFn())); PAssert.that(output).containsInAnyOrder(EXPECTED_COUNT_SET); // Run test from pipeline PipelineResult result = p.run(); TestPipeline.verifyPAssertsSucceeded(p, result); }
Example #16
Source File: HCatalogIOIT.java From beam with Apache License 2.0 | 6 votes |
@BeforeClass public static void setup() throws Exception { PipelineOptionsFactory.register(HCatalogPipelineOptions.class); options = TestPipeline.testingPipelineOptions().as(HCatalogPipelineOptions.class); final String metastoreUri = String.format( "thrift://%s:%s", options.getHCatalogMetastoreHostName(), options.getHCatalogMetastorePort()); configProperties = ImmutableMap.of("hive.metastore.uris", metastoreUri); helper = new HiveDatabaseTestHelper( options.getHCatalogMetastoreHostName(), options.getHCatalogHivePort(), options.getHCatalogHiveDatabaseName(), options.getHCatalogHiveUsername(), options.getHCatalogHivePassword()); try { tableName = helper.createHiveTable(testIdentifier); } catch (Exception e) { helper.closeConnection(); throw new Exception("Problem with creating table for " + testIdentifier + ": " + e, e); } }
Example #17
Source File: WindowedWordCountIT.java From beam with Apache License 2.0 | 6 votes |
private WindowedWordCountITOptions defaultOptions() throws Exception { WindowedWordCountITOptions options = TestPipeline.testingPipelineOptions().as(WindowedWordCountITOptions.class); options.setInputFile(DEFAULT_INPUT); options.setTestTimeoutSeconds(1200L); options.setMinTimestampMillis(0L); options.setMinTimestampMillis(Duration.standardHours(1).getMillis()); options.setWindowSize(10); options.setOutput( FileSystems.matchNewResource(options.getTempRoot(), true) .resolve( String.format( "WindowedWordCountIT.%s-%tFT%<tH:%<tM:%<tS.%<tL+%s", testName.getMethodName(), new Date(), ThreadLocalRandom.current().nextInt()), StandardResolveOptions.RESOLVE_DIRECTORY) .resolve("output", StandardResolveOptions.RESOLVE_DIRECTORY) .resolve("results", StandardResolveOptions.RESOLVE_FILE) .toString()); return options; }
Example #18
Source File: PCollectionListTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testEquals() { Pipeline p = TestPipeline.create(); PCollection<String> first = p.apply("Meta", Create.of("foo", "bar")); PCollection<String> second = p.apply("Pythonic", Create.of("spam, ham")); PCollection<String> third = p.apply("Syntactic", Create.of("eggs", "baz")); EqualsTester tester = new EqualsTester(); // tester.addEqualityGroup(PCollectionList.empty(p), PCollectionList.empty(p)); // tester.addEqualityGroup(PCollectionList.of(first).and(second)); // Constructors should all produce equivalent tester.addEqualityGroup( PCollectionList.of(first).and(second).and(third), PCollectionList.of(first).and(second).and(third), // PCollectionList.<String>empty(p).and(first).and(second).and(third), // PCollectionList.of(ImmutableList.of(first, second, third)), // PCollectionList.of(first).and(ImmutableList.of(second, third)), PCollectionList.of(ImmutableList.of(first, second)).and(third)); // Order is considered tester.addEqualityGroup(PCollectionList.of(first).and(third).and(second)); tester.addEqualityGroup(PCollectionList.empty(TestPipeline.create())); tester.testEquals(); }
Example #19
Source File: TestDataflowRunnerTest.java From beam with Apache License 2.0 | 6 votes |
/** * Tests that when a streaming pipeline terminates and doesn't fail due to {@link PAssert} that * the {@link TestPipelineOptions#setOnSuccessMatcher(SerializableMatcher) on success matcher} is * invoked. */ @Test public void testStreamingOnSuccessMatcherWhenPipelineSucceeds() throws Exception { options.setStreaming(true); Pipeline p = TestPipeline.create(options); PCollection<Integer> pc = p.apply(Create.of(1, 2, 3)); PAssert.that(pc).containsInAnyOrder(1, 2, 3); final DataflowPipelineJob mockJob = Mockito.mock(DataflowPipelineJob.class); when(mockJob.getState()).thenReturn(State.DONE); when(mockJob.getProjectId()).thenReturn("test-project"); when(mockJob.getJobId()).thenReturn("test-job"); DataflowRunner mockRunner = Mockito.mock(DataflowRunner.class); when(mockRunner.run(any(Pipeline.class))).thenReturn(mockJob); TestDataflowRunner runner = TestDataflowRunner.fromOptionsAndClient(options, mockClient); options.as(TestPipelineOptions.class).setOnSuccessMatcher(new TestSuccessMatcher(mockJob, 1)); when(mockJob.waitUntilFinish(any(Duration.class), any(JobMessagesHandler.class))) .thenReturn(State.DONE); when(mockClient.getJobMetrics(anyString())) .thenReturn(generateMockMetricResponse(true /* success */, true /* tentative */)); runner.run(p, mockRunner); }
Example #20
Source File: TextIOReadTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testInitialSplitGzipModeTxt() throws Exception { PipelineOptions options = TestPipeline.testingPipelineOptions(); long desiredBundleSize = 1000; File largeTxt = writeToFile(LARGE, tempFolder, "large.txt", UNCOMPRESSED); // Sanity check: file is at least 2 bundles long. assertThat(largeTxt.length(), greaterThan(2 * desiredBundleSize)); FileBasedSource<String> source = TextIO.read().from(largeTxt.getPath()).withCompression(GZIP).getSource(); List<? extends FileBasedSource<String>> splits = source.split(desiredBundleSize, options); // Exactly 1 split, even though splittable text file, since using GZIP mode. assertThat(splits, hasSize(equalTo(1))); SourceTestUtils.assertSourcesEqualReferenceSource(source, splits, options); }
Example #21
Source File: DataflowRunnerTest.java From beam with Apache License 2.0 | 6 votes |
private void testStreamingWriteOverride(PipelineOptions options, int expectedNumShards) { TestPipeline p = TestPipeline.fromOptions(options); StreamingShardedWriteFactory<Object, Void, Object> factory = new StreamingShardedWriteFactory<>(p.getOptions()); WriteFiles<Object, Void, Object> original = WriteFiles.to(new TestSink(tmpFolder.toString())); PCollection<Object> objs = (PCollection) p.apply(Create.empty(VoidCoder.of())); AppliedPTransform<PCollection<Object>, WriteFilesResult<Void>, WriteFiles<Object, Void, Object>> originalApplication = AppliedPTransform.of("writefiles", objs.expand(), Collections.emptyMap(), original, p); WriteFiles<Object, Void, Object> replacement = (WriteFiles<Object, Void, Object>) factory.getReplacementTransform(originalApplication).getTransform(); assertThat(replacement, not(equalTo((Object) original))); assertThat(replacement.getNumShardsProvider().get(), equalTo(expectedNumShards)); WriteFilesResult<Void> originalResult = objs.apply(original); WriteFilesResult<Void> replacementResult = objs.apply(replacement); Map<PValue, ReplacementOutput> res = factory.mapOutputs(originalResult.expand(), replacementResult); assertEquals(1, res.size()); assertEquals( originalResult.getPerDestinationOutputFilenames(), res.get(replacementResult.getPerDestinationOutputFilenames()).getOriginal().getValue()); }
Example #22
Source File: TextIOReadTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testInitialSplitAutoModeTxt() throws Exception { PipelineOptions options = TestPipeline.testingPipelineOptions(); long desiredBundleSize = 1000; File largeTxt = writeToFile(LARGE, tempFolder, "large.txt", UNCOMPRESSED); // Sanity check: file is at least 2 bundles long. assertThat(largeTxt.length(), greaterThan(2 * desiredBundleSize)); FileBasedSource<String> source = TextIO.read().from(largeTxt.getPath()).getSource(); List<? extends FileBasedSource<String>> splits = source.split(desiredBundleSize, options); // At least 2 splits and they are equal to reading the whole file. assertThat(splits, hasSize(greaterThan(1))); SourceTestUtils.assertSourcesEqualReferenceSource(source, splits, options); }
Example #23
Source File: TestDataflowRunnerTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testBatchOnCreateMatcher() throws Exception { Pipeline p = TestPipeline.create(options); PCollection<Integer> pc = p.apply(Create.of(1, 2, 3)); PAssert.that(pc).containsInAnyOrder(1, 2, 3); final DataflowPipelineJob mockJob = Mockito.mock(DataflowPipelineJob.class); when(mockJob.getState()).thenReturn(State.DONE); when(mockJob.getProjectId()).thenReturn("test-project"); when(mockJob.getJobId()).thenReturn("test-job"); DataflowRunner mockRunner = Mockito.mock(DataflowRunner.class); when(mockRunner.run(any(Pipeline.class))).thenReturn(mockJob); TestDataflowRunner runner = TestDataflowRunner.fromOptionsAndClient(options, mockClient); options.as(TestPipelineOptions.class).setOnCreateMatcher(new TestSuccessMatcher(mockJob, 0)); when(mockClient.getJobMetrics(anyString())) .thenReturn(generateMockMetricResponse(true /* success */, true /* tentative */)); runner.run(p, mockRunner); }
Example #24
Source File: KuduIOIT.java From beam with Apache License 2.0 | 6 votes |
@BeforeClass public static void setUp() throws KuduException { PipelineOptionsFactory.register(KuduPipelineOptions.class); options = TestPipeline.testingPipelineOptions().as(KuduPipelineOptions.class); // synchronous operations client = new AsyncKuduClient.AsyncKuduClientBuilder(options.getKuduMasterAddresses()) .build() .syncClient(); if (client.tableExists(options.getKuduTable())) { client.deleteTable(options.getKuduTable()); } kuduTable = client.createTable(options.getKuduTable(), KuduTestUtils.SCHEMA, createTableOptions()); }
Example #25
Source File: ElasticsearchIOIT.java From beam with Apache License 2.0 | 6 votes |
@BeforeClass public static void beforeClass() throws Exception { PipelineOptionsFactory.register(ElasticsearchPipelineOptions.class); options = TestPipeline.testingPipelineOptions().as(ElasticsearchPipelineOptions.class); readConnectionConfiguration = ElasticsearchIOITCommon.getConnectionConfiguration( options, ElasticsearchIOITCommon.IndexMode.READ); writeConnectionConfiguration = ElasticsearchIOITCommon.getConnectionConfiguration( options, ElasticsearchIOITCommon.IndexMode.WRITE); updateConnectionConfiguration = ElasticsearchIOITCommon.getConnectionConfiguration( options, ElasticsearchIOITCommon.IndexMode.WRITE_PARTIAL); restClient = readConnectionConfiguration.createClient(); elasticsearchIOTestCommon = new ElasticsearchIOTestCommon(readConnectionConfiguration, restClient, true); }
Example #26
Source File: BigtableWriteIT.java From beam with Apache License 2.0 | 6 votes |
@Before public void setup() throws Exception { PipelineOptionsFactory.register(BigtableTestOptions.class); options = TestPipeline.testingPipelineOptions().as(BigtableTestOptions.class); project = options.as(GcpOptions.class).getProject(); bigtableOptions = new Builder() .setProjectId(project) .setInstanceId(options.getInstanceId()) .setUserAgent("apache-beam-test") .build(); session = new BigtableSession( bigtableOptions .toBuilder() .setCredentialOptions( CredentialOptions.credential(options.as(GcpOptions.class).getGcpCredential())) .build()); tableAdminClient = session.getTableAdminClient(); }
Example #27
Source File: MusicBrainzTransformsTest.java From bigquery-etl-dataflow-sample with Apache License 2.0 | 6 votes |
@org.junit.Test public void loadArtistCreditsByKey() { TestPipeline p = TestPipeline.create().enableAbandonedNodeEnforcement(false); Long artistCreditIds[] = {634509L, 846332L}; PCollection<String> text = p.apply(Create.of(artistCreditLinesOfJson)).setCoder(StringUtf8Coder.of()); PCollection<KV<Long, MusicBrainzDataObject>> artistCredits = MusicBrainzTransforms.loadTableFromText(text, "artist_credit_name", "artist_credit"); PCollection<Long> artistCreditIdPCollection = artistCredits.apply(MapElements .into(new TypeDescriptor<Long>() {}) .via((KV<Long, MusicBrainzDataObject> kv) -> { Long k = kv.getKey(); return k; }) ); PAssert.that(artistCreditIdPCollection).containsInAnyOrder(634509L, 846332L); }
Example #28
Source File: BeamSortRelTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void testOrderBy_exception() { thrown.expect(UnsupportedOperationException.class); thrown.expectMessage("`ORDER BY` is only supported for GlobalWindows"); String sql = "INSERT INTO SUB_ORDER_RAM(order_id, site_id) SELECT " + " order_id, COUNT(*) " + "FROM ORDER_DETAILS " + "GROUP BY order_id, TUMBLE(order_time, INTERVAL '1' HOUR)" + "ORDER BY order_id asc limit 11"; TestPipeline pipeline = TestPipeline.create(); compilePipeline(sql, pipeline); }
Example #29
Source File: TestDataflowRunnerTest.java From beam with Apache License 2.0 | 5 votes |
/** * Tests that a tentative {@code true} from metrics indicates that every {@link PAssert} has * succeeded. */ @Test public void testCheckingForSuccessWhenPAssertSucceeds() throws Exception { DataflowPipelineJob job = spy(new DataflowPipelineJob(mockClient, "test-job", options, null)); Pipeline p = TestPipeline.create(options); PCollection<Integer> pc = p.apply(Create.of(1, 2, 3)); PAssert.that(pc).containsInAnyOrder(1, 2, 3); when(mockClient.getJobMetrics(anyString())) .thenReturn(buildJobMetrics(generateMockMetrics(true /* success */, true /* tentative */))); TestDataflowRunner runner = TestDataflowRunner.fromOptionsAndClient(options, mockClient); doReturn(State.DONE).when(job).getState(); assertThat(runner.checkForPAssertSuccess(job), equalTo(Optional.of(true))); }
Example #30
Source File: BigtableIOTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void testWriteWithoutValidate() { final String table = "fooTable"; BigtableIO.Write write = BigtableIO.write() .withBigtableOptions(BIGTABLE_OPTIONS) .withTableId(table) .withBigtableService(service) .withoutValidation(); // validate() will throw if withoutValidation() isn't working write.validate(TestPipeline.testingPipelineOptions()); }