Java Code Examples for org.apache.nifi.util.TestRunner#run()
The following examples show how to use
org.apache.nifi.util.TestRunner#run() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestListenWebSocket.java From localization_nifi with Apache License 2.0 | 6 votes |
@Test public void testValidationError() throws Exception { final TestRunner runner = TestRunners.newTestRunner(ListenWebSocket.class); final WebSocketServerService service = mock(WebSocketServerService.class); final String serviceId = "ws-service"; final String endpointId = "test"; when(service.getIdentifier()).thenReturn(serviceId); runner.addControllerService(serviceId, service); runner.enableControllerService(service); runner.setProperty(ListenWebSocket.PROP_WEBSOCKET_SERVER_SERVICE, serviceId); runner.setProperty(ListenWebSocket.PROP_SERVER_URL_PATH, endpointId); try { runner.run(); fail("Should fail with validation error."); } catch (AssertionError e) { assertTrue(e.toString().contains("'server-url-path' is invalid because Must starts with")); } }
Example 2
Source File: TestPutElasticsearchHttp.java From localization_nifi with Apache License 2.0 | 6 votes |
@Test @Ignore("Comment this out if you want to run against local or test ES") public void testPutElasticSearchBatch() throws IOException { System.out.println("Starting test " + new Object() { }.getClass().getEnclosingMethod().getName()); final TestRunner runner = TestRunners.newTestRunner(new PutElasticsearchHttp()); runner.setValidateExpressionUsage(false); runner.setProperty(AbstractElasticsearchHttpProcessor.ES_URL, "http://127.0.0.1:9200"); runner.setProperty(PutElasticsearchHttp.INDEX, "doc"); runner.setProperty(PutElasticsearchHttp.BATCH_SIZE, "100"); runner.setProperty(PutElasticsearchHttp.TYPE, "status"); runner.setProperty(PutElasticsearchHttp.ID_ATTRIBUTE, "doc_id"); runner.assertValid(); for (int i = 0; i < 100; i++) { long newId = 28039652140L + i; final String newStrId = Long.toString(newId); runner.enqueue(docExample, new HashMap<String, String>() {{ put("doc_id", newStrId); }}); } runner.run(); runner.assertAllFlowFilesTransferred(PutElasticsearchHttp.REL_SUCCESS, 100); }
Example 3
Source File: TestJSONArrayToMultiline.java From daf-kylo with GNU Affero General Public License v3.0 | 6 votes |
@Test public void testJSONArray() throws Exception { final TestRunner runner = TestRunners.newTestRunner(new DafJSONArrayToMultiline()); runner.setValidateExpressionUsage(false); runner.assertValid(); ClassLoader classLoader = getClass().getClassLoader(); String testJson = ""; try { testJson = IOUtils.toString(classLoader.getResourceAsStream("json/testarray.json")); } catch (IOException e) { log.error("", e); } byte[] flowContent = testJson.getBytes(); runner.enqueue(flowContent); runner.run(); final List<MockFlowFile> successFlowFiles = runner.getFlowFilesForRelationship("success"); MockFlowFile result = successFlowFiles.get(0); result.assertContentEquals(classLoader.getResourceAsStream("json/resultarray.json")); }
Example 4
Source File: DeleteGCSObjectTest.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testFailureOnException() throws Exception { reset(storage); final TestRunner runner = buildNewRunner(getProcessor()); addRequiredPropertiesToRunner(runner); runner.assertValid(); runner.enqueue("testdata"); when(storage.delete(any(BlobId.class))).thenThrow(new StorageException(1, "Test Exception")); runner.run(); runner.assertPenalizeCount(1); runner.assertAllFlowFilesTransferred(DeleteGCSObject.REL_FAILURE); runner.assertTransferCount(DeleteGCSObject.REL_FAILURE, 1); }
Example 5
Source File: GetHDFSTest.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testDirectoryUsesValidEL() throws IOException { GetHDFS proc = new TestableGetHDFS(kerberosProperties); TestRunner runner = TestRunners.newTestRunner(proc); runner.setProperty(PutHDFS.DIRECTORY, "src/test/resources/${literal('testdata'):substring(0,8)}"); runner.setProperty(GetHDFS.FILE_FILTER_REGEX, ".*.zip"); runner.setProperty(GetHDFS.KEEP_SOURCE_FILE, "true"); runner.setProperty(GetHDFS.COMPRESSION_CODEC, "AUTOMATIC"); runner.run(); List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(GetHDFS.REL_SUCCESS); assertEquals(1, flowFiles.size()); MockFlowFile flowFile = flowFiles.get(0); assertTrue(flowFile.getAttribute(CoreAttributes.FILENAME.key()).equals("13545423550275052.zip")); InputStream expected = getClass().getResourceAsStream("/testdata/13545423550275052.zip"); flowFile.assertContentEquals(expected); final List<ProvenanceEventRecord> provenanceEvents = runner.getProvenanceEvents(); assertEquals(1, provenanceEvents.size()); final ProvenanceEventRecord receiveEvent = provenanceEvents.get(0); assertEquals(ProvenanceEventType.RECEIVE, receiveEvent.getEventType()); // If it runs with a real HDFS, the protocol will be "hdfs://", but with a local filesystem, just assert the filename. assertTrue(receiveEvent.getTransitUri().endsWith("13545423550275052.zip")); }
Example 6
Source File: TestMergeContent.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testDefragmentWithTooManyFragements() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new MergeContent()); runner.setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT); runner.setProperty(MergeContent.MAX_ENTRIES, "3"); final Map<String, String> attributes = new HashMap<>(); attributes.put(MergeContent.FRAGMENT_ID_ATTRIBUTE, "1"); attributes.put(MergeContent.FRAGMENT_COUNT_ATTRIBUTE, "4"); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "1"); runner.enqueue("A Man ".getBytes("UTF-8"), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "2"); runner.enqueue("A Plan ".getBytes("UTF-8"), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "3"); runner.enqueue("A Canal ".getBytes("UTF-8"), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "4"); runner.enqueue("Panama".getBytes("UTF-8"), attributes); runner.run(); runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_MERGED, 1); final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes("UTF-8")); }
Example 7
Source File: TestControlRate.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testBadAttributeRate() { final TestRunner runner = TestRunners.newTestRunner(new ControlRate()); runner.setProperty(ControlRate.RATE_CONTROL_CRITERIA, ControlRate.ATTRIBUTE_RATE); runner.setProperty(ControlRate.RATE_CONTROL_ATTRIBUTE_NAME, "count"); runner.setProperty(ControlRate.MAX_RATE, "20000"); runner.setProperty(ControlRate.TIME_PERIOD, "1 sec"); final Map<String, String> attributeMap = new HashMap<>(); attributeMap.put("count", "bad string"); runner.enqueue(new byte[0], attributeMap); runner.run(); runner.assertTransferCount(ControlRate.REL_SUCCESS, 0); runner.assertTransferCount(ControlRate.REL_FAILURE, 1); runner.assertQueueEmpty(); }
Example 8
Source File: TestPutHive_1_1QL.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testFailAtBeginning() throws InitializationException, ProcessException, SQLException, IOException { final TestRunner runner = TestRunners.newTestRunner(PutHive_1_1QL.class); final File tempDir = folder.getRoot(); final File dbDir = new File(tempDir, "db"); final DBCPService service = new MockDBCPService(dbDir.getAbsolutePath()); runner.addControllerService("dbcp", service); runner.enableControllerService(service); try (final Connection conn = service.getConnection()) { try (final Statement stmt = conn.createStatement()) { stmt.executeUpdate(createPersonsAutoId); } } runner.setProperty(PutHive_1_1QL.HIVE_DBCP_SERVICE, "dbcp"); runner.enqueue("INSERT INTO PERSONS".getBytes()); // intentionally wrong syntax runner.enqueue("INSERT INTO PERSONS (NAME, CODE) VALUES ('Tom', 3)".getBytes()); runner.enqueue("INSERT INTO PERSONS (NAME, CODE) VALUES ('Harry', 44)".getBytes()); runner.run(); runner.assertTransferCount(PutHive_1_1QL.REL_FAILURE, 1); runner.assertTransferCount(PutHive_1_1QL.REL_SUCCESS, 2); }
Example 9
Source File: TestRouteText.java From nifi with Apache License 2.0 | 5 votes |
@Test public void testInvalidRegex() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new RouteText()); runner.setProperty(RouteText.MATCH_STRATEGY, RouteText.MATCHES_REGULAR_EXPRESSION); runner.setProperty("simple", "["); runner.enqueue("start middle end\nnot match".getBytes("UTF-8")); try { runner.run(); fail(); } catch (AssertionError e) { // Expect to catch error asserting 'simple' as invalid } }
Example 10
Source File: DeleteDynamoDBTest.java From nifi with Apache License 2.0 | 5 votes |
@Test public void testStringHashStringRangeDeleteThrowsRuntimeException() { final DynamoDB mockDynamoDB = new DynamoDB(Regions.AP_NORTHEAST_1) { @Override public BatchWriteItemOutcome batchWriteItem(TableWriteItems... tableWriteItems) { throw new RuntimeException("runtimeException"); } }; deleteDynamoDB = new DeleteDynamoDB() { @Override protected DynamoDB getDynamoDB() { return mockDynamoDB; } }; final TestRunner deleteRunner = TestRunners.newTestRunner(deleteDynamoDB); deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd"); deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef"); deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION); deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName); deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS"); deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1"); deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS"); deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1"); deleteRunner.enqueue(new byte[] {}); deleteRunner.run(1); deleteRunner.assertAllFlowFilesTransferred(AbstractDynamoDBProcessor.REL_FAILURE, 1); List<MockFlowFile> flowFiles = deleteRunner.getFlowFilesForRelationship(AbstractDynamoDBProcessor.REL_FAILURE); for (MockFlowFile flowFile : flowFiles) { assertEquals("runtimeException", flowFile.getAttribute(AbstractDynamoDBProcessor.DYNAMODB_ERROR_EXCEPTION_MESSAGE)); } }
Example 11
Source File: TestPutElasticsearch.java From nifi with Apache License 2.0 | 5 votes |
/** * Tests basic ES functionality against a local or test ES cluster */ @Test @Ignore("Comment this out if you want to run against local or test ES") public void testPutElasticSearchBasic() { System.out.println("Starting test " + new Object() { }.getClass().getEnclosingMethod().getName()); final TestRunner runner = TestRunners.newTestRunner(new PutElasticsearch()); //Local Cluster - Mac pulled from brew runner.setProperty(AbstractElasticsearchTransportClientProcessor.CLUSTER_NAME, "elasticsearch_brew"); runner.setProperty(AbstractElasticsearchTransportClientProcessor.HOSTS, "127.0.0.1:9300"); runner.setProperty(AbstractElasticsearchTransportClientProcessor.PING_TIMEOUT, "5s"); runner.setProperty(AbstractElasticsearchTransportClientProcessor.SAMPLER_INTERVAL, "5s"); runner.setProperty(PutElasticsearch.INDEX, "doc"); runner.setProperty(PutElasticsearch.BATCH_SIZE, "1"); runner.setProperty(PutElasticsearch.TYPE, "status"); runner.setProperty(PutElasticsearch.ID_ATTRIBUTE, "doc_id"); runner.assertValid(); runner.enqueue(docExample, new HashMap<String, String>() {{ put("doc_id", "28039652140"); }}); runner.enqueue(docExample); runner.run(1, true, true); runner.assertAllFlowFilesTransferred(PutElasticsearch.REL_SUCCESS, 1); }
Example 12
Source File: TestGetFile.java From nifi with Apache License 2.0 | 5 votes |
@Test public void testFilePickedUp() throws IOException { final File directory = new File("target/test/data/in"); deleteDirectory(directory); assertTrue("Unable to create test data directory " + directory.getAbsolutePath(), directory.exists() || directory.mkdirs()); final File inFile = new File("src/test/resources/hello.txt"); final Path inPath = inFile.toPath(); final File destFile = new File(directory, inFile.getName()); final Path targetPath = destFile.toPath(); final Path absTargetPath = targetPath.toAbsolutePath(); final String absTargetPathStr = absTargetPath.getParent() + "/"; Files.copy(inPath, targetPath); final TestRunner runner = TestRunners.newTestRunner(new GetFile()); runner.setProperty(GetFile.DIRECTORY, directory.getAbsolutePath()); runner.run(); runner.assertAllFlowFilesTransferred(GetFile.REL_SUCCESS, 1); final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(GetFile.REL_SUCCESS); successFiles.get(0).assertContentEquals("Hello, World!".getBytes("UTF-8")); final String path = successFiles.get(0).getAttribute("path"); assertEquals("/", path); final String absolutePath = successFiles.get(0).getAttribute(CoreAttributes.ABSOLUTE_PATH.key()); assertEquals(absTargetPathStr, absolutePath); }
Example 13
Source File: TestQueryRecord.java From nifi with Apache License 2.0 | 5 votes |
@Test public void testCollectionFunctionsWithoutCastFailure() throws InitializationException { final Record record = createHierarchicalArrayRecord(); final Record record2 = createHierarchicalArrayRecord(); record2.setValue("height", 30); final ArrayListRecordReader recordReader = new ArrayListRecordReader(record.getSchema()); recordReader.addRecord(record); recordReader.addRecord(record2); final ArrayListRecordWriter writer = new ArrayListRecordWriter(record.getSchema()); TestRunner runner = getRunner(); runner.addControllerService("reader", recordReader); runner.enableControllerService(recordReader); runner.addControllerService("writer", writer); runner.enableControllerService(writer); runner.setProperty(QueryRecord.RECORD_READER_FACTORY, "reader"); runner.setProperty(QueryRecord.RECORD_WRITER_FACTORY, "writer"); runner.setProperty(REL_NAME, "SELECT title, name, sum(height) as height_total " + "FROM FLOWFILE " + "GROUP BY title, name"); runner.enqueue(new byte[0]); runner.run(); runner.assertTransferCount(REL_NAME, 1); final List<Record> written = writer.getRecordsWritten(); assertEquals(1, written.size()); final Record output = written.get(0); assertEquals("John Doe", output.getValue("name")); assertEquals("Software Engineer", output.getValue("title")); assertEquals(BigDecimal.valueOf(90.5D), output.getValue("height_total")); }
Example 14
Source File: TestSplitText.java From localization_nifi with Apache License 2.0 | 5 votes |
@Test public void testMultipleHeaderIndicators() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new SplitText()); runner.setProperty(SplitText.HEADER_LINE_COUNT, "1"); runner.setProperty(SplitText.HEADER_MARKER, "Head"); runner.setProperty(SplitText.LINE_SPLIT_COUNT, "5"); runner.setProperty(SplitText.REMOVE_TRAILING_NEWLINES, "false"); runner.enqueue(file); runner.run(); runner.assertTransferCount(SplitText.REL_FAILURE, 0); runner.assertTransferCount(SplitText.REL_ORIGINAL, 1); runner.getFlowFilesForRelationship(SplitText.REL_ORIGINAL).get(0).assertAttributeEquals("fragment.count", "3"); runner.assertTransferCount(SplitText.REL_SPLITS, 3); final List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitText.REL_SPLITS); splits.get(0).assertAttributeEquals(SplitText.SPLIT_LINE_COUNT, "5"); splits.get(0).assertAttributeEquals(SplitText.FRAGMENT_SIZE, "62"); splits.get(1).assertAttributeEquals(SplitText.SPLIT_LINE_COUNT, "5"); splits.get(1).assertAttributeEquals(SplitText.FRAGMENT_SIZE, "55"); splits.get(2).assertAttributeEquals(SplitText.SPLIT_LINE_COUNT, "1"); splits.get(2).assertAttributeEquals(SplitText.FRAGMENT_SIZE, "23"); final String fragmentUUID = splits.get(0).getAttribute(SplitText.FRAGMENT_ID); for (int i = 0; i < splits.size(); i++) { final MockFlowFile split = splits.get(i); split.assertAttributeEquals(SplitText.FRAGMENT_INDEX, String.valueOf(i + 1)); split.assertAttributeEquals(SplitText.FRAGMENT_ID, fragmentUUID); split.assertAttributeEquals(SplitText.FRAGMENT_COUNT, String.valueOf(splits.size())); split.assertAttributeEquals(SplitText.SEGMENT_ORIGINAL_FILENAME, file.getFileName().toString()); } }
Example 15
Source File: TestListenSyslog.java From nifi with Apache License 2.0 | 5 votes |
@Test public void testErrorQueue() throws IOException { final List<ListenSyslog.RawSyslogEvent> msgs = new ArrayList<>(); msgs.add(new ListenSyslog.RawSyslogEvent(VALID_MESSAGE.getBytes(), "sender-01")); msgs.add(new ListenSyslog.RawSyslogEvent(VALID_MESSAGE.getBytes(), "sender-01")); // Add message that will throw a FlowFileAccessException the first time that we attempt to read // the contents but will succeed the second time. final AtomicInteger getMessageAttempts = new AtomicInteger(0); msgs.add(new ListenSyslog.RawSyslogEvent(VALID_MESSAGE.getBytes(), "sender-01") { @Override public byte[] getData() { final int attempts = getMessageAttempts.incrementAndGet(); if (attempts == 1) { throw new FlowFileAccessException("Unit test failure"); } else { return VALID_MESSAGE.getBytes(); } } }); final CannedMessageProcessor proc = new CannedMessageProcessor(msgs); final TestRunner runner = TestRunners.newTestRunner(proc); runner.setProperty(ListenSyslog.MAX_BATCH_SIZE, "5"); runner.setProperty(ListenSyslog.PROTOCOL, ListenSyslog.UDP_VALUE.getValue()); runner.setProperty(ListenSyslog.PORT, "0"); runner.setProperty(ListenSyslog.PARSE_MESSAGES, "false"); runner.run(); assertEquals(1, proc.getErrorQueueSize()); runner.assertAllFlowFilesTransferred(ListenSyslog.REL_SUCCESS, 1); runner.getFlowFilesForRelationship(ListenSyslog.REL_SUCCESS).get(0).assertContentEquals(VALID_MESSAGE + "\n" + VALID_MESSAGE); // running again should pull from the error queue runner.clearTransferState(); runner.run(); runner.assertAllFlowFilesTransferred(ListenSyslog.REL_SUCCESS, 1); runner.getFlowFilesForRelationship(ListenSyslog.REL_SUCCESS).get(0).assertContentEquals(VALID_MESSAGE); }
Example 16
Source File: TestMergeContent.java From localization_nifi with Apache License 2.0 | 5 votes |
@Test public void testSimpleBinaryConcatWaitsForMin() throws IOException, InterruptedException { final TestRunner runner = TestRunners.newTestRunner(new MergeContent()); runner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_CONCAT); runner.setProperty(MergeContent.MIN_SIZE, "20 KB"); createFlowFiles(runner); runner.run(); runner.assertTransferCount(MergeContent.REL_MERGED, 0); runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 0); }
Example 17
Source File: TestJoltTransformJSON.java From localization_nifi with Apache License 2.0 | 5 votes |
@Test public void testTransformInputWithDefaultr() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new JoltTransformJSON()); runner.setValidateExpressionUsage(false); final String spec = new String(Files.readAllBytes(Paths.get("src/test/resources/TestJoltTransformJson/defaultrSpec.json"))); runner.setProperty(JoltTransformJSON.JOLT_SPEC, spec); runner.setProperty(JoltTransformJSON.JOLT_TRANSFORM, JoltTransformJSON.DEFAULTR); runner.enqueue(JSON_INPUT); runner.run(); runner.assertAllFlowFilesTransferred(JoltTransformJSON.REL_SUCCESS); final MockFlowFile transformed = runner.getFlowFilesForRelationship(JoltTransformJSON.REL_SUCCESS).get(0); Object transformedJson = JsonUtils.jsonToObject(new ByteArrayInputStream(transformed.toByteArray())); Object compareJson = JsonUtils.jsonToObject(Files.newInputStream(Paths.get("src/test/resources/TestJoltTransformJson/defaultrOutput.json"))); assertTrue(DIFFY.diff(compareJson, transformedJson).isEmpty()); }
Example 18
Source File: TestPutHiveQL.java From localization_nifi with Apache License 2.0 | 4 votes |
@Test public void testFailInMiddleWithBadParameterValue() throws InitializationException, ProcessException, SQLException, IOException { final TestRunner runner = TestRunners.newTestRunner(PutHiveQL.class); final File tempDir = folder.getRoot(); final File dbDir = new File(tempDir, "db"); final DBCPService service = new MockDBCPService(dbDir.getAbsolutePath()); runner.addControllerService("dbcp", service); runner.enableControllerService(service); try (final Connection conn = service.getConnection()) { try (final Statement stmt = conn.createStatement()) { stmt.executeUpdate(createPersonsAutoId); } } runner.setProperty(PutHiveQL.HIVE_DBCP_SERVICE, "dbcp"); final Map<String, String> goodAttributes = new HashMap<>(); goodAttributes.put("hiveql.args.1.type", String.valueOf(Types.INTEGER)); goodAttributes.put("hiveql.args.1.value", "84"); final Map<String, String> badAttributes = new HashMap<>(); badAttributes.put("hiveql.args.1.type", String.valueOf(Types.INTEGER)); badAttributes.put("hiveql.args.1.value", "9999"); final byte[] data = "INSERT INTO PERSONS (NAME, CODE) VALUES ('Mark', ?)".getBytes(); runner.enqueue(data, goodAttributes); runner.enqueue(data, badAttributes); runner.enqueue(data, goodAttributes); runner.enqueue(data, goodAttributes); runner.run(); runner.assertTransferCount(PutHiveQL.REL_SUCCESS, 3); runner.assertTransferCount(PutHiveQL.REL_FAILURE, 1); try (final Connection conn = service.getConnection()) { try (final Statement stmt = conn.createStatement()) { final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS"); assertTrue(rs.next()); assertEquals(1, rs.getInt(1)); assertEquals("Mark", rs.getString(2)); assertEquals(84, rs.getInt(3)); assertTrue(rs.next()); assertTrue(rs.next()); assertFalse(rs.next()); } } }
Example 19
Source File: AttributeRollingWindowIT.java From nifi with Apache License 2.0 | 4 votes |
@Ignore("this test is too unstable in terms of timing on different size/types of testing envs") @Test public void testMicroBatching() throws InterruptedException { assumeFalse(isWindowsEnvironment()); final TestRunner runner = TestRunners.newTestRunner(AttributeRollingWindow.class); runner.setProperty(AttributeRollingWindow.VALUE_TO_TRACK, "${value}"); runner.setProperty(AttributeRollingWindow.SUB_WINDOW_LENGTH, "500 ms"); runner.setProperty(AttributeRollingWindow.TIME_WINDOW, "1 sec"); final Map<String, String> attributes = new HashMap<>(); attributes.put("value", "2"); runner.enqueue("1".getBytes(), attributes); runner.run(1); runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_SUCCESS, 1); MockFlowFile flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0); runner.clearTransferState(); flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, "2.0"); flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, "1"); flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, "2.0"); Thread.sleep(200L); runner.enqueue("2".getBytes(), attributes); runner.run(1); runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_SUCCESS, 1); flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0); runner.clearTransferState(); flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, "4.0"); flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, "2"); flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, "2.0"); Thread.sleep(300L); runner.enqueue("2".getBytes(), attributes); runner.run(1); runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_SUCCESS, 1); flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0); runner.clearTransferState(); flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, "6.0"); flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, "3"); flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, "2.0"); Thread.sleep(200L); runner.enqueue("2".getBytes(), attributes); runner.run(1); runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_SUCCESS, 1); flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0); runner.clearTransferState(); flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, "8.0"); flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, "4"); flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, "2.0"); Thread.sleep(300L); runner.enqueue("2".getBytes(), attributes); runner.run(1); runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_SUCCESS, 1); flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0); runner.clearTransferState(); flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, "6.0"); flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, "3"); flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, "2.0"); runner.enqueue("2".getBytes(), attributes); runner.run(1); runner.assertAllFlowFilesTransferred(AttributeRollingWindow.REL_SUCCESS, 1); flowFile = runner.getFlowFilesForRelationship(AttributeRollingWindow.REL_SUCCESS).get(0); runner.clearTransferState(); flowFile.assertAttributeEquals(ROLLING_WINDOW_VALUE_KEY, "8.0"); flowFile.assertAttributeEquals(ROLLING_WINDOW_COUNT_KEY, "4"); flowFile.assertAttributeEquals(ROLLING_WINDOW_MEAN_KEY, "2.0"); }
Example 20
Source File: TestPutHiveQL.java From nifi with Apache License 2.0 | 4 votes |
@Test public void testFailInMiddleWithBadNumberFormat() throws InitializationException, ProcessException, SQLException, IOException { final TestRunner runner = TestRunners.newTestRunner(PutHiveQL.class); final File tempDir = folder.getRoot(); final File dbDir = new File(tempDir, "db"); final DBCPService service = new MockDBCPService(dbDir.getAbsolutePath()); runner.addControllerService("dbcp", service); runner.enableControllerService(service); try (final Connection conn = service.getConnection()) { try (final Statement stmt = conn.createStatement()) { stmt.executeUpdate(createPersonsAutoId); } } runner.setProperty(PutHiveQL.HIVE_DBCP_SERVICE, "dbcp"); final Map<String, String> goodAttributes = new HashMap<>(); goodAttributes.put("hiveql.args.1.type", String.valueOf(Types.INTEGER)); goodAttributes.put("hiveql.args.1.value", "84"); final Map<String, String> badAttributes = new HashMap<>(); badAttributes.put("hiveql.args.1.type", String.valueOf(Types.INTEGER)); badAttributes.put("hiveql.args.1.value", "NOT_NUMBER"); final byte[] data = "INSERT INTO PERSONS (NAME, CODE) VALUES ('Mark', ?)".getBytes(); runner.enqueue(data, goodAttributes); runner.enqueue(data, badAttributes); runner.enqueue(data, goodAttributes); runner.enqueue(data, goodAttributes); runner.run(); runner.assertTransferCount(PutHiveQL.REL_SUCCESS, 3); runner.assertTransferCount(PutHiveQL.REL_FAILURE, 1); try (final Connection conn = service.getConnection()) { try (final Statement stmt = conn.createStatement()) { final ResultSet rs = stmt.executeQuery("SELECT * FROM PERSONS"); assertTrue(rs.next()); assertEquals(1, rs.getInt(1)); assertEquals("Mark", rs.getString(2)); assertEquals(84, rs.getInt(3)); assertTrue(rs.next()); assertTrue(rs.next()); assertFalse(rs.next()); } } }