Java Code Examples for org.apache.nifi.util.MockFlowFile#assertAttributeExists()
The following examples show how to use
org.apache.nifi.util.MockFlowFile#assertAttributeExists() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestJoltTransformJSON.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testTransformInputWithCustomTransformationWithJar() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new JoltTransformJSON()); final String customJarPath = "src/test/resources/TestJoltTransformJson/TestCustomJoltTransform.jar"; final String spec = new String(Files.readAllBytes(Paths.get("src/test/resources/TestJoltTransformJson/chainrSpec.json"))); runner.setProperty(JoltTransformJSON.JOLT_SPEC, spec); runner.setProperty(JoltTransformJSON.CUSTOM_CLASS,"TestCustomJoltTransform"); runner.setProperty(JoltTransformJSON.MODULES,customJarPath); runner.setProperty(JoltTransformJSON.JOLT_TRANSFORM,JoltTransformJSON.CUSTOMR); runner.enqueue(JSON_INPUT); runner.run(); runner.assertAllFlowFilesTransferred(JoltTransformJSON.REL_SUCCESS); final MockFlowFile transformed = runner.getFlowFilesForRelationship(JoltTransformJSON.REL_SUCCESS).get(0); transformed.assertAttributeExists(CoreAttributes.MIME_TYPE.key()); transformed.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(),"application/json"); Object transformedJson = JsonUtils.jsonToObject(new ByteArrayInputStream(transformed.toByteArray())); Object compareJson = JsonUtils.jsonToObject(Files.newInputStream(Paths.get("src/test/resources/TestJoltTransformJson/chainrOutput.json"))); assertTrue(DIFFY.diff(compareJson, transformedJson).isEmpty()); }
Example 2
Source File: ITPutKinesisStream.java From localization_nifi with Apache License 2.0 | 6 votes |
@Test public void testThreeMessageWithBatch10MaxBufferSize1MBTRunOnceTwoMessageSent() { runner.setProperty(PutKinesisStream.BATCH_SIZE, "10"); runner.setProperty(PutKinesisStream.MAX_MESSAGE_BUFFER_SIZE_MB, "1 MB"); runner.assertValid(); byte [] bytes = new byte[(PutKinesisStream.MAX_MESSAGE_SIZE)]; for (int i = 0; i < bytes.length; i++) { bytes[i] = 'a'; } runner.enqueue(bytes); runner.enqueue(bytes.clone()); runner.enqueue(bytes.clone()); runner.run(1); runner.assertAllFlowFilesTransferred(PutKinesisStream.REL_SUCCESS, 2); List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(PutKinesisStream.REL_SUCCESS); assertEquals(2,flowFiles.size()); for (MockFlowFile flowFile : flowFiles) { flowFile.assertAttributeExists(PutKinesisStream.AWS_KINESIS_SEQUENCE_NUMBER); flowFile.assertAttributeExists(PutKinesisStream.AWS_KINESIS_SHARD_ID); } }
Example 3
Source File: TestExtractMediaMetadata.java From localization_nifi with Apache License 2.0 | 6 votes |
@Test public void testBmp() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new ExtractMediaMetadata()); runner.setProperty(ExtractMediaMetadata.METADATA_KEY_PREFIX, "bmp."); runner.assertValid(); runner.enqueue(new File("target/test-classes/16color-10x10.bmp").toPath()); runner.run(2); runner.assertAllFlowFilesTransferred(ExtractMediaMetadata.SUCCESS, 1); runner.assertTransferCount(ExtractMediaMetadata.FAILURE, 0); final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(ExtractMediaMetadata.SUCCESS); MockFlowFile flowFile0 = successFiles.get(0); flowFile0.assertAttributeExists("filename"); flowFile0.assertAttributeEquals("filename", "16color-10x10.bmp"); flowFile0.assertAttributeExists("bmp.Content-Type"); flowFile0.assertAttributeEquals("bmp.Content-Type", "image/x-ms-bmp"); flowFile0.assertAttributeExists("bmp.X-Parsed-By"); assertTrue(flowFile0.getAttribute("bmp.X-Parsed-By").contains("org.apache.tika.parser.DefaultParser")); // assertTrue(flowFile0.getAttribute("bmp.X-Parsed-By").contains("org.apache.tika.parser.image.ImageParser")); flowFile0.assertAttributeExists("bmp.height"); flowFile0.assertAttributeEquals("bmp.height", "10"); flowFile0.assertAttributeExists("bmp.width"); flowFile0.assertAttributeEquals("bmp.width", "10"); }
Example 4
Source File: TestExtractMediaMetadata.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testWav() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new ExtractMediaMetadata()); runner.setProperty(ExtractMediaMetadata.METADATA_KEY_FILTER, ""); runner.setProperty(ExtractMediaMetadata.METADATA_KEY_PREFIX, "wav."); runner.assertValid(); runner.enqueue(new File("target/test-classes/testWAV.wav").toPath()); runner.run(); runner.assertAllFlowFilesTransferred(ExtractMediaMetadata.SUCCESS, 1); runner.assertTransferCount(ExtractMediaMetadata.FAILURE, 0); final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(ExtractMediaMetadata.SUCCESS); MockFlowFile flowFile0 = successFiles.get(0); flowFile0.assertAttributeExists("filename"); flowFile0.assertAttributeEquals("filename", "testWAV.wav"); flowFile0.assertAttributeExists("wav.Content-Type"); assertTrue(flowFile0.getAttribute("wav.Content-Type").startsWith("audio/vnd.wave")); flowFile0.assertAttributeExists("wav.X-Parsed-By"); assertTrue(flowFile0.getAttribute("wav.X-Parsed-By").contains("org.apache.tika.parser.DefaultParser")); assertTrue(flowFile0.getAttribute("wav.X-Parsed-By").contains("org.apache.tika.parser.audio.AudioParser")); flowFile0.assertAttributeExists("wav.encoding"); flowFile0.assertAttributeEquals("wav.encoding", "PCM_SIGNED"); }
Example 5
Source File: TestExtractMediaMetadata.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testJpg() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new ExtractMediaMetadata()); runner.setProperty(ExtractMediaMetadata.METADATA_KEY_PREFIX, "jpg."); runner.assertValid(); runner.enqueue(new File("target/test-classes/simple.jpg").toPath()); runner.run(2); runner.assertAllFlowFilesTransferred(ExtractMediaMetadata.SUCCESS, 1); runner.assertTransferCount(ExtractMediaMetadata.FAILURE, 0); final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(ExtractMediaMetadata.SUCCESS); MockFlowFile flowFile0 = successFiles.get(0); flowFile0.assertAttributeExists("filename"); flowFile0.assertAttributeEquals("filename", "simple.jpg"); flowFile0.assertAttributeExists("jpg.tiff:Model"); flowFile0.assertAttributeEquals("jpg.tiff:Model", "Canon PowerShot S330"); }
Example 6
Source File: TestFetchFileTransfer.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testFilenameContainsPath() { final String filenameWithPath = "./here/is/my/path/hello.txt"; final TestableFetchFileTransfer proc = new TestableFetchFileTransfer(); final TestRunner runner = TestRunners.newTestRunner(proc); runner.setProperty(FetchFileTransfer.HOSTNAME, "localhost"); runner.setProperty(FetchFileTransfer.UNDEFAULTED_PORT, "11"); runner.setProperty(FetchFileTransfer.REMOTE_FILENAME, "${filename}"); proc.addContent(filenameWithPath, "world".getBytes()); final Map<String, String> attrs = new HashMap<>(); attrs.put("filename", filenameWithPath); runner.enqueue(new byte[0], attrs); runner.run(1, false, false); runner.assertAllFlowFilesTransferred(FetchFileTransfer.REL_SUCCESS, 1); assertFalse(proc.closed); MockFlowFile transferredFlowFile = runner.getFlowFilesForRelationship(FetchFileTransfer.REL_SUCCESS).get(0); transferredFlowFile.assertContentEquals("world"); transferredFlowFile.assertAttributeExists(CoreAttributes.PATH.key()); transferredFlowFile.assertAttributeEquals(CoreAttributes.PATH.key(), "./here/is/my/path"); }
Example 7
Source File: ITPutKinesisStream.java From localization_nifi with Apache License 2.0 | 6 votes |
@Test public void testThreeMessageWithBatch2MaxBufferSize1MBRunTwiceThreeMessageSent() { runner.setProperty(PutKinesisStream.BATCH_SIZE, "2"); runner.setProperty(PutKinesisStream.MAX_MESSAGE_BUFFER_SIZE_MB, "1 MB"); runner.assertValid(); byte [] bytes = new byte[(PutKinesisStream.MAX_MESSAGE_SIZE)]; for (int i = 0; i < bytes.length; i++) { bytes[i] = 'a'; } runner.enqueue(bytes); runner.enqueue(bytes.clone()); runner.enqueue(bytes.clone()); runner.run(2, true, true); runner.assertAllFlowFilesTransferred(PutKinesisStream.REL_SUCCESS, 3); List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(PutKinesisStream.REL_SUCCESS); assertEquals(3,flowFiles.size()); for (MockFlowFile flowFile : flowFiles) { flowFile.assertAttributeExists(PutKinesisStream.AWS_KINESIS_SEQUENCE_NUMBER); flowFile.assertAttributeExists(PutKinesisStream.AWS_KINESIS_SHARD_ID); } }
Example 8
Source File: TestJoltTransformRecord.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testTransformInputWithSortrPopulatedSpec() throws IOException { generateTestData(1, null); final String outputSchemaText = new String(Files.readAllBytes(Paths.get("src/test/resources/TestJoltTransformRecord/sortrOutputSchema.avsc"))); runner.setProperty(writer, SchemaAccessUtils.SCHEMA_ACCESS_STRATEGY, SchemaAccessUtils.SCHEMA_TEXT_PROPERTY); runner.setProperty(writer, SchemaAccessUtils.SCHEMA_TEXT, outputSchemaText); runner.setProperty(writer, "Pretty Print JSON", "true"); runner.enableControllerService(writer); runner.setProperty(JoltTransformRecord.JOLT_TRANSFORM, JoltTransformRecord.SORTR); runner.setProperty(JoltTransformRecord.JOLT_SPEC, "abcd"); runner.enqueue(new byte[0]); runner.run(); runner.assertTransferCount(JoltTransformRecord.REL_SUCCESS, 1); runner.assertTransferCount(JoltTransformRecord.REL_ORIGINAL, 1); final MockFlowFile transformed = runner.getFlowFilesForRelationship(JoltTransformRecord.REL_SUCCESS).get(0); transformed.assertAttributeExists(CoreAttributes.MIME_TYPE.key()); transformed.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/json"); assertEquals(new String(Files.readAllBytes(Paths.get("src/test/resources/TestJoltTransformRecord/sortrOutput.json"))), new String(transformed.toByteArray())); }
Example 9
Source File: TestSplitContent.java From localization_nifi with Apache License 2.0 | 6 votes |
@Test public void testWithLargerSplit() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new SplitContent()); runner.setProperty(SplitContent.KEEP_SEQUENCE, "false"); runner.setProperty(SplitContent.BYTE_SEQUENCE.getName(), "05050505"); runner.enqueue(new byte[]{ 1, 2, 3, 4, 5, 5, 5, 5, 5, 5, 4, 3, 2, 1}); runner.run(); runner.assertTransferCount(SplitContent.REL_ORIGINAL, 1); final MockFlowFile originalFlowFile = runner.getFlowFilesForRelationship(SplitContent.REL_ORIGINAL).get(0); originalFlowFile.assertAttributeExists(FRAGMENT_ID); originalFlowFile.assertAttributeEquals(FRAGMENT_COUNT, "2"); runner.assertTransferCount(SplitContent.REL_SPLITS, 2); runner.assertQueueEmpty(); final List<MockFlowFile> splits = runner.getFlowFilesForRelationship(SplitContent.REL_SPLITS); final MockFlowFile split1 = splits.get(0); final MockFlowFile split2 = splits.get(1); split1.assertContentEquals(new byte[]{1, 2, 3, 4}); split2.assertContentEquals(new byte[]{5, 5, 4, 3, 2, 1}); }
Example 10
Source File: TestSplitText.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testLastLineExceedsSizeLimit() { final TestRunner runner = TestRunners.newTestRunner(new SplitText()); runner.setProperty(SplitText.HEADER_LINE_COUNT, "0"); runner.setProperty(SplitText.LINE_SPLIT_COUNT, "2"); runner.setProperty(SplitText.FRAGMENT_MAX_SIZE, "20 B"); runner.enqueue("Line #1\nLine #2\nLine #3\nLong line exceeding limit"); runner.run(); runner.assertTransferCount(SplitText.REL_FAILURE, 0); runner.assertTransferCount(SplitText.REL_ORIGINAL, 1); final MockFlowFile originalFlowFile = runner.getFlowFilesForRelationship(SplitText.REL_ORIGINAL).get(0); originalFlowFile.assertAttributeEquals(SplitText.FRAGMENT_COUNT, "3"); originalFlowFile.assertAttributeExists(SplitText.FRAGMENT_ID); runner.assertTransferCount(SplitText.REL_SPLITS, 3); }
Example 11
Source File: TestExtractAvroMetadata.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testExtractionWithMetadataKey() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new ExtractAvroMetadata()); runner.setProperty(ExtractAvroMetadata.METADATA_KEYS, AVRO_SCHEMA_ATTR); // test dynamic attribute avro.schema final Schema schema = new Schema.Parser().parse(new File("src/test/resources/user.avsc")); final ByteArrayOutputStream out = getOutputStreamWithOneUser(schema); runner.enqueue(out.toByteArray()); runner.run(); runner.assertAllFlowFilesTransferred(ExtractAvroMetadata.REL_SUCCESS, 1); final MockFlowFile flowFile = runner.getFlowFilesForRelationship(ExtractAvroMetadata.REL_SUCCESS).get(0); flowFile.assertAttributeExists(ExtractAvroMetadata.SCHEMA_FINGERPRINT_ATTR); flowFile.assertAttributeEquals(ExtractAvroMetadata.SCHEMA_TYPE_ATTR, Schema.Type.RECORD.getName()); flowFile.assertAttributeEquals(ExtractAvroMetadata.SCHEMA_NAME_ATTR, "User"); flowFile.assertAttributeEquals(AVRO_SCHEMA_ATTR, schema.toString()); }
Example 12
Source File: TestExtractMediaMetadata.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testBigTextFile() throws IOException { File textFile = new File("target/test-classes/textFileBig.txt"); final TestRunner runner = TestRunners.newTestRunner(new ExtractMediaMetadata()); runner.setProperty(ExtractMediaMetadata.METADATA_KEY_PREFIX, "txt."); runner.assertValid(); runner.enqueue(textFile.toPath()); runner.run(2); runner.assertAllFlowFilesTransferred(ExtractMediaMetadata.SUCCESS, 1); runner.assertTransferCount(ExtractMediaMetadata.FAILURE, 0); final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(ExtractMediaMetadata.SUCCESS); MockFlowFile flowFile0 = successFiles.get(0); flowFile0.assertAttributeExists("filename"); flowFile0.assertAttributeEquals("filename", "textFileBig.txt"); flowFile0.assertAttributeExists("txt.Content-Type"); assertTrue(flowFile0.getAttribute("txt.Content-Type").startsWith("text/plain")); flowFile0.assertAttributeExists("txt.X-Parsed-By"); assertTrue(flowFile0.getAttribute("txt.X-Parsed-By").contains("org.apache.tika.parser.DefaultParser")); flowFile0.assertAttributeExists("txt.Content-Encoding"); assertEquals(flowFile0.getSize(), textFile.length()); }
Example 13
Source File: TestJoltTransformJSON.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testTransformInputCustomTransformationIgnored() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new JoltTransformJSON()); final String customJarPath = "src/test/resources/TestJoltTransformJson/TestCustomJoltTransform.jar"; final String spec = new String(Files.readAllBytes(Paths.get("src/test/resources/TestJoltTransformJson/defaultrSpec.json"))); runner.setProperty(JoltTransformJSON.JOLT_SPEC, spec); runner.setProperty(JoltTransformJSON.CUSTOM_CLASS,"TestCustomJoltTransform"); runner.setProperty(JoltTransformJSON.MODULES,customJarPath); runner.setProperty(JoltTransformJSON.JOLT_TRANSFORM,JoltTransformJSON.DEFAULTR); runner.enqueue(JSON_INPUT); runner.run(); runner.assertAllFlowFilesTransferred(JoltTransformJSON.REL_SUCCESS); final MockFlowFile transformed = runner.getFlowFilesForRelationship(JoltTransformJSON.REL_SUCCESS).get(0); transformed.assertAttributeExists(CoreAttributes.MIME_TYPE.key()); transformed.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(),"application/json"); Object transformedJson = JsonUtils.jsonToObject(new ByteArrayInputStream(transformed.toByteArray())); Object compareJson = JsonUtils.jsonToObject(Files.newInputStream(Paths.get("src/test/resources/TestJoltTransformJson/defaultrOutput.json"))); assertTrue(DIFFY.diff(compareJson, transformedJson).isEmpty()); }
Example 14
Source File: TestJoltTransformJSON.java From nifi with Apache License 2.0 | 5 votes |
@Test public void testTransformInputWithChainr() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new JoltTransformJSON()); final String spec = new String(Files.readAllBytes(Paths.get("src/test/resources/TestJoltTransformJson/chainrSpec.json"))); runner.setProperty(JoltTransformJSON.JOLT_SPEC, spec); runner.enqueue(JSON_INPUT); runner.run(); runner.assertAllFlowFilesTransferred(JoltTransformJSON.REL_SUCCESS); final MockFlowFile transformed = runner.getFlowFilesForRelationship(JoltTransformJSON.REL_SUCCESS).get(0); transformed.assertAttributeExists(CoreAttributes.MIME_TYPE.key()); transformed.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(),"application/json"); Object transformedJson = JsonUtils.jsonToObject(new ByteArrayInputStream(transformed.toByteArray())); Object compareJson = JsonUtils.jsonToObject(Files.newInputStream(Paths.get("src/test/resources/TestJoltTransformJson/chainrOutput.json"))); assertTrue(DIFFY.diff(compareJson, transformedJson).isEmpty()); }
Example 15
Source File: TestSplitAvro.java From nifi with Apache License 2.0 | 5 votes |
@Test public void testRecordSplitDatafileOutputWithSingleRecords() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new SplitAvro()); final String filename = "users.avro"; runner.enqueue(users.toByteArray(), new HashMap<String,String>() {{ put(CoreAttributes.FILENAME.key(), filename); }}); runner.run(); runner.assertTransferCount(SplitAvro.REL_SPLIT, 100); runner.assertTransferCount(SplitAvro.REL_ORIGINAL, 1); runner.assertTransferCount(SplitAvro.REL_FAILURE, 0); final MockFlowFile originalFlowFile = runner.getFlowFilesForRelationship(SplitAvro.REL_ORIGINAL).get(0); originalFlowFile.assertAttributeExists(FRAGMENT_ID.key()); originalFlowFile.assertAttributeEquals(FRAGMENT_COUNT.key(), "100"); final List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(SplitAvro.REL_SPLIT); checkDataFileSplitSize(flowFiles, 1, true); final String fragmentIdentifier = flowFiles.get(0).getAttribute("fragment.identifier"); IntStream.range(0, flowFiles.size()).forEach((i) -> { MockFlowFile flowFile = flowFiles.get(i); assertEquals(i, Integer.parseInt(flowFile.getAttribute("fragment.index"))); assertEquals(fragmentIdentifier, flowFile.getAttribute("fragment.identifier")); assertEquals(flowFiles.size(), Integer.parseInt(flowFile.getAttribute(FRAGMENT_COUNT.key()))); assertEquals(filename, flowFile.getAttribute("segment.original.filename")); }); }
Example 16
Source File: TestAttributesToCSV.java From nifi with Apache License 2.0 | 5 votes |
@Test public void testSchemaWithCoreAttribuesToAttribute() throws Exception { final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToCSV()); testRunner.setProperty(AttributesToCSV.DESTINATION, OUTPUT_NEW_ATTRIBUTE); testRunner.setProperty(AttributesToCSV.INCLUDE_CORE_ATTRIBUTES, "true"); testRunner.setProperty(AttributesToCSV.NULL_VALUE_FOR_EMPTY_STRING, "false"); testRunner.setProperty(AttributesToCSV.INCLUDE_SCHEMA, "true"); testRunner.setProperty(AttributesToCSV.ATTRIBUTES_REGEX, "beach-.*"); Map<String, String> attrs = new HashMap<String, String>(){{ put("beach-name", "Malibu Beach"); put("beach-location", "California, US"); put("attribute-should-be-eliminated", "This should not be in CSVData!"); }}; testRunner.enqueue(new byte[0], attrs); testRunner.run(); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).get(0); flowFile.assertAttributeExists("CSVData"); flowFile.assertAttributeExists("CSVSchema"); final String path = flowFile.getAttribute("path"); final String filename = flowFile.getAttribute("filename"); final String uuid = flowFile.getAttribute("uuid"); flowFile.assertAttributeEquals("CSVData", "Malibu Beach,\"California, US\"," + path + "," + filename + "," + uuid); flowFile.assertAttributeEquals("CSVSchema","beach-name,beach-location,path,filename,uuid"); }
Example 17
Source File: ITPutKinesisStream.java From localization_nifi with Apache License 2.0 | 5 votes |
@Test public void testTwoMessage2MBHelloWorldWithBatch10MaxBufferSize1MBRunOnceTwoMessageSent() throws Exception { runner.setProperty(PutKinesisStream.BATCH_SIZE, "10"); runner.setProperty(PutKinesisStream.MAX_MESSAGE_BUFFER_SIZE_MB, "1 MB"); runner.assertValid(); byte [] bytes = new byte[(PutKinesisStream.MAX_MESSAGE_SIZE * 2)]; for (int i = 0; i < bytes.length; i++) { bytes[i] = 'a'; } runner.enqueue(bytes); runner.enqueue("HelloWorld".getBytes()); runner.run(1, true, true); List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(PutKinesisStream.REL_SUCCESS); assertEquals(1,flowFiles.size()); for (MockFlowFile flowFile : flowFiles) { flowFile.assertAttributeExists(PutKinesisStream.AWS_KINESIS_SEQUENCE_NUMBER); flowFile.assertAttributeExists(PutKinesisStream.AWS_KINESIS_SHARD_ID); flowFile.assertContentEquals("HelloWorld".getBytes()); } List<MockFlowFile> flowFilesFailed = runner.getFlowFilesForRelationship(PutKinesisStream.REL_FAILURE); assertEquals(1,flowFilesFailed.size()); for (MockFlowFile flowFileFailed : flowFilesFailed) { assertNotNull(flowFileFailed.getAttribute(PutKinesisStream.AWS_KINESIS_ERROR_MESSAGE)); } }
Example 18
Source File: ITPutKinesisStream.java From nifi with Apache License 2.0 | 5 votes |
@Test public void testThreeMessageHello2MBThereWithBatch10MaxBufferSize1MBRunOnceTwoMessageSuccessOneFailed() { runner.setProperty(PutKinesisStream.BATCH_SIZE, "10"); runner.setProperty(PutKinesisStream.MAX_MESSAGE_BUFFER_SIZE_MB, "1 MB"); runner.assertValid(); byte [] bytes = new byte[(PutKinesisStream.MAX_MESSAGE_SIZE * 2)]; for (int i = 0; i < bytes.length; i++) { bytes[i] = 'a'; } runner.enqueue("hello".getBytes()); runner.enqueue(bytes); runner.enqueue("there".getBytes()); runner.run(1, true, true); List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(PutKinesisStream.REL_SUCCESS); assertEquals(2,flowFiles.size()); for (MockFlowFile flowFile : flowFiles) { flowFile.assertAttributeExists(PutKinesisStream.AWS_KINESIS_SEQUENCE_NUMBER); flowFile.assertAttributeExists(PutKinesisStream.AWS_KINESIS_SHARD_ID); } List<MockFlowFile> flowFilesFailed = runner.getFlowFilesForRelationship(PutKinesisStream.REL_FAILURE); assertEquals(1,flowFilesFailed.size()); for (MockFlowFile flowFileFailed : flowFilesFailed) { assertNotNull(flowFileFailed.getAttribute(PutKinesisStream.AWS_KINESIS_ERROR_MESSAGE)); } }
Example 19
Source File: TestConvertJSONToSQL.java From localization_nifi with Apache License 2.0 | 5 votes |
@Test public void testMultipleInsertsQuotedIdentifiers() throws InitializationException, ProcessException, SQLException, IOException { final TestRunner runner = TestRunners.newTestRunner(ConvertJSONToSQL.class); final File tempDir = folder.getRoot(); final File dbDir = new File(tempDir, "db"); final DBCPService service = new MockDBCPService(dbDir.getAbsolutePath()); runner.addControllerService("dbcp", service); runner.enableControllerService(service); try (final Connection conn = service.getConnection()) { try (final Statement stmt = conn.createStatement()) { stmt.executeUpdate(createPersons); } } runner.setProperty(ConvertJSONToSQL.CONNECTION_POOL, "dbcp"); runner.setProperty(ConvertJSONToSQL.TABLE_NAME, "PERSONS"); runner.setProperty(ConvertJSONToSQL.STATEMENT_TYPE, "INSERT"); runner.setProperty(ConvertJSONToSQL.QUOTED_IDENTIFIERS, "true"); runner.enqueue(Paths.get("src/test/resources/TestConvertJSONToSQL/persons.json")); runner.run(); runner.assertTransferCount(ConvertJSONToSQL.REL_ORIGINAL, 1); runner.getFlowFilesForRelationship(ConvertJSONToSQL.REL_ORIGINAL).get(0).assertAttributeEquals(FRAGMENT_COUNT.key(), "5"); runner.assertTransferCount(ConvertJSONToSQL.REL_SQL, 5); final List<MockFlowFile> mffs = runner.getFlowFilesForRelationship(ConvertJSONToSQL.REL_SQL); for (final MockFlowFile mff : mffs) { mff.assertContentEquals("INSERT INTO PERSONS (\"ID\", \"NAME\", \"CODE\") VALUES (?, ?, ?)"); for (int i=1; i <= 3; i++) { mff.assertAttributeExists("sql.args." + i + ".type"); mff.assertAttributeExists("sql.args." + i + ".value"); } } }
Example 20
Source File: QueryDatabaseTableRecordTest.java From nifi with Apache License 2.0 | 4 votes |
@Test public void testMaxRowsPerFlowFile() throws IOException, SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); Statement stmt = con.createStatement(); MockFlowFile mff; try { stmt.execute("drop table TEST_QUERY_DB_TABLE"); } catch (final SQLException sqle) { // Ignore this error, probably a "table does not exist" since Derby doesn't yet support DROP IF EXISTS [DERBY-4842] } stmt.execute("create table TEST_QUERY_DB_TABLE (id integer not null, name varchar(100), scale float, created_on timestamp, bignum bigint default 0)"); int rowCount = 0; //create larger row set for (int batch = 0; batch < 100; batch++) { stmt.execute("insert into TEST_QUERY_DB_TABLE (id, name, scale, created_on) VALUES (" + rowCount + ", 'Joe Smith', 1.0, '1962-09-23 03:23:34.234')"); rowCount++; } runner.setProperty(QueryDatabaseTableRecord.TABLE_NAME, "TEST_QUERY_DB_TABLE"); runner.setIncomingConnection(false); runner.setProperty(QueryDatabaseTableRecord.MAX_VALUE_COLUMN_NAMES, "ID"); runner.setProperty(QueryDatabaseTableRecord.MAX_ROWS_PER_FLOW_FILE, "${" + MAX_ROWS_KEY + "}"); runner.setVariable(MAX_ROWS_KEY, "9"); runner.run(); runner.assertAllFlowFilesTransferred(QueryDatabaseTableRecord.REL_SUCCESS, 12); //ensure all but the last file have 9 records each for (int ff = 0; ff < 11; ff++) { mff = runner.getFlowFilesForRelationship(QueryDatabaseTableRecord.REL_SUCCESS).get(ff); mff.assertAttributeEquals("record.count", "9"); mff.assertAttributeExists("fragment.identifier"); assertEquals(Integer.toString(ff), mff.getAttribute("fragment.index")); assertEquals("12", mff.getAttribute("fragment.count")); } //last file should have 1 record mff = runner.getFlowFilesForRelationship(QueryDatabaseTableRecord.REL_SUCCESS).get(11); mff.assertAttributeEquals("record.count", "1"); mff.assertAttributeExists("fragment.identifier"); assertEquals(Integer.toString(11), mff.getAttribute("fragment.index")); assertEquals("12", mff.getAttribute("fragment.count")); runner.clearTransferState(); // Run again, this time no flowfiles/rows should be transferred runner.run(); runner.assertAllFlowFilesTransferred(QueryDatabaseTableRecord.REL_SUCCESS, 0); runner.clearTransferState(); // Run again, this time should be a single partial flow file for (int batch = 0; batch < 5; batch++) { stmt.execute("insert into TEST_QUERY_DB_TABLE (id, name, scale, created_on) VALUES (" + rowCount + ", 'Joe Smith', 1.0, '1962-09-23 03:23:34.234')"); rowCount++; } runner.run(); runner.assertAllFlowFilesTransferred(QueryDatabaseTableRecord.REL_SUCCESS, 1); mff = runner.getFlowFilesForRelationship(QueryDatabaseTableRecord.REL_SUCCESS).get(0); mff.assertAttributeExists("fragment.identifier"); assertEquals(Integer.toString(0), mff.getAttribute("fragment.index")); assertEquals("1", mff.getAttribute("fragment.count")); mff.assertAttributeEquals("record.count", "5"); runner.clearTransferState(); // Run again, this time should be a full batch and a partial for (int batch = 0; batch < 14; batch++) { stmt.execute("insert into TEST_QUERY_DB_TABLE (id, name, scale, created_on) VALUES (" + rowCount + ", 'Joe Smith', 1.0, '1962-09-23 03:23:34.234')"); rowCount++; } runner.run(); runner.assertAllFlowFilesTransferred(QueryDatabaseTableRecord.REL_SUCCESS, 2); mff = runner.getFlowFilesForRelationship(QueryDatabaseTableRecord.REL_SUCCESS).get(0); mff.assertAttributeEquals("record.count", "9"); mff = runner.getFlowFilesForRelationship(QueryDatabaseTableRecord.REL_SUCCESS).get(1); mff.assertAttributeEquals("record.count", "5"); runner.clearTransferState(); // Run again with a cleaned state. Should get all rows split into batches int ffCount = (int) Math.ceil(rowCount / 9D); runner.getStateManager().clear(Scope.CLUSTER); runner.run(); runner.assertAllFlowFilesTransferred(QueryDatabaseTableRecord.REL_SUCCESS, ffCount); //ensure all but the last file have 9 records each for (int ff = 0; ff < ffCount - 1; ff++) { mff = runner.getFlowFilesForRelationship(QueryDatabaseTableRecord.REL_SUCCESS).get(ff); mff.assertAttributeEquals("record.count", "9"); } mff = runner.getFlowFilesForRelationship(QueryDatabaseTableRecord.REL_SUCCESS).get(ffCount - 1); mff.assertAttributeEquals("record.count", Integer.toString(rowCount % 9)); runner.clearTransferState(); }