Java Code Examples for org.apache.nifi.processor.ProcessSession#get()
The following examples show how to use
org.apache.nifi.processor.ProcessSession#get() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: WriteResourceToStream.java From localization_nifi with Apache License 2.0 | 6 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile flowFile = session.get(); if (flowFile == null) { return; } try { flowFile = session.write(flowFile, new OutputStreamCallback() { @Override public void process(OutputStream out) throws IOException { IOUtils.write(resourceData, out); } }); session.transfer(flowFile, REL_SUCCESS); } catch (ProcessException ex) { getLogger().error("Unable to process", ex); session.transfer(flowFile, REL_FAILURE); } }
Example 2
Source File: StandardFunnel.java From localization_nifi with Apache License 2.0 | 6 votes |
private void onTrigger(final ProcessContext context, final ProcessSession session) { readLock.lock(); try { Set<Relationship> available = context.getAvailableRelationships(); while (!available.isEmpty()) { final List<FlowFile> flowFiles = session.get(100); if (flowFiles.isEmpty()) { break; } session.transfer(flowFiles, Relationship.ANONYMOUS); session.commit(); available = context.getAvailableRelationships(); } } finally { readLock.unlock(); } }
Example 3
Source File: PutGridFS.java From nifi with Apache License 2.0 | 5 votes |
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { FlowFile input = session.get(); if (input == null) { return; } GridFSBucket bucket = getBucket(input, context); if (!canUploadFile(context, input, bucket.getBucketName())) { getLogger().error("Cannot upload the file because of the uniqueness policy configured."); session.transfer(input, REL_DUPLICATE); return; } final int chunkSize = context.getProperty(CHUNK_SIZE).evaluateAttributeExpressions(input).asDataSize(DataUnit.B).intValue(); try (InputStream fileInput = session.read(input)) { String fileName = context.getProperty(FILE_NAME).evaluateAttributeExpressions(input).getValue(); GridFSUploadOptions options = new GridFSUploadOptions() .chunkSizeBytes(chunkSize) .metadata(getMetadata(input, context)); ObjectId id = bucket.uploadFromStream(fileName, fileInput, options); fileInput.close(); if (id != null) { input = session.putAttribute(input, ID_ATTRIBUTE, id.toString()); session.transfer(input, REL_SUCCESS); session.getProvenanceReporter().send(input, getTransitUri(id, input, context)); } else { getLogger().error("ID was null, assuming failure."); session.transfer(input, REL_FAILURE); } } catch (Exception ex) { getLogger().error("Failed to upload file", ex); session.transfer(input, REL_FAILURE); } }
Example 4
Source File: GetIgniteCache.java From localization_nifi with Apache License 2.0 | 5 votes |
/** * Handle flow file and gets the entry from the cache based on the key attribute */ @Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile flowFile = session.get(); if (flowFile == null) { return; } String key = context.getProperty(IGNITE_CACHE_ENTRY_KEY).evaluateAttributeExpressions(flowFile).getValue(); if ( StringUtils.isEmpty(key) ) { flowFile = session.putAttribute(flowFile, IGNITE_GET_FAILED_REASON_ATTRIBUTE_KEY, IGNITE_GET_FAILED_MISSING_KEY_MESSAGE); session.transfer(flowFile, REL_FAILURE); } else { try { byte [] value = getIgniteCache().get(key); if ( value == null || value.length == 0 ) { flowFile = session.putAttribute(flowFile, IGNITE_GET_FAILED_REASON_ATTRIBUTE_KEY, IGNITE_GET_FAILED_MISSING_ENTRY_MESSAGE); session.transfer(flowFile, REL_FAILURE); } else { ByteArrayInputStream bais = new ByteArrayInputStream(value); flowFile = session.importFrom(bais, flowFile); session.transfer(flowFile,REL_SUCCESS); } } catch(Exception exception) { flowFile = session.putAttribute(flowFile, IGNITE_GET_FAILED_REASON_ATTRIBUTE_KEY, IGNITE_GET_FAILED_MESSAGE_PREFIX + exception); getLogger().error("Failed to get value for key {} from IgniteDB due to {}", new Object[] { key, exception }, exception); session.transfer(flowFile, REL_FAILURE); context.yield(); } } }
Example 5
Source File: PutUDP.java From localization_nifi with Apache License 2.0 | 5 votes |
/** * event handler method to handle the FlowFile being forwarded to the Processor by the framework. The FlowFile contents is sent out as a UDP datagram using an acquired ChannelSender object. If the * FlowFile contents was sent out successfully then the FlowFile is forwarded to the success relationship. If an error occurred then the FlowFile is forwarded to the failure relationship. * * @param context * - the current process context. * * @param sessionFactory * - a factory object to obtain a process session. */ @Override public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) throws ProcessException { final ProcessSession session = sessionFactory.createSession(); final FlowFile flowFile = session.get(); if (flowFile == null) { pruneIdleSenders(context.getProperty(IDLE_EXPIRATION).asTimePeriod(TimeUnit.MILLISECONDS).longValue()); context.yield(); return; } ChannelSender sender = acquireSender(context, session, flowFile); if (sender == null) { return; } try { byte[] content = readContent(session, flowFile); StopWatch stopWatch = new StopWatch(true); sender.send(content); session.getProvenanceReporter().send(flowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); session.transfer(flowFile, REL_SUCCESS); session.commit(); } catch (Exception e) { getLogger().error("Exception while handling a process session, transferring {} to failure.", new Object[] { flowFile }, e); onFailure(context, session, flowFile); } finally { relinquishSender(sender); } }
Example 6
Source File: AttributesToJSON.java From localization_nifi with Apache License 2.0 | 5 votes |
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { final FlowFile original = session.get(); if (original == null) { return; } final Map<String, String> atrList = buildAttributesMapForFlowFile(original, attributes, attributesToRemove, nullValueForEmptyString); try { if (destinationContent) { FlowFile conFlowfile = session.write(original, (in, out) -> { try (OutputStream outputStream = new BufferedOutputStream(out)) { outputStream.write(objectMapper.writeValueAsBytes(atrList)); } }); conFlowfile = session.putAttribute(conFlowfile, CoreAttributes.MIME_TYPE.key(), APPLICATION_JSON); session.transfer(conFlowfile, REL_SUCCESS); } else { FlowFile atFlowfile = session.putAttribute(original, JSON_ATTRIBUTE_NAME, objectMapper.writeValueAsString(atrList)); session.transfer(atFlowfile, REL_SUCCESS); } } catch (JsonProcessingException e) { getLogger().error(e.getMessage()); session.transfer(original, REL_FAILURE); } }
Example 7
Source File: TerminateOnce.java From nifi with Apache License 2.0 | 5 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile flowFile = session.get(); if (flowFile != null) { session.remove(flowFile); } }
Example 8
Source File: DeleteHBaseCells.java From nifi with Apache License 2.0 | 4 votes |
@Override protected void doDelete(ProcessContext context, ProcessSession session) throws Exception { FlowFile input = session.get(); if (input == null) { return; } final String separator = context.getProperty(SEPARATOR).evaluateAttributeExpressions(input).getValue(); final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(input).getValue(); List<String> rowKeys = new ArrayList<>(); int lineNum = 1; try (InputStream is = session.read(input)) { Scanner scanner = new Scanner(is); List<DeleteRequest> deletes = new ArrayList<>(); while (scanner.hasNextLine()) { String line = scanner.nextLine().trim(); if (line.equals("")) { continue; } String[] parts = line.split(separator); if (parts.length < 3 || parts.length > 4) { final String msg = String.format("Invalid line length. It must have 3 or 4 components. It had %d.", parts.length); is.close(); input = writeErrorAttributes(lineNum, msg, input, session); session.transfer(input, REL_FAILURE); getLogger().error(msg); return; } String rowId = parts[0]; String family = parts[1]; String column = parts[2]; String visibility = parts.length == 4 ? parts[3] : null; DeleteRequest request = new DeleteRequest(rowId.getBytes(), family.getBytes(), column.getBytes(), visibility); deletes.add(request); if (!rowKeys.contains(rowId)) { rowKeys.add(rowId); } if (getLogger().isDebugEnabled()) { logCell(rowId, family, column, visibility); } lineNum++; } is.close(); clientService.deleteCells(tableName, deletes); for (int index = 0; index < rowKeys.size(); index++) { //Could be many row keys in one flowfile. session.getProvenanceReporter().invokeRemoteProcess(input, clientService.toTransitUri(tableName, rowKeys.get(index))); } session.transfer(input, REL_SUCCESS); } catch (Exception ex) { input = writeErrorAttributes(lineNum, ex.getMessage(), input, session); session.transfer(input, REL_FAILURE); } }
Example 9
Source File: PutSQS.java From localization_nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) { FlowFile flowFile = session.get(); if (flowFile == null) { return; } final long startNanos = System.nanoTime(); final AmazonSQSClient client = getClient(); final SendMessageBatchRequest request = new SendMessageBatchRequest(); final String queueUrl = context.getProperty(QUEUE_URL).evaluateAttributeExpressions(flowFile).getValue(); request.setQueueUrl(queueUrl); final Set<SendMessageBatchRequestEntry> entries = new HashSet<>(); final SendMessageBatchRequestEntry entry = new SendMessageBatchRequestEntry(); entry.setId(flowFile.getAttribute("uuid")); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); session.exportTo(flowFile, baos); final String flowFileContent = baos.toString(); entry.setMessageBody(flowFileContent); final Map<String, MessageAttributeValue> messageAttributes = new HashMap<>(); for (final PropertyDescriptor descriptor : userDefinedProperties) { final MessageAttributeValue mav = new MessageAttributeValue(); mav.setDataType("String"); mav.setStringValue(context.getProperty(descriptor).evaluateAttributeExpressions(flowFile).getValue()); messageAttributes.put(descriptor.getName(), mav); } entry.setMessageAttributes(messageAttributes); entry.setDelaySeconds(context.getProperty(DELAY).asTimePeriod(TimeUnit.SECONDS).intValue()); entries.add(entry); request.setEntries(entries); try { client.sendMessageBatch(request); } catch (final Exception e) { getLogger().error("Failed to send messages to Amazon SQS due to {}; routing to failure", new Object[]{e}); flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE); return; } getLogger().info("Successfully published message to Amazon SQS for {}", new Object[]{flowFile}); session.transfer(flowFile, REL_SUCCESS); final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); session.getProvenanceReporter().send(flowFile, queueUrl, transmissionMillis); }
Example 10
Source File: SegmentContent.java From localization_nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) { FlowFile flowFile = session.get(); if (flowFile == null) { return; } final String segmentId = UUID.randomUUID().toString(); final long segmentSize = context.getProperty(SIZE).asDataSize(DataUnit.B).longValue(); final String originalFileName = flowFile.getAttribute(CoreAttributes.FILENAME.key()); if (flowFile.getSize() <= segmentSize) { flowFile = session.putAttribute(flowFile, SEGMENT_ID, segmentId); flowFile = session.putAttribute(flowFile, SEGMENT_INDEX, "1"); flowFile = session.putAttribute(flowFile, SEGMENT_COUNT, "1"); flowFile = session.putAttribute(flowFile, SEGMENT_ORIGINAL_FILENAME, originalFileName); flowFile = session.putAttribute(flowFile, FRAGMENT_ID, segmentId); flowFile = session.putAttribute(flowFile, FRAGMENT_INDEX, "1"); flowFile = session.putAttribute(flowFile, FRAGMENT_COUNT, "1"); FlowFile clone = session.clone(flowFile); session.transfer(flowFile, REL_ORIGINAL); session.transfer(clone, REL_SEGMENTS); return; } int totalSegments = (int) (flowFile.getSize() / segmentSize); if (totalSegments * segmentSize < flowFile.getSize()) { totalSegments++; } final Map<String, String> segmentAttributes = new HashMap<>(); segmentAttributes.put(SEGMENT_ID, segmentId); segmentAttributes.put(SEGMENT_COUNT, String.valueOf(totalSegments)); segmentAttributes.put(SEGMENT_ORIGINAL_FILENAME, originalFileName); segmentAttributes.put(FRAGMENT_ID, segmentId); segmentAttributes.put(FRAGMENT_COUNT, String.valueOf(totalSegments)); final Set<FlowFile> segmentSet = new HashSet<>(); for (int i = 1; i <= totalSegments; i++) { final long segmentOffset = segmentSize * (i - 1); FlowFile segment = session.clone(flowFile, segmentOffset, Math.min(segmentSize, flowFile.getSize() - segmentOffset)); segmentAttributes.put(SEGMENT_INDEX, String.valueOf(i)); segmentAttributes.put(FRAGMENT_INDEX, String.valueOf(i)); segment = session.putAllAttributes(segment, segmentAttributes); segmentSet.add(segment); } session.transfer(segmentSet, REL_SEGMENTS); flowFile = FragmentAttributes.copyAttributesToOriginal(session, flowFile, segmentId, totalSegments); session.transfer(flowFile, REL_ORIGINAL); if (totalSegments <= 10) { getLogger().info("Segmented {} into {} segments: {}", new Object[]{flowFile, totalSegments, segmentSet}); } else { getLogger().info("Segmented {} into {} segments", new Object[]{flowFile, totalSegments}); } }
Example 11
Source File: PutMongoRecord.java From nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { final FlowFile flowFile = session.get(); if (flowFile == null) { return; } final RecordReaderFactory recordParserFactory = context.getProperty(RECORD_READER_FACTORY) .asControllerService(RecordReaderFactory.class); final WriteConcern writeConcern = getWriteConcern(context); List<Document> inserts = new ArrayList<>(); int ceiling = context.getProperty(INSERT_COUNT).asInteger(); int added = 0; boolean error = false; try (final InputStream inStream = session.read(flowFile); final RecordReader reader = recordParserFactory.createRecordReader(flowFile, inStream, getLogger())) { final MongoCollection<Document> collection = getCollection(context, flowFile).withWriteConcern(writeConcern); RecordSchema schema = reader.getSchema(); Record record; while ((record = reader.nextRecord()) != null) { // Convert each Record to HashMap and put into the Mongo document Map<String, Object> contentMap = (Map<String, Object>) DataTypeUtils.convertRecordFieldtoObject(record, RecordFieldType.RECORD.getRecordDataType(record.getSchema())); Document document = new Document(); for (String name : schema.getFieldNames()) { document.put(name, contentMap.get(name)); } inserts.add(convertArrays(document)); if (inserts.size() == ceiling) { collection.insertMany(inserts); added += inserts.size(); inserts = new ArrayList<>(); } } if (inserts.size() > 0) { collection.insertMany(inserts); } } catch (SchemaNotFoundException | IOException | MalformedRecordException | MongoException e) { getLogger().error("PutMongoRecord failed with error:", e); session.transfer(flowFile, REL_FAILURE); error = true; } finally { if (!error) { String url = clientService != null ? clientService.getURI() : context.getProperty(URI).evaluateAttributeExpressions().getValue(); session.getProvenanceReporter().send(flowFile, url, String.format("Added %d documents to MongoDB.", added)); session.transfer(flowFile, REL_SUCCESS); getLogger().info("Inserted {} records into MongoDB", new Object[]{ added }); } } session.commit(); }
Example 12
Source File: YandexTranslate.java From nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile flowFile = session.get(); if (flowFile == null) { return; } final StopWatch stopWatch = new StopWatch(true); final String key = context.getProperty(KEY).getValue(); final String sourceLanguage = context.getProperty(SOURCE_LANGUAGE).evaluateAttributeExpressions(flowFile).getValue(); final String targetLanguage = context.getProperty(TARGET_LANGUAGE).evaluateAttributeExpressions(flowFile).getValue(); final String encoding = context.getProperty(CHARACTER_SET).evaluateAttributeExpressions(flowFile).getValue(); final List<String> attributeNames = new ArrayList<>(); final List<String> textValues = new ArrayList<>(); for (final PropertyDescriptor descriptor : context.getProperties().keySet()) { if (descriptor.isDynamic()) { attributeNames.add(descriptor.getName()); // add to list so that we know the order when the translations come back. textValues.add(context.getProperty(descriptor).evaluateAttributeExpressions(flowFile).getValue()); } } if (context.getProperty(TRANSLATE_CONTENT).asBoolean()) { final byte[] buff = new byte[(int) flowFile.getSize()]; session.read(flowFile, new InputStreamCallback() { @Override public void process(final InputStream in) throws IOException { StreamUtils.fillBuffer(in, buff); } }); final String content = new String(buff, Charset.forName(encoding)); textValues.add(content); } final Invocation invocation = prepareResource(key, textValues, sourceLanguage, targetLanguage); final Response response; try { response = invocation.invoke(); } catch (final Exception e) { getLogger().error("Failed to make request to Yandex to transate text for {} due to {}; routing to comms.failure", new Object[]{flowFile, e}); session.transfer(flowFile, REL_COMMS_FAILURE); return; } if (response.getStatus() != Response.Status.OK.getStatusCode()) { getLogger().error("Failed to translate text using Yandex for {}; response was {}: {}; routing to {}", new Object[]{ flowFile, response.getStatus(), response.getStatusInfo().getReasonPhrase(), REL_TRANSLATION_FAILED.getName()}); flowFile = session.putAttribute(flowFile, "yandex.translate.failure.reason", response.getStatusInfo().getReasonPhrase()); session.transfer(flowFile, REL_TRANSLATION_FAILED); return; } final Map<String, String> newAttributes = new HashMap<>(); final Translation translation = response.readEntity(Translation.class); final List<String> texts = translation.getText(); for (int i = 0; i < texts.size(); i++) { final String text = texts.get(i); if (i < attributeNames.size()) { final String attributeName = attributeNames.get(i); newAttributes.put(attributeName, text); } else { flowFile = session.write(flowFile, new OutputStreamCallback() { @Override public void process(final OutputStream out) throws IOException { out.write(text.getBytes(encoding)); } }); newAttributes.put("language", targetLanguage); } } if (!newAttributes.isEmpty()) { flowFile = session.putAllAttributes(flowFile, newAttributes); } stopWatch.stop(); session.transfer(flowFile, REL_SUCCESS); getLogger().info("Successfully translated {} items for {} from {} to {} in {}; routing to success", new Object[]{texts.size(), flowFile, sourceLanguage, targetLanguage, stopWatch.getDuration()}); }
Example 13
Source File: ExtractText.java From nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) { FlowFile flowFile = session.get(); if (flowFile == null) { return; } final ComponentLog logger = getLogger(); final Charset charset = Charset.forName(context.getProperty(CHARACTER_SET).getValue()); final int maxCaptureGroupLength = context.getProperty(MAX_CAPTURE_GROUP_LENGTH).asInteger(); final String contentString; byte[] buffer = bufferQueue.poll(); if (buffer == null) { final int maxBufferSize = context.getProperty(MAX_BUFFER_SIZE).asDataSize(DataUnit.B).intValue(); buffer = new byte[maxBufferSize]; } try { final byte[] byteBuffer = buffer; session.read(flowFile, new InputStreamCallback() { @Override public void process(InputStream in) throws IOException { StreamUtils.fillBuffer(in, byteBuffer, false); } }); final long len = Math.min(byteBuffer.length, flowFile.getSize()); contentString = new String(byteBuffer, 0, (int) len, charset); } finally { bufferQueue.offer(buffer); } final Map<String, String> regexResults = new HashMap<>(); final Map<String, Pattern> patternMap = compiledPattersMapRef.get(); final int startGroupIdx = context.getProperty(INCLUDE_CAPTURE_GROUP_ZERO).asBoolean() ? 0 : 1; for (final Map.Entry<String, Pattern> entry : patternMap.entrySet()) { final Matcher matcher = entry.getValue().matcher(contentString); int j = 0; while (matcher.find()) { final String baseKey = entry.getKey(); int start = j == 0 ? startGroupIdx : 1; for (int i = start; i <= matcher.groupCount(); i++) { final String key = new StringBuilder(baseKey).append(".").append(i + j).toString(); String value = matcher.group(i); if (value != null && !value.isEmpty()) { if (value.length() > maxCaptureGroupLength) { value = value.substring(0, maxCaptureGroupLength); } regexResults.put(key, value); if (i == 1 && j == 0) { regexResults.put(baseKey, value); } } } j += matcher.groupCount(); if (!context.getProperty(ENABLE_REPEATING_CAPTURE_GROUP).asBoolean()) { break; } } } if (!regexResults.isEmpty()) { flowFile = session.putAllAttributes(flowFile, regexResults); session.getProvenanceReporter().modifyAttributes(flowFile); session.transfer(flowFile, REL_MATCH); logger.info("Matched {} Regular Expressions and added attributes to FlowFile {}", new Object[]{regexResults.size(), flowFile}); } else { session.transfer(flowFile, REL_NO_MATCH); logger.info("Did not match any Regular Expressions for FlowFile {}", new Object[]{flowFile}); } }
Example 14
Source File: QueryDNS.java From localization_nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { if (!initialized.get()) { initializeResolver(context); getLogger().warn("Resolver was initialized at onTrigger instead of onScheduled"); } FlowFile flowFile = session.get(); if (flowFile == null) { return; } final String queryType = context.getProperty(DNS_QUERY_TYPE).getValue(); final String queryInput = context.getProperty(QUERY_INPUT).evaluateAttributeExpressions(flowFile).getValue(); final String queryParser = context.getProperty(QUERY_PARSER).getValue(); final String queryRegex = context.getProperty(QUERY_PARSER_INPUT).getValue(); boolean found = false; try { Attributes results = doLookup(queryInput, queryType); // NOERROR & NODATA seem to return empty Attributes handled bellow // but defaulting to not found in any case if (results.size() < 1) { found = false; } else { int recordNumber = 0; NamingEnumeration<?> dnsEntryIterator = results.get(queryType).getAll(); while (dnsEntryIterator.hasMoreElements()) { String dnsRecord = dnsEntryIterator.next().toString(); // While NXDOMAIN is being generated by doLookup catch if (dnsRecord != "NXDOMAIN") { // Map<String, String> parsedResults = parseResponse(recordNumber, dnsRecord, queryParser, queryRegex, "dns"); Map<String, String> parsedResults = parseResponse(String.valueOf(recordNumber), dnsRecord, queryParser, queryRegex, "dns"); flowFile = session.putAllAttributes(flowFile, parsedResults); found = true; } else { // Otherwise treat as not found found = false; } // Increase the counter and iterate over next record.... recordNumber++; } } } catch (NamingException e) { context.yield(); throw new ProcessException("Unexpected NamingException while processing records. Please review your configuration.", e); } // Finally prepare to send the data down the pipeline if (found) { // Sending the resulting flowfile (with attributes) to REL_FOUND session.transfer(flowFile, REL_FOUND); } else { // NXDOMAIN received, accepting the fate but forwarding // to REL_NOT_FOUND session.transfer(flowFile, REL_NOT_FOUND); } }
Example 15
Source File: DeleteByQueryElasticsearch.java From nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { FlowFile input = null; if (context.hasIncomingConnection()) { input = session.get(); if (input == null && context.hasNonLoopConnection()) { return; } } try { final String query = getQuery(input, context, session); final String index = context.getProperty(INDEX).evaluateAttributeExpressions(input).getValue(); final String type = context.getProperty(TYPE).isSet() ? context.getProperty(TYPE).evaluateAttributeExpressions(input).getValue() : null; final String queryAttr = context.getProperty(QUERY_ATTRIBUTE).isSet() ? context.getProperty(QUERY_ATTRIBUTE).evaluateAttributeExpressions(input).getValue() : null; DeleteOperationResponse dor = clientService.deleteByQuery(query, index, type); if (input == null) { input = session.create(); } Map<String, String> attrs = new HashMap<>(); attrs.put(TOOK_ATTRIBUTE, String.valueOf(dor.getTook())); if (!StringUtils.isBlank(queryAttr)) { attrs.put(queryAttr, query); } input = session.putAllAttributes(input, attrs); session.transfer(input, REL_SUCCESS); } catch (Exception e) { if (input != null) { input = session.putAttribute(input, ERROR_ATTRIBUTE, e.getMessage()); session.transfer(input, REL_FAILURE); } getLogger().error("Error running delete by query: ", e); context.yield(); } }
Example 16
Source File: ExecuteGraphQuery.java From nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile flowFile = session.get(); FlowFile output = flowFile != null ? session.create(flowFile) : session.create(); try (OutputStream os = session.write(output)) { String query = getQuery(context, session, flowFile); long startTimeMillis = System.currentTimeMillis(); os.write("[".getBytes()); Map<String, String> resultAttrs = clientService.executeQuery(query, getParameters(context, output), (record, hasMore) -> { try { String obj = mapper.writeValueAsString(record); os.write(obj.getBytes()); if (hasMore) { os.write(",".getBytes()); } } catch (Exception ex) { throw new ProcessException(ex); } }); os.write("]".getBytes()); os.close(); final long endTimeMillis = System.currentTimeMillis(); String executionTime = String.valueOf((endTimeMillis - startTimeMillis)); resultAttrs.put(EXECUTION_TIME, executionTime); resultAttrs.put(CoreAttributes.MIME_TYPE.key(), "application/json"); output = session.putAllAttributes(output, resultAttrs); session.transfer(output, REL_SUCCESS); session.getProvenanceReporter().invokeRemoteProcess(output, clientService.getTransitUrl(), String.format("The following query was executed in %s milliseconds: \"%s\"", executionTime, query) ); if (flowFile != null) { session.transfer(flowFile, REL_ORIGINAL); } } catch (Exception exception) { getLogger().error("Failed to execute graph statement due to {}", new Object[]{exception.getLocalizedMessage()}, exception); session.remove(output); if (flowFile != null) { flowFile = session.putAttribute(flowFile, ERROR_MESSAGE, String.valueOf(exception.getMessage())); session.transfer(flowFile, REL_FAILURE); } context.yield(); } }
Example 17
Source File: FetchGCSObject.java From nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { FlowFile flowFile = session.get(); if (flowFile == null) { return; } final long startNanos = System.nanoTime(); final String bucketName = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue(); final String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue(); final Long generation = context.getProperty(GENERATION).evaluateAttributeExpressions(flowFile).asLong(); final String encryptionKey = context.getProperty(ENCRYPTION_KEY).evaluateAttributeExpressions(flowFile).getValue(); final Storage storage = getCloudService(); final BlobId blobId = BlobId.of(bucketName, key, generation); try { final List<Storage.BlobSourceOption> blobSourceOptions = new ArrayList<>(2); if (encryptionKey != null) { blobSourceOptions.add(Storage.BlobSourceOption.decryptionKey(encryptionKey)); } if (generation != null) { blobSourceOptions.add(Storage.BlobSourceOption.generationMatch()); } final Blob blob = storage.get(blobId); if (blob == null) { throw new StorageException(404, "Blob " + blobId + " not found"); } final ReadChannel reader = storage.reader(blobId, blobSourceOptions.toArray(new Storage.BlobSourceOption[0])); flowFile = session.importFrom(Channels.newInputStream(reader), flowFile); final Map<String, String> attributes = StorageAttributes.createAttributes(blob); flowFile = session.putAllAttributes(flowFile, attributes); } catch (StorageException e) { getLogger().error("Failed to fetch GCS Object due to {}", new Object[] {e}, e); flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE); return; } session.transfer(flowFile, REL_SUCCESS); final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); getLogger().info("Successfully retrieved GCS Object for {} in {} millis; routing to success", new Object[]{flowFile, millis}); session.getProvenanceReporter().fetch(flowFile, "https://" + bucketName + ".storage.googleapis.com/" + key, millis); }
Example 18
Source File: PutSplunk.java From localization_nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(ProcessContext context, ProcessSessionFactory sessionFactory) throws ProcessException { // first complete any batches from previous executions FlowFileMessageBatch batch; while ((batch = completeBatches.poll()) != null) { batch.completeSession(); } // create a session and try to get a FlowFile, if none available then close any idle senders final ProcessSession session = sessionFactory.createSession(); final FlowFile flowFile = session.get(); if (flowFile == null) { pruneIdleSenders(context.getProperty(IDLE_EXPIRATION).asTimePeriod(TimeUnit.MILLISECONDS).longValue()); context.yield(); return; } // get a sender from the pool, or create a new one if the pool is empty // if we can't create a new connection then route flow files to failure and yield // acquireSender will handle the routing to failure and yielding ChannelSender sender = acquireSender(context, session, flowFile); if (sender == null) { return; } try { String delimiter = context.getProperty(MESSAGE_DELIMITER).evaluateAttributeExpressions(flowFile).getValue(); if (delimiter != null) { delimiter = delimiter.replace("\\n", "\n").replace("\\r", "\r").replace("\\t", "\t"); } // if no delimiter then treat the whole FlowFile as a single message if (delimiter == null) { processSingleMessage(context, session, flowFile, sender); } else { processDelimitedMessages(context, session, flowFile, sender, delimiter); } } finally { relinquishSender(sender); } }
Example 19
Source File: ConvertCharacterSet.java From localization_nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) { FlowFile flowFile = session.get(); if (flowFile == null) { return; } final ComponentLog logger = getLogger(); final Charset inputCharset = Charset.forName(context.getProperty(INPUT_CHARSET).evaluateAttributeExpressions(flowFile).getValue()); final Charset outputCharset = Charset.forName(context.getProperty(OUTPUT_CHARSET).evaluateAttributeExpressions(flowFile).getValue()); final CharBuffer charBuffer = CharBuffer.allocate(MAX_BUFFER_SIZE); final CharsetDecoder decoder = inputCharset.newDecoder(); decoder.onMalformedInput(CodingErrorAction.REPLACE); decoder.onUnmappableCharacter(CodingErrorAction.REPLACE); decoder.replaceWith("?"); final CharsetEncoder encoder = outputCharset.newEncoder(); encoder.onMalformedInput(CodingErrorAction.REPLACE); encoder.onUnmappableCharacter(CodingErrorAction.REPLACE); encoder.replaceWith("?".getBytes(outputCharset)); try { final StopWatch stopWatch = new StopWatch(true); flowFile = session.write(flowFile, new StreamCallback() { @Override public void process(final InputStream rawIn, final OutputStream rawOut) throws IOException { try (final BufferedReader reader = new BufferedReader(new InputStreamReader(rawIn, decoder), MAX_BUFFER_SIZE); final BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(rawOut, encoder), MAX_BUFFER_SIZE)) { int charsRead; while ((charsRead = reader.read(charBuffer)) != -1) { charBuffer.flip(); writer.write(charBuffer.array(), 0, charsRead); } writer.flush(); } } }); session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); logger.info("successfully converted characters from {} to {} for {}", new Object[]{inputCharset, outputCharset, flowFile}); session.transfer(flowFile, REL_SUCCESS); } catch (final Exception e) { throw new ProcessException(e); } }
Example 20
Source File: FetchDistributedMapCache.java From nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile flowFile = session.get(); if (flowFile == null) { return; } final ComponentLog logger = getLogger(); final String cacheKey = context.getProperty(PROP_CACHE_ENTRY_IDENTIFIER).evaluateAttributeExpressions(flowFile).getValue(); // This block retains the previous behavior when only one Cache Entry Identifier was allowed, so as not to change the expected error message if (StringUtils.isBlank(cacheKey)) { logger.error("FlowFile {} has no attribute for given Cache Entry Identifier", new Object[]{flowFile}); flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE); return; } List<String> cacheKeys = Arrays.stream(cacheKey.split(",")).filter(path -> !StringUtils.isEmpty(path)).map(String::trim).collect(Collectors.toList()); for (int i = 0; i < cacheKeys.size(); i++) { if (StringUtils.isBlank(cacheKeys.get(i))) { // Log first missing identifier, route to failure, and return logger.error("FlowFile {} has no attribute for Cache Entry Identifier in position {}", new Object[]{flowFile, i}); flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE); return; } } final DistributedMapCacheClient cache = context.getProperty(PROP_DISTRIBUTED_CACHE_SERVICE).asControllerService(DistributedMapCacheClient.class); try { final Map<String, byte[]> cacheValues; final boolean singleKey = cacheKeys.size() == 1; if (singleKey) { cacheValues = new HashMap<>(1); cacheValues.put(cacheKeys.get(0), cache.get(cacheKey, keySerializer, valueDeserializer)); } else { cacheValues = cache.subMap(new HashSet<>(cacheKeys), keySerializer, valueDeserializer); } boolean notFound = false; for(Map.Entry<String,byte[]> cacheValueEntry : cacheValues.entrySet()) { final byte[] cacheValue = cacheValueEntry.getValue(); if (cacheValue == null) { logger.info("Could not find an entry in cache for {}; routing to not-found", new Object[]{flowFile}); notFound = true; break; } else { boolean putInAttribute = context.getProperty(PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE).isSet(); if (putInAttribute) { String attributeName = context.getProperty(PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE).evaluateAttributeExpressions(flowFile).getValue(); if (!singleKey) { // Append key to attribute name if multiple keys attributeName += "." + cacheValueEntry.getKey(); } String attributeValue = new String(cacheValue, context.getProperty(PROP_CHARACTER_SET).getValue()); int maxLength = context.getProperty(PROP_PUT_ATTRIBUTE_MAX_LENGTH).asInteger(); if (maxLength < attributeValue.length()) { attributeValue = attributeValue.substring(0, maxLength); } flowFile = session.putAttribute(flowFile, attributeName, attributeValue); } else if (cacheKeys.size() > 1) { throw new IOException("Multiple Cache Value Identifiers specified without Put Cache Value In Attribute set"); } else { // Write single value to content flowFile = session.write(flowFile, out -> out.write(cacheValue)); } if (putInAttribute) { logger.info("Found a cache key of {} and added an attribute to {} with it's value.", new Object[]{cacheKey, flowFile}); } else { logger.info("Found a cache key of {} and replaced the contents of {} with it's value.", new Object[]{cacheKey, flowFile}); } } } // If the loop was exited because a cache entry was not found, route to REL_NOT_FOUND; otherwise route to REL_SUCCESS if (notFound) { session.transfer(flowFile, REL_NOT_FOUND); } else { session.transfer(flowFile, REL_SUCCESS); } } catch (final IOException e) { flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE); logger.error("Unable to communicate with cache when processing {} due to {}", new Object[]{flowFile, e}); } }