Java Code Examples for org.apache.nifi.processor.ProcessContext#hasIncomingConnection()
The following examples show how to use
org.apache.nifi.processor.ProcessContext#hasIncomingConnection() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ExecuteSQL.java From localization_nifi with Apache License 2.0 | 5 votes |
@OnScheduled public void setup(ProcessContext context) { // If the query is not set, then an incoming flow file is needed. Otherwise fail the initialization if (!context.getProperty(SQL_SELECT_QUERY).isSet() && !context.hasIncomingConnection()) { final String errorString = "Either the Select Query must be specified or there must be an incoming connection " + "providing flowfile(s) containing a SQL select query"; getLogger().error(errorString); throw new ProcessException(errorString); } }
Example 2
Source File: SelectHiveQL.java From localization_nifi with Apache License 2.0 | 5 votes |
@OnScheduled public void setup(ProcessContext context) { // If the query is not set, then an incoming flow file is needed. Otherwise fail the initialization if (!context.getProperty(HIVEQL_SELECT_QUERY).isSet() && !context.hasIncomingConnection()) { final String errorString = "Either the Select Query must be specified or there must be an incoming connection " + "providing flowfile(s) containing a SQL select query"; getLogger().error(errorString); throw new ProcessException(errorString); } }
Example 3
Source File: ExecuteInfluxDBQuery.java From nifi with Apache License 2.0 | 5 votes |
@OnScheduled public void onScheduled(final ProcessContext context) { super.onScheduled(context); // Either input connection or scheduled query is required if ( ! context.getProperty(INFLUX_DB_QUERY).isSet() && ! context.hasIncomingConnection() ) { String error = "The InfluxDB Query processor requires input connection or scheduled InfluxDB query"; getLogger().error(error); throw new ProcessException(error); } }
Example 4
Source File: AbstractExecuteSQL.java From nifi with Apache License 2.0 | 5 votes |
@OnScheduled public void setup(ProcessContext context) { // If the query is not set, then an incoming flow file is needed. Otherwise fail the initialization if (!context.getProperty(SQL_SELECT_QUERY).isSet() && !context.hasIncomingConnection()) { final String errorString = "Either the Select Query must be specified or there must be an incoming connection " + "providing flowfile(s) containing a SQL select query"; getLogger().error(errorString); throw new ProcessException(errorString); } dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class); }
Example 5
Source File: GenerateTableFetch.java From nifi with Apache License 2.0 | 5 votes |
@Override @OnScheduled public void setup(final ProcessContext context) { if (context.hasIncomingConnection() && !context.hasNonLoopConnection()) { getLogger().error("The failure relationship can be used only if there is another incoming connection to this processor."); } }
Example 6
Source File: SelectHive3QL.java From nifi with Apache License 2.0 | 5 votes |
@OnScheduled public void setup(ProcessContext context) { // If the query is not set, then an incoming flow file is needed. Otherwise fail the initialization if (!context.getProperty(HIVEQL_SELECT_QUERY).isSet() && !context.hasIncomingConnection()) { final String errorString = "Either the Select Query must be specified or there must be an incoming connection " + "providing flowfile(s) containing a SQL select query"; getLogger().error(errorString); throw new ProcessException(errorString); } }
Example 7
Source File: SelectHiveQL.java From nifi with Apache License 2.0 | 5 votes |
@OnScheduled public void setup(ProcessContext context) { // If the query is not set, then an incoming flow file is needed. Otherwise fail the initialization if (!context.getProperty(HIVEQL_SELECT_QUERY).isSet() && !context.hasIncomingConnection()) { final String errorString = "Either the Select Query must be specified or there must be an incoming connection " + "providing flowfile(s) containing a SQL select query"; getLogger().error(errorString); throw new ProcessException(errorString); } }
Example 8
Source File: SelectHive_1_1QL.java From nifi with Apache License 2.0 | 5 votes |
@OnScheduled public void setup(ProcessContext context) { // If the query is not set, then an incoming flow file is needed. Otherwise fail the initialization if (!context.getProperty(HIVEQL_SELECT_QUERY).isSet() && !context.hasIncomingConnection()) { final String errorString = "Either the Select Query must be specified or there must be an incoming connection " + "providing flowfile(s) containing a SQL select query"; getLogger().error(errorString); throw new ProcessException(errorString); } }
Example 9
Source File: DeleteHDFS.java From localization_nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { String fileOrDirectoryName = null; FlowFile flowFile = session.get(); // If this processor has an incoming connection, then do not run unless a // FlowFile is actually sent through if (flowFile == null && context.hasIncomingConnection()) { context.yield(); return; } if (flowFile != null) { fileOrDirectoryName = context.getProperty(FILE_OR_DIRECTORY).evaluateAttributeExpressions(flowFile).getValue(); } else { fileOrDirectoryName = context.getProperty(FILE_OR_DIRECTORY).evaluateAttributeExpressions().getValue(); } final FileSystem fileSystem = getFileSystem(); try { // Check if the user has supplied a file or directory pattern List<Path> pathList = Lists.newArrayList(); if (GLOB_MATCHER.reset(fileOrDirectoryName).find()) { FileStatus[] fileStatuses = fileSystem.globStatus(new Path(fileOrDirectoryName)); if (fileStatuses != null) { for (FileStatus fileStatus : fileStatuses) { pathList.add(fileStatus.getPath()); } } } else { pathList.add(new Path(fileOrDirectoryName)); } Map<String, String> attributes = Maps.newHashMapWithExpectedSize(2); for (Path path : pathList) { attributes.put("filename", path.getName()); attributes.put("path", path.getParent().toString()); if (fileSystem.exists(path)) { fileSystem.delete(path, context.getProperty(RECURSIVE).asBoolean()); if (!context.hasIncomingConnection()) { flowFile = session.create(); } session.transfer(session.putAllAttributes(flowFile, attributes), REL_SUCCESS); } else { getLogger().warn("File (" + path + ") does not exist"); if (!context.hasIncomingConnection()) { flowFile = session.create(); } session.transfer(session.putAllAttributes(flowFile, attributes), REL_FAILURE); } } } catch (IOException e) { getLogger().warn("Error processing delete for file or directory", e); if (flowFile != null) { session.rollback(true); } } }
Example 10
Source File: QueryElasticsearchHttp.java From localization_nifi with Apache License 2.0 | 4 votes |
private int getPage(final Response getResponse, final URL url, final ProcessContext context, final ProcessSession session, FlowFile flowFile, final ComponentLog logger, final long startNanos, boolean targetIsContent) throws IOException { List<FlowFile> page = new ArrayList<>(); final int statusCode = getResponse.code(); if (isSuccess(statusCode)) { ResponseBody body = getResponse.body(); final byte[] bodyBytes = body.bytes(); JsonNode responseJson = parseJsonResponse(new ByteArrayInputStream(bodyBytes)); JsonNode hits = responseJson.get("hits").get("hits"); for(int i = 0; i < hits.size(); i++) { JsonNode hit = hits.get(i); String retrievedId = hit.get("_id").asText(); String retrievedIndex = hit.get("_index").asText(); String retrievedType = hit.get("_type").asText(); FlowFile documentFlowFile = null; if (flowFile != null) { documentFlowFile = targetIsContent ? session.create(flowFile) : session.clone(flowFile); } else { documentFlowFile = session.create(); } JsonNode source = hit.get("_source"); documentFlowFile = session.putAttribute(documentFlowFile, "es.id", retrievedId); documentFlowFile = session.putAttribute(documentFlowFile, "es.index", retrievedIndex); documentFlowFile = session.putAttribute(documentFlowFile, "es.type", retrievedType); if (targetIsContent) { documentFlowFile = session.putAttribute(documentFlowFile, "filename", retrievedId); documentFlowFile = session.putAttribute(documentFlowFile, "mime.type", "application/json"); documentFlowFile = session.write(documentFlowFile, out -> { out.write(source.toString().getBytes()); }); } else { Map<String, String> attributes = new HashMap<>(); for(Iterator<Entry<String, JsonNode>> it = source.getFields(); it.hasNext(); ) { Entry<String, JsonNode> entry = it.next(); attributes.put(ATTRIBUTE_PREFIX + entry.getKey(), entry.getValue().asText()); } documentFlowFile = session.putAllAttributes(documentFlowFile, attributes); } page.add(documentFlowFile); } logger.debug("Elasticsearch retrieved " + responseJson.size() + " documents, routing to success"); session.transfer(page, REL_SUCCESS); } else { try { // 5xx -> RETRY, but a server error might last a while, so yield if (statusCode / 100 == 5) { throw new RetryableException(String.format("Elasticsearch returned code %s with message %s, transferring flow file to retry. This is likely a server problem, yielding...", statusCode, getResponse.message())); } else if (context.hasIncomingConnection()) { // 1xx, 3xx, 4xx -> NO RETRY throw new UnretryableException(String.format("Elasticsearch returned code %s with message %s, transferring flow file to failure", statusCode, getResponse.message())); } else { logger.warn("Elasticsearch returned code {} with message {}", new Object[]{statusCode, getResponse.message()}); } } finally { if (!page.isEmpty()) { session.remove(page); page.clear(); } } } // emit provenance event final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); if (!page.isEmpty()) { if (context.hasNonLoopConnection()) { page.forEach(f -> session.getProvenanceReporter().fetch(f, url.toExternalForm(), millis)); } else { page.forEach(f -> session.getProvenanceReporter().receive(f, url.toExternalForm(), millis)); } } return page.size(); }
Example 11
Source File: ExecuteInfluxDBQuery.java From nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { String query = null; String database = null; TimeUnit queryResultTimeunit = null; Charset charset = null; FlowFile outgoingFlowFile = null; // If there are incoming connections, prepare query params from flow file if ( context.hasIncomingConnection() ) { FlowFile incomingFlowFile = session.get(); if ( incomingFlowFile == null && context.hasNonLoopConnection() ) { return; } charset = Charset.forName(context.getProperty(CHARSET).evaluateAttributeExpressions(incomingFlowFile).getValue()); if ( incomingFlowFile.getSize() == 0 ) { if ( context.getProperty(INFLUX_DB_QUERY).isSet() ) { query = context.getProperty(INFLUX_DB_QUERY).evaluateAttributeExpressions(incomingFlowFile).getValue(); } else { String message = "FlowFile query is empty and no scheduled query is set"; getLogger().error(message); incomingFlowFile = session.putAttribute(incomingFlowFile, INFLUX_DB_ERROR_MESSAGE, message); session.transfer(incomingFlowFile, REL_FAILURE); return; } } else { try { query = getQuery(session, charset, incomingFlowFile); } catch(IOException ioe) { getLogger().error("Exception while reading from FlowFile " + ioe.getLocalizedMessage(), ioe); throw new ProcessException(ioe); } } outgoingFlowFile = incomingFlowFile; } else { outgoingFlowFile = session.create(); charset = Charset.forName(context.getProperty(CHARSET).evaluateAttributeExpressions(outgoingFlowFile).getValue()); query = context.getProperty(INFLUX_DB_QUERY).evaluateAttributeExpressions(outgoingFlowFile).getValue(); } database = context.getProperty(DB_NAME).evaluateAttributeExpressions(outgoingFlowFile).getValue(); queryResultTimeunit = TimeUnit.valueOf(context.getProperty(INFLUX_DB_QUERY_RESULT_TIMEUNIT).evaluateAttributeExpressions(outgoingFlowFile).getValue()); try { long startTimeMillis = System.currentTimeMillis(); int chunkSize = context.getProperty(INFLUX_DB_QUERY_CHUNK_SIZE).evaluateAttributeExpressions(outgoingFlowFile).asInteger(); List<QueryResult> result = executeQuery(context, database, query, queryResultTimeunit, chunkSize); String json = result.size() == 1 ? gson.toJson(result.get(0)) : gson.toJson(result); if ( getLogger().isDebugEnabled() ) { getLogger().debug("Query result {} ", new Object[] {result}); } ByteArrayInputStream bais = new ByteArrayInputStream(json.getBytes(charset)); session.importFrom(bais, outgoingFlowFile); bais.close(); final long endTimeMillis = System.currentTimeMillis(); if ( ! hasErrors(result) ) { outgoingFlowFile = session.putAttribute(outgoingFlowFile, INFLUX_DB_EXECUTED_QUERY, String.valueOf(query)); session.getProvenanceReporter().send(outgoingFlowFile, makeProvenanceUrl(context, database), (endTimeMillis - startTimeMillis)); session.transfer(outgoingFlowFile, REL_SUCCESS); } else { outgoingFlowFile = populateErrorAttributes(session, outgoingFlowFile, query, queryErrors(result)); session.transfer(outgoingFlowFile, REL_FAILURE); } } catch (Exception exception) { outgoingFlowFile = populateErrorAttributes(session, outgoingFlowFile, query, exception.getMessage()); if ( exception.getCause() instanceof SocketTimeoutException ) { getLogger().error("Failed to read from InfluxDB due SocketTimeoutException to {} and retrying", new Object[]{exception.getCause().getLocalizedMessage()}, exception.getCause()); session.transfer(outgoingFlowFile, REL_RETRY); } else { getLogger().error("Failed to read from InfluxDB due to {}", new Object[]{exception.getLocalizedMessage()}, exception); session.transfer(outgoingFlowFile, REL_FAILURE); } context.yield(); } }
Example 12
Source File: InvokeGRPC.java From nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile fileToProcess = null; if (context.hasIncomingConnection()) { fileToProcess = session.get(); // If we have no FlowFile, and all incoming connections are self-loops then we can continue on. // However, if we have no FlowFile and we have connections coming from other Processors, then // we know that we should run only if we have a FlowFile. if (fileToProcess == null && context.hasNonLoopConnection()) { return; } } final ComponentLog logger = getLogger(); final FlowFileServiceGrpc.FlowFileServiceBlockingStub blockingStub = blockingStubReference.get(); final String host = context.getProperty(PROP_SERVICE_HOST).getValue(); final String port = context.getProperty(PROP_SERVICE_PORT).getValue(); fileToProcess = session.putAttribute(fileToProcess, SERVICE_HOST, host); fileToProcess = session.putAttribute(fileToProcess, SERVICE_PORT, port); FlowFile responseFlowFile = null; try { final FlowFileRequest.Builder requestBuilder = FlowFileRequest.newBuilder() .setId(fileToProcess.getId()) .putAllAttributes(fileToProcess.getAttributes()); // if the processor is configured to send the content, turn the content into bytes // and add it to the request. final boolean sendContent = context.getProperty(PROP_SEND_CONTENT).asBoolean(); if (sendContent) { try (final InputStream contents = session.read(fileToProcess)) { requestBuilder.setContent(ByteString.readFrom(contents)); } // emit provenance event session.getProvenanceReporter().send(fileToProcess, getRemote(host, port), true); } final FlowFileRequest flowFileRequest = requestBuilder.build(); logRequest(logger, host, port, flowFileRequest); final FlowFileReply flowFileReply = blockingStub.send(flowFileRequest); logReply(logger, host, port, flowFileReply); final FlowFileReply.ResponseCode responseCode = flowFileReply.getResponseCode(); final String body = flowFileReply.getBody(); fileToProcess = session.putAttribute(fileToProcess, RESPONSE_CODE, String.valueOf(responseCode)); fileToProcess = session.putAttribute(fileToProcess, RESPONSE_BODY, body); responseFlowFile = session.create(fileToProcess); route(fileToProcess, responseFlowFile, session, context, responseCode); } catch (final Exception e) { // penalize or yield if (fileToProcess != null) { logger.error("Routing to {} due to exception: {}", new Object[]{REL_FAILURE.getName(), e}, e); fileToProcess = session.penalize(fileToProcess); fileToProcess = session.putAttribute(fileToProcess, EXCEPTION_CLASS, e.getClass().getName()); fileToProcess = session.putAttribute(fileToProcess, EXCEPTION_MESSAGE, e.getMessage()); // transfer original to failure session.transfer(fileToProcess, REL_FAILURE); } else { logger.error("Yielding processor due to exception encountered as a source processor: {}", e); context.yield(); } // cleanup try { if (responseFlowFile != null) { session.remove(responseFlowFile); } } catch (final Exception e1) { logger.error("Could not cleanup response flowfile due to exception: {}", new Object[]{e1}, e1); } } }
Example 13
Source File: GetMongoRecord.java From nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { FlowFile input = null; if (context.hasIncomingConnection()) { input = session.get(); if (input == null && context.hasNonLoopConnection()) { return; } } final String database = context.getProperty(DATABASE_NAME).evaluateAttributeExpressions(input).getValue(); final String collection = context.getProperty(COLLECTION_NAME).evaluateAttributeExpressions(input).getValue(); final String schemaName = context.getProperty(SCHEMA_NAME).evaluateAttributeExpressions(input).getValue(); final Document query = getQuery(context, session, input); MongoCollection mongoCollection = clientService.getDatabase(database).getCollection(collection); FindIterable<Document> find = mongoCollection.find(query); if (context.getProperty(SORT).isSet()) { find = find.sort(Document.parse(context.getProperty(SORT).evaluateAttributeExpressions(input).getValue())); } if (context.getProperty(PROJECTION).isSet()) { find = find.projection(Document.parse(context.getProperty(PROJECTION).evaluateAttributeExpressions(input).getValue())); } if (context.getProperty(LIMIT).isSet()) { find = find.limit(context.getProperty(LIMIT).evaluateAttributeExpressions(input).asInteger()); } MongoCursor<Document> cursor = find.iterator(); FlowFile output = input != null ? session.create(input) : session.create(); final FlowFile inputPtr = input; try { final Map<String, String> attributes = getAttributes(context, input, query, mongoCollection); try (OutputStream out = session.write(output)) { Map<String, String> attrs = inputPtr != null ? inputPtr.getAttributes() : new HashMap<String, String>(){{ put("schema.name", schemaName); }}; RecordSchema schema = writerFactory.getSchema(attrs, null); RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out, attrs); long count = 0L; writer.beginRecordSet(); while (cursor.hasNext()) { Document next = cursor.next(); if (next.get("_id") instanceof ObjectId) { next.put("_id", next.get("_id").toString()); } Record record = new MapRecord(schema, next); writer.write(record); count++; } writer.finishRecordSet(); writer.close(); out.close(); attributes.put("record.count", String.valueOf(count)); } catch (SchemaNotFoundException e) { throw new RuntimeException(e); } output = session.putAllAttributes(output, attributes); session.getProvenanceReporter().fetch(output, getURI(context)); session.transfer(output, REL_SUCCESS); if (input != null) { session.transfer(input, REL_ORIGINAL); } } catch (Exception ex) { ex.printStackTrace(); getLogger().error("Error writing record set from Mongo query.", ex); session.remove(output); if (input != null) { session.transfer(input, REL_FAILURE); } } }
Example 14
Source File: RunMongoAggregation.java From nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { FlowFile flowFile = null; if (context.hasIncomingConnection()) { flowFile = session.get(); if (flowFile == null && context.hasNonLoopConnection()) { return; } } final String query = context.getProperty(QUERY).evaluateAttributeExpressions(flowFile).getValue(); final Boolean allowDiskUse = context.getProperty(ALLOW_DISK_USE).asBoolean(); final String queryAttr = context.getProperty(QUERY_ATTRIBUTE).evaluateAttributeExpressions(flowFile).getValue(); final Integer batchSize = context.getProperty(BATCH_SIZE).asInteger(); final Integer resultsPerFlowfile = context.getProperty(RESULTS_PER_FLOWFILE).asInteger(); final String jsonTypeSetting = context.getProperty(JSON_TYPE).getValue(); final String dateFormat = context.getProperty(DATE_FORMAT).evaluateAttributeExpressions(flowFile).getValue(); configureMapper(jsonTypeSetting, dateFormat); Map<String, String> attrs = new HashMap<>(); if (queryAttr != null && queryAttr.trim().length() > 0) { attrs.put(queryAttr, query); } MongoCursor<Document> iter = null; try { MongoCollection<Document> collection = getCollection(context, flowFile); List<Bson> aggQuery = buildAggregationQuery(query); AggregateIterable<Document> it = collection.aggregate(aggQuery).allowDiskUse(allowDiskUse);; it.batchSize(batchSize != null ? batchSize : 1); iter = it.iterator(); List<Document> batch = new ArrayList<>(); Boolean doneSomething = false; while (iter.hasNext()) { batch.add(iter.next()); if (batch.size() == resultsPerFlowfile) { writeBatch(buildBatch(batch), flowFile, context, session, attrs, REL_RESULTS); batch = new ArrayList<>(); doneSomething |= true; } } if (! batch.isEmpty()) { // Something remains in batch list, write it to RESULT writeBatch(buildBatch(batch), flowFile, context, session, attrs, REL_RESULTS); } else if (! doneSomething) { // The batch list is empty and no batch was written (empty result!), so write empty string to RESULT writeBatch("", flowFile, context, session, attrs, REL_RESULTS); } if (flowFile != null) { session.transfer(flowFile, REL_ORIGINAL); } } catch (Exception e) { getLogger().error("Error running MongoDB aggregation query.", e); if (flowFile != null) { session.transfer(flowFile, REL_FAILURE); } } finally { if (iter != null) { iter.close(); } } }
Example 15
Source File: DeleteByQueryElasticsearch.java From nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { FlowFile input = null; if (context.hasIncomingConnection()) { input = session.get(); if (input == null && context.hasNonLoopConnection()) { return; } } try { final String query = getQuery(input, context, session); final String index = context.getProperty(INDEX).evaluateAttributeExpressions(input).getValue(); final String type = context.getProperty(TYPE).isSet() ? context.getProperty(TYPE).evaluateAttributeExpressions(input).getValue() : null; final String queryAttr = context.getProperty(QUERY_ATTRIBUTE).isSet() ? context.getProperty(QUERY_ATTRIBUTE).evaluateAttributeExpressions(input).getValue() : null; DeleteOperationResponse dor = clientService.deleteByQuery(query, index, type); if (input == null) { input = session.create(); } Map<String, String> attrs = new HashMap<>(); attrs.put(TOOK_ATTRIBUTE, String.valueOf(dor.getTook())); if (!StringUtils.isBlank(queryAttr)) { attrs.put(queryAttr, query); } input = session.putAllAttributes(input, attrs); session.transfer(input, REL_SUCCESS); } catch (Exception e) { if (input != null) { input = session.putAttribute(input, ERROR_ATTRIBUTE, e.getMessage()); session.transfer(input, REL_FAILURE); } getLogger().error("Error running delete by query: ", e); context.yield(); } }