Java Code Examples for org.apache.nifi.logging.ComponentLog#debug()
The following examples show how to use
org.apache.nifi.logging.ComponentLog#debug() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StandardOPCUAService.java From NIFI-OPCUA with Apache License 2.0 | 6 votes |
private boolean validateEndpoint(Client client, String security_policy, String discoveryServer, String url){ // TODO This method should provide feedback final ComponentLog logger = getLogger(); // Retrieve end point list EndpointDescription[] endpoints = null; // This assumes the provided url is co-served with the discovery server try { endpoints = client.discoverEndpoints(discoveryServer); } catch (ServiceResultException e1) { logger.error(e1.getMessage()); } // Finally confirm the provided endpoint is in the list of endpoints = EndpointUtil.selectByUrl(endpoints, url); logger.debug(endpoints.length + "endpoints found"); // There should only be one item left in the list // TODO Servers with multiple nic cards have more than one left in the list return true; }
Example 2
Source File: HiveConfigurator.java From nifi with Apache License 2.0 | 6 votes |
public Collection<ValidationResult> validate(String configFiles, String principal, String keyTab, String password, AtomicReference<ValidationResources> validationResourceHolder, ComponentLog log) { final List<ValidationResult> problems = new ArrayList<>(); ValidationResources resources = validationResourceHolder.get(); // if no resources in the holder, or if the holder has different resources loaded, // then load the Configuration and set the new resources in the holder if (resources == null || !configFiles.equals(resources.getConfigResources())) { log.debug("Reloading validation resources"); resources = new ValidationResources(configFiles, getConfigurationFromFiles(configFiles)); validationResourceHolder.set(resources); } final Configuration hiveConfig = resources.getConfiguration(); problems.addAll(KerberosProperties.validatePrincipalWithKeytabOrPassword(this.getClass().getSimpleName(), hiveConfig, principal, keyTab, password, log)); return problems; }
Example 3
Source File: ValidateXml.java From nifi with Apache License 2.0 | 5 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) { final List<FlowFile> flowFiles = session.get(50); if (flowFiles.isEmpty()) { return; } final Schema schema = schemaRef.get(); final Validator validator = schema.newValidator(); final ComponentLog logger = getLogger(); for (FlowFile flowFile : flowFiles) { final AtomicBoolean valid = new AtomicBoolean(true); final AtomicReference<Exception> exception = new AtomicReference<Exception>(null); session.read(flowFile, new InputStreamCallback() { @Override public void process(final InputStream in) throws IOException { try { validator.validate(new StreamSource(in)); } catch (final IllegalArgumentException | SAXException e) { valid.set(false); exception.set(e); } } }); if (valid.get()) { logger.debug("Successfully validated {} against schema; routing to 'valid'", new Object[]{flowFile}); session.getProvenanceReporter().route(flowFile, REL_VALID); session.transfer(flowFile, REL_VALID); } else { flowFile = session.putAttribute(flowFile, ERROR_ATTRIBUTE_KEY, exception.get().getLocalizedMessage()); logger.info("Failed to validate {} against schema due to {}; routing to 'invalid'", new Object[]{flowFile, exception.get().getLocalizedMessage()}); session.getProvenanceReporter().route(flowFile, REL_INVALID); session.transfer(flowFile, REL_INVALID); } } }
Example 4
Source File: AbstractCouchbaseProcessor.java From nifi with Apache License 2.0 | 5 votes |
/** * Handles the thrown CouchbaseException accordingly. * * @param context a process context * @param session a process session * @param logger a logger * @param inFile an input FlowFile * @param e the thrown CouchbaseException * @param errMsg a message to be logged */ protected void handleCouchbaseException(final ProcessContext context, final ProcessSession session, final ComponentLog logger, FlowFile inFile, CouchbaseException e, String errMsg) { logger.error(errMsg, e); if (inFile != null) { ErrorHandlingStrategy strategy = CouchbaseExceptionMappings.getStrategy(e); switch (strategy.penalty()) { case Penalize: if (logger.isDebugEnabled()) { logger.debug("Penalized: {}", new Object[] {inFile}); } inFile = session.penalize(inFile); break; case Yield: if (logger.isDebugEnabled()) { logger.debug("Yielded context: {}", new Object[] {inFile}); } context.yield(); break; case None: break; } switch (strategy.result()) { case ProcessException: throw new ProcessException(errMsg, e); case Failure: inFile = session.putAttribute(inFile, CouchbaseAttributes.Exception.key(), e.getClass().getName()); session.transfer(inFile, REL_FAILURE); break; case Retry: inFile = session.putAttribute(inFile, CouchbaseAttributes.Exception.key(), e.getClass().getName()); session.transfer(inFile, REL_RETRY); break; } } }
Example 5
Source File: ListFile.java From nifi with Apache License 2.0 | 5 votes |
public void logPerformance() { final ComponentLog logger = getLogger(); if (!logger.isDebugEnabled()) { return; } final long earliestTimestamp = performanceTracker.getEarliestTimestamp(); final long millis = System.currentTimeMillis() - earliestTimestamp; final long seconds = TimeUnit.MILLISECONDS.toSeconds(millis); for (final DiskOperation operation : DiskOperation.values()) { final OperationStatistics stats = performanceTracker.getOperationStatistics(operation); final StringBuilder sb = new StringBuilder(); if (stats.getCount() == 0) { sb.append("Over the past ").append(seconds).append(" seconds, for Operation '").append(operation).append("' there were no operations performed"); } else { sb.append("Over the past ").append(seconds).append(" seconds, For Operation '").append(operation).append("' there were ") .append(stats.getCount()).append(" operations performed with an average time of ") .append(stats.getAverage()).append(" milliseconds; Standard Deviation = ").append(stats.getStandardDeviation()).append(" millis; Min Time = ") .append(stats.getMin()).append(" millis, Max Time = ").append(stats.getMax()).append(" millis"); if (logger.isDebugEnabled()) { final Map<String, Long> outliers = stats.getOutliers(); sb.append("; ").append(stats.getOutliers().size()).append(" significant outliers: "); sb.append(outliers); } } logger.debug(sb.toString()); } performanceLoggingTimestamp = System.currentTimeMillis(); }
Example 6
Source File: AbstractAWSGatewayApiProcessor.java From nifi with Apache License 2.0 | 5 votes |
protected void logRequest(ComponentLog logger, URI endpoint, GenericApiGatewayRequest request) { try { logger.debug("\nRequest to remote service:\n\t{}\t{}\t\n{}", new Object[]{endpoint.toURL().toExternalForm(), request.getHttpMethod(), getLogString(request.getHeaders())}); } catch (MalformedURLException e) { logger.debug(e.getMessage()); } }
Example 7
Source File: AbstractAWSGatewayApiProcessor.java From nifi with Apache License 2.0 | 5 votes |
protected void logResponse(ComponentLog logger, GenericApiGatewayResponse response) { try { logger.debug("\nResponse from remote service:\n\t{}\n{}", new Object[]{response.getHttpResponse().getHttpRequest().getURI().toURL().toExternalForm(), getLogString(response.getHttpResponse().getHeaders())}); } catch (MalformedURLException e) { logger.debug(e.getMessage()); } }
Example 8
Source File: ScrollElasticsearchHttp.java From nifi with Apache License 2.0 | 4 votes |
private void getPage(final Response getResponse, final URL url, final ProcessContext context, final ProcessSession session, FlowFile flowFile, final ComponentLog logger, final long startNanos, Charset charset) throws IOException { final int statusCode = getResponse.code(); if (isSuccess(statusCode)) { ResponseBody body = getResponse.body(); final byte[] bodyBytes = body.bytes(); JsonNode responseJson = parseJsonResponse(new ByteArrayInputStream(bodyBytes)); String scrollId = responseJson.get("_scroll_id").asText(); StringBuilder builder = new StringBuilder(); builder.append("{ \"hits\" : ["); JsonNode hits = responseJson.get("hits").get("hits"); if (hits.size() == 0) { finishQuery(context.getStateManager()); session.remove(flowFile); return; } for(int i = 0; i < hits.size(); i++) { JsonNode hit = hits.get(i); String retrievedIndex = hit.get("_index").asText(); String retrievedType = hit.get("_type").asText(); JsonNode source = hit.get("_source"); flowFile = session.putAttribute(flowFile, "es.index", retrievedIndex); flowFile = session.putAttribute(flowFile, "es.type", retrievedType); flowFile = session.putAttribute(flowFile, "mime.type", "application/json"); builder.append(source.toString()); if (i < hits.size() - 1) { builder.append(", "); } } builder.append("] }"); logger.debug("Elasticsearch retrieved " + responseJson.size() + " documents, routing to success"); flowFile = session.write(flowFile, out -> { out.write(builder.toString().getBytes(charset)); }); session.transfer(flowFile, REL_SUCCESS); saveScrollId(context.getStateManager(), scrollId); // emit provenance event final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); session.getProvenanceReporter().receive(flowFile, url.toExternalForm(), millis); } else { // 5xx -> RETRY, but a server error might last a while, so yield if (statusCode / 100 == 5) { logger.warn("Elasticsearch returned code {} with message {}, removing the flow file. This is likely a server problem, yielding...", new Object[]{statusCode, getResponse.message()}); session.remove(flowFile); context.yield(); } else { logger.warn("Elasticsearch returned code {} with message {}", new Object[]{statusCode, getResponse.message()}); session.remove(flowFile); } } }
Example 9
Source File: JMSPublisher.java From nifi with Apache License 2.0 | 4 votes |
JMSPublisher(CachingConnectionFactory connectionFactory, JmsTemplate jmsTemplate, ComponentLog processLog) { super(connectionFactory, jmsTemplate, processLog); processLog.debug("Created Message Publisher for {}", new Object[] {jmsTemplate}); }
Example 10
Source File: GetNodeIds.java From NIFI-OPCUA with Apache License 2.0 | 4 votes |
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { final ComponentLog logger = getLogger(); StringBuilder stringBuilder = new StringBuilder(); // Submit to getValue final OPCUAService opcUAService = context.getProperty(OPCUA_SERVICE) .asControllerService(OPCUAService.class); if(opcUAService.updateSession()){ logger.debug("Session current"); }else { logger.debug("Session update failed"); } // Set the starting node and parse the node tree if ( starting_node == null) { logger.debug("Parse the root node " + new ExpandedNodeId(Identifiers.RootFolder)); stringBuilder.append(opcUAService.getNameSpace(print_indentation, max_recursiveDepth, new ExpandedNodeId(Identifiers.RootFolder))); } else { logger.debug("Parse the result list for node " + new ExpandedNodeId(NodeId.parseNodeId(starting_node))); stringBuilder.append(opcUAService.getNameSpace(print_indentation, max_recursiveDepth, new ExpandedNodeId(NodeId.parseNodeId(starting_node)))); } // Write the results back out to a flow file FlowFile flowFile = session.create(); if ( flowFile == null ) { logger.error("Flowfile is null"); } flowFile = session.write(flowFile, new OutputStreamCallback() { public void process(OutputStream out) throws IOException { out.write(stringBuilder.toString().getBytes()); } }); // Transfer data to flow file session.transfer(flowFile, SUCCESS); }
Example 11
Source File: PutEmail.java From nifi with Apache License 2.0 | 4 votes |
/** * Uses the mapping of javax.mail properties to NiFi PropertyDescriptors to build the required Properties object to be used for sending this email * * @param context context * @param flowFile flowFile * @return mail properties */ private Properties getMailPropertiesFromFlowFile(final ProcessContext context, final FlowFile flowFile) { final Properties properties = new Properties(); final ComponentLog logger = this.getLogger(); for (Entry<String, PropertyDescriptor> entry : propertyToContext.entrySet()) { // Evaluate the property descriptor against the flow file String flowFileValue = context.getProperty(entry.getValue()).evaluateAttributeExpressions(flowFile).getValue(); String property = entry.getKey(); logger.debug("Evaluated Mail Property: {} with Value: {}", new Object[]{property, flowFileValue}); // Nullable values are not allowed, so filter out if (null != flowFileValue) { properties.setProperty(property, flowFileValue); } } return properties; }
Example 12
Source File: AbstractElasticsearch5TransportClientProcessor.java From localization_nifi with Apache License 2.0 | 4 votes |
/** * Instantiate ElasticSearch Client. This should be called by subclasses' @OnScheduled method to create a client * if one does not yet exist. If called when scheduled, closeClient() should be called by the subclasses' @OnStopped * method so the client will be destroyed when the processor is stopped. * * @param context The context for this processor * @throws ProcessException if an error occurs while creating an Elasticsearch client */ @Override protected void createElasticsearchClient(ProcessContext context) throws ProcessException { ComponentLog log = getLogger(); if (esClient.get() != null) { return; } log.debug("Creating ElasticSearch Client"); try { final String clusterName = context.getProperty(CLUSTER_NAME).evaluateAttributeExpressions().getValue(); final String pingTimeout = context.getProperty(PING_TIMEOUT).evaluateAttributeExpressions().getValue(); final String samplerInterval = context.getProperty(SAMPLER_INTERVAL).evaluateAttributeExpressions().getValue(); final String username = context.getProperty(USERNAME).evaluateAttributeExpressions().getValue(); final String password = context.getProperty(PASSWORD).getValue(); final SSLContextService sslService = context.getProperty(PROP_SSL_CONTEXT_SERVICE).asControllerService(SSLContextService.class); Settings.Builder settingsBuilder = Settings.builder() .put("cluster.name", clusterName) .put("client.transport.ping_timeout", pingTimeout) .put("client.transport.nodes_sampler_interval", samplerInterval); String xPackUrl = context.getProperty(PROP_XPACK_LOCATION).evaluateAttributeExpressions().getValue(); if (sslService != null) { settingsBuilder.put("xpack.security.transport.ssl.enabled", "true"); if (!StringUtils.isEmpty(sslService.getKeyStoreFile())) { settingsBuilder.put("xpack.ssl.keystore.path", sslService.getKeyStoreFile()); } if (!StringUtils.isEmpty(sslService.getKeyStorePassword())) { settingsBuilder.put("xpack.ssl.keystore.password", sslService.getKeyStorePassword()); } if (!StringUtils.isEmpty(sslService.getKeyPassword())) { settingsBuilder.put("xpack.ssl.keystore.key_password", sslService.getKeyPassword()); } if (!StringUtils.isEmpty(sslService.getTrustStoreFile())) { settingsBuilder.put("xpack.ssl.truststore.path", sslService.getTrustStoreFile()); } if (!StringUtils.isEmpty(sslService.getTrustStorePassword())) { settingsBuilder.put("xpack.ssl.truststore.password", sslService.getTrustStorePassword()); } } // Set username and password for X-Pack if (!StringUtils.isEmpty(username)) { StringBuffer secureUser = new StringBuffer(username); if (!StringUtils.isEmpty(password)) { secureUser.append(":"); secureUser.append(password); } settingsBuilder.put("xpack.security.user", secureUser); } final String hosts = context.getProperty(HOSTS).evaluateAttributeExpressions().getValue(); esHosts = getEsHosts(hosts); Client transportClient = getTransportClient(settingsBuilder, xPackUrl, username, password, esHosts, log); esClient.set(transportClient); } catch (Exception e) { log.error("Failed to create Elasticsearch client due to {}", new Object[]{e}, e); throw new ProcessException(e); } }
Example 13
Source File: FetchElasticsearch.java From nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile flowFile = session.get(); if (flowFile == null) { return; } final String index = context.getProperty(INDEX).evaluateAttributeExpressions(flowFile).getValue(); final String docId = context.getProperty(DOC_ID).evaluateAttributeExpressions(flowFile).getValue(); final String docType = context.getProperty(TYPE).evaluateAttributeExpressions(flowFile).getValue(); final Charset charset = Charset.forName(context.getProperty(CHARSET).evaluateAttributeExpressions(flowFile).getValue()); final ComponentLog logger = getLogger(); try { logger.debug("Fetching {}/{}/{} from Elasticsearch", new Object[]{index, docType, docId}); final long startNanos = System.nanoTime(); GetRequestBuilder getRequestBuilder = esClient.get().prepareGet(index, docType, docId); if (authToken != null) { getRequestBuilder.putHeader("Authorization", authToken); } final GetResponse getResponse = getRequestBuilder.execute().actionGet(); if (getResponse == null || !getResponse.isExists()) { logger.debug("Failed to read {}/{}/{} from Elasticsearch: Document not found", new Object[]{index, docType, docId}); // We couldn't find the document, so penalize it and send it to "not found" flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_NOT_FOUND); } else { flowFile = session.putAttribute(flowFile, "filename", docId); flowFile = session.putAttribute(flowFile, "es.index", index); flowFile = session.putAttribute(flowFile, "es.type", docType); flowFile = session.write(flowFile, new OutputStreamCallback() { @Override public void process(OutputStream out) throws IOException { out.write(getResponse.getSourceAsString().getBytes(charset)); } }); logger.debug("Elasticsearch document " + docId + " fetched, routing to success"); final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); final String uri = context.getProperty(HOSTS).evaluateAttributeExpressions().getValue() + "/" + index + "/" + docType + "/" + docId; session.getProvenanceReporter().fetch(flowFile, uri, millis); session.transfer(flowFile, REL_SUCCESS); } } catch (NoNodeAvailableException | ElasticsearchTimeoutException | ReceiveTimeoutTransportException | NodeClosedException exceptionToRetry) { logger.error("Failed to read into Elasticsearch due to {}, this may indicate an error in configuration " + "(hosts, username/password, etc.). Routing to retry", new Object[]{exceptionToRetry.getLocalizedMessage()}, exceptionToRetry); session.transfer(flowFile, REL_RETRY); context.yield(); } catch (Exception e) { logger.error("Failed to read {} from Elasticsearch due to {}", new Object[]{flowFile, e.getLocalizedMessage()}, e); session.transfer(flowFile, REL_FAILURE); context.yield(); } }
Example 14
Source File: ScrollElasticsearchHttp.java From localization_nifi with Apache License 2.0 | 4 votes |
private void getPage(final Response getResponse, final URL url, final ProcessContext context, final ProcessSession session, FlowFile flowFile, final ComponentLog logger, final long startNanos) throws IOException { final int statusCode = getResponse.code(); if (isSuccess(statusCode)) { ResponseBody body = getResponse.body(); final byte[] bodyBytes = body.bytes(); JsonNode responseJson = parseJsonResponse(new ByteArrayInputStream(bodyBytes)); String scrollId = responseJson.get("_scroll_id").asText(); StringBuilder builder = new StringBuilder(); builder.append("{ \"hits\" : ["); JsonNode hits = responseJson.get("hits").get("hits"); if (hits.size() == 0) { finishQuery(context.getStateManager()); session.remove(flowFile); return; } for(int i = 0; i < hits.size(); i++) { JsonNode hit = hits.get(i); String retrievedIndex = hit.get("_index").asText(); String retrievedType = hit.get("_type").asText(); JsonNode source = hit.get("_source"); flowFile = session.putAttribute(flowFile, "es.index", retrievedIndex); flowFile = session.putAttribute(flowFile, "es.type", retrievedType); flowFile = session.putAttribute(flowFile, "mime.type", "application/json"); builder.append(source.toString()); if (i < hits.size() - 1) { builder.append(", "); } } builder.append("] }"); logger.debug("Elasticsearch retrieved " + responseJson.size() + " documents, routing to success"); flowFile = session.write(flowFile, out -> { out.write(builder.toString().getBytes()); }); session.transfer(flowFile, REL_SUCCESS); saveScrollId(context.getStateManager(), scrollId); // emit provenance event final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); session.getProvenanceReporter().receive(flowFile, url.toExternalForm(), millis); } else { // 5xx -> RETRY, but a server error might last a while, so yield if (statusCode / 100 == 5) { logger.warn("Elasticsearch returned code {} with message {}, removing the flow file. This is likely a server problem, yielding...", new Object[]{statusCode, getResponse.message()}); session.remove(flowFile); context.yield(); } else { logger.warn("Elasticsearch returned code {} with message {}", new Object[]{statusCode, getResponse.message()}); session.remove(flowFile); } } }
Example 15
Source File: LivySessionController.java From nifi with Apache License 2.0 | 4 votes |
private JSONObject openSession() throws IOException, JSONException, InterruptedException { ComponentLog log = getLogger(); JSONObject newSessionInfo; final ObjectMapper mapper = new ObjectMapper(); String sessionsUrl = livyUrl + "/sessions"; StringBuilder payload = new StringBuilder("{\"kind\":\"" + controllerKind + "\""); if (jars != null) { List<String> jarsArray = Arrays.stream(jars.split(",")) .filter(StringUtils::isNotBlank) .map(String::trim).collect(Collectors.toList()); String jarsJsonArray = mapper.writeValueAsString(jarsArray); payload.append(",\"jars\":"); payload.append(jarsJsonArray); } if (files != null) { List<String> filesArray = Arrays.stream(files.split(",")) .filter(StringUtils::isNotBlank) .map(String::trim).collect(Collectors.toList()); String filesJsonArray = mapper.writeValueAsString(filesArray); payload.append(",\"files\":"); payload.append(filesJsonArray); } payload.append("}"); log.debug("openSession() Session Payload: " + payload.toString()); Map<String, String> headers = new HashMap<>(); headers.put("Content-Type", APPLICATION_JSON); headers.put("X-Requested-By", USER); newSessionInfo = readJSONObjectFromUrlPOST(sessionsUrl, headers, payload.toString()); Thread.sleep(1000); while (newSessionInfo.getString("state").equalsIgnoreCase("starting")) { log.debug("openSession() Waiting for session to start..."); newSessionInfo = getSessionInfo(newSessionInfo.getInt("id")); log.debug("openSession() newSessionInfo: " + newSessionInfo); Thread.sleep(1000); } return newSessionInfo; }
Example 16
Source File: CryptographicHashAttribute.java From nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) { FlowFile flowFile = session.get(); if (flowFile == null) { return; } final Charset charset = Charset.forName(context.getProperty(CHARACTER_SET).getValue()); final Map<String, String> attributeToGeneratedNameMap = attributeToGenerateNameMapRef.get(); final ComponentLog logger = getLogger(); final SortedMap<String, String> relevantAttributes = getRelevantAttributes(flowFile, attributeToGeneratedNameMap); if (relevantAttributes.isEmpty()) { if (context.getProperty(FAIL_WHEN_EMPTY).asBoolean()) { logger.info("Routing {} to 'failure' because of missing all attributes: {}", new Object[]{flowFile, getMissingKeysString(null, attributeToGeneratedNameMap.keySet())}); session.transfer(flowFile, REL_FAILURE); return; } } if (relevantAttributes.size() != attributeToGeneratedNameMap.size()) { if (PartialAttributePolicy.valueOf(context.getProperty(PARTIAL_ATTR_ROUTE_POLICY).getValue()) == PartialAttributePolicy.PROHIBIT) { logger.info("Routing {} to 'failure' because of missing attributes: {}", new Object[]{flowFile, getMissingKeysString(relevantAttributes.keySet(), attributeToGeneratedNameMap.keySet())}); session.transfer(flowFile, REL_FAILURE); return; } } // Determine the algorithm to use final String algorithmName = context.getProperty(HASH_ALGORITHM).getValue(); logger.debug("Using algorithm {}", new Object[]{algorithmName}); HashAlgorithm algorithm = HashAlgorithm.fromName(algorithmName); // Generate a hash with the configured algorithm for each attribute value // and create a new attribute with the configured name for (final Map.Entry<String, String> entry : relevantAttributes.entrySet()) { logger.debug("Generating {} hash of attribute '{}'", new Object[]{algorithmName, entry.getKey()}); String value = hashValue(algorithm, entry.getValue(), charset); session.putAttribute(flowFile, attributeToGeneratedNameMap.get(entry.getKey()), value); } session.getProvenanceReporter().modifyAttributes(flowFile); session.transfer(flowFile, REL_SUCCESS); }
Example 17
Source File: DeleteElasticsearch5.java From nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { synchronized (esClient) { if(esClient.get() == null) { setup(context); } } FlowFile flowFile = session.get(); if (flowFile == null) { return; } final String index = context.getProperty(INDEX).evaluateAttributeExpressions(flowFile).getValue(); final String documentId = context.getProperty(DOCUMENT_ID).evaluateAttributeExpressions(flowFile).getValue(); final String documentType = context.getProperty(TYPE).evaluateAttributeExpressions(flowFile).getValue(); final ComponentLog logger = getLogger(); if ( StringUtils.isBlank(index) ) { logger.debug("Index is required but was empty {}", new Object [] { index }); flowFile = session.putAttribute(flowFile, ES_ERROR_MESSAGE, "Index is required but was empty"); session.transfer(flowFile,REL_FAILURE); return; } if ( StringUtils.isBlank(documentType) ) { logger.debug("Document type is required but was empty {}", new Object [] { documentType }); flowFile = session.putAttribute(flowFile, ES_ERROR_MESSAGE, "Document type is required but was empty"); session.transfer(flowFile,REL_FAILURE); return; } if ( StringUtils.isBlank(documentId) ) { logger.debug("Document id is required but was empty {}", new Object [] { documentId }); flowFile = session.putAttribute(flowFile, ES_ERROR_MESSAGE, "Document id is required but was empty"); session.transfer(flowFile,REL_FAILURE); return; } flowFile = session.putAllAttributes(flowFile, new HashMap<String, String>() {{ put(ES_FILENAME, documentId); put(ES_INDEX, index); put(ES_TYPE, documentType); }}); try { logger.debug("Deleting document {}/{}/{} from Elasticsearch", new Object[]{index, documentType, documentId}); DeleteRequestBuilder requestBuilder = prepareDeleteRequest(index, documentId, documentType); final DeleteResponse response = doDelete(requestBuilder); if (response.status() != RestStatus.OK) { logger.warn("Failed to delete document {}/{}/{} from Elasticsearch: Status {}", new Object[]{index, documentType, documentId, response.status()}); flowFile = session.putAttribute(flowFile, ES_ERROR_MESSAGE, UNABLE_TO_DELETE_DOCUMENT_MESSAGE); flowFile = session.putAttribute(flowFile, ES_REST_STATUS, response.status().toString()); context.yield(); if ( response.status() == RestStatus.NOT_FOUND ) { session.transfer(flowFile, REL_NOT_FOUND); } else { session.transfer(flowFile, REL_FAILURE); } } else { logger.debug("Elasticsearch document " + documentId + " deleted"); session.transfer(flowFile, REL_SUCCESS); } } catch ( ElasticsearchTimeoutException | ReceiveTimeoutTransportException exception) { logger.error("Failed to delete document {} from Elasticsearch due to {}", new Object[]{documentId, exception.getLocalizedMessage()}, exception); flowFile = session.putAttribute(flowFile, ES_ERROR_MESSAGE, exception.getLocalizedMessage()); session.transfer(flowFile, REL_RETRY); context.yield(); } catch (Exception e) { logger.error("Failed to delete document {} from Elasticsearch due to {}", new Object[]{documentId, e.getLocalizedMessage()}, e); flowFile = session.putAttribute(flowFile, ES_ERROR_MESSAGE, e.getLocalizedMessage()); session.transfer(flowFile, REL_FAILURE); context.yield(); } }
Example 18
Source File: GetHDFSSequenceFile.java From localization_nifi with Apache License 2.0 | 4 votes |
@Override protected void processBatchOfFiles(final List<Path> files, final ProcessContext context, final ProcessSession session) { final Configuration conf = getConfiguration(); final FileSystem hdfs = getFileSystem(); final String flowFileContentValue = context.getProperty(FLOWFILE_CONTENT).getValue(); final boolean keepSourceFiles = context.getProperty(KEEP_SOURCE_FILE).asBoolean(); final Double bufferSizeProp = context.getProperty(BUFFER_SIZE).asDataSize(DataUnit.B); if (bufferSizeProp != null) { int bufferSize = bufferSizeProp.intValue(); conf.setInt(BUFFER_SIZE_KEY, bufferSize); } ComponentLog logger = getLogger(); final SequenceFileReader<Set<FlowFile>> reader; if (flowFileContentValue.equalsIgnoreCase(VALUE_ONLY)) { reader = new ValueReader(session); } else { reader = new KeyValueReader(session); } Set<FlowFile> flowFiles = Collections.emptySet(); for (final Path file : files) { if (!this.isScheduled()) { break; // This processor should stop running immediately. } final StopWatch stopWatch = new StopWatch(false); try { stopWatch.start(); if (!hdfs.exists(file)) { continue; // If file is no longer here move on. } logger.debug("Reading file"); flowFiles = getFlowFiles(conf, hdfs, reader, file); if (!keepSourceFiles && !hdfs.delete(file, false)) { logger.warn("Unable to delete path " + file.toString() + " from HDFS. Will likely be picked up over and over..."); } } catch (Throwable t) { logger.error("Error retrieving file {} from HDFS due to {}", new Object[]{file, t}); session.rollback(); context.yield(); } finally { stopWatch.stop(); long totalSize = 0; for (FlowFile flowFile : flowFiles) { totalSize += flowFile.getSize(); session.getProvenanceReporter().receive(flowFile, file.toString()); } if (totalSize > 0) { final String dataRate = stopWatch.calculateDataRate(totalSize); final long millis = stopWatch.getDuration(TimeUnit.MILLISECONDS); logger.info("Created {} flowFiles from SequenceFile {}. Ingested in {} milliseconds at a rate of {}", new Object[]{ flowFiles.size(), file.toUri().toASCIIString(), millis, dataRate}); logger.info("Transferred flowFiles {} to success", new Object[]{flowFiles}); session.transfer(flowFiles, REL_SUCCESS); } } } }
Example 19
Source File: DetectDuplicate.java From localization_nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile flowFile = session.get(); if (flowFile == null) { return; } final ComponentLog logger = getLogger(); final String cacheKey = context.getProperty(CACHE_ENTRY_IDENTIFIER).evaluateAttributeExpressions(flowFile).getValue(); if (StringUtils.isBlank(cacheKey)) { logger.error("FlowFile {} has no attribute for given Cache Entry Identifier", new Object[]{flowFile}); flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE); return; } final DistributedMapCacheClient cache = context.getProperty(DISTRIBUTED_CACHE_SERVICE).asControllerService(DistributedMapCacheClient.class); final Long durationMS = context.getProperty(AGE_OFF_DURATION).asTimePeriod(TimeUnit.MILLISECONDS); final long now = System.currentTimeMillis(); try { final String flowFileDescription = context.getProperty(FLOWFILE_DESCRIPTION).evaluateAttributeExpressions(flowFile).getValue(); final CacheValue cacheValue = new CacheValue(flowFileDescription, now); final CacheValue originalCacheValue; final boolean shouldCacheIdentifier = context.getProperty(CACHE_IDENTIFIER).asBoolean(); if (shouldCacheIdentifier) { originalCacheValue = cache.getAndPutIfAbsent(cacheKey, cacheValue, keySerializer, valueSerializer, valueDeserializer); } else { originalCacheValue = cache.get(cacheKey, keySerializer, valueDeserializer); } boolean duplicate = originalCacheValue != null; if (duplicate && durationMS != null && (now >= originalCacheValue.getEntryTimeMS() + durationMS)) { boolean status = cache.remove(cacheKey, keySerializer); logger.debug("Removal of expired cached entry with key {} returned {}", new Object[]{cacheKey, status}); // both should typically result in duplicate being false...but, better safe than sorry if (shouldCacheIdentifier) { duplicate = !cache.putIfAbsent(cacheKey, cacheValue, keySerializer, valueSerializer); } else { duplicate = cache.containsKey(cacheKey, keySerializer); } } if (duplicate) { session.getProvenanceReporter().route(flowFile, REL_DUPLICATE, "Duplicate of: " + ORIGINAL_DESCRIPTION_ATTRIBUTE_NAME); String originalFlowFileDescription = originalCacheValue.getDescription(); flowFile = session.putAttribute(flowFile, ORIGINAL_DESCRIPTION_ATTRIBUTE_NAME, originalFlowFileDescription); session.transfer(flowFile, REL_DUPLICATE); logger.info("Found {} to be a duplicate of FlowFile with description {}", new Object[]{flowFile, originalFlowFileDescription}); session.adjustCounter("Duplicates Detected", 1L, false); } else { session.getProvenanceReporter().route(flowFile, REL_NON_DUPLICATE); session.transfer(flowFile, REL_NON_DUPLICATE); logger.info("Could not find a duplicate entry in cache for {}; routing to non-duplicate", new Object[]{flowFile}); session.adjustCounter("Non-Duplicate Files Processed", 1L, false); } } catch (final IOException e) { flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE); logger.error("Unable to communicate with cache when processing {} due to {}", new Object[]{flowFile, e}); } }
Example 20
Source File: InvokeHTTP.java From localization_nifi with Apache License 2.0 | 4 votes |
private void logResponse(ComponentLog logger, URL url, Response response) { logger.debug("\nResponse from remote service:\n\t{}\n{}", new Object[]{url.toExternalForm(), getLogString(response.headers().toMultimap())}); }