Java Code Examples for org.apache.nifi.processor.ProcessContext#getMaxConcurrentTasks()
The following examples show how to use
org.apache.nifi.processor.ProcessContext#getMaxConcurrentTasks() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PutAzureEventHub.java From localization_nifi with Apache License 2.0 | 6 votes |
@OnScheduled public final void setupClient(final ProcessContext context) throws ProcessException{ final String policyName = context.getProperty(ACCESS_POLICY).getValue(); final String policyKey = context.getProperty(POLICY_PRIMARY_KEY).getValue(); final String namespace = context.getProperty(NAMESPACE).getValue(); final String eventHubName = context.getProperty(EVENT_HUB_NAME).getValue(); final int numThreads = context.getMaxConcurrentTasks(); senderQueue = new LinkedBlockingQueue<>(numThreads); for (int i = 0; i < numThreads; i++) { final EventHubClient client = createEventHubClient(namespace, eventHubName, policyName, policyKey); if(null != client) { senderQueue.offer(client); } } }
Example 2
Source File: ExecuteScript.java From localization_nifi with Apache License 2.0 | 6 votes |
/** * Performs setup operations when the processor is scheduled to run. This includes evaluating the processor's * properties, as well as reloading the script (from file or the "Script Body" property) * * @param context the context in which to perform the setup operations */ @OnScheduled public void setup(final ProcessContext context) { scriptingComponentHelper.setupVariables(context); // Create a script engine for each possible task int maxTasks = context.getMaxConcurrentTasks(); scriptingComponentHelper.setup(maxTasks, getLogger()); scriptToRun = scriptingComponentHelper.getScriptBody(); try { if (scriptToRun == null && scriptingComponentHelper.getScriptPath() != null) { try (final FileInputStream scriptStream = new FileInputStream(scriptingComponentHelper.getScriptPath())) { scriptToRun = IOUtils.toString(scriptStream, Charset.defaultCharset()); } } } catch (IOException ioe) { throw new ProcessException(ioe); } }
Example 3
Source File: ConsumeKafka_0_10.java From localization_nifi with Apache License 2.0 | 6 votes |
protected ConsumerPool createConsumerPool(final ProcessContext context, final ComponentLog log) { final int maxLeases = context.getMaxConcurrentTasks(); final long maxUncommittedTime = context.getProperty(MAX_UNCOMMITTED_TIME).asTimePeriod(TimeUnit.MILLISECONDS); final byte[] demarcator = context.getProperty(ConsumeKafka_0_10.MESSAGE_DEMARCATOR).isSet() ? context.getProperty(ConsumeKafka_0_10.MESSAGE_DEMARCATOR).evaluateAttributeExpressions().getValue().getBytes(StandardCharsets.UTF_8) : null; final Map<String, Object> props = new HashMap<>(); KafkaProcessorUtils.buildCommonKafkaProperties(context, ConsumerConfig.class, props); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); final String topicListing = context.getProperty(ConsumeKafka_0_10.TOPICS).evaluateAttributeExpressions().getValue(); final List<String> topics = new ArrayList<>(); for (final String topic : topicListing.split(",", 100)) { final String trimmedName = topic.trim(); if (!trimmedName.isEmpty()) { topics.add(trimmedName); } } final String keyEncoding = context.getProperty(KEY_ATTRIBUTE_ENCODING).getValue(); final String securityProtocol = context.getProperty(KafkaProcessorUtils.SECURITY_PROTOCOL).getValue(); final String bootstrapServers = context.getProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS).getValue(); return new ConsumerPool(maxLeases, demarcator, props, topics, maxUncommittedTime, keyEncoding, securityProtocol, bootstrapServers, log); }
Example 4
Source File: ExecuteScript.java From nifi-script-tester with Apache License 2.0 | 6 votes |
/** * Performs setup operations when the processor is scheduled to run. This includes evaluating the processor's * properties, as well as reloading the script (from file or the "Script Body" property) * * @param context the context in which to perform the setup operations */ @OnScheduled public void setup(final ProcessContext context) { scriptingComponentHelper.setupVariables(context); // Create a script engine for each possible task int maxTasks = context.getMaxConcurrentTasks(); scriptingComponentHelper.setup(maxTasks, getLogger()); scriptToRun = scriptingComponentHelper.getScriptBody(); try { if (scriptToRun == null && scriptingComponentHelper.getScriptPath() != null) { try (final FileInputStream scriptStream = new FileInputStream(scriptingComponentHelper.getScriptPath())) { scriptToRun = IOUtils.toString(scriptStream, Charset.defaultCharset()); } } } catch (IOException ioe) { throw new ProcessException(ioe); } }
Example 5
Source File: ExtractText.java From nifi with Apache License 2.0 | 6 votes |
@OnScheduled public final void onScheduled(final ProcessContext context) throws IOException { final Map<String, Pattern> compiledPatternsMap = new HashMap<>(); for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) { if (!entry.getKey().isDynamic()) { continue; } final int flags = getCompileFlags(context); final Pattern pattern = Pattern.compile(entry.getValue(), flags); compiledPatternsMap.put(entry.getKey().getName(), pattern); } compiledPattersMapRef.set(compiledPatternsMap); for (int i = 0; i < context.getMaxConcurrentTasks(); i++) { final int maxBufferSize = context.getProperty(MAX_BUFFER_SIZE).asDataSize(DataUnit.B).intValue(); final byte[] buffer = new byte[maxBufferSize]; bufferQueue.add(buffer); } }
Example 6
Source File: ExecuteScript.java From nifi with Apache License 2.0 | 6 votes |
/** * Performs setup operations when the processor is scheduled to run. This includes evaluating the processor's * properties, as well as reloading the script (from file or the "Script Body" property) * * @param context the context in which to perform the setup operations */ @OnScheduled public void setup(final ProcessContext context) { scriptingComponentHelper.setupVariables(context); // Create a script engine for each possible task int maxTasks = context.getMaxConcurrentTasks(); scriptingComponentHelper.setup(maxTasks, getLogger()); scriptToRun = scriptingComponentHelper.getScriptBody(); try { if (scriptToRun == null && scriptingComponentHelper.getScriptPath() != null) { try (final FileInputStream scriptStream = new FileInputStream(scriptingComponentHelper.getScriptPath())) { scriptToRun = IOUtils.toString(scriptStream, Charset.defaultCharset()); } } } catch (IOException ioe) { throw new ProcessException(ioe); } }
Example 7
Source File: PutHiveStreaming.java From nifi with Apache License 2.0 | 4 votes |
@OnScheduled public void setup(final ProcessContext context) { ComponentLog log = getLogger(); final Integer heartbeatInterval = context.getProperty(HEARTBEAT_INTERVAL).evaluateAttributeExpressions().asInteger(); final String configFiles = context.getProperty(HIVE_CONFIGURATION_RESOURCES).evaluateAttributeExpressions().getValue(); hiveConfig = hiveConfigurator.getConfigurationFromFiles(configFiles); // If more than one concurrent task, force 'hcatalog.hive.client.cache.disabled' to true if(context.getMaxConcurrentTasks() > 1) { hiveConfig.setBoolean(CLIENT_CACHE_DISABLED_PROPERTY, true); } // add any dynamic properties to the Hive configuration for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) { final PropertyDescriptor descriptor = entry.getKey(); if (descriptor.isDynamic()) { hiveConfig.set(descriptor.getName(), entry.getValue()); } } hiveConfigurator.preload(hiveConfig); if (SecurityUtil.isSecurityEnabled(hiveConfig)) { final String explicitPrincipal = context.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue(); final String explicitKeytab = context.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue(); final String explicitPassword = context.getProperty(kerberosProperties.getKerberosPassword()).getValue(); final KerberosCredentialsService credentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class); final String resolvedPrincipal; final String resolvedKeytab; if (credentialsService == null) { resolvedPrincipal = explicitPrincipal; resolvedKeytab = explicitKeytab; } else { resolvedPrincipal = credentialsService.getPrincipal(); resolvedKeytab = credentialsService.getKeytab(); } if (resolvedKeytab != null) { kerberosUserReference.set(new KerberosKeytabUser(resolvedPrincipal, resolvedKeytab)); log.info("Hive Security Enabled, logging in as principal {} with keytab {}", new Object[] {resolvedPrincipal, resolvedKeytab}); } else if (explicitPassword != null) { kerberosUserReference.set(new KerberosPasswordUser(resolvedPrincipal, explicitPassword)); log.info("Hive Security Enabled, logging in as principal {} with password", new Object[] {resolvedPrincipal}); } else { throw new ProcessException("Unable to authenticate with Kerberos, no keytab or password was provided"); } try { ugi = hiveConfigurator.authenticate(hiveConfig, kerberosUserReference.get()); } catch (AuthenticationFailedException ae) { throw new ProcessException("Kerberos authentication failed for Hive Streaming", ae); } log.info("Successfully logged in as principal " + resolvedPrincipal); } else { ugi = null; kerberosUserReference.set(null); } callTimeout = context.getProperty(CALL_TIMEOUT).evaluateAttributeExpressions().asInteger() * 1000; // milliseconds String timeoutName = "put-hive-streaming-%d"; this.callTimeoutPool = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder().setNameFormat(timeoutName).build()); sendHeartBeat.set(true); heartBeatTimer = new Timer(); setupHeartBeatTimer(heartbeatInterval); }
Example 8
Source File: AbstractPutEventProcessor.java From localization_nifi with Apache License 2.0 | 4 votes |
@OnScheduled public void onScheduled(final ProcessContext context) throws IOException { // initialize the queue of senders, one per task, senders will get created on the fly in onTrigger this.senderPool = new LinkedBlockingQueue<>(context.getMaxConcurrentTasks()); this.transitUri = createTransitUri(context); }
Example 9
Source File: PutHive3Streaming.java From nifi with Apache License 2.0 | 4 votes |
@OnScheduled public void setup(final ProcessContext context) throws IOException { ComponentLog log = getLogger(); rollbackOnFailure = context.getProperty(ROLLBACK_ON_FAILURE).asBoolean(); final String configFiles = context.getProperty(HIVE_CONFIGURATION_RESOURCES).evaluateAttributeExpressions().getValue(); hiveConfig = hiveConfigurator.getConfigurationFromFiles(configFiles); // If more than one concurrent task, force 'hcatalog.hive.client.cache.disabled' to true if (context.getMaxConcurrentTasks() > 1) { hiveConfig.setBoolean(CLIENT_CACHE_DISABLED_PROPERTY, true); } // add any dynamic properties to the Hive configuration for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) { final PropertyDescriptor descriptor = entry.getKey(); if (descriptor.isDynamic()) { hiveConfig.set(descriptor.getName(), entry.getValue()); } } hiveConfigurator.preload(hiveConfig); if (SecurityUtil.isSecurityEnabled(hiveConfig)) { final KerberosCredentialsService credentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class); final String explicitPrincipal = context.getProperty(KERBEROS_PRINCIPAL).evaluateAttributeExpressions().getValue(); final String explicitPassword = context.getProperty(KERBEROS_PASSWORD).getValue(); final String resolvedPrincipal = credentialsService != null ? credentialsService.getPrincipal() : explicitPrincipal; final String resolvedKeytab = credentialsService != null ? credentialsService.getKeytab() : null; if (resolvedKeytab != null) { kerberosUserReference.set(new KerberosKeytabUser(resolvedPrincipal, resolvedKeytab)); log.info("Hive Security Enabled, logging in as principal {} with keytab {}", new Object[] {resolvedPrincipal, resolvedKeytab}); } else if (explicitPassword != null) { kerberosUserReference.set(new KerberosPasswordUser(resolvedPrincipal, explicitPassword)); log.info("Hive Security Enabled, logging in as principal {} with password", new Object[] {resolvedPrincipal}); } else { throw new ProcessException("Unable to authenticate with Kerberos, no keytab or password was provided"); } try { ugi = hiveConfigurator.authenticate(hiveConfig, kerberosUserReference.get()); } catch (AuthenticationFailedException ae) { log.error(ae.getMessage(), ae); throw new ProcessException(ae); } log.info("Successfully logged in as principal " + resolvedPrincipal); } else { ugi = SecurityUtil.loginSimple(hiveConfig); kerberosUserReference.set(null); } callTimeout = context.getProperty(CALL_TIMEOUT).evaluateAttributeExpressions().asInteger() * 1000; // milliseconds String timeoutName = "put-hive3-streaming-%d"; this.callTimeoutPool = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder().setNameFormat(timeoutName).build()); }
Example 10
Source File: ConsumeKafkaRecord_0_11.java From nifi with Apache License 2.0 | 4 votes |
protected ConsumerPool createConsumerPool(final ProcessContext context, final ComponentLog log) { final int maxLeases = context.getMaxConcurrentTasks(); final long maxUncommittedTime = context.getProperty(MAX_UNCOMMITTED_TIME).asTimePeriod(TimeUnit.MILLISECONDS); final Map<String, Object> props = new HashMap<>(); KafkaProcessorUtils.buildCommonKafkaProperties(context, ConsumerConfig.class, props); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); final String topicListing = context.getProperty(ConsumeKafkaRecord_0_11.TOPICS).evaluateAttributeExpressions().getValue(); final String topicType = context.getProperty(ConsumeKafkaRecord_0_11.TOPIC_TYPE).evaluateAttributeExpressions().getValue(); final List<String> topics = new ArrayList<>(); final String securityProtocol = context.getProperty(KafkaProcessorUtils.SECURITY_PROTOCOL).getValue(); final String bootstrapServers = context.getProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS).evaluateAttributeExpressions().getValue(); final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class); final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class); final boolean honorTransactions = context.getProperty(HONOR_TRANSACTIONS).asBoolean(); final String charsetName = context.getProperty(MESSAGE_HEADER_ENCODING).evaluateAttributeExpressions().getValue(); final Charset charset = Charset.forName(charsetName); final String headerNameRegex = context.getProperty(HEADER_NAME_REGEX).getValue(); final Pattern headerNamePattern = headerNameRegex == null ? null : Pattern.compile(headerNameRegex); if (topicType.equals(TOPIC_NAME.getValue())) { for (final String topic : topicListing.split(",", 100)) { final String trimmedName = topic.trim(); if (!trimmedName.isEmpty()) { topics.add(trimmedName); } } return new ConsumerPool(maxLeases, readerFactory, writerFactory, props, topics, maxUncommittedTime, securityProtocol, bootstrapServers, log, honorTransactions, charset, headerNamePattern); } else if (topicType.equals(TOPIC_PATTERN.getValue())) { final Pattern topicPattern = Pattern.compile(topicListing.trim()); return new ConsumerPool(maxLeases, readerFactory, writerFactory, props, topicPattern, maxUncommittedTime, securityProtocol, bootstrapServers, log, honorTransactions, charset, headerNamePattern); } else { getLogger().error("Subscription type has an unknown value {}", new Object[] {topicType}); return null; } }
Example 11
Source File: ConsumeJMS.java From nifi with Apache License 2.0 | 4 votes |
@OnScheduled public void onSchedule(ProcessContext context) { if (context.getMaxConcurrentTasks() > 1 && isDurableSubscriber(context) && !isShared(context)) { throw new ProcessException("Durable non shared subscriptions cannot work on multiple threads. Check javax/jms/Session#createDurableConsumer API doc."); } }
Example 12
Source File: PutSyslog.java From nifi with Apache License 2.0 | 4 votes |
@OnScheduled public void onScheduled(final ProcessContext context) throws IOException { // initialize the queue of senders, one per task, senders will get created on the fly in onTrigger this.senderPool = new LinkedBlockingQueue<>(context.getMaxConcurrentTasks()); }
Example 13
Source File: GetKafka.java From nifi with Apache License 2.0 | 4 votes |
public void createConsumers(final ProcessContext context) { final String topic = context.getProperty(TOPIC).evaluateAttributeExpressions().getValue(); final Properties props = new Properties(); props.setProperty("zookeeper.connect", context.getProperty(ZOOKEEPER_CONNECTION_STRING).evaluateAttributeExpressions().getValue()); props.setProperty("group.id", context.getProperty(GROUP_ID).evaluateAttributeExpressions().getValue()); props.setProperty("client.id", context.getProperty(CLIENT_NAME).getValue()); props.setProperty("auto.commit.interval.ms", String.valueOf(context.getProperty(ZOOKEEPER_COMMIT_DELAY).asTimePeriod(TimeUnit.MILLISECONDS))); props.setProperty("auto.offset.reset", context.getProperty(AUTO_OFFSET_RESET).getValue()); props.setProperty("zookeeper.connection.timeout.ms", context.getProperty(ZOOKEEPER_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).toString()); props.setProperty("socket.timeout.ms", context.getProperty(KAFKA_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).toString()); for (final Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) { PropertyDescriptor descriptor = entry.getKey(); if (descriptor.isDynamic()) { if (props.containsKey(descriptor.getName())) { this.getLogger().warn("Overriding existing property '" + descriptor.getName() + "' which had value of '" + props.getProperty(descriptor.getName()) + "' with dynamically set value '" + entry.getValue() + "'."); } props.setProperty(descriptor.getName(), entry.getValue()); } } /* * Unless user sets it to some explicit value we are setting it to the * lowest possible value of 1 millisecond to ensure the * consumerStream.hasNext() doesn't block. See * http://kafka.apache.org/documentation.html#configuration) as well as * comment in 'catch ConsumerTimeoutException' in onTrigger() for more * explanation as to the reasoning behind it. */ if (!props.containsKey("consumer.timeout.ms")) { this.getLogger().info("Setting 'consumer.timeout.ms' to 1 milliseconds to avoid consumer" + " block in the event when no events are present in Kafka topic. If you wish to change this value " + " set it as dynamic property. If you wish to explicitly enable consumer block (at your own risk)" + " set its value to -1."); props.setProperty("consumer.timeout.ms", "1"); } int partitionCount = KafkaUtils.retrievePartitionCountForTopic( context.getProperty(ZOOKEEPER_CONNECTION_STRING).evaluateAttributeExpressions().getValue(), context.getProperty(TOPIC).evaluateAttributeExpressions().getValue()); final ConsumerConfig consumerConfig = new ConsumerConfig(props); consumer = Consumer.createJavaConsumerConnector(consumerConfig); final Map<String, Integer> topicCountMap = new HashMap<>(1); int concurrentTaskToUse = context.getMaxConcurrentTasks(); if (context.getMaxConcurrentTasks() < partitionCount){ this.getLogger().warn("The amount of concurrent tasks '" + context.getMaxConcurrentTasks() + "' configured for " + "this processor is less than the amount of partitions '" + partitionCount + "' for topic '" + context.getProperty(TOPIC).evaluateAttributeExpressions().getValue() + "'. " + "Consider making it equal to the amount of partition count for most efficient event consumption."); } else if (context.getMaxConcurrentTasks() > partitionCount){ concurrentTaskToUse = partitionCount; this.getLogger().warn("The amount of concurrent tasks '" + context.getMaxConcurrentTasks() + "' configured for " + "this processor is greater than the amount of partitions '" + partitionCount + "' for topic '" + context.getProperty(TOPIC).evaluateAttributeExpressions().getValue() + "'. " + "Therefore those tasks would never see a message. To avoid that the '" + partitionCount + "'(partition count) will be used to consume events"); } topicCountMap.put(topic, concurrentTaskToUse); final Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap); final List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic); this.streamIterators.clear(); for (final KafkaStream<byte[], byte[]> stream : streams) { streamIterators.add(stream.iterator()); } this.consumerStreamsReady.set(true); }
Example 14
Source File: AbstractPutEventProcessor.java From nifi with Apache License 2.0 | 4 votes |
@OnScheduled public void onScheduled(final ProcessContext context) throws IOException { // initialize the queue of senders, one per task, senders will get created on the fly in onTrigger this.senderPool = new LinkedBlockingQueue<>(context.getMaxConcurrentTasks()); this.transitUri = createTransitUri(context); }
Example 15
Source File: ConsumeKafkaRecord_2_0.java From nifi with Apache License 2.0 | 4 votes |
protected ConsumerPool createConsumerPool(final ProcessContext context, final ComponentLog log) { final int maxLeases = context.getMaxConcurrentTasks(); final long maxUncommittedTime = context.getProperty(MAX_UNCOMMITTED_TIME).asTimePeriod(TimeUnit.MILLISECONDS); final Map<String, Object> props = new HashMap<>(); KafkaProcessorUtils.buildCommonKafkaProperties(context, ConsumerConfig.class, props); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); final String topicListing = context.getProperty(ConsumeKafkaRecord_2_0.TOPICS).evaluateAttributeExpressions().getValue(); final String topicType = context.getProperty(ConsumeKafkaRecord_2_0.TOPIC_TYPE).evaluateAttributeExpressions().getValue(); final List<String> topics = new ArrayList<>(); final String securityProtocol = context.getProperty(KafkaProcessorUtils.SECURITY_PROTOCOL).getValue(); final String bootstrapServers = context.getProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS).evaluateAttributeExpressions().getValue(); final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class); final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class); final boolean honorTransactions = context.getProperty(HONOR_TRANSACTIONS).asBoolean(); final int commsTimeoutMillis = context.getProperty(COMMS_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue(); props.put(ConsumerConfig.DEFAULT_API_TIMEOUT_MS_CONFIG, commsTimeoutMillis); final String charsetName = context.getProperty(MESSAGE_HEADER_ENCODING).evaluateAttributeExpressions().getValue(); final Charset charset = Charset.forName(charsetName); final String headerNameRegex = context.getProperty(HEADER_NAME_REGEX).getValue(); final Pattern headerNamePattern = headerNameRegex == null ? null : Pattern.compile(headerNameRegex); if (topicType.equals(TOPIC_NAME.getValue())) { for (final String topic : topicListing.split(",", 100)) { final String trimmedName = topic.trim(); if (!trimmedName.isEmpty()) { topics.add(trimmedName); } } return new ConsumerPool(maxLeases, readerFactory, writerFactory, props, topics, maxUncommittedTime, securityProtocol, bootstrapServers, log, honorTransactions, charset, headerNamePattern); } else if (topicType.equals(TOPIC_PATTERN.getValue())) { final Pattern topicPattern = Pattern.compile(topicListing.trim()); return new ConsumerPool(maxLeases, readerFactory, writerFactory, props, topicPattern, maxUncommittedTime, securityProtocol, bootstrapServers, log, honorTransactions, charset, headerNamePattern); } else { getLogger().error("Subscription type has an unknown value {}", new Object[] {topicType}); return null; } }
Example 16
Source File: GetKafka.java From localization_nifi with Apache License 2.0 | 4 votes |
public void createConsumers(final ProcessContext context) { final String topic = context.getProperty(TOPIC).getValue(); final Properties props = new Properties(); props.setProperty("zookeeper.connect", context.getProperty(ZOOKEEPER_CONNECTION_STRING).getValue()); props.setProperty("group.id", context.getProperty(GROUP_ID).getValue()); props.setProperty("client.id", context.getProperty(CLIENT_NAME).getValue()); props.setProperty("auto.commit.interval.ms", String.valueOf(context.getProperty(ZOOKEEPER_COMMIT_DELAY).asTimePeriod(TimeUnit.MILLISECONDS))); props.setProperty("auto.offset.reset", context.getProperty(AUTO_OFFSET_RESET).getValue()); props.setProperty("zookeeper.connection.timeout.ms", context.getProperty(ZOOKEEPER_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).toString()); props.setProperty("socket.timeout.ms", context.getProperty(KAFKA_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).toString()); for (final Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) { PropertyDescriptor descriptor = entry.getKey(); if (descriptor.isDynamic()) { if (props.containsKey(descriptor.getName())) { this.getLogger().warn("Overriding existing property '" + descriptor.getName() + "' which had value of '" + props.getProperty(descriptor.getName()) + "' with dynamically set value '" + entry.getValue() + "'."); } props.setProperty(descriptor.getName(), entry.getValue()); } } /* * Unless user sets it to some explicit value we are setting it to the * lowest possible value of 1 millisecond to ensure the * consumerStream.hasNext() doesn't block. See * http://kafka.apache.org/documentation.html#configuration) as well as * comment in 'catch ConsumerTimeoutException' in onTrigger() for more * explanation as to the reasoning behind it. */ if (!props.containsKey("consumer.timeout.ms")) { this.getLogger().info("Setting 'consumer.timeout.ms' to 1 milliseconds to avoid consumer" + " block in the event when no events are present in Kafka topic. If you wish to change this value " + " set it as dynamic property. If you wish to explicitly enable consumer block (at your own risk)" + " set its value to -1."); props.setProperty("consumer.timeout.ms", "1"); } int partitionCount = KafkaUtils.retrievePartitionCountForTopic( context.getProperty(ZOOKEEPER_CONNECTION_STRING).getValue(), context.getProperty(TOPIC).getValue()); final ConsumerConfig consumerConfig = new ConsumerConfig(props); consumer = Consumer.createJavaConsumerConnector(consumerConfig); final Map<String, Integer> topicCountMap = new HashMap<>(1); int concurrentTaskToUse = context.getMaxConcurrentTasks(); if (context.getMaxConcurrentTasks() < partitionCount){ this.getLogger().warn("The amount of concurrent tasks '" + context.getMaxConcurrentTasks() + "' configured for " + "this processor is less than the amount of partitions '" + partitionCount + "' for topic '" + context.getProperty(TOPIC).getValue() + "'. " + "Consider making it equal to the amount of partition count for most efficient event consumption."); } else if (context.getMaxConcurrentTasks() > partitionCount){ concurrentTaskToUse = partitionCount; this.getLogger().warn("The amount of concurrent tasks '" + context.getMaxConcurrentTasks() + "' configured for " + "this processor is greater than the amount of partitions '" + partitionCount + "' for topic '" + context.getProperty(TOPIC).getValue() + "'. " + "Therefore those tasks would never see a message. To avoid that the '" + partitionCount + "'(partition count) will be used to consume events"); } topicCountMap.put(topic, concurrentTaskToUse); final Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap); final List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic); this.streamIterators.clear(); for (final KafkaStream<byte[], byte[]> stream : streams) { streamIterators.add(stream.iterator()); } this.consumerStreamsReady.set(true); }
Example 17
Source File: ConsumeKafka_2_0.java From nifi with Apache License 2.0 | 4 votes |
protected ConsumerPool createConsumerPool(final ProcessContext context, final ComponentLog log) { final int maxLeases = context.getMaxConcurrentTasks(); final long maxUncommittedTime = context.getProperty(MAX_UNCOMMITTED_TIME).asTimePeriod(TimeUnit.MILLISECONDS); final byte[] demarcator = context.getProperty(ConsumeKafka_2_0.MESSAGE_DEMARCATOR).isSet() ? context.getProperty(ConsumeKafka_2_0.MESSAGE_DEMARCATOR).evaluateAttributeExpressions().getValue().getBytes(StandardCharsets.UTF_8) : null; final Map<String, Object> props = new HashMap<>(); KafkaProcessorUtils.buildCommonKafkaProperties(context, ConsumerConfig.class, props); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); final String topicListing = context.getProperty(ConsumeKafka_2_0.TOPICS).evaluateAttributeExpressions().getValue(); final String topicType = context.getProperty(ConsumeKafka_2_0.TOPIC_TYPE).evaluateAttributeExpressions().getValue(); final List<String> topics = new ArrayList<>(); final String keyEncoding = context.getProperty(KEY_ATTRIBUTE_ENCODING).getValue(); final String securityProtocol = context.getProperty(KafkaProcessorUtils.SECURITY_PROTOCOL).getValue(); final String bootstrapServers = context.getProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS).evaluateAttributeExpressions().getValue(); final boolean honorTransactions = context.getProperty(HONOR_TRANSACTIONS).asBoolean(); final int commsTimeoutMillis = context.getProperty(COMMS_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue(); props.put(ConsumerConfig.DEFAULT_API_TIMEOUT_MS_CONFIG, commsTimeoutMillis); final String charsetName = context.getProperty(MESSAGE_HEADER_ENCODING).evaluateAttributeExpressions().getValue(); final Charset charset = Charset.forName(charsetName); final String headerNameRegex = context.getProperty(HEADER_NAME_REGEX).getValue(); final Pattern headerNamePattern = headerNameRegex == null ? null : Pattern.compile(headerNameRegex); if (topicType.equals(TOPIC_NAME.getValue())) { for (final String topic : topicListing.split(",", 100)) { final String trimmedName = topic.trim(); if (!trimmedName.isEmpty()) { topics.add(trimmedName); } } return new ConsumerPool(maxLeases, demarcator, props, topics, maxUncommittedTime, keyEncoding, securityProtocol, bootstrapServers, log, honorTransactions, charset, headerNamePattern); } else if (topicType.equals(TOPIC_PATTERN.getValue())) { final Pattern topicPattern = Pattern.compile(topicListing.trim()); return new ConsumerPool(maxLeases, demarcator, props, topicPattern, maxUncommittedTime, keyEncoding, securityProtocol, bootstrapServers, log, honorTransactions, charset, headerNamePattern); } else { getLogger().error("Subscription type has an unknown value {}", new Object[] {topicType}); return null; } }
Example 18
Source File: PutSyslog.java From localization_nifi with Apache License 2.0 | 4 votes |
@OnScheduled public void onScheduled(final ProcessContext context) throws IOException { // initialize the queue of senders, one per task, senders will get created on the fly in onTrigger this.senderPool = new LinkedBlockingQueue<>(context.getMaxConcurrentTasks()); }
Example 19
Source File: ConsumeKafka_1_0.java From nifi with Apache License 2.0 | 4 votes |
protected ConsumerPool createConsumerPool(final ProcessContext context, final ComponentLog log) { final int maxLeases = context.getMaxConcurrentTasks(); final long maxUncommittedTime = context.getProperty(MAX_UNCOMMITTED_TIME).asTimePeriod(TimeUnit.MILLISECONDS); final byte[] demarcator = context.getProperty(ConsumeKafka_1_0.MESSAGE_DEMARCATOR).isSet() ? context.getProperty(ConsumeKafka_1_0.MESSAGE_DEMARCATOR).evaluateAttributeExpressions().getValue().getBytes(StandardCharsets.UTF_8) : null; final Map<String, Object> props = new HashMap<>(); KafkaProcessorUtils.buildCommonKafkaProperties(context, ConsumerConfig.class, props); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); final String topicListing = context.getProperty(ConsumeKafka_1_0.TOPICS).evaluateAttributeExpressions().getValue(); final String topicType = context.getProperty(ConsumeKafka_1_0.TOPIC_TYPE).evaluateAttributeExpressions().getValue(); final List<String> topics = new ArrayList<>(); final String keyEncoding = context.getProperty(KEY_ATTRIBUTE_ENCODING).getValue(); final String securityProtocol = context.getProperty(KafkaProcessorUtils.SECURITY_PROTOCOL).getValue(); final String bootstrapServers = context.getProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS).evaluateAttributeExpressions().getValue(); final boolean honorTransactions = context.getProperty(HONOR_TRANSACTIONS).asBoolean(); final String charsetName = context.getProperty(MESSAGE_HEADER_ENCODING).evaluateAttributeExpressions().getValue(); final Charset charset = Charset.forName(charsetName); final String headerNameRegex = context.getProperty(HEADER_NAME_REGEX).getValue(); final Pattern headerNamePattern = headerNameRegex == null ? null : Pattern.compile(headerNameRegex); if (topicType.equals(TOPIC_NAME.getValue())) { for (final String topic : topicListing.split(",", 100)) { final String trimmedName = topic.trim(); if (!trimmedName.isEmpty()) { topics.add(trimmedName); } } return new ConsumerPool(maxLeases, demarcator, props, topics, maxUncommittedTime, keyEncoding, securityProtocol, bootstrapServers, log, honorTransactions, charset, headerNamePattern); } else if (topicType.equals(TOPIC_PATTERN.getValue())) { final Pattern topicPattern = Pattern.compile(topicListing.trim()); return new ConsumerPool(maxLeases, demarcator, props, topicPattern, maxUncommittedTime, keyEncoding, securityProtocol, bootstrapServers, log, honorTransactions, charset, headerNamePattern); } else { getLogger().error("Subscription type has an unknown value {}", new Object[] {topicType}); return null; } }
Example 20
Source File: ExtractGrok.java From localization_nifi with Apache License 2.0 | 3 votes |
@OnScheduled public void onScheduled(final ProcessContext context) throws GrokException { for (int i = 0; i < context.getMaxConcurrentTasks(); i++) { final int maxBufferSize = context.getProperty(MAX_BUFFER_SIZE).asDataSize(DataUnit.B).intValue(); final byte[] buffer = new byte[maxBufferSize]; bufferQueue.add(buffer); } grok.addPatternFromFile(context.getProperty(GROK_PATTERN_FILE).getValue()); grok.compile(context.getProperty(GROK_EXPRESSION).getValue()); }