Java Code Examples for org.apache.flume.Context#getString()
The following examples show how to use
org.apache.flume.Context#getString() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CsvSerializer.java From flume-elasticsearch-sink with Apache License 2.0 | 6 votes |
/** * Configure the field and its type with the custom delimiter */ @Override public void configure(Context context) { String fields = context.getString(ES_CSV_FIELDS); if (fields == null) { Throwables.propagate(new Exception("Fields for csv files are not configured," + " please configured the property " + ES_CSV_FIELDS)); } try { delimiter = context.getString(ES_CSV_DELIMITER, DEFAULT_ES_CSV_DELIMITER); String[] fieldTypes = fields.split(COMMA); for (String fieldType : fieldTypes) { names.add(getValue(fieldType, 0)); types.add(getValue(fieldType, 1)); } } catch (Exception e) { Throwables.propagate(e); } }
Example 2
Source File: IRCSink.java From mt-flume with Apache License 2.0 | 6 votes |
public void configure(Context context) { hostname = context.getString("hostname"); String portStr = context.getString("port"); nick = context.getString("nick"); password = context.getString("password"); user = context.getString("user"); name = context.getString("name"); chan = context.getString("chan"); splitLines = context.getBoolean("splitlines"); splitChars = context.getString("splitchars"); if (portStr != null) { port = Integer.parseInt(portStr); } else { port = DEFAULT_PORT; } if (splitChars == null) { splitChars = DEFAULT_SPLIT_CHARS; } Preconditions.checkState(hostname != null, "No hostname specified"); Preconditions.checkState(nick != null, "No nick specified"); Preconditions.checkState(chan != null, "No chan specified"); }
Example 3
Source File: HDFSSequenceFile.java From mt-flume with Apache License 2.0 | 6 votes |
@Override public void configure(Context context) { super.configure(context); // use binary writable serialize by default writeFormat = context.getString("hdfs.writeFormat", SequenceFileSerializerType.Writable.name()); useRawLocalFileSystem = context.getBoolean("hdfs.useRawLocalFileSystem", false); serializerContext = new Context( context.getSubProperties(SequenceFileSerializerFactory.CTX_PREFIX)); serializer = SequenceFileSerializerFactory .getSerializer(writeFormat, serializerContext); logger.info("writeFormat = " + writeFormat + ", UseRawLocalFileSystem = " + useRawLocalFileSystem); }
Example 4
Source File: WatchDir.java From flume-taildirectory-source with Apache License 2.0 | 5 votes |
private void loadConfiguration(Context context) { timeToUnlockFile = context.getLong(UNLOCK_TIME, 1L); fileHeader = new Boolean(context.getBoolean(FILE_HEADER, false)); fileHeaderKey = new String(context.getString(FILE_HEADER_KEY, "file")); basenameHeader = new Boolean(context.getBoolean(BASENAME_HEADER, false)); basenameHeaderKey = new String(context.getString(BASENAME_HEADER_KEY, "basename")); followLinks = new Boolean(context.getBoolean(FOLLOW_LINKS, false)); }
Example 5
Source File: FlumeHBaseWordCountInterceptor.java From SparkOnALog with Apache License 2.0 | 5 votes |
@Override public void configure(Context context) { tableName = context.getString("hbaseTableName", "flumeCounter"); columnFamilyName = context.getString("hbaseColumnFamily", "C"); flushIntervals = Integer.parseInt(context.getString( "hbase-flush-intervals", "3000")); }
Example 6
Source File: RabbitMQSource.java From rabbitmq-flume-plugin with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Override public void configure(Context context) { // Only the queue name does not have a default value Configurables.ensureRequiredNonNull(context, QUEUE_KEY); // Assign all of the configured values hostname = context.getString(HOST_KEY, ConnectionFactory.DEFAULT_HOST); port = context.getInteger(PORT_KEY, ConnectionFactory.DEFAULT_AMQP_PORT); enableSSL = context.getBoolean(SSL_KEY, false); virtualHost = context.getString(VHOST_KEY, ConnectionFactory.DEFAULT_VHOST); username = context.getString(USER_KEY, ConnectionFactory.DEFAULT_USER); password = context.getString(PASSWORD_KEY, ConnectionFactory.DEFAULT_PASS); queue = context.getString(QUEUE_KEY, null); exchange = context.getString(EXCHANGE_KEY, null); autoAck = context.getBoolean(AUTOACK_KEY, false); requeuing = context.getBoolean(REQUEUING, false); prefetchCount = context.getInteger(PREFETCH_COUNT_KEY, 0); timeout = context.getInteger(TIMEOUT_KEY, -1); consumerThreads = context.getInteger(THREAD_COUNT_KEY, 1); // Ensure that Flume can connect to RabbitMQ testRabbitMQConnection(); // Create and configure the counters sourceCounter = new SourceCounter(getName()); counterGroup = new CounterGroup(); counterGroup.setName(getName()); }
Example 7
Source File: LoadBalancingSinkProcessor.java From mt-flume with Apache License 2.0 | 5 votes |
@Override public void configure(Context context) { Preconditions.checkState(getSinks().size() > 1, "The LoadBalancingSinkProcessor cannot be used for a single sink. " + "Please configure more than one sinks and try again."); String selectorTypeName = context.getString(CONFIG_SELECTOR, SELECTOR_NAME_ROUND_ROBIN); Boolean shouldBackOff = context.getBoolean(CONFIG_BACKOFF, false); switchon = context.getBoolean(CONFIG_SWITCHON, true); selector = null; if (selectorTypeName.equalsIgnoreCase(SELECTOR_NAME_ROUND_ROBIN)) { selector = new RoundRobinSinkSelector(shouldBackOff); } else if (selectorTypeName.equalsIgnoreCase(SELECTOR_NAME_RANDOM)) { selector = new RandomOrderSinkSelector(shouldBackOff); } else { try { @SuppressWarnings("unchecked") Class<? extends SinkSelector> klass = (Class<? extends SinkSelector>) Class.forName(selectorTypeName); selector = klass.newInstance(); } catch (Exception ex) { throw new FlumeException("Unable to instantiate sink selector: " + selectorTypeName, ex); } } selector.setSinks(getSinks()); selector.configure( new Context(context.getSubProperties(CONFIG_SELECTOR_PREFIX))); LOGGER.debug("Sink selector: " + selector + " initialized"); }
Example 8
Source File: HDFSCompressedDataStream.java From mt-flume with Apache License 2.0 | 5 votes |
@Override public void configure(Context context) { super.configure(context); serializerType = context.getString("serializer", "TEXT"); useRawLocalFileSystem = context.getBoolean("hdfs.useRawLocalFileSystem", false); serializerContext = new Context( context.getSubProperties(EventSerializer.CTX_PREFIX)); logger.info("Serializer = " + serializerType + ", UseRawLocalFileSystem = " + useRawLocalFileSystem); }
Example 9
Source File: SyslogAvroEventSerializer.java From flume-plugins with MIT License | 5 votes |
@Override public EventSerializer build(Context context, OutputStream out) { path = context.getString(PATH, PATH_DEFAULT); customerHeader = context.getString(CUSTOMER_HEADER, CUSTOMER_HEADER_DEFAULT); hostHeader = context.getString(HOST_HEADER, HOST_HEADER_DEFAULT); SyslogAvroEventSerializer writer = null; try { writer = new SyslogAvroEventSerializer(out, path, customerHeader, hostHeader); writer.configure(context); } catch (IOException e) { log.error("Unable to parse schema file. Exception follows.", e); } return writer; }
Example 10
Source File: NGSINameMappingsInterceptor.java From fiware-cygnus with GNU Affero General Public License v3.0 | 5 votes |
@Override public void configure(Context context) { nameMappingsConfFile = context.getString("name_mappings_conf_file"); if (nameMappingsConfFile == null) { invalidConfiguration = true; LOGGER.error("[nmi] Invalid configuration (name_mappings_conf_file = null) -- Must be configured"); } else if (nameMappingsConfFile.length() == 0) { invalidConfiguration = true; LOGGER.error("[nmi] Invalid configuration (nameMappingsConfFile = ) -- Cannot be empty"); } else { LOGGER.info("[nmi] Reading configuration (nameMappingsConfFile=" + nameMappingsConfFile + ")"); } // if else }
Example 11
Source File: ElasticSearchSerializerWithMapping.java From ingestion with Apache License 2.0 | 5 votes |
@Override public void configure(Context context) { String mappingFile = context.getString(CONF_MAPPING_FILE); try { jsonMapping = readFile(new File(mappingFile)); } catch (IOException e) { Throwables.propagate(e); } }
Example 12
Source File: AvroEventSerializer.java From Transwarp-Sample-Code with MIT License | 5 votes |
@Override public void configure(Context context) { syncIntervalBytes = context.getInteger(SYNC_INTERVAL_BYTES, DEFAULT_SYNC_INTERVAL_BYTES); compressionCodec = context.getString(COMPRESSION_CODEC, DEFAULT_COMPRESSION_CODEC); }
Example 13
Source File: ModifyMessageBodyPreprocessor.java From flume-ng-kafka-sink with Apache License 2.0 | 4 votes |
@Override public String extractTopic(Event event, Context context) { return context.getString(Constants.TOPIC); }
Example 14
Source File: SimpleIndexNameBuilder.java From ingestion with Apache License 2.0 | 4 votes |
@Override public void configure(Context context) { indexName = context.getString(ElasticSearchSinkConstants.INDEX_NAME); }
Example 15
Source File: HostToCustomerInterceptor.java From flume-plugins with MIT License | 4 votes |
@Override public void configure(Context context) { path = context.getString(PATH, PATH_DEFAULT); customerHeader = context.getString(CUSTOMER_HEADER, CUSTOMER_HEADER_DEFAULT); hostHeader = context.getString(HOST_HEADER, HOST_HEADER_DEFAULT); }
Example 16
Source File: MultiLineParser.java From flume_monitor_source with Apache License 2.0 | 4 votes |
@Override public void Configure(Context context) { // TODO Auto-generated method stub LOG.info("Config MultiLineParser"); buffer_size_ = context.getInteger("read_buffer_size", FlumeConstants.READ_BUFFER_SIZE).intValue(); read_buffer_ = new byte[buffer_size_]; max_buffer_size_ = context.getInteger("max_read_buffer_size", FlumeConstants.MAX_READ_BUFFER_SIZE).intValue(); max_record_size_ = context.getInteger("max_record_size", FlumeConstants.MAX_RECORD_LENGH); file_content_include_pattern_str_ = context.getString( FlumeConstants.FILE_CONTENT_INCLUDE, default_content_include_str_); file_content_exclude_pattern_str_ = context .getString(FlumeConstants.FILE_CONTENT_EXCLUDE); record_include_pattern_ = Pattern .compile(file_content_include_pattern_str_); if (file_content_exclude_pattern_str_ != null) { record_exclude_pattern_ = Pattern .compile(file_content_exclude_pattern_str_); } first_line_pattern_str_ = context .getString(FlumeConstants.FIRST_LINE_PATTERN); last_line_pattern_str_ = context .getString(FlumeConstants.LAST_LINE_PATTERN); if (first_line_pattern_str_ != null) { first_line_pattern_ = Pattern.compile(first_line_pattern_str_); } if (last_line_pattern_str_ != null) { last_line_pattern_ = Pattern.compile(last_line_pattern_str_); } if (first_line_pattern_ != null && last_line_pattern_ != null) { parse_type_ = ParseType.MIX; } else if (first_line_pattern_ != null) { parse_type_ = ParseType.FIRST; } else if (last_line_pattern_ != null) { parse_type_ = ParseType.LAST; } else { parse_type_ = ParseType.NONE; } StringBuilder builder = new StringBuilder(); builder.append("Config MultiLineParser with ["); builder.append("read_buffer_size(init)="); builder.append(buffer_size_); builder.append(",max_buffer_size="); builder.append(max_buffer_size_); builder.append(",max_record_size="); builder.append(max_record_size_); builder.append(",first_line_pattern_str_="); builder.append(first_line_pattern_str_); builder.append(",last_line_pattern_str_="); builder.append(last_line_pattern_str_); builder.append(",file_content_include_pattern_str_="); builder.append(file_content_include_pattern_str_); builder.append(",record_include_pattern_="); builder.append(record_include_pattern_ == null ? "null" : record_include_pattern_.toString()); builder.append(",file_content_exclude_pattern_str_="); builder.append(file_content_exclude_pattern_str_); builder.append(",record_exclude_pattern_="); builder.append(record_exclude_pattern_ == null ? "null" : record_exclude_pattern_.toString()); builder.append(", parse_type="); builder.append("" + parse_type_); builder.append("]"); LOG.info(builder.toString()); builder = null; }
Example 17
Source File: UUIDInterceptor.java From mt-flume with Apache License 2.0 | 4 votes |
protected UUIDInterceptor(Context context) { headerName = context.getString(HEADER_NAME, "id"); preserveExisting = context.getBoolean(PRESERVE_EXISTING_NAME, true); prefix = context.getString(PREFIX_NAME, ""); }
Example 18
Source File: NGSIRestHandler.java From fiware-cygnus with GNU Affero General Public License v3.0 | 4 votes |
@Override public void configure(Context context) { notificationTarget = context.getString(NGSIConstants.PARAM_NOTIFICATION_TARGET, "/notify"); if (notificationTarget.startsWith("/")) { LOGGER.debug("[NGSIRestHandler] Reading configuration (" + NGSIConstants.PARAM_NOTIFICATION_TARGET + "=" + notificationTarget + ")"); } else { invalidConfiguration = true; LOGGER.error("[NGSIRestHandler] Bad configuration (" + NGSIConstants.PARAM_NOTIFICATION_TARGET + "=" + notificationTarget + ") -- Must start with '/'"); } // if else defaultService = context.getString(NGSIConstants.PARAM_DEFAULT_SERVICE, "default"); if (defaultService.length() > NGSIConstants.SERVICE_HEADER_MAX_LEN) { invalidConfiguration = true; LOGGER.error("[NGSIRestHandler] Bad configuration ('" + NGSIConstants.PARAM_DEFAULT_SERVICE + "' parameter length greater than " + NGSIConstants.SERVICE_HEADER_MAX_LEN + ")"); } else if (CommonUtils.isMAdeOfAlphaNumericsOrUnderscores(defaultService)) { LOGGER.debug("[NGSIRestHandler] Reading configuration (" + NGSIConstants.PARAM_DEFAULT_SERVICE + "=" + defaultService + ")"); } else { invalidConfiguration = true; LOGGER.error("[NGSIRestHandler] Bad configuration ('" + NGSIConstants.PARAM_DEFAULT_SERVICE + "' parameter can only contain alphanumerics or underscores)"); } // if else defaultServicePath = context.getString(NGSIConstants.PARAM_DEFAULT_SERVICE_PATH, "/"); if (defaultServicePath.length() > NGSIConstants.SERVICE_PATH_HEADER_MAX_LEN) { invalidConfiguration = true; LOGGER.error("[NGSIRestHandler] Bad configuration ('" + NGSIConstants.PARAM_DEFAULT_SERVICE_PATH + "' parameter length greater " + "than " + NGSIConstants.SERVICE_PATH_HEADER_MAX_LEN + ")"); } else if (!defaultServicePath.startsWith("/")) { invalidConfiguration = true; LOGGER.error("[NGSIRestHandler] Bad configuration ('" + NGSIConstants.PARAM_DEFAULT_SERVICE_PATH + "' must start with '/')"); } else if (CommonUtils.isMAdeOfAlphaNumericsOrUnderscores(defaultServicePath.substring(1))) { LOGGER.debug("[NGSIRestHandler] Reading configuration (" + NGSIConstants.PARAM_DEFAULT_SERVICE_PATH + "=" + defaultServicePath + ")"); } else { invalidConfiguration = true; LOGGER.error("[NGSIRestHandler] Bad configuration ('" + NGSIConstants.PARAM_DEFAULT_SERVICE_PATH + "' parameter can only contain alphanumerics or underscores"); } // else LOGGER.info("[NGSIRestHandler] Startup completed"); }
Example 19
Source File: SimpleIndexNameBuilder.java From ns4_gear_watchdog with Apache License 2.0 | 4 votes |
@Override public void configure(Context context) { indexName = context.getString(ElasticSearchHighSinkConstants.INDEX_NAME); }
Example 20
Source File: SimpleMessagePreprocessor.java From flume-ng-kafka-sink with Apache License 2.0 | 2 votes |
/** * A custom property is read from the Flume config. * @param event This is the Flume event that will be sent to Kafka * @param context The Flume runtime context. * @return topic provided as a custom property */ @Override public String extractTopic(Event event, Context context) { return context.getString("custom-topic", "default-topic"); }