org.apache.flume.Event Java Examples
The following examples show how to use
org.apache.flume.Event.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MemoryChannel.java From mt-flume with Apache License 2.0 | 6 votes |
@Override protected Event doTake() throws InterruptedException { channelCounter.incrementEventTakeAttemptCount(); if(takeList.remainingCapacity() == 0) { throw new ChannelException("Take list for MemoryTransaction, capacity " + takeList.size() + " full, consider committing more frequently, " + "increasing capacity, or increasing thread count"); } if(!queueStored.tryAcquire(keepAlive, TimeUnit.SECONDS)) { return null; } Event event; synchronized(queueLock) { event = queue.poll(); } Preconditions.checkNotNull(event, "Queue.poll returned NULL despite semaphore " + "signalling existence of entry"); takeList.put(event); int eventByteSize = (int)Math.ceil(estimateEventSize(event)/byteCapacitySlotSize); takeByteCounter += eventByteSize; return event; }
Example #2
Source File: TestLineDeserializer.java From mt-flume with Apache License 2.0 | 6 votes |
private void validateMiniParse(EventDeserializer des) throws IOException { Event evt; evt = des.readEvent(); Assert.assertEquals(new String(evt.getBody()), "line 1"); des.mark(); evt = des.readEvent(); Assert.assertEquals(new String(evt.getBody()), "line 2"); des.reset(); // reset! evt = des.readEvent(); Assert.assertEquals("Line 2 should be repeated, " + "because we reset() the stream", new String(evt.getBody()), "line 2"); evt = des.readEvent(); Assert.assertNull("Event should be null because there are no lines " + "left to read", evt); des.mark(); des.close(); }
Example #3
Source File: DualChannel.java From mt-flume with Apache License 2.0 | 6 votes |
@Override protected void doPut(Event event) throws InterruptedException { channelCounter.incrementEventPutAttemptCount(); handleEventCount.incrementAndGet(); if (switchon && putToMemChannel.get()) { memHandleEventCount.incrementAndGet(); memTransaction.put(event); /** * check whether memChannel queueRemaining to 30%, or fileChannel has event? * if true, change to fileChannel next event. * */ if ( memChannel.isFull() || fileChannel.getQueueSize() > 100) { putToMemChannel.set(false); } } else { fileHandleEventCount.incrementAndGet(); fileTransaction.put(event); } }
Example #4
Source File: RegexExtractorInterceptor.java From mt-flume with Apache License 2.0 | 6 votes |
@Override public Event intercept(Event event) { Matcher matcher = regex.matcher( new String(event.getBody(), Charsets.UTF_8)); Map<String, String> headers = event.getHeaders(); if (matcher.find()) { for (int group = 0, count = matcher.groupCount(); group < count; group++) { int groupIndex = group + 1; if (groupIndex > serializers.size()) { if (logger.isDebugEnabled()) { logger.debug("Skipping group {} to {} due to missing serializer", group, count); } break; } NameAndSerializer serializer = serializers.get(group); if (logger.isDebugEnabled()) { logger.debug("Serializing {} using {}", serializer.headerName, serializer.serializer); } headers.put(serializer.headerName, serializer.serializer.serialize(matcher.group(groupIndex))); } } return event; }
Example #5
Source File: DruidSinkIT.java From ingestion with Apache License 2.0 | 6 votes |
private Event getTrackerEvent() { Random random = new Random(); String[] users = new String[] { "[email protected]", "[email protected]", "[email protected]", "[email protected]" }; String[] isoCode = new String[] { "DE", "ES", "US", "FR" }; TimeUnit[] offset = new TimeUnit[] { TimeUnit.DAYS, TimeUnit.HOURS, TimeUnit.SECONDS }; ObjectNode jsonBody = new ObjectNode(JsonNodeFactory.instance); Map<String, String> headers; ObjectMapper mapper = new ObjectMapper(); JsonNode jsonNode = null; final String fileName = "/trackerSample" + random.nextInt(4) + ".json"; try { jsonNode = mapper.readTree(getClass().getResourceAsStream(fileName)); } catch (IOException e) { e.printStackTrace(); } headers = mapper.convertValue(jsonNode, Map.class); headers.put("timestamp", String.valueOf(new Date().getTime() + getOffset(offset[random.nextInt(3)]) * random .nextInt(100))); headers.put("santanderID", users[random.nextInt(4)]); headers.put("isoCode", isoCode[random.nextInt(4)]); return EventBuilder.withBody(jsonBody.toString().getBytes(Charsets.UTF_8), headers); }
Example #6
Source File: TestElasticSearchTransportClient.java From ingestion with Apache License 2.0 | 6 votes |
@Before public void setUp() throws IOException { initMocks(this); BytesReference bytesReference = mock(BytesReference.class); BytesStream bytesStream = mock(BytesStream.class); when(nameBuilder.getIndexName(any(Event.class))).thenReturn("foo_index"); when(bytesReference.toBytes()).thenReturn("{\"body\":\"test\"}".getBytes()); when(bytesStream.bytes()).thenReturn(bytesReference); when(serializer.getContentBuilder(any(Event.class))) .thenReturn(bytesStream); when(elasticSearchClient.prepareIndex(anyString(), anyString())) .thenReturn(indexRequestBuilder); when(indexRequestBuilder.setSource(bytesReference)).thenReturn( indexRequestBuilder); fixture = new ElasticSearchTransportClient(elasticSearchClient, serializer); fixture.setBulkRequestBuilder(bulkRequestBuilder); }
Example #7
Source File: LoadBalancingRpcClient.java From mt-flume with Apache License 2.0 | 6 votes |
@Override public void append(Event event) throws EventDeliveryException { throwIfClosed(); boolean eventSent = false; Iterator<HostInfo> it = selector.createHostIterator(); while (it.hasNext()) { HostInfo host = it.next(); try { RpcClient client = getClient(host); client.append(event); eventSent = true; break; } catch (Exception ex) { selector.informFailure(host); LOGGER.warn("Failed to send event to host " + host, ex); } } if (!eventSent) { throw new EventDeliveryException("Unable to send event to any host"); } }
Example #8
Source File: Consumer.java From rabbitmq-flume-plugin with BSD 3-Clause "New" or "Revised" License | 6 votes |
private Event parseMessage(Envelope envelope, AMQP.BasicProperties props, byte[] body) { // Create the event passing in the body Event event = EventBuilder.withBody(body); // Get the headers from properties, exchange, and routing-key Map<String, String> headers = buildHeaders(props); String exchange = envelope.getExchange(); if (exchange != null && !exchange.isEmpty()) { headers.put("exchange", exchange); } String routingKey = envelope.getRoutingKey(); if (routingKey != null && !routingKey.isEmpty()) { headers.put("routing-key", routingKey); } event.setHeaders(headers); return event; }
Example #9
Source File: WatchDir.java From flume-taildirectory-source with Apache License 2.0 | 6 votes |
private void sendEvent(FileSet fileSet) { LOGGER.trace("WatchDir: sendEvent"); if (fileSet.getBufferList().isEmpty()) return; StringBuilder sb = fileSet.getAllLines(); Event event = EventBuilder.withBody(String.valueOf(sb).getBytes(), fileSet.getHeaders()); Map<String,String> headers = new HashMap<String, String>(); if (fileHeader) headers.put(fileHeaderKey,fileSet.getFilePath().toString()); if (basenameHeader) headers.put(basenameHeaderKey, fileSet.getFileName().toString()); if (!headers.isEmpty()) event.setHeaders(headers); source.getChannelProcessor().processEvent(event); counter.increaseCounterMessageSent(); fileSet.clear(); }
Example #10
Source File: AvroSerializer.java From flume-elasticsearch-sink with Apache License 2.0 | 6 votes |
/** * Converts the avro binary data to the json format */ @Override public XContentBuilder serialize(Event event) { XContentBuilder builder = null; try { if (datumReader != null) { Decoder decoder = new DecoderFactory().binaryDecoder(event.getBody(), null); GenericRecord data = datumReader.read(null, decoder); logger.trace("Record in event " + data); XContentParser parser = XContentFactory .xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, data.toString()); builder = jsonBuilder().copyCurrentStructure(parser); parser.close(); } else { logger.error("Schema File is not configured"); } } catch (IOException e) { logger.error("Exception in parsing avro format data but continuing serialization to process further records", e.getMessage(), e); } return builder; }
Example #11
Source File: FlumePersistentAppenderTest.java From logging-log4j2 with Apache License 2.0 | 6 votes |
@Test public void testMultiple() { for (int i = 0; i < 10; ++i) { final StructuredDataMessage msg = new StructuredDataMessage("Test", "Test Multiple " + i, "Test"); msg.put("counter", Integer.toString(i)); EventLogger.logEvent(msg); } final boolean[] fields = new boolean[10]; for (int i = 0; i < 10; ++i) { final Event event = primary.poll(); Assert.assertNotNull("Received " + i + " events. Event " + (i + 1) + " is null", event); final String value = event.getHeaders().get("counter"); Assert.assertNotNull("Missing 'counter' in map " + event.getHeaders() + ", i = " + i, value); final int counter = Integer.parseInt(value); if (fields[counter]) { Assert.fail("Duplicate event"); } else { fields[counter] = true; } } for (int i = 0; i < 10; ++i) { Assert.assertTrue("Channel contained event, but not expected message " + i, fields[i]); } }
Example #12
Source File: TestBucketWriter.java From mt-flume with Apache License 2.0 | 6 votes |
@Test public void testFileSuffixNotGiven() throws IOException, InterruptedException { final int ROLL_INTERVAL = 1000; // seconds. Make sure it doesn't change in course of test final String suffix = null; MockHDFSWriter hdfsWriter = new MockHDFSWriter(); BucketWriter bucketWriter = new BucketWriter(ROLL_INTERVAL, 0, 0, 0, ctx, "/tmp", "file", "", ".tmp", suffix, null, SequenceFile.CompressionType.NONE, hdfsWriter, timedRollerPool, null, new SinkCounter("test-bucket-writer-" + System.currentTimeMillis()), 0, null, null, 30000, Executors.newSingleThreadExecutor()); // Need to override system time use for test so we know what to expect final long testTime = System.currentTimeMillis(); Clock testClock = new Clock() { public long currentTimeMillis() { return testTime; } }; bucketWriter.setClock(testClock); Event e = EventBuilder.withBody("foo", Charsets.UTF_8); bucketWriter.append(e); Assert.assertTrue("Incorrect suffix", hdfsWriter.getOpenedFilePath().endsWith(Long.toString(testTime+1) + ".tmp")); }
Example #13
Source File: EventCodec.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
@Override public Slice toByteArray(Event event) { ByteArrayOutputStream os = new ByteArrayOutputStream(); Output output = new Output(os); Map<String, String> headers = event.getHeaders(); if (headers != null && headers.getClass() != HashMap.class) { HashMap<String, String> tmp = new HashMap<String, String>(headers.size()); tmp.putAll(headers); headers = tmp; } kryo.writeObjectOrNull(output, headers, HashMap.class); kryo.writeObjectOrNull(output, event.getBody(), byte[].class); output.flush(); final byte[] bytes = os.toByteArray(); return new Slice(bytes, 0, bytes.length); }
Example #14
Source File: TestLog4jAppender.java From mt-flume with Apache License 2.0 | 6 votes |
private void sendAndAssertFail(Logger logger) throws Throwable { /* * Log4j internally defines levels as multiples of 10000. So if we * create levels directly using count, the level will be set as the * default. */ int level = 20000; try { logger.log(Level.toLevel(level), "Test Msg"); } catch (FlumeException ex) { ex.printStackTrace(); throw ex.getCause(); } Transaction transaction = ch.getTransaction(); transaction.begin(); Event event = ch.take(); Assert.assertNull(event); transaction.commit(); transaction.close(); }
Example #15
Source File: TestThriftLegacySource.java From mt-flume with Apache License 2.0 | 6 votes |
@Test public void testRequest() throws InterruptedException, IOException { bind(); Map flumeMap = new HashMap<CharSequence, ByteBuffer>(); ThriftFlumeEvent thriftEvent = new ThriftFlumeEvent( 1, Priority.INFO, ByteBuffer.wrap("foo".getBytes()), 0, "fooHost", flumeMap); FlumeClient fClient = new FlumeClient("0.0.0.0", selectedPort); fClient.append(thriftEvent); // check if the even has arrived in the channel through OG thrift source Transaction transaction = channel.getTransaction(); transaction.begin(); Event event = channel.take(); Assert.assertNotNull(event); Assert.assertEquals("Channel contained our event", "foo", new String(event.getBody())); transaction.commit(); transaction.close(); stop(); }
Example #16
Source File: MultiplexingChannelSelector.java From mt-flume with Apache License 2.0 | 6 votes |
@Override public List<Channel> getRequiredChannels(Event event) { String headerValue = event.getHeaders().get(headerName); if (headerValue == null || headerValue.trim().length() == 0) { return defaultChannels; } List<Channel> channels = channelMapping.get(headerValue); //This header value does not point to anything //Return default channel(s) here. if (channels == null) { channels = defaultChannels; } return channels; }
Example #17
Source File: FlumeAvroManager.java From logback-flume-appender with MIT License | 6 votes |
private FlumeAvroManager(final Properties props, final Long reportingWindowReq, final Integer batchSizeReq, final Integer reporterMaxThreadPoolSizeReq, final Integer reporterMaxQueueSizeReq, final ContextAware context) { this.loggingContext = context; final int reporterMaxThreadPoolSize = reporterMaxThreadPoolSizeReq == null ? DEFAULT_REPORTER_MAX_THREADPOOL_SIZE : reporterMaxThreadPoolSizeReq; final int reporterMaxQueueSize = reporterMaxQueueSizeReq == null ? DEFAULT_REPORTER_MAX_QUEUE_SIZE : reporterMaxQueueSizeReq; this.reporter = new EventReporter(props, loggingContext, reporterMaxThreadPoolSize, reporterMaxQueueSize); this.evQueue = new ArrayBlockingQueue<Event>(1000); final long reportingWindow = hamonizeReportingWindow(reportingWindowReq); final int batchSize = batchSizeReq == null ? DEFAULT_BATCH_SIZE : batchSizeReq; this.asyncThread = new AsyncThread(evQueue, batchSize, reportingWindow); loggingContext.addInfo("Created a new flume agent with properties: " + props.toString()); asyncThread.start(); }
Example #18
Source File: RestSource.java From ingestion with Apache License 2.0 | 6 votes |
/** * {@inheritDoc} */ @Override public Status process() throws EventDeliveryException { Status status = Status.READY; try { Event e = poll(); if (e != null) { getChannelProcessor().processEvent(e); status = Status.READY; } } catch (Throwable t) { status = Status.BACKOFF; log.error("RestSource error. " + t.getMessage()); } return status; }
Example #19
Source File: TestExecSource.java From mt-flume with Apache License 2.0 | 5 votes |
@Test public void testProcess() throws InterruptedException, LifecycleException, EventDeliveryException, IOException { context.put("command", "cat /etc/passwd"); context.put("keep-alive", "1"); context.put("capacity", "1000"); context.put("transactionCapacity", "1000"); Configurables.configure(source, context); source.start(); Transaction transaction = channel.getTransaction(); transaction.begin(); Event event; FileOutputStream outputStream = new FileOutputStream( "/tmp/flume-execsource." + Thread.currentThread().getId()); while ((event = channel.take()) != null) { outputStream.write(event.getBody()); outputStream.write('\n'); } outputStream.close(); transaction.commit(); transaction.close(); File file1 = new File("/tmp/flume-execsource." + Thread.currentThread().getId()); File file2 = new File("/etc/passwd"); Assert.assertEquals(FileUtils.checksumCRC32(file1), FileUtils.checksumCRC32(file2)); FileUtils.forceDelete(file1); }
Example #20
Source File: HDFSCompressedDataStream.java From Transwarp-Sample-Code with MIT License | 5 votes |
@Override public void append(Event e) throws IOException { if (isFinished) { cmpOut.resetState(); isFinished = false; } serializer.write(e); }
Example #21
Source File: TestRegexFilteringInterceptor.java From mt-flume with Apache License 2.0 | 5 votes |
@Test public void testExclusion() throws ClassNotFoundException, InstantiationException, IllegalAccessException { Interceptor.Builder builder = InterceptorBuilderFactory.newInstance( InterceptorType.REGEX_FILTER.toString()); Context ctx = new Context(); ctx.put(Constants.REGEX, ".*DEBUG.*"); ctx.put(Constants.EXCLUDE_EVENTS, "true"); builder.configure(ctx); Interceptor interceptor = builder.build(); Event shouldPass1 = EventBuilder.withBody("INFO: some message", Charsets.UTF_8); Assert.assertNotNull(interceptor.intercept(shouldPass1)); Event shouldPass2 = EventBuilder.withBody("WARNING: some message", Charsets.UTF_8); Assert.assertNotNull(interceptor.intercept(shouldPass2)); Event shouldNotPass = EventBuilder.withBody("this message has DEBUG in it", Charsets.UTF_8); Assert.assertNull(interceptor.intercept(shouldNotPass)); builder.configure(ctx); }
Example #22
Source File: TestJSONHandler.java From mt-flume with Apache License 2.0 | 5 votes |
@Test public void testSingleHTMLEvent() throws Exception { String json = "[{\"headers\": {\"a\": \"b\"}," + "\"body\": \"<html><body>test</body></html>\"}]"; HttpServletRequest req = new FlumeHttpServletRequestWrapper(json); List<Event> deserialized = handler.getEvents(req); Event e = deserialized.get(0); Assert.assertEquals("b", e.getHeaders().get("a")); Assert.assertEquals("<html><body>test</body></html>", new String(e.getBody(),"UTF-8")); }
Example #23
Source File: PseudoTxnMemoryChannel.java From mt-flume with Apache License 2.0 | 5 votes |
@Override public void put(Event event) { Preconditions.checkState(queue != null, "No queue defined (Did you forget to configure me?"); channelCounter.incrementEventPutAttemptCount(); try { queue.put(event); } catch (InterruptedException ex) { throw new ChannelException("Failed to put(" + event + ")", ex); } channelCounter.addToEventPutSuccessCount(1); channelCounter.setChannelSize(queue.size()); }
Example #24
Source File: FlumeEmbeddedAppenderTest.java From logging-log4j2 with Apache License 2.0 | 5 votes |
@Test @Ignore public void testHeaderAddedByInterceptor() { final StructuredDataMessage msg = new StructuredDataMessage("Test", "Test Log4j", "Test"); EventLogger.logEvent(msg); final Event event = primary.poll(); Assert.assertNotNull("Event should not be null", event); final String environmentHeader = event.getHeaders().get("environment"); Assert.assertEquals("local", environmentHeader); }
Example #25
Source File: TestRegexHbaseEventSerializer.java From mt-flume with Apache License 2.0 | 5 votes |
@Test public void testRowKeyGeneration() { Context context = new Context(); RegexHbaseEventSerializer s1 = new RegexHbaseEventSerializer(); s1.configure(context); RegexHbaseEventSerializer s2 = new RegexHbaseEventSerializer(); s2.configure(context); // Reset shared nonce to zero RegexHbaseEventSerializer.nonce.set(0); String randomString = RegexHbaseEventSerializer.randomKey; Event e1 = EventBuilder.withBody(Bytes.toBytes("body")); Event e2 = EventBuilder.withBody(Bytes.toBytes("body")); Event e3 = EventBuilder.withBody(Bytes.toBytes("body")); Calendar cal = mock(Calendar.class); when(cal.getTimeInMillis()).thenReturn(1L); s1.initialize(e1, "CF".getBytes()); String rk1 = new String(s1.getRowKey(cal)); assertEquals("1-" + randomString + "-0", rk1); when(cal.getTimeInMillis()).thenReturn(10L); s1.initialize(e2, "CF".getBytes()); String rk2 = new String(s1.getRowKey(cal)); assertEquals("10-" + randomString + "-1", rk2); when(cal.getTimeInMillis()).thenReturn(100L); s2.initialize(e3, "CF".getBytes()); String rk3 = new String(s2.getRowKey(cal)); assertEquals("100-" + randomString + "-2", rk3); }
Example #26
Source File: JsonRestSourceHandlerTest.java From ingestion with Apache License 2.0 | 5 votes |
@Test public void getEventsFromNotValidJson() throws Exception { final List<Event> events = jsonHandler .getEvents("[{\"field1\":\"value1\"},{\"field2\":}]", ImmutableMap.<String, String>builder().build()); assertThat(events).isEmpty(); }
Example #27
Source File: ApplicationDiscoveryTest.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
@Override public Collection<Partition<AbstractFlumeInputOperator<Event>>> definePartitions(Collection<Partition<AbstractFlumeInputOperator<Event>>> partitions, PartitioningContext context) { if (first) { first = false; zkListener.setup(null); } Collection<Discovery.Service<byte[]>> addresses; addresses = zkListener.discover(); discoveredFlumeSinks.set(addresses); return super.definePartitions(partitions, context); }
Example #28
Source File: CustomLastfmHeaderAndBodyTextEventSerializer.java From sequenceiq-samples with Apache License 2.0 | 5 votes |
@Override public void write(Event e) throws IOException { try { String message = e.getHeaders().get("message"); out.write(message.getBytes(Charset.forName("UTF-8"))); if (appendNewline) { out.write('\n'); } } catch (Exception ex) { LOGGER.info("There was no message in the header..."); } }
Example #29
Source File: TestDefaultJMSMessageConverter.java From mt-flume with Apache License 2.0 | 5 votes |
@Test public void testNoHeaders() throws Exception { createTextMessage(); createHeaders(); Event event = converter.convert(message).iterator().next(); assertEquals(Collections.EMPTY_MAP, event.getHeaders()); assertEquals(TEXT, new String(event.getBody(), Charsets.UTF_8)); }
Example #30
Source File: ColumnFilteringInterceptor.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
@Override public List<Event> intercept(List<Event> events) { for (Event event: events) { intercept(event); } return events; }