Java Code Examples for com.datatorrent.api.DAG#addOperator()
The following examples show how to use
com.datatorrent.api.DAG#addOperator() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HDHTBenchmarkApplication.java From examples with Apache License 2.0 | 6 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { dag.setAttribute(DAG.APPLICATION_NAME, "HDHTBenchmarkApplication"); Generator gen = dag.addOperator("Generator", new Generator()); gen.setTupleBlast(1000); gen.setSleepms(0); dag.getOperatorMeta("Generator").getAttributes().put(Context.OperatorContext.APPLICATION_WINDOW_COUNT, 1); HDSOperator hdsOut = dag.addOperator("Store", new HDSOperator()); TFileImpl.DTFileImpl hdsFile = new TFileImpl.DTFileImpl(); hdsFile.setBasePath("WALBenchMarkDir"); hdsOut.setFileStore(hdsFile); dag.getOperatorMeta("Store").getAttributes().put(Context.OperatorContext.COUNTERS_AGGREGATOR, new HDHTWriter.BucketIOStatAggregator()); dag.addStream("s1", gen.out, hdsOut.input).setLocality(DAG.Locality.THREAD_LOCAL); }
Example 2
Source File: ManagedStateBenchmarkApp.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { TestStatsListener sl = new TestStatsListener(); sl.adjustRate = conf.getBoolean("dt.ManagedStateBenchmark.adjustRate", false); TestGenerator gen = dag.addOperator("Generator", new TestGenerator()); gen.setRange(timeRange); dag.setAttribute(gen, OperatorContext.STATS_LISTENERS, Lists.newArrayList((StatsListener)sl)); storeOperator = new StoreOperator(); storeOperator.setStore(createStore(conf)); storeOperator.setTimeRange(timeRange); storeOperator = dag.addOperator("Store", storeOperator); dag.setAttribute(storeOperator, OperatorContext.STATS_LISTENERS, Lists.newArrayList((StatsListener)sl)); dag.addStream("Events", gen.data, storeOperator.input).setLocality(Locality.CONTAINER_LOCAL); }
Example 3
Source File: Application.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
public DimensionTimeBucketSumOperator getMysqlDimensionTimeBucketSumOperator(String name, DAG dag) { DimensionTimeBucketSumOperator oper = dag.addOperator(name, DimensionTimeBucketSumOperator.class); oper.addDimensionKeyName(MYSQL_KEYS.user.value); oper.addDimensionKeyName(MYSQL_KEYS.query_time.value); oper.addDimensionKeyName(MYSQL_KEYS.rows_sent.value); oper.addDimensionKeyName(MYSQL_KEYS.rows_examined.value); oper.addValueKeyName(MYSQL_KEYS.lock_time.value); Set<String> dimensionKey = new HashSet<String>(); dimensionKey.add(MYSQL_KEYS.user.value); try { oper.addCombination(dimensionKey); } catch (NoSuchFieldException e) { throw new RuntimeException("Exception while while adding operator " + name, e); } oper.setTimeBucketFlags(AbstractDimensionTimeBucketOperator.TIMEBUCKET_MINUTE); return oper; }
Example 4
Source File: xmlParserApplication.java From examples with Apache License 2.0 | 6 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { EmployeeDataGenerator dataGenerator = dag.addOperator("dataGenerator", new EmployeeDataGenerator()); XmlParser parserOperator = dag.addOperator("xmlParser", new XmlParser()); XmlDocumentFormatter resultCollector = dag.addOperator("resultCollector", new XmlDocumentFormatter()); FileOutputOperator pojoOutput = dag.addOperator("pojoOutput", new FileOutputOperator()); FileOutputOperator dataOutput = dag.addOperator("dataOutput", new FileOutputOperator()); FileOutputOperator errorOutput = dag.addOperator("errorOutput", new FileOutputOperator()); JavaSerializationStreamCodec codec = new JavaSerializationStreamCodec(); dag.setInputPortAttribute(resultCollector.input, Context.PortContext.STREAM_CODEC, codec); dag.addStream("inputData", dataGenerator.output, parserOperator.in); dag.addStream("parsedDoc", parserOperator.parsedOutput, resultCollector.input); dag.addStream("formattedData", resultCollector.output, dataOutput.input); dag.addStream("errorData", parserOperator.err, errorOutput.input); dag.addStream("pojotoFile", parserOperator.out,pojoOutput.input); }
Example 5
Source File: Application.java From examples with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { // create operators RandomInteger random = dag.addOperator("random", new RandomInteger()); RangeFinder rf = dag.addOperator("range", new RangeFinder()); ToConsole cons = dag.addOperator("console", new ToConsole()); // create streams dag.addStream("randomData", random.out, rf.in); dag.addStream("rangeData", rf.out, cons.in); }
Example 6
Source File: DevNullCounterBenchmark.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
/** * Tests both string and non string schema * * @param dag * @param conf */ @Override public void populateDAG(DAG dag, Configuration conf) { // RandomEventGenerator rand = dag.addOperator("rand", new RandomEventGenerator()); // rand.setMinvalue(0); // rand.setMaxvalue(999999); // rand.setTuplesBlastIntervalMillis(50); // dag.getMeta(rand).getMeta(rand.integer_data).getAttributes().put(PortContext.QUEUE_CAPACITY, QUEUE_CAPACITY); IntegerOperator intInput = dag.addOperator("intInput", new IntegerOperator()); DevNullCounter oper = dag.addOperator("oper", new DevNullCounter()); dag.getMeta(oper).getMeta(oper.data).getAttributes().put(PortContext.QUEUE_CAPACITY, QUEUE_CAPACITY); dag.addStream("dev", intInput.integer_data, oper.data).setLocality(locality); }
Example 7
Source File: CsvFormatterTest.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { PojoEmitter input = dag.addOperator("data", new PojoEmitter()); CsvFormatter formatter = dag.addOperator("formatter", new CsvFormatter()); dag.getMeta(formatter).getMeta(formatter.in).getAttributes().put(Context.PortContext.TUPLE_CLASS, Ad.class); formatter.setSchema(SchemaUtils.jarResourceFileToString("schema.json")); ConsoleOutputOperator output = dag.addOperator("output", new ConsoleOutputOperator()); ConsoleOutputOperator error = dag.addOperator("error", new ConsoleOutputOperator()); output.setDebug(true); dag.addStream("input", input.output, formatter.in); dag.addStream("output", formatter.out, output.input); dag.addStream("err", formatter.err, error.input); }
Example 8
Source File: Application.java From examples with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { LineByLineFileInputOperator in = dag.addOperator("lines", LineByLineFileInputOperator.class); KafkaSinglePortOutputOperator<String,String> out = dag.addOperator("kafkaOutput", new KafkaSinglePortOutputOperator<String,String>()); dag.addStream("data", in.output, out.inputPort).setLocality(Locality.CONTAINER_LOCAL); }
Example 9
Source File: StreamingJsonParserTest.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { FileInputOperator fileInput = dag.addOperator("fileInput", getFileInput()); StreamingJsonParser parser = dag.addOperator("parser", getParser()); dag.getMeta(parser).getMeta(parser.output).getAttributes().put(Context.PortContext.TUPLE_CLASS, Person.class); ConsoleOutputOperator consoleOutput = dag.addOperator("output", new ConsoleOutputOperator()); dag.addStream("Input", fileInput.output, parser.in).setLocality(Locality.CONTAINER_LOCAL); dag.addStream("pojo", parser.output, consoleOutput.input).setLocality(Locality.CONTAINER_LOCAL); }
Example 10
Source File: Application.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
public InputPort<Map<String, Map<String, Number>>> getRedisOutput(String name, DAG dag, int dbIndex) { @SuppressWarnings("unchecked") RedisNumberSummationMapOutputOperator<String, Map<String, Number>> oper = dag.addOperator(name, RedisNumberSummationMapOutputOperator.class); oper.getStore().setDbIndex(dbIndex); return oper.input; }
Example 11
Source File: CustomerServiceHbaseOutputOperatorTester.java From examples with Apache License 2.0 | 5 votes |
/** * this is the DAG to read tuples from HBase * @param dag * @param conf */ protected void populateInputDAG(DAG dag, Configuration conf) { HBasePOJOInputOperator pojoInput = new HBasePOJOInputOperator(); pojoInput.setStore(createHBaseStore()); pojoInput.setPojoTypeName(LoadedCustomerService.class.getName()); { TableInfo<HBaseFieldInfo> tableInfo = new TableInfo<HBaseFieldInfo>(); tableInfo.setRowOrIdExpression("imsi"); final String familyName = "f1"; List<HBaseFieldInfo> fieldsInfo = new ArrayList<HBaseFieldInfo>(); fieldsInfo.add( new HBaseFieldInfo( "totalDuration", "totalDuration", SupportType.INTEGER, familyName) ); fieldsInfo.add( new HBaseFieldInfo( "wait", "wait", SupportType.INTEGER, familyName) ); fieldsInfo.add( new HBaseFieldInfo( "zipCode", "zipCode", SupportType.STRING, familyName) ); fieldsInfo.add( new HBaseFieldInfo( "issueType", "issueType", SupportType.STRING, familyName) ); fieldsInfo.add( new HBaseFieldInfo( "satisfied", "satisfied", SupportType.BOOLEAN, familyName) ); tableInfo.setFieldsInfo(fieldsInfo); pojoInput.setTableInfo(tableInfo); } dag.addOperator("HbaseInput", pojoInput); hbaseInputCacheOperator = new TupleCacheOperator<>("hbaseInputCacheOperatorData"); dag.addOperator("InputCache", hbaseInputCacheOperator); hbaseInputCacheOperator.outputPort.setSink(new ArrayListTestSink()); dag.addStream("InputStream", pojoInput.outputPort, hbaseInputCacheOperator.inputPort).setLocality(Locality.CONTAINER_LOCAL); }
Example 12
Source File: Application.java From examples with Apache License 2.0 | 5 votes |
public void populateDAG(DAG dag, Configuration conf) { POJOGenerator generator = dag.addOperator("POJOGenerator", POJOGenerator.class); BytesFileOutputOperator fileOutput = dag.addOperator("fileOutput", BytesFileOutputOperator.class); dag.addStream("data", generator.out, fileOutput.input); }
Example 13
Source File: csvParserApplication.java From examples with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { AdDataGenerator dataGenerator = dag.addOperator("dataGenerator", new AdDataGenerator()); CsvParser parserOperator = dag.addOperator("csvParser", new CsvParser()); FileOutputOperator dataOutput = dag.addOperator("dataOutput", new FileOutputOperator()); FileOutputOperator errorOutput = dag.addOperator("errorOutput", new FileOutputOperator()); ConsoleOutputOperator consoleOutput = dag.addOperator("consoleOutput", new ConsoleOutputOperator()); dag.addStream("inputData", dataGenerator.out, parserOperator.in); dag.addStream("parsedData", parserOperator.parsedOutput, dataOutput.input); dag.addStream("errorData", parserOperator.err, errorOutput.input); dag.addStream("pojoData", parserOperator.out, consoleOutput.input); }
Example 14
Source File: fixedWidthParserApplication.java From examples with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { com.datatorrent.tutorial.fixedwidthparser.AdDataGenerator dataGenerator = dag.addOperator("dataGenerator", new com.datatorrent.tutorial.fixedwidthparser.AdDataGenerator()); FixedWidthParser parserOperator = dag.addOperator("fixedWidthParser", new FixedWidthParser()); FileOutputOperator dataOutput = dag.addOperator("dataOutput", new FileOutputOperator()); FileOutputOperator errorOutput = dag.addOperator("errorOutput", new FileOutputOperator()); FileOutputOperator pojoOutput = dag.addOperator("pojoOutput", new FileOutputOperator()); dag.addStream("inputData", dataGenerator.out, parserOperator.in); dag.addStream("parsedData", parserOperator.parsedOutput, dataOutput.input); dag.addStream("errorData", parserOperator.err, errorOutput.input); dag.addStream("pojoData", parserOperator.out, pojoOutput.input); }
Example 15
Source File: LogicalPlanConfigurationTest.java From attic-apex-core with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { dag.addOperator("operator1", gt1); dag.addOperator("operator2", gt2); dag.addOperator("operator3", gt3); dag.addStream("s1", gt1.outport1, gt2.inport1); dag.addStream("s2", gt2.outport1, gt3.inport1, gt3.inport2); }
Example 16
Source File: TelecomDimensionsDemo.java From examples with Apache License 2.0 | 4 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { //Set input properties String eventSchema = SchemaUtils.jarResourceFileToString(eventSchemaLocation); //input if (inputOperator == null) { inputOperator = new EnrichedCDRHbaseInputOperator(); } dag.addOperator("InputGenerator", inputOperator); //dimension DimensionsComputationFlexibleSingleSchemaPOJO dimensions = dag.addOperator("DimensionsComputation", DimensionsComputationFlexibleSingleSchemaPOJO.class); dag.getMeta(dimensions).getAttributes().put(Context.OperatorContext.APPLICATION_WINDOW_COUNT, 4); dag.getMeta(dimensions).getAttributes().put(Context.OperatorContext.CHECKPOINT_WINDOW_COUNT, 4); //Set operator properties //key expression { Map<String, String> keyToExpression = Maps.newHashMap(); keyToExpression.put("imsi", "getImsi()"); keyToExpression.put("Carrier", "getOperatorCode()"); keyToExpression.put("imei", "getImei()"); dimensions.setKeyToExpression(keyToExpression); } EnrichedCDR cdr = new EnrichedCDR(); cdr.getOperatorCode(); cdr.getDuration(); //aggregate expression { Map<String, String> aggregateToExpression = Maps.newHashMap(); aggregateToExpression.put("duration", "getDuration()"); aggregateToExpression.put("terminatedAbnomally", "getTerminatedAbnomally()"); aggregateToExpression.put("terminatedNomally", "getTerminatedNomally()"); aggregateToExpression.put("called", "getCalled()"); dimensions.setAggregateToExpression(aggregateToExpression); } //event schema dimensions.setConfigurationSchemaJSON(eventSchema); dimensions.setUnifier(new DimensionsComputationUnifierImpl<InputEvent, Aggregate>()); dag.getMeta(dimensions).getMeta(dimensions.output).getUnifierMeta().getAttributes().put(OperatorContext.MEMORY_MB, 8092); //store AppDataSingleSchemaDimensionStoreHDHT store = dag.addOperator("Store", AppDataSingleSchemaDimensionStoreHDHT.class); String basePath = conf.get(PROP_STORE_PATH); if (basePath == null || basePath.isEmpty()) { basePath = Preconditions.checkNotNull(conf.get(PROP_STORE_PATH), "base path should be specified in the properties.xml"); } TFileImpl hdsFile = new TFileImpl.DTFileImpl(); basePath += System.currentTimeMillis(); hdsFile.setBasePath(basePath); store.setFileStore(hdsFile); dag.setAttribute(store, Context.OperatorContext.COUNTERS_AGGREGATOR, new BasicCounters.LongAggregator<MutableLong>()); store.setConfigurationSchemaJSON(eventSchema); //store.setDimensionalSchemaStubJSON(eventSchema); PubSubWebSocketAppDataQuery query = createAppDataQuery(); store.setEmbeddableQueryInfoProvider(query); //wsOut PubSubWebSocketAppDataResult wsOut = createAppDataResult(); dag.addOperator("QueryResult", wsOut); //Set remaining dag options dag.setAttribute(store, Context.OperatorContext.COUNTERS_AGGREGATOR, new BasicCounters.LongAggregator<MutableLong>()); dag.addStream("InputStream", inputOperator.outputPort, dimensions.input).setLocality(Locality.CONTAINER_LOCAL); dag.addStream("DimensionalData", dimensions.output, store.input); dag.addStream("QueryResult", store.queryResult, wsOut.input); }
Example 17
Source File: KinesisInputOperatorTest.java From attic-apex-malhar with Apache License 2.0 | 4 votes |
@Test public void testKinesisByteArrayInputOperator() throws Exception { int totalCount = 10; // initial the latch for this test latch = new CountDownLatch(1); // Start producer KinesisTestProducer p = new KinesisTestProducer(streamName); p.setSendCount(totalCount); p.setBatchSize(9); new Thread(p).start(); // Create DAG for testing. LocalMode lma = LocalMode.newInstance(); DAG dag = lma.getDAG(); // Create KinesisByteArrayInputOperator and set some properties with respect to consumer. KinesisByteArrayInputOperator node = dag.addOperator("Kinesis message consumer", KinesisByteArrayInputOperator.class); node.setAccessKey(credentials.getCredentials().getAWSSecretKey()); node.setSecretKey(credentials.getCredentials().getAWSAccessKeyId()); KinesisConsumer consumer = new KinesisConsumer(); consumer.setStreamName(streamName); consumer.setRecordsLimit(totalCount); node.setConsumer(consumer); // Create Test tuple collector CollectorModule<byte[]> collector = dag.addOperator("TestMessageCollector", new CollectorModule<byte[]>()); // Connect ports dag.addStream("Kinesis message", node.outputPort, collector.inputPort).setLocality(Locality.CONTAINER_LOCAL); // Create local cluster final LocalMode.Controller lc = lma.getController(); lc.setHeartbeatMonitoringEnabled(false); lc.runAsync(); // Wait 45s for consumer finish consuming all the messages latch.await(45000, TimeUnit.MILLISECONDS); // Check results Assert.assertEquals("Collections size", 1, collections.size()); Assert.assertEquals("Tuple count", totalCount, collections.get(collector.inputPort.id).size()); logger.debug(String.format("Number of emitted tuples: %d", collections.get(collector.inputPort.id).size())); lc.shutdown(); }
Example 18
Source File: KafkaPartitionableInputOperatorTest.java From attic-apex-malhar with Apache License 2.0 | 4 votes |
public void testPartitionableInputOperator(KafkaConsumer consumer) throws Exception { // each broker should get a END_TUPLE message latch = new CountDownLatch(totalBrokers); int totalCount = 10000; // Start producer KafkaTestProducer p = new KafkaTestProducer(TEST_TOPIC, hasMultiPartition, hasMultiCluster); p.setSendCount(totalCount); new Thread(p).start(); // Create DAG for testing. LocalMode lma = LocalMode.newInstance(); DAG dag = lma.getDAG(); // Create KafkaSinglePortStringInputOperator KafkaSinglePortStringInputOperator node = dag.addOperator("Kafka message consumer", KafkaSinglePortStringInputOperator.class); node.setInitialPartitionCount(1); // set topic consumer.setTopic(TEST_TOPIC); consumer.setInitialOffset("earliest"); node.setConsumer(consumer); String clusterString = "cluster1::localhost:" + TEST_ZOOKEEPER_PORT[0] + (hasMultiCluster ? ";cluster2::localhost:" + TEST_ZOOKEEPER_PORT[1] : ""); node.setZookeeper(clusterString); // Create Test tuple collector CollectorModule<String> collector = dag.addOperator("TestMessageCollector", new CollectorModule<String>()); // Connect ports dag.addStream("Kafka message", node.outputPort, collector.inputPort).setLocality(Locality.CONTAINER_LOCAL); // Create local cluster final LocalMode.Controller lc = lma.getController(); lc.setHeartbeatMonitoringEnabled(false); lc.runAsync(); // Wait 30s for consumer finish consuming all the messages Assert.assertTrue("TIMEOUT: 40s ", latch.await(40000, TimeUnit.MILLISECONDS)); // Check results Assert.assertEquals("Collections size", 1, collections.size()); Assert.assertEquals("Tuple count", totalCount, collections.get(collector.inputPort.id).size()); logger.debug(String.format("Number of emitted tuples: %d", collections.get(collector.inputPort.id).size())); p.close(); lc.shutdown(); // kafka has a bug shutdown connector you have to make sure kafka client resource has been cleaned before clean the broker Thread.sleep(5000); }
Example 19
Source File: SqsApplication.java From examples with Apache License 2.0 | 4 votes |
@Override public void populateDAG(DAG dag, final Configuration conf) { JMSStringInputOperator sqsInput = dag.addOperator("sqsIn", new JMSStringInputOperator()); MyConnectionFactoryBuilder factoryBuilder = new MyConnectionFactoryBuilder(); factoryBuilder.accessKeyId = conf.get("dt.operator.sqsIn.prop.aws.key.id"); factoryBuilder.secretKey = conf.get("dt.operator.sqsIn.prop.aws.key.secret"); factoryBuilder.awsRegionName = conf.get("dt.operator.sqsIn.prop.aws.region"); factoryBuilder.endpoint = conf.get("dt.operator.sqsIn.prop.aws.endpoint"); sqsInput.setConnectionFactoryBuilder(factoryBuilder); LineOutputOperator out = dag.addOperator("fileOut", new LineOutputOperator()); dag.addStream("data", sqsInput.output, out.input); }
Example 20
Source File: YahooFinanceApplication.java From attic-apex-malhar with Apache License 2.0 | 2 votes |
/** * Instantiate {@link org.apache.apex.malhar.lib.stream.ConsolidatorKeyVal} to send * Chart (Merge minute volume and minute high-low) * @param name Operator name * @param dag Application DAG graph. * @return ConsolidatorKeyVal instance. */ public ConsolidatorKeyVal<String,HighLow<Double>,Long,?,?,?> getChartOperator(String name, DAG dag) { ConsolidatorKeyVal<String,HighLow<Double>,Long,?,?,?> oper = dag.addOperator(name, new ConsolidatorKeyVal<String,HighLow<Double>,Long,Object,Object,Object>()); return oper; }