com.datatorrent.api.DAG.Locality Java Examples
The following examples show how to use
com.datatorrent.api.DAG.Locality.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OiOStreamTest.java From attic-apex-core with Apache License 2.0 | 6 votes |
@Test public void validatePositiveOiOiOdiamond() { logger.info("Checking the logic for sanity checking of OiO"); LogicalPlan plan = new LogicalPlan(); ThreadIdValidatingInputOperator inputOperator = plan.addOperator("inputOperator", new ThreadIdValidatingInputOperator()); ThreadIdValidatingGenericIntermediateOperator intermediateOperator1 = plan.addOperator("intermediateOperator1", new ThreadIdValidatingGenericIntermediateOperator()); ThreadIdValidatingGenericIntermediateOperator intermediateOperator2 = plan.addOperator("intermediateOperator2", new ThreadIdValidatingGenericIntermediateOperator()); ThreadIdValidatingGenericOperatorWithTwoInputPorts outputOperator = plan.addOperator("outputOperator", new ThreadIdValidatingGenericOperatorWithTwoInputPorts()); plan.addStream("OiOin", inputOperator.output, intermediateOperator1.input, intermediateOperator2.input).setLocality(Locality.THREAD_LOCAL); plan.addStream("OiOout1", intermediateOperator1.output, outputOperator.input).setLocality(Locality.THREAD_LOCAL); plan.addStream("OiOout2", intermediateOperator2.output, outputOperator.input2).setLocality(Locality.THREAD_LOCAL); try { plan.validate(); Assert.assertTrue("OiOiO diamond validation", true); } catch (ConstraintViolationException ex) { Assert.fail("OIOIO diamond validation"); } }
Example #2
Source File: TwitterTopWordsApplication.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { TwitterSampleInput twitterFeed = new TwitterSampleInput(); twitterFeed = dag.addOperator("TweetSampler", twitterFeed); TwitterStatusWordExtractor wordExtractor = dag.addOperator("WordExtractor", TwitterStatusWordExtractor.class); UniqueCounter<String> uniqueCounter = dag.addOperator("UniqueWordCounter", new UniqueCounter<String>()); WindowedTopCounter<String> topCounts = dag.addOperator("TopCounter", new WindowedTopCounter<String>()); topCounts.setSlidingWindowWidth(120); topCounts.setDagWindowWidth(1); dag.addStream("TweetStream", twitterFeed.text, wordExtractor.input); dag.addStream("TwittedWords", wordExtractor.output, uniqueCounter.data); dag.addStream("UniqueWordCounts", uniqueCounter.count, topCounts.input).setLocality(Locality.CONTAINER_LOCAL); TwitterTopCounterApplication.consoleOutput(dag, "topWords", topCounts.output, SNAPSHOT_SCHEMA, "word"); }
Example #3
Source File: ApplicationLocalLog.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { TailFsInputOperator log = dag.addOperator("log", new TailFsInputOperator()); log.setDelimiter('\n'); log.setFilePath("/var/log/apache2/access.log"); ApacheLogParseMapOutputOperator parse = dag.addOperator("parse", new ApacheLogParseMapOutputOperator()); GeoIPExtractor geoIPExtractor = new GeoIPExtractor(); // Can't put this file in resources until licensing issue is straightened out geoIPExtractor.setDatabasePath("/home/david/GeoLiteCity.dat"); parse.registerInformationExtractor("ip", geoIPExtractor); parse.registerInformationExtractor("agent", new UserAgentExtractor()); TimestampExtractor timestampExtractor = new TimestampExtractor(); timestampExtractor.setDateFormatString("dd/MMM/yyyy:HH:mm:ss Z"); parse.registerInformationExtractor("time", timestampExtractor); ConsoleOutputOperator console = dag.addOperator("console", new ConsoleOutputOperator()); dag.addStream("log-parse", log.output, parse.data); dag.addStream("parse-console", parse.output, console.input).setLocality(Locality.CONTAINER_LOCAL); }
Example #4
Source File: Application.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { // Sample DAG with 2 operators // Replace this code with the DAG you want to build SeedEventGenerator seedGen = dag.addOperator("seedGen", SeedEventGenerator.class); seedGen.setSeedStart(1); seedGen.setSeedEnd(10); seedGen.addKeyData("x", 0, 10); seedGen.addKeyData("y", 0, 100); ConsoleOutputOperator cons = dag.addOperator("console", new ConsoleOutputOperator()); cons.setStringFormat("hello: %s"); dag.addStream("seeddata", seedGen.val_list, cons.input).setLocality(Locality.CONTAINER_LOCAL); }
Example #5
Source File: ManagedStateBenchmarkApp.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { TestStatsListener sl = new TestStatsListener(); sl.adjustRate = conf.getBoolean("dt.ManagedStateBenchmark.adjustRate", false); TestGenerator gen = dag.addOperator("Generator", new TestGenerator()); gen.setRange(timeRange); dag.setAttribute(gen, OperatorContext.STATS_LISTENERS, Lists.newArrayList((StatsListener)sl)); storeOperator = new StoreOperator(); storeOperator.setStore(createStore(conf)); storeOperator.setTimeRange(timeRange); storeOperator = dag.addOperator("Store", storeOperator); dag.setAttribute(storeOperator, OperatorContext.STATS_LISTENERS, Lists.newArrayList((StatsListener)sl)); dag.addStream("Events", gen.data, storeOperator.input).setLocality(Locality.CONTAINER_LOCAL); }
Example #6
Source File: RubyOperatorBenchmarkApplication.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { RandomEventGenerator rand = dag.addOperator("rand", new RandomEventGenerator()); rand.setMaxvalue(3000); rand.setTuplesBlast(120); RandomMapOutput randMap = dag.addOperator("randMap", new RandomMapOutput()); randMap.setKey("val"); RubyOperator ruby = dag.addOperator("ruby", new RubyOperator()); String setupScript = "def square(val)\n"; setupScript += " return val*val\nend\n"; ruby.addSetupScript(setupScript); ruby.setInvoke("square"); ruby.setPassThru(true); ConsoleOutputOperator console = dag.addOperator("console", new ConsoleOutputOperator()); dag.getMeta(console).getMeta(console.input).getAttributes().put(PortContext.QUEUE_CAPACITY, QUEUE_CAPACITY); dag.getMeta(ruby).getMeta(ruby.result).getAttributes().put(PortContext.QUEUE_CAPACITY, QUEUE_CAPACITY); dag.addStream("rand_randMap", rand.integer_data, randMap.input).setLocality(Locality.THREAD_LOCAL); dag.addStream("randMap_ruby", randMap.map_data, ruby.inBindings).setLocality(locality); dag.addStream("ruby_console", ruby.result, console.input).setLocality(locality); }
Example #7
Source File: CustomerServiceHbaseOutputOperatorTester.java From examples with Apache License 2.0 | 6 votes |
/** * this is the DAG for write tuples into HBase * @param dag * @param conf */ protected void populateOutputDAG(DAG dag, Configuration conf) { customerServiceGenerator = new SpecificCustomerServiceGenerateOperator(); customerServiceGenerator.capacity = CAPACITY; dag.addOperator("CustomerService-Generator", customerServiceGenerator); cacheOperator = new TupleCacheOperator<>("cacheOperatorData"); dag.addOperator("Cache", cacheOperator); dag.addStream("GenerateStream", customerServiceGenerator.outputPort, cacheOperator.inputPort).setLocality(Locality.CONTAINER_LOCAL); { CustomerServiceHbaseOutputOperator hbaseOutput = new CustomerServiceHbaseOutputOperator(); hbaseOutput.setStartOver(true); //remove old table and create new dag.addOperator("CustomerService-Output", hbaseOutput); dag.addStream("CustomerService", cacheOperator.outputPort, hbaseOutput.input).setLocality(Locality.CONTAINER_LOCAL); } }
Example #8
Source File: TwitterDumpApplication.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { //dag.setAttribute(DAGContext.APPLICATION_NAME, "TweetsDump"); TwitterSampleInput twitterStream = dag.addOperator("TweetSampler", new TwitterSampleInput()); //ConsoleOutputOperator dbWriter = dag.addOperator("DatabaseWriter", new ConsoleOutputOperator()); Status2Database dbWriter = dag.addOperator("DatabaseWriter", new Status2Database()); dbWriter.getStore().setDatabaseDriver("com.mysql.jdbc.Driver"); dbWriter.getStore().setDatabaseUrl("jdbc:mysql://node6.morado.com:3306/twitter"); dbWriter.getStore().setConnectionProperties("user:twitter"); dag.addStream("Statuses", twitterStream.status, dbWriter.input).setLocality(Locality.CONTAINER_LOCAL); }
Example #9
Source File: StreamingContainerManagerTest.java From attic-apex-core with Apache License 2.0 | 6 votes |
private void testDownStreamPartition(Locality locality) throws Exception { TestGeneratorInputOperator o1 = dag.addOperator("o1", TestGeneratorInputOperator.class); GenericTestOperator o2 = dag.addOperator("o2", GenericTestOperator.class); dag.setOperatorAttribute(o2, OperatorContext.PARTITIONER, new StatelessPartitioner<GenericTestOperator>(2)); dag.addStream("o1Output1", o1.outport, o2.inport1).setLocality(locality); int maxContainers = 5; dag.setAttribute(LogicalPlan.CONTAINERS_MAX_COUNT, maxContainers); dag.setAttribute(OperatorContext.STORAGE_AGENT, new StramTestSupport.MemoryStorageAgent()); dag.validate(); PhysicalPlan plan = new PhysicalPlan(dag, new TestPlanContext()); Assert.assertEquals("number of containers", 1, plan.getContainers().size()); PTContainer container1 = plan.getContainers().get(0); Assert.assertEquals("number operators " + container1, 3, container1.getOperators().size()); StramLocalCluster slc = new StramLocalCluster(dag); slc.run(5000); }
Example #10
Source File: ProcessingModeTests.java From attic-apex-core with Apache License 2.0 | 6 votes |
public void testLinearInlineOperatorsRecovery() throws Exception { RecoverableInputOperator.initGenTuples(); CollectorOperator.collection.clear(); CollectorOperator.duplicates.clear(); dag.getAttributes().put(LogicalPlan.CHECKPOINT_WINDOW_COUNT, 2); dag.getAttributes().put(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS, 300); dag.getAttributes().put(LogicalPlan.CONTAINERS_MAX_COUNT, 1); RecoverableInputOperator rip = dag.addOperator("LongGenerator", RecoverableInputOperator.class); rip.setMaximumTuples(maxTuples); rip.setSimulateFailure(true); CollectorOperator cm = dag.addOperator("LongCollector", CollectorOperator.class); cm.setSimulateFailure(true); dag.getMeta(cm).getAttributes().put(OperatorContext.PROCESSING_MODE, processingMode); dag.addStream("connection", rip.output, cm.input).setLocality(Locality.CONTAINER_LOCAL); StramLocalCluster lc = new StramLocalCluster(dag); lc.run(); }
Example #11
Source File: LogicalPlanTest.java From attic-apex-core with Apache License 2.0 | 6 votes |
@Test public void testLocalityValidation() { TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class); GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class); StreamMeta s1 = dag.addStream("input1.outport", input1.outport, o1.inport1).setLocality(Locality.THREAD_LOCAL); dag.validate(); TestGeneratorInputOperator input2 = dag.addOperator("input2", TestGeneratorInputOperator.class); dag.addStream("input2.outport", input2.outport, o1.inport2); try { dag.validate(); Assert.fail("Exception expected for " + o1); } catch (ValidationException ve) { Assert.assertThat("", ve.getMessage(), RegexMatcher.matches("Locality THREAD_LOCAL invalid for operator .* with multiple input streams .*")); } s1.setLocality(null); dag.validate(); }
Example #12
Source File: JdbcHDFSApp.java From examples with Apache License 2.0 | 6 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { JdbcPOJOInputOperator jdbcInputOperator = dag.addOperator("JdbcInput", new JdbcPOJOInputOperator()); /** * The class given below can be updated to the user defined class based on * input table schema The addField infos method needs to be updated * accordingly This line can be commented and class can be set from the * properties file */ // dag.setOutputPortAttribute(jdbcInputOperator.outputPort, Context.PortContext.TUPLE_CLASS, PojoEvent.class); jdbcInputOperator.setFieldInfos(addFieldInfos()); JdbcStore store = new JdbcStore(); jdbcInputOperator.setStore(store); FileLineOutputOperator fileOutput = dag.addOperator("FileOutputOperator", new FileLineOutputOperator()); dag.addStream("POJO's", jdbcInputOperator.outputPort, fileOutput.input).setLocality(Locality.CONTAINER_LOCAL); }
Example #13
Source File: OiOStreamTest.java From attic-apex-core with Apache License 2.0 | 6 votes |
@Test public void validatePositiveOiOiOExtendeddiamond() { logger.info("Checking the logic for sanity checking of OiO"); LogicalPlan plan = new LogicalPlan(); ThreadIdValidatingInputOperator inputOperator = plan.addOperator("inputOperator", new ThreadIdValidatingInputOperator()); ThreadIdValidatingGenericIntermediateOperator intermediateOperator1 = plan.addOperator("intermediateOperator1", new ThreadIdValidatingGenericIntermediateOperator()); ThreadIdValidatingGenericIntermediateOperator intermediateOperator2 = plan.addOperator("intermediateOperator2", new ThreadIdValidatingGenericIntermediateOperator()); ThreadIdValidatingGenericIntermediateOperator intermediateOperator3 = plan.addOperator("intermediateOperator3", new ThreadIdValidatingGenericIntermediateOperator()); ThreadIdValidatingGenericIntermediateOperator intermediateOperator4 = plan.addOperator("intermediateOperator4", new ThreadIdValidatingGenericIntermediateOperator()); ThreadIdValidatingGenericOperatorWithTwoInputPorts outputOperator = plan.addOperator("outputOperator", new ThreadIdValidatingGenericOperatorWithTwoInputPorts()); plan.addStream("OiOin", inputOperator.output, intermediateOperator1.input, intermediateOperator3.input).setLocality(Locality.THREAD_LOCAL); plan.addStream("OiOIntermediate1", intermediateOperator1.output, intermediateOperator2.input).setLocality(Locality.THREAD_LOCAL); plan.addStream("OiOIntermediate2", intermediateOperator3.output, intermediateOperator4.input).setLocality(Locality.THREAD_LOCAL); plan.addStream("OiOout1", intermediateOperator2.output, outputOperator.input).setLocality(Locality.THREAD_LOCAL); plan.addStream("OiOout2", intermediateOperator4.output, outputOperator.input2).setLocality(Locality.THREAD_LOCAL); try { plan.validate(); Assert.assertTrue("OiOiO extended diamond validation", true); } catch (ConstraintViolationException ex) { Assert.fail("OIOIO extended diamond validation"); } }
Example #14
Source File: UniqueKeyValCountExample.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
@Override public void populateDAG(DAG dag, Configuration entries) { /* Generate random key-value pairs */ RandomDataGenerator randGen = dag.addOperator("randomgen", new RandomDataGenerator()); /* Initialize with three partition to start with */ UniqueCounter<KeyValPair<String, Object>> uniqCount = dag.addOperator("uniqevalue", new UniqueCounter<KeyValPair<String, Object>>()); MapToKeyHashValuePairConverter<KeyValPair<String, Object>, Integer> converter = dag.addOperator("converter", new MapToKeyHashValuePairConverter()); uniqCount.setCumulative(false); dag.setAttribute(randGen, Context.OperatorContext.PARTITIONER, new StatelessPartitioner<UniqueCounter<KeyValPair<String, Object>>>(3)); ConsoleOutputOperator output = dag.addOperator("output", new ConsoleOutputOperator()); dag.addStream("datain", randGen.outPort, uniqCount.data); dag.addStream("convert", uniqCount.count, converter.input).setLocality(Locality.THREAD_LOCAL); dag.addStream("consoutput", converter.output, output.input); }
Example #15
Source File: WindowedOperatorBenchmarkApp.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override protected void connectGeneratorToWindowedOperator(DAG dag, WindowedGenerator generator, MyWindowedOperator windowedOperator) { dag.addStream("Data", generator.data, windowedOperator.input).setLocality(Locality.CONTAINER_LOCAL); }
Example #16
Source File: Application2.java From attic-apex-core with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { // Sample DAG with 2 operators // Replace this code with the DAG you want to build RandomNumberGenerator rand = dag.addOperator("rand", RandomNumberGenerator.class); StdoutOperator stdout = dag.addOperator("stdout", new StdoutOperator()); dag.addStream("data", rand.out, stdout.in).setLocality(Locality.CONTAINER_LOCAL); }
Example #17
Source File: Application.java From attic-apex-core with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { // Sample DAG with 2 operators // Replace this code with the DAG you want to build RandomNumberGenerator rand = dag.addOperator("rand", RandomNumberGenerator.class); StdoutOperator stdout = dag.addOperator("stdout", new StdoutOperator()); // This module will be added to dag for testing purpose but will not be connected in a dag. Module testModule = dag.addModule("testModule", com.example.mydtapp.TestModule.class); dag.addStream("data", rand.out, stdout.in).setLocality(Locality.CONTAINER_LOCAL); }
Example #18
Source File: Application.java From attic-apex-core with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { // Sample DAG with 2 operators // Replace this code with the DAG you want to build RandomNumberGenerator randomGenerator = dag.addOperator("randomGenerator", RandomNumberGenerator.class); randomGenerator.setNumTuples(500); ConsoleOutputOperator cons = dag.addOperator("console", new ConsoleOutputOperator()); dag.addStream("randomData", randomGenerator.out, cons.input).setLocality(Locality.CONTAINER_LOCAL); }
Example #19
Source File: KeyedWindowedOperatorBenchmarkApp.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override protected void connectGeneratorToWindowedOperator(DAG dag, KeyedWindowedGenerator generator, KeyedWindowedOperatorBenchmarkApp.MyKeyedWindowedOperator windowedOperator) { dag.addStream("Data", generator.data, windowedOperator.input).setLocality(Locality.CONTAINER_LOCAL); }
Example #20
Source File: PhysicalPlanTest.java From attic-apex-core with Apache License 2.0 | 5 votes |
@Test public void testContainerCores() { LogicalPlan dag = new LogicalPlan(); dag.setAttribute(OperatorContext.STORAGE_AGENT, new StramTestSupport.MemoryStorageAgent()); GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class); GenericTestOperator o2 = dag.addOperator("o2", GenericTestOperator.class); GenericTestOperator o3 = dag.addOperator("o3", GenericTestOperator.class); GenericTestOperator o4 = dag.addOperator("o4", GenericTestOperator.class); GenericTestOperator o5 = dag.addOperator("o5", GenericTestOperator.class); GenericTestOperator o6 = dag.addOperator("o6", GenericTestOperator.class); dag.setOperatorAttribute(o1,OperatorContext.VCORES,1); dag.setOperatorAttribute(o2,OperatorContext.VCORES,2); dag.setOperatorAttribute(o3,OperatorContext.VCORES,3); dag.setOperatorAttribute(o4,OperatorContext.VCORES,4); dag.setOperatorAttribute(o5,OperatorContext.VCORES,5); dag.setOperatorAttribute(o6,OperatorContext.VCORES,6); dag.addStream("o1.outport1", o1.outport1, o2.inport1).setLocality(Locality.CONTAINER_LOCAL); dag.addStream("o2.outport1", o2.outport1, o3.inport1, o4.inport1).setLocality(Locality.THREAD_LOCAL); dag.addStream("o3.output1", o3.outport1, o5.inport1).setLocality(Locality.THREAD_LOCAL); dag.addStream("o4.output1", o4.outport1, o5.inport2).setLocality(Locality.THREAD_LOCAL); dag.addStream("o5.output1", o5.outport1, o6.inport1).setLocality(Locality.CONTAINER_LOCAL); dag.setAttribute(LogicalPlan.CONTAINERS_MAX_COUNT, 2); PhysicalPlan plan = new PhysicalPlan(dag, new TestPlanContext()); Assert.assertEquals("number of containers", 1, plan.getContainers().size()); Assert.assertEquals("vcores container 1 is 12", 12, plan.getContainers().get(0).getRequiredVCores()); }
Example #21
Source File: InputOperatorTest.java From attic-apex-core with Apache License 2.0 | 5 votes |
@Test public void testSomeMethod() throws Exception { LogicalPlan dag = new LogicalPlan(); String testWorkDir = new File("target").getAbsolutePath(); dag.setAttribute(OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(testWorkDir, null)); EvenOddIntegerGeneratorInputOperator generator = dag.addOperator("NumberGenerator", EvenOddIntegerGeneratorInputOperator.class); final CollectorModule<Number> collector = dag.addOperator("NumberCollector", new CollectorModule<Number>()); dag.addStream("EvenIntegers", generator.even, collector.even).setLocality(Locality.CONTAINER_LOCAL); dag.addStream("OddIntegers", generator.odd, collector.odd).setLocality(Locality.CONTAINER_LOCAL); final StramLocalCluster lc = new StramLocalCluster(dag); lc.setHeartbeatMonitoringEnabled(false); lc.runAsync(); WaitCondition c = new WaitCondition() { @Override public boolean isComplete() { return tupleCount.get() > 2; } }; StramTestSupport.awaitCompletion(c, 2000); lc.shutdown(); Assert.assertEquals("Collections size", 2, collections.size()); Assert.assertFalse("Zero tuple count", collections.get(collector.even.id).isEmpty() && collections.get(collector.odd.id).isEmpty()); Assert.assertTrue("Tuple count", collections.get(collector.even.id).size() - collections.get(collector.odd.id).size() <= 1); }
Example #22
Source File: KafkaInputBenchmark.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { dag.setAttribute(DAG.APPLICATION_NAME, "KafkaInputOperatorPartitionDemo"); BenchmarkKafkaInputOperator bpkio = new BenchmarkKafkaInputOperator(); String type = conf.get("kafka.consumertype", "simple"); KafkaConsumer consumer = null; if (type.equals("highlevel")) { // Create template high-level consumer Properties props = new Properties(); props.put("group.id", "main_group"); props.put("auto.offset.reset", "smallest"); consumer = new HighlevelKafkaConsumer(props); } else { // topic is set via property file consumer = new SimpleKafkaConsumer(null, 10000, 100000, "test_kafka_autop_client", null); } bpkio.setZookeeper(conf.get("dt.kafka.zookeeper")); bpkio.setInitialPartitionCount(1); //bpkio.setTuplesBlast(1024 * 1024); bpkio.setConsumer(consumer); bpkio = dag.addOperator("KafkaBenchmarkConsumer", bpkio); CollectorModule cm = dag.addOperator("DataBlackhole", CollectorModule.class); dag.addStream("end", bpkio.oport, cm.inputPort).setLocality(Locality.CONTAINER_LOCAL); dag.setInputPortAttribute(cm.inputPort, PortContext.PARTITION_PARALLEL, true); dag.setAttribute(bpkio, OperatorContext.COUNTERS_AGGREGATOR, new KafkaConsumer.KafkaMeterStatsAggregator()); // dag.setAttribute(bpkio, OperatorContext.STATS_LISTENER, KafkaMeterStatsListener.class); }
Example #23
Source File: HostLocalTest.java From attic-apex-core with Apache License 2.0 | 5 votes |
@Test public void testUnavailableResources() { LogicalPlan dag = new LogicalPlan(); dag.getAttributes().put(com.datatorrent.api.Context.DAGContext.APPLICATION_PATH, new File("target", HostLocalTest.class.getName()).getAbsolutePath()); dag.setAttribute(OperatorContext.STORAGE_AGENT, new MemoryStorageAgent()); GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class); dag.getMeta(o1).getAttributes().put(OperatorContext.LOCALITY_HOST, "host2"); GenericTestOperator partitioned = dag.addOperator("partitioned", GenericTestOperator.class); dag.addStream("o1_outport1", o1.outport1, partitioned.inport1).setLocality(Locality.CONTAINER_LOCAL); dag.setOperatorAttribute(o1, OperatorContext.MEMORY_MB, 256); dag.setOperatorAttribute(o1, OperatorContext.VCORES, 2); dag.setOperatorAttribute(partitioned, OperatorContext.VCORES, 1); StreamingContainerManager scm = new StreamingContainerManager(dag); ResourceRequestHandler rr = new ResourceRequestHandler(); int containerMem = 1000; Map<String, NodeReport> nodeReports = Maps.newHashMap(); NodeReport nr = BuilderUtils.newNodeReport(BuilderUtils.newNodeId("host1", 0), NodeState.RUNNING, "httpAddress", "rackName", BuilderUtils.newResource(0, 0), BuilderUtils.newResource(containerMem * 2, 2), 0, null, 0); nodeReports.put(nr.getNodeId().getHost(), nr); nr = BuilderUtils.newNodeReport(BuilderUtils.newNodeId("host2", 0), NodeState.RUNNING, "httpAddress", "rackName", BuilderUtils.newResource(0, 0), BuilderUtils.newResource(containerMem * 2, 2), 0, null, 0); nodeReports.put(nr.getNodeId().getHost(), nr); // set resources rr.updateNodeReports(Lists.newArrayList(nodeReports.values())); Assert.assertEquals("number of containers is 1", 1, scm.containerStartRequests.size()); for (ContainerStartRequest csr : scm.containerStartRequests) { String host = rr.getHost(csr, true); Assert.assertEquals("number of vcores", 3, csr.container.getRequiredVCores()); Assert.assertNull("Host is null", host); } }
Example #24
Source File: TwitterTopCounterApplication.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { // Setup the operator to get the data from twitter sample stream injected into the system. TwitterSampleInput twitterFeed = new TwitterSampleInput(); twitterFeed = dag.addOperator("TweetSampler", twitterFeed); // Setup the operator to get the URLs extracted from the twitter statuses TwitterStatusURLExtractor urlExtractor = dag.addOperator("URLExtractor", TwitterStatusURLExtractor.class); // Setup a node to count the unique urls within a window. UniqueCounter<String> uniqueCounter = dag.addOperator("UniqueURLCounter", new UniqueCounter<String>()); // Get the aggregated url counts and count them over last 5 mins. dag.setAttribute(uniqueCounter, Context.OperatorContext.APPLICATION_WINDOW_COUNT, 600); dag.setAttribute(uniqueCounter, Context.OperatorContext.SLIDE_BY_WINDOW_COUNT, 1); WindowedTopCounter<String> topCounts = dag.addOperator("TopCounter", new WindowedTopCounter<String>()); topCounts.setTopCount(10); topCounts.setSlidingWindowWidth(1); topCounts.setDagWindowWidth(1); // Feed the statuses from feed into the input of the url extractor. dag.addStream("TweetStream", twitterFeed.status, urlExtractor.input).setLocality(Locality.CONTAINER_LOCAL); // Start counting the urls coming out of URL extractor dag.addStream("TwittedURLs", urlExtractor.url, uniqueCounter.data).setLocality(locality); // Count unique urls dag.addStream("UniqueURLCounts", uniqueCounter.count, topCounts.input); consoleOutput(dag, "topURLs", topCounts.output, SNAPSHOT_SCHEMA, "url"); }
Example #25
Source File: HostLocalTest.java From attic-apex-core with Apache License 2.0 | 5 votes |
@Test public void testContainerLocal() { LogicalPlan dag = new LogicalPlan(); dag.getAttributes().put(com.datatorrent.api.Context.DAGContext.APPLICATION_PATH, new File("target", HostLocalTest.class.getName()).getAbsolutePath()); dag.setAttribute(OperatorContext.STORAGE_AGENT, new MemoryStorageAgent()); GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class); dag.getMeta(o1).getAttributes().put(OperatorContext.LOCALITY_HOST, "host2"); GenericTestOperator partitioned = dag.addOperator("partitioned", GenericTestOperator.class); dag.addStream("o1_outport1", o1.outport1, partitioned.inport1).setLocality(Locality.CONTAINER_LOCAL); dag.setOperatorAttribute(o1, OperatorContext.MEMORY_MB, 256); dag.setOperatorAttribute(partitioned,OperatorContext.MEMORY_MB,256); StreamingContainerManager scm = new StreamingContainerManager(dag); ResourceRequestHandler rr = new ResourceRequestHandler(); int containerMem = 1000; Map<String, NodeReport> nodeReports = Maps.newHashMap(); NodeReport nr = BuilderUtils.newNodeReport(BuilderUtils.newNodeId("host1", 0), NodeState.RUNNING, "httpAddress", "rackName", BuilderUtils.newResource(0, 0), BuilderUtils.newResource(containerMem * 2, 2), 0, null, 0); nodeReports.put(nr.getNodeId().getHost(), nr); nr = BuilderUtils.newNodeReport(BuilderUtils.newNodeId("host2", 0), NodeState.RUNNING, "httpAddress", "rackName", BuilderUtils.newResource(0, 0), BuilderUtils.newResource(containerMem * 2, 2), 0, null, 0); nodeReports.put(nr.getNodeId().getHost(), nr); // set resources rr.updateNodeReports(Lists.newArrayList(nodeReports.values())); Assert.assertEquals("number of containers is 1", 1, scm.containerStartRequests.size()); for (ContainerStartRequest csr : scm.containerStartRequests) { String host = rr.getHost(csr, true); csr.container.host = host; Assert.assertEquals("Hosts set to host2", "host2", host); } }
Example #26
Source File: Application.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { LineByLineFileInputOperator in = dag.addOperator("lines", LineByLineFileInputOperator.class); KafkaSinglePortOutputOperator<String,String> out = dag.addOperator("kafkaOutput", new KafkaSinglePortOutputOperator<String,String>()); dag.addStream("data", in.output, out.inputPort).setLocality(Locality.CONTAINER_LOCAL); }
Example #27
Source File: HostLocalTest.java From attic-apex-core with Apache License 2.0 | 5 votes |
@Test public void testNodeLocal() { LogicalPlan dag = new LogicalPlan(); dag.getAttributes().put(com.datatorrent.api.Context.DAGContext.APPLICATION_PATH, new File("target", HostLocalTest.class.getName()).getAbsolutePath()); dag.setAttribute(OperatorContext.STORAGE_AGENT, new MemoryStorageAgent()); GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class); dag.setOperatorAttribute(o1,OperatorContext.MEMORY_MB,256); GenericTestOperator partitioned = dag.addOperator("partitioned", GenericTestOperator.class); dag.setOperatorAttribute(partitioned,OperatorContext.MEMORY_MB,256); dag.getMeta(partitioned).getAttributes().put(OperatorContext.LOCALITY_HOST, "host1"); dag.addStream("o1_outport1", o1.outport1, partitioned.inport1).setLocality(Locality.NODE_LOCAL); StreamingContainerManager scm = new StreamingContainerManager(dag); ResourceRequestHandler rr = new ResourceRequestHandler(); int containerMem = 1000; Map<String, NodeReport> nodeReports = Maps.newHashMap(); NodeReport nr = BuilderUtils.newNodeReport(BuilderUtils.newNodeId("host1", 0), NodeState.RUNNING, "httpAddress", "rackName", BuilderUtils.newResource(0, 0), BuilderUtils.newResource(containerMem * 2, 2), 0, null, 0); nodeReports.put(nr.getNodeId().getHost(), nr); nr = BuilderUtils.newNodeReport(BuilderUtils.newNodeId("host2", 0), NodeState.RUNNING, "httpAddress", "rackName", BuilderUtils.newResource(0, 0), BuilderUtils.newResource(containerMem * 2, 2), 0, null, 0); nodeReports.put(nr.getNodeId().getHost(), nr); // set resources rr.updateNodeReports(Lists.newArrayList(nodeReports.values())); for (ContainerStartRequest csr : scm.containerStartRequests) { String host = rr.getHost(csr, true); csr.container.host = host; Assert.assertEquals("Hosts set to host1", "host1", host); } }
Example #28
Source File: OiOStreamTest.java From attic-apex-core with Apache License 2.0 | 5 votes |
@Test public void validatePositiveOiOiOdiamondWithCores() { logger.info("Checking the logic for sanity checking of OiO"); LogicalPlan plan = new LogicalPlan(); ThreadIdValidatingInputOperator inputOperator = plan.addOperator("inputOperator", new ThreadIdValidatingInputOperator()); ThreadIdValidatingGenericIntermediateOperator intermediateOperator1 = plan.addOperator("intermediateOperator1", new ThreadIdValidatingGenericIntermediateOperator()); ThreadIdValidatingGenericIntermediateOperator intermediateOperator2 = plan.addOperator("intermediateOperator2", new ThreadIdValidatingGenericIntermediateOperator()); ThreadIdValidatingGenericIntermediateOperator intermediateOperator3 = plan.addOperator("intermediateOperator3", new ThreadIdValidatingGenericIntermediateOperator()); ThreadIdValidatingGenericIntermediateOperator intermediateOperator4 = plan.addOperator("intermediateOperator4", new ThreadIdValidatingGenericIntermediateOperator()); ThreadIdValidatingGenericOperatorWithTwoInputPorts outputOperator = plan.addOperator("outputOperator", new ThreadIdValidatingGenericOperatorWithTwoInputPorts()); plan.addStream("OiOin", inputOperator.output, intermediateOperator1.input, intermediateOperator3.input).setLocality(Locality.THREAD_LOCAL); plan.addStream("OiOIntermediate1", intermediateOperator1.output, intermediateOperator2.input).setLocality(Locality.THREAD_LOCAL); plan.addStream("OiOIntermediate2", intermediateOperator3.output, intermediateOperator4.input).setLocality(Locality.THREAD_LOCAL); plan.addStream("OiOout1", intermediateOperator2.output, outputOperator.input).setLocality(Locality.THREAD_LOCAL); plan.addStream("OiOout2", intermediateOperator4.output, outputOperator.input2).setLocality(Locality.THREAD_LOCAL); plan.setOperatorAttribute(inputOperator, OperatorContext.VCORES, 1); plan.setOperatorAttribute(intermediateOperator1, OperatorContext.VCORES, 1); plan.setOperatorAttribute(intermediateOperator2, OperatorContext.VCORES, 2); plan.setOperatorAttribute(intermediateOperator3, OperatorContext.VCORES, 3); plan.setOperatorAttribute(intermediateOperator4, OperatorContext.VCORES, 5); plan.setAttribute(OperatorContext.STORAGE_AGENT, new StramTestSupport.MemoryStorageAgent()); try { plan.validate(); Assert.assertTrue("OiOiO extended diamond validation", true); } catch (ConstraintViolationException ex) { Assert.fail("OIOIO extended diamond validation"); } PhysicalPlan physicalPlan = new PhysicalPlan(plan, new TestPlanContext()); Assert.assertTrue("number of containers", 1 == physicalPlan.getContainers().size()); Assert.assertTrue("number of vcores " + physicalPlan.getContainers().get(0).getRequiredVCores(), 5 == physicalPlan.getContainers().get(0).getRequiredVCores()); }
Example #29
Source File: Application.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
@Override public void populateDAG(DAG dag, Configuration conf) { locality = Locality.CONTAINER_LOCAL; dag.getAttributes().put(DAG.STREAMING_WINDOW_SIZE_MILLIS, 1000); SimpleSinglePortZeroMQPullStringInputOperator input = dag.addOperator("input", new SimpleSinglePortZeroMQPullStringInputOperator(addr)); ApacheLogParseOperator parse = dag.addOperator("parse", new ApacheLogParseOperator()); UniqueCounter<String> ipAddrCount = dag.addOperator("ipAddrCount", new UniqueCounter<String>()); UniqueCounter<String> urlCount = dag.addOperator("urlCount", new UniqueCounter<String>()); UniqueCounter<String> httpStatusCount = dag.addOperator("httpStatusCount", new UniqueCounter<String>()); Sum<Long> numOfBytesSum = dag.addOperator("numOfBytesSum", new Sum<Long>()); //ArrayListAggregator<Long> agg = dag.addOperator("agg", new ArrayListAggregator<Long>()); //dag.getOperatorWrapper(agg).getAttributes().put(OperatorContext.APPLICATION_WINDOW_COUNT, 3); dag.getMeta(numOfBytesSum).getAttributes().put(OperatorContext.APPLICATION_WINDOW_COUNT, 3); dag.addStream("input-parse", input.outputPort, parse.data).setLocality(locality); dag.addStream("parse-ipAddrCount", parse.outputIPAddress, ipAddrCount.data).setLocality(locality); dag.addStream("parse-urlCount", parse.outputUrl, urlCount.data).setLocality(locality); dag.addStream("parse-httpStatusCount", parse.outputStatusCode, httpStatusCount.data).setLocality(locality); dag.addStream("parse-numOfBytesSum", parse.outputBytes, numOfBytesSum.data).setLocality(locality); //dag.addStream("numOfBytesSum-agg", numOfBytesSum.sumLong, agg.input); ConsoleOutputOperator consoleOperator1 = dag.addOperator("console1", new ConsoleOutputOperator()); ConsoleOutputOperator consoleOperator2 = dag.addOperator("console2", new ConsoleOutputOperator()); ConsoleOutputOperator consoleOperator3 = dag.addOperator("console3", new ConsoleOutputOperator()); ConsoleOutputOperator consoleOperator4 = dag.addOperator("console4", new ConsoleOutputOperator()); dag.addStream("ipAddrCount-console", ipAddrCount.count, consoleOperator1.input); dag.addStream("urlCount-console", urlCount.count, consoleOperator2.input); dag.addStream("httpStatusCount-console", httpStatusCount.count, consoleOperator3.input); //dag.addStream("agg-console", agg.output, consoleOperator4.input); dag.addStream("numOfBytesSum-console", numOfBytesSum.sumLong, consoleOperator4.input); }
Example #30
Source File: LogicalPlanModificationTest.java From attic-apex-core with Apache License 2.0 | 5 votes |
@Test public void testRemoveOperator2() { GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class); OperatorMeta o1Meta = dag.getMeta(o1); GenericTestOperator o2 = dag.addOperator("o2", GenericTestOperator.class); OperatorMeta o2Meta = dag.getMeta(o2); GenericTestOperator o3 = dag.addOperator("o3", GenericTestOperator.class); OperatorMeta o3Meta = dag.getMeta(o3); LogicalPlan.StreamMeta s1 = dag.addStream("o1.outport1", o1.outport1, o2.inport1, o3.inport1).setLocality(Locality.CONTAINER_LOCAL); TestPlanContext ctx = new TestPlanContext(); dag.setAttribute(OperatorContext.STORAGE_AGENT, ctx); PhysicalPlan plan = new PhysicalPlan(dag, ctx); ctx.deploy.clear(); ctx.undeploy.clear(); Assert.assertEquals("containers " + plan.getContainers(), 1, plan.getContainers().size()); Assert.assertEquals("physical operators " + plan.getAllOperators(), 3, plan.getAllOperators().size()); Assert.assertEquals("sinks s1 " + s1.getSinks(), 2, s1.getSinks().size()); List<PTOperator> o2PhysicalOpers = plan.getOperators(o2Meta); Assert.assertEquals("instances " + o2Meta, 1, o2PhysicalOpers.size()); PlanModifier pm = new PlanModifier(plan); pm.removeOperator(o2Meta.getName()); // remove operator w/o removing the stream pm.applyChanges(ctx); Assert.assertEquals("sinks s1 " + s1.getSinks(), 1, s1.getSinks().size()); Assert.assertTrue("undeploy " + ctx.undeploy, ctx.undeploy.containsAll(o2PhysicalOpers)); Set<PTOperator> expDeploy = Sets.newHashSet(); // TODO: container local operators should be included in undeploy/deploy //expDeploy.addAll(plan.getOperators(o1Meta)); //expDeploy.addAll(plan.getOperators(o3Meta)); Assert.assertEquals("deploy " + ctx.deploy, ctx.deploy, expDeploy); Assert.assertEquals("streams " + dag.getAllStreams(), 1, dag.getAllStreams().size()); Assert.assertEquals("operators " + dag.getAllOperators(), 2, dag.getAllOperators().size()); Assert.assertTrue("operators " + dag.getAllOperators(), dag.getAllOperators().containsAll(Sets.newHashSet(o1Meta, o3Meta))); }