Java Code Examples for org.apache.flink.api.java.operators.IterativeDataSet#map()
The following examples show how to use
org.apache.flink.api.java.operators.IterativeDataSet#map() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AggregatorsITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testAggregatorWithoutParameterForIterate() throws Exception { /* * Test aggregator without parameter for iterate */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(parallelism); DataSet<Integer> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env); IterativeDataSet<Integer> iteration = initialSolutionSet.iterate(MAX_ITERATIONS); // register aggregator LongSumAggregator aggr = new LongSumAggregator(); iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr); // register convergence criterion iteration.registerAggregationConvergenceCriterion(NEGATIVE_ELEMENTS_AGGR, aggr, new NegativeElementsConvergenceCriterion()); DataSet<Integer> updatedDs = iteration.map(new SubtractOneMap()); List<Integer> result = iteration.closeWith(updatedDs).collect(); Collections.sort(result); List<Integer> expected = Arrays.asList(-3, -2, -2, -1, -1, -1, 0, 0, 0, 0, 1, 1, 1, 1, 1); assertEquals(expected, result); }
Example 2
Source File: AggregatorsITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testAggregatorWithParameterForIterate() throws Exception { /* * Test aggregator with parameter for iterate */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(parallelism); DataSet<Integer> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env); IterativeDataSet<Integer> iteration = initialSolutionSet.iterate(MAX_ITERATIONS); // register aggregator LongSumAggregatorWithParameter aggr = new LongSumAggregatorWithParameter(0); iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr); // register convergence criterion iteration.registerAggregationConvergenceCriterion(NEGATIVE_ELEMENTS_AGGR, aggr, new NegativeElementsConvergenceCriterion()); DataSet<Integer> updatedDs = iteration.map(new SubtractOneMapWithParam()); List<Integer> result = iteration.closeWith(updatedDs).collect(); Collections.sort(result); List<Integer> expected = Arrays.asList(-3, -2, -2, -1, -1, -1, 0, 0, 0, 0, 1, 1, 1, 1, 1); assertEquals(expected, result); }
Example 3
Source File: AggregatorsITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testConvergenceCriterionWithParameterForIterate() throws Exception { /* * Test convergence criterion with parameter for iterate */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(parallelism); DataSet<Integer> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env); IterativeDataSet<Integer> iteration = initialSolutionSet.iterate(MAX_ITERATIONS); // register aggregator LongSumAggregator aggr = new LongSumAggregator(); iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr); // register convergence criterion iteration.registerAggregationConvergenceCriterion(NEGATIVE_ELEMENTS_AGGR, aggr, new NegativeElementsConvergenceCriterionWithParam(3)); DataSet<Integer> updatedDs = iteration.map(new SubtractOneMap()); List<Integer> result = iteration.closeWith(updatedDs).collect(); Collections.sort(result); List<Integer> expected = Arrays.asList(-3, -2, -2, -1, -1, -1, 0, 0, 0, 0, 1, 1, 1, 1, 1); assertEquals(expected, result); }
Example 4
Source File: CollectionExecutionIterationTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testBulkIterationWithTerminationCriterion() { try { ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment(); IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(100); DataSet<Integer> iterationResult = iteration.map(new AddSuperstepNumberMapper()); DataSet<Integer> terminationCriterion = iterationResult.filter(new FilterFunction<Integer>() { public boolean filter(Integer value) { return value < 50; } }); List<Integer> collected = new ArrayList<Integer>(); iteration.closeWith(iterationResult, terminationCriterion) .output(new LocalCollectionOutputFormat<Integer>(collected)); env.execute(); assertEquals(1, collected.size()); assertEquals(56, collected.get(0).intValue()); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example 5
Source File: AggregatorsITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testAggregatorWithoutParameterForIterate() throws Exception { /* * Test aggregator without parameter for iterate */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(parallelism); DataSet<Integer> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env); IterativeDataSet<Integer> iteration = initialSolutionSet.iterate(MAX_ITERATIONS); // register aggregator LongSumAggregator aggr = new LongSumAggregator(); iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr); // register convergence criterion iteration.registerAggregationConvergenceCriterion(NEGATIVE_ELEMENTS_AGGR, aggr, new NegativeElementsConvergenceCriterion()); DataSet<Integer> updatedDs = iteration.map(new SubtractOneMap()); List<Integer> result = iteration.closeWith(updatedDs).collect(); Collections.sort(result); List<Integer> expected = Arrays.asList(-3, -2, -2, -1, -1, -1, 0, 0, 0, 0, 1, 1, 1, 1, 1); assertEquals(expected, result); }
Example 6
Source File: AggregatorsITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testAggregatorWithParameterForIterate() throws Exception { /* * Test aggregator with parameter for iterate */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(parallelism); DataSet<Integer> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env); IterativeDataSet<Integer> iteration = initialSolutionSet.iterate(MAX_ITERATIONS); // register aggregator LongSumAggregatorWithParameter aggr = new LongSumAggregatorWithParameter(0); iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr); // register convergence criterion iteration.registerAggregationConvergenceCriterion(NEGATIVE_ELEMENTS_AGGR, aggr, new NegativeElementsConvergenceCriterion()); DataSet<Integer> updatedDs = iteration.map(new SubtractOneMapWithParam()); List<Integer> result = iteration.closeWith(updatedDs).collect(); Collections.sort(result); List<Integer> expected = Arrays.asList(-3, -2, -2, -1, -1, -1, 0, 0, 0, 0, 1, 1, 1, 1, 1); assertEquals(expected, result); }
Example 7
Source File: AggregatorsITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testConvergenceCriterionWithParameterForIterate() throws Exception { /* * Test convergence criterion with parameter for iterate */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(parallelism); DataSet<Integer> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env); IterativeDataSet<Integer> iteration = initialSolutionSet.iterate(MAX_ITERATIONS); // register aggregator LongSumAggregator aggr = new LongSumAggregator(); iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr); // register convergence criterion iteration.registerAggregationConvergenceCriterion(NEGATIVE_ELEMENTS_AGGR, aggr, new NegativeElementsConvergenceCriterionWithParam(3)); DataSet<Integer> updatedDs = iteration.map(new SubtractOneMap()); List<Integer> result = iteration.closeWith(updatedDs).collect(); Collections.sort(result); List<Integer> expected = Arrays.asList(-3, -2, -2, -1, -1, -1, 0, 0, 0, 0, 1, 1, 1, 1, 1); assertEquals(expected, result); }
Example 8
Source File: CollectionExecutionIterationTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testBulkIterationWithTerminationCriterion() { try { ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment(); IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(100); DataSet<Integer> iterationResult = iteration.map(new AddSuperstepNumberMapper()); DataSet<Integer> terminationCriterion = iterationResult.filter(new FilterFunction<Integer>() { public boolean filter(Integer value) { return value < 50; } }); List<Integer> collected = new ArrayList<Integer>(); iteration.closeWith(iterationResult, terminationCriterion) .output(new LocalCollectionOutputFormat<Integer>(collected)); env.execute(); assertEquals(1, collected.size()); assertEquals(56, collected.get(0).intValue()); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example 9
Source File: AggregatorsITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testAggregatorWithoutParameterForIterate() throws Exception { /* * Test aggregator without parameter for iterate */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(parallelism); DataSet<Integer> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env); IterativeDataSet<Integer> iteration = initialSolutionSet.iterate(MAX_ITERATIONS); // register aggregator LongSumAggregator aggr = new LongSumAggregator(); iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr); // register convergence criterion iteration.registerAggregationConvergenceCriterion(NEGATIVE_ELEMENTS_AGGR, aggr, new NegativeElementsConvergenceCriterion()); DataSet<Integer> updatedDs = iteration.map(new SubtractOneMap()); List<Integer> result = iteration.closeWith(updatedDs).collect(); Collections.sort(result); List<Integer> expected = Arrays.asList(-3, -2, -2, -1, -1, -1, 0, 0, 0, 0, 1, 1, 1, 1, 1); assertEquals(expected, result); }
Example 10
Source File: AggregatorsITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testAggregatorWithParameterForIterate() throws Exception { /* * Test aggregator with parameter for iterate */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(parallelism); DataSet<Integer> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env); IterativeDataSet<Integer> iteration = initialSolutionSet.iterate(MAX_ITERATIONS); // register aggregator LongSumAggregatorWithParameter aggr = new LongSumAggregatorWithParameter(0); iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr); // register convergence criterion iteration.registerAggregationConvergenceCriterion(NEGATIVE_ELEMENTS_AGGR, aggr, new NegativeElementsConvergenceCriterion()); DataSet<Integer> updatedDs = iteration.map(new SubtractOneMapWithParam()); List<Integer> result = iteration.closeWith(updatedDs).collect(); Collections.sort(result); List<Integer> expected = Arrays.asList(-3, -2, -2, -1, -1, -1, 0, 0, 0, 0, 1, 1, 1, 1, 1); assertEquals(expected, result); }
Example 11
Source File: AggregatorsITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testConvergenceCriterionWithParameterForIterate() throws Exception { /* * Test convergence criterion with parameter for iterate */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(parallelism); DataSet<Integer> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env); IterativeDataSet<Integer> iteration = initialSolutionSet.iterate(MAX_ITERATIONS); // register aggregator LongSumAggregator aggr = new LongSumAggregator(); iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr); // register convergence criterion iteration.registerAggregationConvergenceCriterion(NEGATIVE_ELEMENTS_AGGR, aggr, new NegativeElementsConvergenceCriterionWithParam(3)); DataSet<Integer> updatedDs = iteration.map(new SubtractOneMap()); List<Integer> result = iteration.closeWith(updatedDs).collect(); Collections.sort(result); List<Integer> expected = Arrays.asList(-3, -2, -2, -1, -1, -1, 0, 0, 0, 0, 1, 1, 1, 1, 1); assertEquals(expected, result); }
Example 12
Source File: CollectionExecutionIterationTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testBulkIterationWithTerminationCriterion() { try { ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment(); IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(100); DataSet<Integer> iterationResult = iteration.map(new AddSuperstepNumberMapper()); DataSet<Integer> terminationCriterion = iterationResult.filter(new FilterFunction<Integer>() { public boolean filter(Integer value) { return value < 50; } }); List<Integer> collected = new ArrayList<Integer>(); iteration.closeWith(iterationResult, terminationCriterion) .output(new LocalCollectionOutputFormat<Integer>(collected)); env.execute(); assertEquals(1, collected.size()); assertEquals(56, collected.get(0).intValue()); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example 13
Source File: BulkIterationTranslationTest.java From Flink-CEPplus with Apache License 2.0 | 2 votes |
@Test public void testCorrectTranslation() { final String jobName = "Test JobName"; final int numIterations = 13; final int defaultParallelism = 133; final int iterationParallelism = 77; ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); // ------------ construct the test program ------------------ { env.setParallelism(defaultParallelism); @SuppressWarnings("unchecked") DataSet<Tuple3<Double, Long, String>> initialDataSet = env.fromElements(new Tuple3<>(3.44, 5L, "abc")); IterativeDataSet<Tuple3<Double, Long, String>> bulkIteration = initialDataSet.iterate(numIterations); bulkIteration.setParallelism(iterationParallelism); // test that multiple iteration consumers are supported DataSet<Tuple3<Double, Long, String>> identity = bulkIteration .map(new IdentityMapper<Tuple3<Double, Long, String>>()); DataSet<Tuple3<Double, Long, String>> result = bulkIteration.closeWith(identity); result.output(new DiscardingOutputFormat<Tuple3<Double, Long, String>>()); result.writeAsText("/dev/null"); } Plan p = env.createProgramPlan(jobName); // ------------- validate the plan ---------------- BulkIterationBase<?> iteration = (BulkIterationBase<?>) p.getDataSinks().iterator().next().getInput(); assertEquals(jobName, p.getJobName()); assertEquals(defaultParallelism, p.getDefaultParallelism()); assertEquals(iterationParallelism, iteration.getParallelism()); }
Example 14
Source File: BulkIterationTranslationTest.java From flink with Apache License 2.0 | 2 votes |
@Test public void testCorrectTranslation() { final String jobName = "Test JobName"; final int numIterations = 13; final int defaultParallelism = 133; final int iterationParallelism = 77; ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); // ------------ construct the test program ------------------ { env.setParallelism(defaultParallelism); @SuppressWarnings("unchecked") DataSet<Tuple3<Double, Long, String>> initialDataSet = env.fromElements(new Tuple3<>(3.44, 5L, "abc")); IterativeDataSet<Tuple3<Double, Long, String>> bulkIteration = initialDataSet.iterate(numIterations); bulkIteration.setParallelism(iterationParallelism); // test that multiple iteration consumers are supported DataSet<Tuple3<Double, Long, String>> identity = bulkIteration .map(new IdentityMapper<Tuple3<Double, Long, String>>()); DataSet<Tuple3<Double, Long, String>> result = bulkIteration.closeWith(identity); result.output(new DiscardingOutputFormat<Tuple3<Double, Long, String>>()); result.writeAsText("/dev/null"); } Plan p = env.createProgramPlan(jobName); // ------------- validate the plan ---------------- BulkIterationBase<?> iteration = (BulkIterationBase<?>) p.getDataSinks().iterator().next().getInput(); assertEquals(jobName, p.getJobName()); assertEquals(defaultParallelism, p.getDefaultParallelism()); assertEquals(iterationParallelism, iteration.getParallelism()); }
Example 15
Source File: BulkIterationTranslationTest.java From flink with Apache License 2.0 | 2 votes |
@Test public void testCorrectTranslation() { final String jobName = "Test JobName"; final int numIterations = 13; final int defaultParallelism = 133; final int iterationParallelism = 77; ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); // ------------ construct the test program ------------------ { env.setParallelism(defaultParallelism); @SuppressWarnings("unchecked") DataSet<Tuple3<Double, Long, String>> initialDataSet = env.fromElements(new Tuple3<>(3.44, 5L, "abc")); IterativeDataSet<Tuple3<Double, Long, String>> bulkIteration = initialDataSet.iterate(numIterations); bulkIteration.setParallelism(iterationParallelism); // test that multiple iteration consumers are supported DataSet<Tuple3<Double, Long, String>> identity = bulkIteration .map(new IdentityMapper<Tuple3<Double, Long, String>>()); DataSet<Tuple3<Double, Long, String>> result = bulkIteration.closeWith(identity); result.output(new DiscardingOutputFormat<Tuple3<Double, Long, String>>()); result.writeAsText("/dev/null"); } Plan p = env.createProgramPlan(jobName); // ------------- validate the plan ---------------- BulkIterationBase<?> iteration = (BulkIterationBase<?>) p.getDataSinks().iterator().next().getInput(); assertEquals(jobName, p.getJobName()); assertEquals(defaultParallelism, p.getDefaultParallelism()); assertEquals(iterationParallelism, iteration.getParallelism()); }