Java Code Examples for org.apache.flink.runtime.JobException#printStackTrace()
The following examples show how to use
org.apache.flink.runtime.JobException#printStackTrace() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PointwisePatternTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void test2NToN() throws Exception { final int N = 17; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); v1.setParallelism(2 * N); v2.setParallelism(N); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2)); ExecutionGraph eg = getDummyExecutionGraph(); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } ExecutionJobVertex target = eg.getAllVertices().get(v2.getID()); for (ExecutionVertex ev : target.getTaskVertices()) { assertEquals(1, ev.getNumberOfInputs()); ExecutionEdge[] inEdges = ev.getInputEdges(0); assertEquals(2, inEdges.length); assertEquals(ev.getParallelSubtaskIndex() * 2, inEdges[0].getSource().getPartitionNumber()); assertEquals(ev.getParallelSubtaskIndex() * 2 + 1, inEdges[1].getSource().getPartitionNumber()); } }
Example 2
Source File: PointwisePatternTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testNToN() throws Exception { final int N = 23; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); v1.setParallelism(N); v2.setParallelism(N); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2)); ExecutionGraph eg = getDummyExecutionGraph(); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } ExecutionJobVertex target = eg.getAllVertices().get(v2.getID()); for (ExecutionVertex ev : target.getTaskVertices()) { assertEquals(1, ev.getNumberOfInputs()); ExecutionEdge[] inEdges = ev.getInputEdges(0); assertEquals(1, inEdges.length); assertEquals(ev.getParallelSubtaskIndex(), inEdges[0].getSource().getPartitionNumber()); } }
Example 3
Source File: PointwisePatternTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testNToN() throws Exception { final int N = 23; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); v1.setParallelism(N); v2.setParallelism(N); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2)); ExecutionGraph eg = getDummyExecutionGraph(); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } ExecutionJobVertex target = eg.getAllVertices().get(v2.getID()); for (ExecutionVertex ev : target.getTaskVertices()) { assertEquals(1, ev.getNumberOfInputs()); ExecutionEdge[] inEdges = ev.getInputEdges(0); assertEquals(1, inEdges.length); assertEquals(ev.getParallelSubtaskIndex(), inEdges[0].getSource().getPartitionNumber()); } }
Example 4
Source File: PointwisePatternTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void test2NToN() throws Exception { final int N = 17; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); v1.setParallelism(2 * N); v2.setParallelism(N); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2)); ExecutionGraph eg = getDummyExecutionGraph(); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } ExecutionJobVertex target = eg.getAllVertices().get(v2.getID()); for (ExecutionVertex ev : target.getTaskVertices()) { assertEquals(1, ev.getNumberOfInputs()); ExecutionEdge[] inEdges = ev.getInputEdges(0); assertEquals(2, inEdges.length); assertEquals(ev.getParallelSubtaskIndex() * 2, inEdges[0].getSource().getPartitionNumber()); assertEquals(ev.getParallelSubtaskIndex() * 2 + 1, inEdges[1].getSource().getPartitionNumber()); } }
Example 5
Source File: PointwisePatternTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void test3NToN() throws Exception { final int N = 17; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); v1.setParallelism(3 * N); v2.setParallelism(N); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2)); ExecutionGraph eg = getDummyExecutionGraph(); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } ExecutionJobVertex target = eg.getAllVertices().get(v2.getID()); for (ExecutionVertex ev : target.getTaskVertices()) { assertEquals(1, ev.getNumberOfInputs()); ExecutionEdge[] inEdges = ev.getInputEdges(0); assertEquals(3, inEdges.length); assertEquals(ev.getParallelSubtaskIndex() * 3, inEdges[0].getSource().getPartitionNumber()); assertEquals(ev.getParallelSubtaskIndex() * 3 + 1, inEdges[1].getSource().getPartitionNumber()); assertEquals(ev.getParallelSubtaskIndex() * 3 + 2, inEdges[2].getSource().getPartitionNumber()); } }
Example 6
Source File: PointwisePatternTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testNTo2N() throws Exception { final int N = 41; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); v1.setParallelism(N); v2.setParallelism(2 * N); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2)); ExecutionGraph eg = getDummyExecutionGraph(); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } ExecutionJobVertex target = eg.getAllVertices().get(v2.getID()); for (ExecutionVertex ev : target.getTaskVertices()) { assertEquals(1, ev.getNumberOfInputs()); ExecutionEdge[] inEdges = ev.getInputEdges(0); assertEquals(1, inEdges.length); assertEquals(ev.getParallelSubtaskIndex() / 2, inEdges[0].getSource().getPartitionNumber()); } }
Example 7
Source File: ExecutionGraphConstructionTest.java From flink with Apache License 2.0 | 5 votes |
/** * Creates a JobGraph of the following form: * * <pre> * v1--->v2-->\ * \ * v4 --->\ * ----->/ \ * v3-->/ v5 * \ / * ------------->/ * </pre> */ @Test public void testCreateSimpleGraphBipartite() throws Exception { JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); JobVertex v4 = new JobVertex("vertex4"); JobVertex v5 = new JobVertex("vertex5"); v1.setParallelism(5); v2.setParallelism(7); v3.setParallelism(2); v4.setParallelism(11); v5.setParallelism(4); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v3.setInvokableClass(AbstractInvokable.class); v4.setInvokableClass(AbstractInvokable.class); v5.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput(v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput(v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput(v4, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput(v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2, v3, v4, v5)); ExecutionGraph eg = createExecutionGraph(); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } verifyTestGraph(eg, v1, v2, v3, v4, v5); }
Example 8
Source File: PointwisePatternTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void test3NToN() throws Exception { final int N = 17; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); v1.setParallelism(3 * N); v2.setParallelism(N); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2)); ExecutionGraph eg = getDummyExecutionGraph(); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } ExecutionJobVertex target = eg.getAllVertices().get(v2.getID()); for (ExecutionVertex ev : target.getTaskVertices()) { assertEquals(1, ev.getNumberOfInputs()); ExecutionEdge[] inEdges = ev.getInputEdges(0); assertEquals(3, inEdges.length); assertEquals(ev.getParallelSubtaskIndex() * 3, inEdges[0].getSource().getPartitionNumber()); assertEquals(ev.getParallelSubtaskIndex() * 3 + 1, inEdges[1].getSource().getPartitionNumber()); assertEquals(ev.getParallelSubtaskIndex() * 3 + 2, inEdges[2].getSource().getPartitionNumber()); } }
Example 9
Source File: PointwisePatternTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testNTo2N() throws Exception { final int N = 41; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); v1.setParallelism(N); v2.setParallelism(2 * N); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2)); ExecutionGraph eg = getDummyExecutionGraph(); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } ExecutionJobVertex target = eg.getAllVertices().get(v2.getID()); for (ExecutionVertex ev : target.getTaskVertices()) { assertEquals(1, ev.getNumberOfInputs()); ExecutionEdge[] inEdges = ev.getInputEdges(0); assertEquals(1, inEdges.length); assertEquals(ev.getParallelSubtaskIndex() / 2, inEdges[0].getSource().getPartitionNumber()); } }
Example 10
Source File: PointwisePatternTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void test3NToN() throws Exception { final int N = 17; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); v1.setParallelism(3 * N); v2.setParallelism(N); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2)); ExecutionGraph eg = getDummyExecutionGraph(); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } ExecutionJobVertex target = eg.getAllVertices().get(v2.getID()); for (ExecutionVertex ev : target.getTaskVertices()) { assertEquals(1, ev.getNumberOfInputs()); ExecutionEdge[] inEdges = ev.getInputEdges(0); assertEquals(3, inEdges.length); assertEquals(ev.getParallelSubtaskIndex() * 3, inEdges[0].getSource().getPartitionNumber()); assertEquals(ev.getParallelSubtaskIndex() * 3 + 1, inEdges[1].getSource().getPartitionNumber()); assertEquals(ev.getParallelSubtaskIndex() * 3 + 2, inEdges[2].getSource().getPartitionNumber()); } }
Example 11
Source File: RestartPipelinedRegionStrategyTest.java From flink with Apache License 2.0 | 4 votes |
/** * Creates a JobGraph of the following form: * * <pre> * v1-->v2-->|\ * \ * v4 * / * v3------>/ * </pre> */ @Test public void testMultiRegionNotAllToAll() throws Exception { final JobID jobId = new JobID(); final String jobName = "Test Job Sample Name"; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); JobVertex v4 = new JobVertex("vertex4"); JobVertex v5 = new JobVertex("vertex5"); v1.setParallelism(2); v2.setParallelism(2); v3.setParallelism(5); v4.setParallelism(5); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v3.setInvokableClass(AbstractInvokable.class); v4.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput(v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); v4.connectNewDataSetAsInput(v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2, v3, v4)); final JobInformation jobInformation = new DummyJobInformation( jobId, jobName); ExecutionGraph eg = new ExecutionGraph( jobInformation, TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), AkkaUtils.getDefaultTimeout(), new NoRestartStrategy(), new RestartPipelinedRegionStrategy.Factory(), new TestingSlotProvider(ignored -> new CompletableFuture<>()), ExecutionGraph.class.getClassLoader(), VoidBlobWriter.getInstance(), AkkaUtils.getDefaultTimeout()); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } // All in one failover region RestartPipelinedRegionStrategy strategy = (RestartPipelinedRegionStrategy)eg.getFailoverStrategy(); ExecutionJobVertex ejv1 = eg.getJobVertex(v1.getID()); ExecutionJobVertex ejv2 = eg.getJobVertex(v2.getID()); ExecutionJobVertex ejv3 = eg.getJobVertex(v3.getID()); ExecutionJobVertex ejv4 = eg.getJobVertex(v4.getID()); FailoverRegion region11 = strategy.getFailoverRegion(ejv1.getTaskVertices()[0]); FailoverRegion region12 = strategy.getFailoverRegion(ejv1.getTaskVertices()[1]); FailoverRegion region21 = strategy.getFailoverRegion(ejv2.getTaskVertices()[0]); FailoverRegion region22 = strategy.getFailoverRegion(ejv2.getTaskVertices()[1]); FailoverRegion region3 = strategy.getFailoverRegion(ejv3.getTaskVertices()[0]); FailoverRegion region4 = strategy.getFailoverRegion(ejv4.getTaskVertices()[3]); //There should be 3 failover regions. v1 v2 in two, v3 and v4 in one assertEquals(region11, region21); assertEquals(region12, region22); assertFalse(region11.equals(region12)); assertFalse(region3.equals(region4)); }
Example 12
Source File: PointwisePatternTest.java From flink with Apache License 2.0 | 4 votes |
private void testHighToLow(int highDop, int lowDop) throws Exception { if (highDop < lowDop) { throw new IllegalArgumentException(); } final int factor = highDop / lowDop; final int delta = highDop % lowDop == 0 ? 0 : 1; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); v1.setParallelism(highDop); v2.setParallelism(lowDop); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2)); ExecutionGraph eg = getDummyExecutionGraph(); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } ExecutionJobVertex target = eg.getAllVertices().get(v2.getID()); int[] timesUsed = new int[highDop]; for (ExecutionVertex ev : target.getTaskVertices()) { assertEquals(1, ev.getNumberOfInputs()); ExecutionEdge[] inEdges = ev.getInputEdges(0); assertTrue(inEdges.length >= factor && inEdges.length <= factor + delta); for (ExecutionEdge ee : inEdges) { timesUsed[ee.getSource().getPartitionNumber()]++; } } for (int used : timesUsed) { assertEquals(1, used); } }
Example 13
Source File: FailoverRegionTest.java From flink with Apache License 2.0 | 4 votes |
private ExecutionGraph createSingleRegionExecutionGraph(RestartStrategy restartStrategy) throws Exception { final JobID jobId = new JobID(); final String jobName = "Test Job Sample Name"; final SimpleSlotProvider slotProvider = new SimpleSlotProvider(jobId, 14); JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); v1.setParallelism(3); v2.setParallelism(2); v3.setParallelism(2); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v3.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v3.connectNewDataSetAsInput(v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v3.connectNewDataSetAsInput(v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<>(Arrays.asList(v1, v2, v3)); ExecutionGraph eg = new ExecutionGraph( new DummyJobInformation( jobId, jobName), TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), AkkaUtils.getDefaultTimeout(), restartStrategy, new FailoverPipelinedRegionWithDirectExecutor(), slotProvider); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } enableCheckpointing(eg); eg.start(ComponentMainThreadExecutorServiceAdapter.forMainThread()); eg.scheduleForExecution(); attachPendingCheckpoints(eg); return eg; }
Example 14
Source File: PointwisePatternTest.java From flink with Apache License 2.0 | 4 votes |
private void testLowToHigh(int lowDop, int highDop) throws Exception { if (highDop < lowDop) { throw new IllegalArgumentException(); } final int factor = highDop / lowDop; final int delta = highDop % lowDop == 0 ? 0 : 1; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); v1.setParallelism(lowDop); v2.setParallelism(highDop); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2)); ExecutionGraph eg = getDummyExecutionGraph(); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } ExecutionJobVertex target = eg.getAllVertices().get(v2.getID()); int[] timesUsed = new int[lowDop]; for (ExecutionVertex ev : target.getTaskVertices()) { assertEquals(1, ev.getNumberOfInputs()); ExecutionEdge[] inEdges = ev.getInputEdges(0); assertEquals(1, inEdges.length); timesUsed[inEdges[0].getSource().getPartitionNumber()]++; } for (int used : timesUsed) { assertTrue(used >= factor && used <= factor + delta); } }
Example 15
Source File: RestartPipelinedRegionStrategyTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
/** * Creates a JobGraph of the following form: * * <pre> * v1--->v2-->\ * \ * v4 --->|\ * ----->/ \ * v3-->/ v5 * \ / * ------------->/ * </pre> */ @Test public void testSingleRegionWithMixedInput() throws Exception { final JobID jobId = new JobID(); final String jobName = "Test Job Sample Name"; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); JobVertex v4 = new JobVertex("vertex4"); JobVertex v5 = new JobVertex("vertex5"); v1.setParallelism(3); v2.setParallelism(2); v3.setParallelism(2); v4.setParallelism(5); v5.setParallelism(2); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v3.setInvokableClass(AbstractInvokable.class); v4.setInvokableClass(AbstractInvokable.class); v5.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput(v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput(v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput(v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput(v4, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2, v3, v4, v5)); final JobInformation jobInformation = new DummyJobInformation( jobId, jobName); ExecutionGraph eg = new ExecutionGraph( jobInformation, TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), AkkaUtils.getDefaultTimeout(), new NoRestartStrategy(), new RestartPipelinedRegionStrategy.Factory(), new TestingSlotProvider(ignored -> new CompletableFuture<>()), ExecutionGraph.class.getClassLoader(), VoidBlobWriter.getInstance(), AkkaUtils.getDefaultTimeout()); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } // All in one failover region RestartPipelinedRegionStrategy strategy = (RestartPipelinedRegionStrategy)eg.getFailoverStrategy(); ExecutionJobVertex ejv1 = eg.getJobVertex(v1.getID()); ExecutionJobVertex ejv2 = eg.getJobVertex(v2.getID()); ExecutionJobVertex ejv3 = eg.getJobVertex(v3.getID()); ExecutionJobVertex ejv4 = eg.getJobVertex(v4.getID()); ExecutionJobVertex ejv5 = eg.getJobVertex(v5.getID()); FailoverRegion region1 = strategy.getFailoverRegion(ejv1.getTaskVertices()[1]); FailoverRegion region2 = strategy.getFailoverRegion(ejv2.getTaskVertices()[0]); FailoverRegion region4 = strategy.getFailoverRegion(ejv4.getTaskVertices()[3]); FailoverRegion region3 = strategy.getFailoverRegion(ejv3.getTaskVertices()[0]); FailoverRegion region5 = strategy.getFailoverRegion(ejv5.getTaskVertices()[1]); assertEquals(region1, region2); assertEquals(region2, region4); assertEquals(region3, region2); assertEquals(region1, region5); }
Example 16
Source File: RestartPipelinedRegionStrategyTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
/** * Creates a JobGraph of the following form: * * <pre> * v2 ------->\ * \ * v1---------> v4 --->|\ * \ * v5 * / * v3--------------->|/ * </pre> */ @Test public void testMultipleFailoverRegions() throws Exception { final JobID jobId = new JobID(); final String jobName = "Test Job Sample Name"; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); JobVertex v4 = new JobVertex("vertex4"); JobVertex v5 = new JobVertex("vertex5"); v1.setParallelism(3); v2.setParallelism(2); v3.setParallelism(2); v4.setParallelism(5); v5.setParallelism(2); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v3.setInvokableClass(AbstractInvokable.class); v4.setInvokableClass(AbstractInvokable.class); v5.setInvokableClass(AbstractInvokable.class); v4.connectNewDataSetAsInput(v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput(v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput(v4, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); v5.connectNewDataSetAsInput(v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2, v3, v4, v5)); final JobInformation jobInformation = new DummyJobInformation( jobId, jobName); ExecutionGraph eg = new ExecutionGraph( jobInformation, TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), AkkaUtils.getDefaultTimeout(), new NoRestartStrategy(), new RestartPipelinedRegionStrategy.Factory(), new TestingSlotProvider(ignored -> new CompletableFuture<>()), ExecutionGraph.class.getClassLoader(), VoidBlobWriter.getInstance(), AkkaUtils.getDefaultTimeout()); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } // All in one failover region RestartPipelinedRegionStrategy strategy = (RestartPipelinedRegionStrategy)eg.getFailoverStrategy(); ExecutionJobVertex ejv1 = eg.getJobVertex(v1.getID()); ExecutionJobVertex ejv2 = eg.getJobVertex(v2.getID()); ExecutionJobVertex ejv3 = eg.getJobVertex(v3.getID()); ExecutionJobVertex ejv4 = eg.getJobVertex(v4.getID()); ExecutionJobVertex ejv5 = eg.getJobVertex(v5.getID()); FailoverRegion region1 = strategy.getFailoverRegion(ejv1.getTaskVertices()[1]); FailoverRegion region2 = strategy.getFailoverRegion(ejv2.getTaskVertices()[0]); FailoverRegion region4 = strategy.getFailoverRegion(ejv4.getTaskVertices()[3]); FailoverRegion region31 = strategy.getFailoverRegion(ejv3.getTaskVertices()[0]); FailoverRegion region32 = strategy.getFailoverRegion(ejv3.getTaskVertices()[1]); FailoverRegion region51 = strategy.getFailoverRegion(ejv5.getTaskVertices()[0]); FailoverRegion region52 = strategy.getFailoverRegion(ejv5.getTaskVertices()[1]); //There should be 5 failover regions. v1 v2 v4 in one, v3 has two, v5 has two assertEquals(region1, region2); assertEquals(region2, region4); assertFalse(region31.equals(region32)); assertFalse(region51.equals(region52)); }
Example 17
Source File: FailoverRegionTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
/** * Tests that two failover regions failover at the same time, they will not influence each other * @throws Exception */ @Test public void testMultiRegionFailoverAtSameTime() throws Exception { final JobID jobId = new JobID(); final String jobName = "Test Job Sample Name"; final SimpleSlotProvider slotProvider = new SimpleSlotProvider(jobId, 16); JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); JobVertex v4 = new JobVertex("vertex4"); v1.setParallelism(2); v2.setParallelism(2); v3.setParallelism(2); v4.setParallelism(2); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v3.setInvokableClass(AbstractInvokable.class); v4.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput(v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); v4.connectNewDataSetAsInput(v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); List<JobVertex> ordered = Arrays.asList(v1, v2, v3, v4); ExecutionGraph eg = new ExecutionGraph( new DummyJobInformation( jobId, jobName), TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), AkkaUtils.getDefaultTimeout(), new InfiniteDelayRestartStrategy(10), new RestartPipelinedRegionStrategy.Factory(), slotProvider); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } eg.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread()); eg.scheduleForExecution(); RestartPipelinedRegionStrategy strategy = (RestartPipelinedRegionStrategy)eg.getFailoverStrategy(); ExecutionVertex ev11 = eg.getJobVertex(v1.getID()).getTaskVertices()[0]; ExecutionVertex ev12 = eg.getJobVertex(v1.getID()).getTaskVertices()[1]; ExecutionVertex ev31 = eg.getJobVertex(v3.getID()).getTaskVertices()[0]; ExecutionVertex ev32 = eg.getJobVertex(v3.getID()).getTaskVertices()[1]; assertEquals(JobStatus.RUNNING, strategy.getFailoverRegion(ev11).getState()); assertEquals(JobStatus.RUNNING, strategy.getFailoverRegion(ev31).getState()); ev11.getCurrentExecutionAttempt().fail(new Exception("new fail")); ev31.getCurrentExecutionAttempt().fail(new Exception("new fail")); assertEquals(JobStatus.CANCELLING, strategy.getFailoverRegion(ev11).getState()); assertEquals(JobStatus.CANCELLING, strategy.getFailoverRegion(ev31).getState()); ev32.getCurrentExecutionAttempt().completeCancelling(); waitUntilFailoverRegionState(strategy.getFailoverRegion(ev31), JobStatus.RUNNING, 1000); ev12.getCurrentExecutionAttempt().completeCancelling(); waitUntilFailoverRegionState(strategy.getFailoverRegion(ev11), JobStatus.RUNNING, 1000); }
Example 18
Source File: PointwisePatternTest.java From flink with Apache License 2.0 | 4 votes |
private void testLowToHigh(int lowDop, int highDop) throws Exception { if (highDop < lowDop) { throw new IllegalArgumentException(); } final int factor = highDop / lowDop; final int delta = highDop % lowDop == 0 ? 0 : 1; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); v1.setParallelism(lowDop); v2.setParallelism(highDop); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2)); ExecutionGraph eg = getDummyExecutionGraph(); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } ExecutionJobVertex target = eg.getAllVertices().get(v2.getID()); int[] timesUsed = new int[lowDop]; for (ExecutionVertex ev : target.getTaskVertices()) { assertEquals(1, ev.getNumberOfInputs()); ExecutionEdge[] inEdges = ev.getInputEdges(0); assertEquals(1, inEdges.length); timesUsed[inEdges[0].getSource().getPartitionNumber()]++; } for (int used : timesUsed) { assertTrue(used >= factor && used <= factor + delta); } }
Example 19
Source File: PointwisePatternTest.java From flink with Apache License 2.0 | 4 votes |
private void testHighToLow(int highDop, int lowDop) throws Exception { if (highDop < lowDop) { throw new IllegalArgumentException(); } final int factor = highDop / lowDop; final int delta = highDop % lowDop == 0 ? 0 : 1; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); v1.setParallelism(highDop); v2.setParallelism(lowDop); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2)); ExecutionGraph eg = getDummyExecutionGraph(); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } ExecutionJobVertex target = eg.getAllVertices().get(v2.getID()); int[] timesUsed = new int[highDop]; for (ExecutionVertex ev : target.getTaskVertices()) { assertEquals(1, ev.getNumberOfInputs()); ExecutionEdge[] inEdges = ev.getInputEdges(0); assertTrue(inEdges.length >= factor && inEdges.length <= factor + delta); for (ExecutionEdge ee : inEdges) { timesUsed[ee.getSource().getPartitionNumber()]++; } } for (int used : timesUsed) { assertEquals(1, used); } }
Example 20
Source File: RestartPipelinedRegionStrategyTest.java From flink with Apache License 2.0 | 4 votes |
/** * Creates a JobGraph of the following form: * * <pre> * v1--->v2-->\ * \ * v4 --->|\ * ----->/ \ * v3-->/ v5 * \ / * ------------->/ * </pre> */ @Test public void testSingleRegionWithMixedInput() throws Exception { final JobID jobId = new JobID(); final String jobName = "Test Job Sample Name"; JobVertex v1 = new JobVertex("vertex1"); JobVertex v2 = new JobVertex("vertex2"); JobVertex v3 = new JobVertex("vertex3"); JobVertex v4 = new JobVertex("vertex4"); JobVertex v5 = new JobVertex("vertex5"); v1.setParallelism(3); v2.setParallelism(2); v3.setParallelism(2); v4.setParallelism(5); v5.setParallelism(2); v1.setInvokableClass(AbstractInvokable.class); v2.setInvokableClass(AbstractInvokable.class); v3.setInvokableClass(AbstractInvokable.class); v4.setInvokableClass(AbstractInvokable.class); v5.setInvokableClass(AbstractInvokable.class); v2.connectNewDataSetAsInput(v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput(v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput(v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput(v3, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); v5.connectNewDataSetAsInput(v4, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); List<JobVertex> ordered = new ArrayList<JobVertex>(Arrays.asList(v1, v2, v3, v4, v5)); final JobInformation jobInformation = new DummyJobInformation( jobId, jobName); ExecutionGraph eg = new ExecutionGraph( jobInformation, TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), AkkaUtils.getDefaultTimeout(), new NoRestartStrategy(), new RestartPipelinedRegionStrategy.Factory(), new TestingSlotProvider(ignored -> new CompletableFuture<>()), ExecutionGraph.class.getClassLoader(), VoidBlobWriter.getInstance(), AkkaUtils.getDefaultTimeout()); try { eg.attachJobGraph(ordered); } catch (JobException e) { e.printStackTrace(); fail("Job failed with exception: " + e.getMessage()); } // All in one failover region RestartPipelinedRegionStrategy strategy = (RestartPipelinedRegionStrategy)eg.getFailoverStrategy(); ExecutionJobVertex ejv1 = eg.getJobVertex(v1.getID()); ExecutionJobVertex ejv2 = eg.getJobVertex(v2.getID()); ExecutionJobVertex ejv3 = eg.getJobVertex(v3.getID()); ExecutionJobVertex ejv4 = eg.getJobVertex(v4.getID()); ExecutionJobVertex ejv5 = eg.getJobVertex(v5.getID()); FailoverRegion region1 = strategy.getFailoverRegion(ejv1.getTaskVertices()[1]); FailoverRegion region2 = strategy.getFailoverRegion(ejv2.getTaskVertices()[0]); FailoverRegion region4 = strategy.getFailoverRegion(ejv4.getTaskVertices()[3]); FailoverRegion region3 = strategy.getFailoverRegion(ejv3.getTaskVertices()[0]); FailoverRegion region5 = strategy.getFailoverRegion(ejv5.getTaskVertices()[1]); assertEquals(region1, region2); assertEquals(region2, region4); assertEquals(region3, region2); assertEquals(region1, region5); }