org.apache.flink.runtime.metrics.groups.TaskMetricGroup Java Examples
The following examples show how to use
org.apache.flink.runtime.metrics.groups.TaskMetricGroup.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OperatorScopeFormat.java From flink with Apache License 2.0 | 6 votes |
public String[] formatScope(TaskMetricGroup parent, OperatorID operatorID, String operatorName) { final String[] template = copyTemplate(); final String[] values = { parent.parent().parent().hostname(), parent.parent().parent().taskManagerId(), valueOrNull(parent.parent().jobId()), valueOrNull(parent.parent().jobName()), valueOrNull(parent.vertexId()), valueOrNull(parent.executionId()), valueOrNull(parent.taskName()), String.valueOf(parent.subtaskIndex()), String.valueOf(parent.attemptNumber()), valueOrNull(operatorID), valueOrNull(operatorName) }; return bindVariables(template, values); }
Example #2
Source File: OperatorScopeFormat.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public String[] formatScope(TaskMetricGroup parent, OperatorID operatorID, String operatorName) { final String[] template = copyTemplate(); final String[] values = { parent.parent().parent().hostname(), parent.parent().parent().taskManagerId(), valueOrNull(parent.parent().jobId()), valueOrNull(parent.parent().jobName()), valueOrNull(parent.vertexId()), valueOrNull(parent.executionId()), valueOrNull(parent.taskName()), String.valueOf(parent.subtaskIndex()), String.valueOf(parent.attemptNumber()), valueOrNull(operatorID), valueOrNull(operatorName) }; return bindVariables(template, values); }
Example #3
Source File: OperatorScopeFormat.java From flink with Apache License 2.0 | 6 votes |
public String[] formatScope(TaskMetricGroup parent, OperatorID operatorID, String operatorName) { final String[] template = copyTemplate(); final String[] values = { parent.parent().parent().hostname(), parent.parent().parent().taskManagerId(), valueOrNull(parent.parent().jobId()), valueOrNull(parent.parent().jobName()), valueOrNull(parent.vertexId()), valueOrNull(parent.executionId()), valueOrNull(parent.taskName()), String.valueOf(parent.subtaskIndex()), String.valueOf(parent.attemptNumber()), valueOrNull(operatorID), valueOrNull(operatorName) }; return bindVariables(template, values); }
Example #4
Source File: MultipleInputStreamTaskTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testLatencyMarker() throws Exception { final Map<String, Metric> metrics = new ConcurrentHashMap<>(); final TaskMetricGroup taskMetricGroup = new StreamTaskTestHarness.TestTaskMetricGroup(metrics); try (StreamTaskMailboxTestHarness<String> testHarness = new MultipleInputStreamTaskTestHarnessBuilder<>(MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO) .addInput(BasicTypeInfo.STRING_TYPE_INFO) .addInput(BasicTypeInfo.INT_TYPE_INFO) .addInput(BasicTypeInfo.DOUBLE_TYPE_INFO) .setupOutputForSingletonOperatorChain(new MapToStringMultipleInputOperatorFactory()) .setTaskMetricGroup(taskMetricGroup) .build()) { ArrayDeque<Object> expectedOutput = new ArrayDeque<>(); OperatorID sourceId = new OperatorID(); LatencyMarker latencyMarker = new LatencyMarker(42L, sourceId, 0); testHarness.processElement(latencyMarker); expectedOutput.add(latencyMarker); assertThat(testHarness.getOutput(), contains(expectedOutput.toArray())); testHarness.endInput(); testHarness.waitForTaskCompletion(); } }
Example #5
Source File: MultipleInputStreamTaskTest.java From flink with Apache License 2.0 | 6 votes |
/** * Tests the checkpoint related metrics are registered into {@link TaskIOMetricGroup} * correctly while generating the {@link TwoInputStreamTask}. */ @Test public void testCheckpointBarrierMetrics() throws Exception { final Map<String, Metric> metrics = new ConcurrentHashMap<>(); final TaskMetricGroup taskMetricGroup = new StreamTaskTestHarness.TestTaskMetricGroup(metrics); try (StreamTaskMailboxTestHarness<String> testHarness = new MultipleInputStreamTaskTestHarnessBuilder<>(MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO) .addInput(BasicTypeInfo.STRING_TYPE_INFO, 2) .addInput(BasicTypeInfo.INT_TYPE_INFO, 2) .addInput(BasicTypeInfo.DOUBLE_TYPE_INFO, 2) .setupOutputForSingletonOperatorChain(new MapToStringMultipleInputOperatorFactory()) .setTaskMetricGroup(taskMetricGroup) .build()) { assertThat(metrics, IsMapContaining.hasKey(MetricNames.CHECKPOINT_ALIGNMENT_TIME)); assertThat(metrics, IsMapContaining.hasKey(MetricNames.CHECKPOINT_START_DELAY_TIME)); testHarness.endInput(); testHarness.waitForTaskCompletion(); } }
Example #6
Source File: OneInputStreamTaskTest.java From flink with Apache License 2.0 | 6 votes |
/** * Tests the checkpoint related metrics are registered into {@link TaskIOMetricGroup} * correctly while generating the {@link OneInputStreamTask}. */ @Test public void testCheckpointBarrierMetrics() throws Exception { final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); testHarness.setupOutputForSingletonOperatorChain(); StreamConfig streamConfig = testHarness.getStreamConfig(); streamConfig.setStreamOperator(new TestOperator()); final Map<String, Metric> metrics = new ConcurrentHashMap<>(); final TaskMetricGroup taskMetricGroup = new StreamTaskTestHarness.TestTaskMetricGroup(metrics); final StreamMockEnvironment environment = testHarness.createEnvironment(); environment.setTaskMetricGroup(taskMetricGroup); testHarness.invoke(environment); testHarness.waitForTaskRunning(); assertThat(metrics, IsMapContaining.hasKey(MetricNames.CHECKPOINT_ALIGNMENT_TIME)); assertThat(metrics, IsMapContaining.hasKey(MetricNames.CHECKPOINT_START_DELAY_TIME)); testHarness.endInput(); testHarness.waitForTaskCompletion(); }
Example #7
Source File: PrometheusReporterTaskScopeTest.java From flink with Apache License 2.0 | 5 votes |
@Before public void setupReporter() { registry = new MetricRegistryImpl( MetricRegistryConfiguration.defaultMetricRegistryConfiguration(), Collections.singletonList(createReporterSetup("test1", "9400-9500"))); reporter = (PrometheusReporter) registry.getReporters().get(0); TaskManagerMetricGroup tmMetricGroup = new TaskManagerMetricGroup(registry, TASK_MANAGER_HOST, TASK_MANAGER_ID); TaskManagerJobMetricGroup tmJobMetricGroup = new TaskManagerJobMetricGroup(registry, tmMetricGroup, jobId, JOB_NAME); taskMetricGroup1 = new TaskMetricGroup(registry, tmJobMetricGroup, taskId1, taskAttemptId1, TASK_NAME, SUBTASK_INDEX_1, ATTEMPT_NUMBER); taskMetricGroup2 = new TaskMetricGroup(registry, tmJobMetricGroup, taskId2, taskAttemptId2, TASK_NAME, SUBTASK_INDEX_2, ATTEMPT_NUMBER); }
Example #8
Source File: PrometheusReporterTaskScopeTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Before public void setupReporter() { registry = new MetricRegistryImpl(MetricRegistryConfiguration.fromConfiguration(createConfigWithOneReporter("test1", "9400-9500"))); reporter = (PrometheusReporter) registry.getReporters().get(0); TaskManagerMetricGroup tmMetricGroup = new TaskManagerMetricGroup(registry, TASK_MANAGER_HOST, TASK_MANAGER_ID); TaskManagerJobMetricGroup tmJobMetricGroup = new TaskManagerJobMetricGroup(registry, tmMetricGroup, jobId, JOB_NAME); taskMetricGroup1 = new TaskMetricGroup(registry, tmJobMetricGroup, taskId1, taskAttemptId1, TASK_NAME, SUBTASK_INDEX_1, ATTEMPT_NUMBER); taskMetricGroup2 = new TaskMetricGroup(registry, tmJobMetricGroup, taskId2, taskAttemptId2, TASK_NAME, SUBTASK_INDEX_2, ATTEMPT_NUMBER); }
Example #9
Source File: PrometheusReporterTaskScopeTest.java From flink with Apache License 2.0 | 5 votes |
@Before public void setupReporter() { registry = new MetricRegistryImpl( MetricRegistryConfiguration.defaultMetricRegistryConfiguration(), Collections.singletonList(createReporterSetup("test1", "9400-9500"))); reporter = (PrometheusReporter) registry.getReporters().get(0); TaskManagerMetricGroup tmMetricGroup = new TaskManagerMetricGroup(registry, TASK_MANAGER_HOST, TASK_MANAGER_ID); TaskManagerJobMetricGroup tmJobMetricGroup = new TaskManagerJobMetricGroup(registry, tmMetricGroup, jobId, JOB_NAME); taskMetricGroup1 = new TaskMetricGroup(registry, tmJobMetricGroup, taskId1, taskAttemptId1, TASK_NAME, SUBTASK_INDEX_1, ATTEMPT_NUMBER); taskMetricGroup2 = new TaskMetricGroup(registry, tmJobMetricGroup, taskId2, taskAttemptId2, TASK_NAME, SUBTASK_INDEX_2, ATTEMPT_NUMBER); }
Example #10
Source File: TwoInputStreamTaskTest.java From flink with Apache License 2.0 | 5 votes |
/** * Tests the checkpoint related metrics are registered into {@link TaskIOMetricGroup} * correctly while generating the {@link TwoInputStreamTask}. */ @Test public void testCheckpointBarrierMetrics() throws Exception { final TwoInputStreamTaskTestHarness<String, Integer, String> testHarness = new TwoInputStreamTaskTestHarness<>( TwoInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); final StreamConfig streamConfig = testHarness.getStreamConfig(); final CoStreamMap<String, Integer, String> coMapOperator = new CoStreamMap<>(new IdentityMap()); testHarness.setupOutputForSingletonOperatorChain(); streamConfig.setStreamOperator(coMapOperator); final Map<String, Metric> metrics = new ConcurrentHashMap<>(); final TaskMetricGroup taskMetricGroup = new StreamTaskTestHarness.TestTaskMetricGroup(metrics); final StreamMockEnvironment environment = testHarness.createEnvironment(); environment.setTaskMetricGroup(taskMetricGroup); testHarness.invoke(environment); testHarness.waitForTaskRunning(); assertThat(metrics, IsMapContaining.hasKey(MetricNames.CHECKPOINT_ALIGNMENT_TIME)); assertThat(metrics, IsMapContaining.hasKey(MetricNames.CHECKPOINT_START_DELAY_TIME)); testHarness.endInput(); testHarness.waitForTaskCompletion(); }
Example #11
Source File: TwoInputStreamTaskTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testOperatorMetricReuse() throws Exception { final TwoInputStreamTaskTestHarness<String, String, String> testHarness = new TwoInputStreamTaskTestHarness<>( isInputSelectable ? TwoInputSelectableStreamTask::new : TwoInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); testHarness.setupOperatorChain(new OperatorID(), new DuplicatingOperator()) .chain(new OperatorID(), new OneInputStreamTaskTest.DuplicatingOperator(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig())) .chain(new OperatorID(), new OneInputStreamTaskTest.DuplicatingOperator(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig())) .finish(); final TaskMetricGroup taskMetricGroup = new UnregisteredMetricGroups.UnregisteredTaskMetricGroup() { @Override public OperatorMetricGroup getOrAddOperator(OperatorID operatorID, String name) { return new OperatorMetricGroup(NoOpMetricRegistry.INSTANCE, this, operatorID, name); } }; final StreamMockEnvironment env = new StreamMockEnvironment( testHarness.jobConfig, testHarness.taskConfig, testHarness.memorySize, new MockInputSplitProvider(), testHarness.bufferSize, new TestTaskStateManager()) { @Override public TaskMetricGroup getMetricGroup() { return taskMetricGroup; } }; final Counter numRecordsInCounter = taskMetricGroup.getIOMetricGroup().getNumRecordsInCounter(); final Counter numRecordsOutCounter = taskMetricGroup.getIOMetricGroup().getNumRecordsOutCounter(); testHarness.invoke(env); testHarness.waitForTaskRunning(); final int numRecords1 = 5; final int numRecords2 = 3; for (int x = 0; x < numRecords1; x++) { testHarness.processElement(new StreamRecord<>("hello"), 0, 0); } for (int x = 0; x < numRecords2; x++) { testHarness.processElement(new StreamRecord<>("hello"), 1, 0); } testHarness.waitForInputProcessing(); assertEquals(numRecords1 + numRecords2, numRecordsInCounter.getCount()); assertEquals((numRecords1 + numRecords2) * 2 * 2 * 2, numRecordsOutCounter.getCount()); testHarness.endInput(); testHarness.waitForTaskCompletion(); }
Example #12
Source File: MailboxProcessor.java From flink with Apache License 2.0 | 4 votes |
public void initMetric(TaskMetricGroup metricGroup) { idleTime = metricGroup.getIOMetricGroup().getIdleTimeMsPerSecond(); }
Example #13
Source File: DummyEnvironment.java From flink with Apache License 2.0 | 4 votes |
@Override public TaskMetricGroup getMetricGroup() { return UnregisteredMetricGroups.createUnregisteredTaskMetricGroup(); }
Example #14
Source File: StreamMockEnvironment.java From flink with Apache License 2.0 | 4 votes |
@Override public TaskMetricGroup getMetricGroup() { return this.taskMetricGroup; }
Example #15
Source File: StreamMockEnvironment.java From flink with Apache License 2.0 | 4 votes |
public void setTaskMetricGroup(TaskMetricGroup taskMetricGroup) { this.taskMetricGroup = taskMetricGroup; }
Example #16
Source File: TwoInputStreamTaskTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testOperatorMetricReuse() throws Exception { final TwoInputStreamTaskTestHarness<String, String, String> testHarness = new TwoInputStreamTaskTestHarness<>( TwoInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); testHarness.setupOperatorChain(new OperatorID(), new DuplicatingOperator()) .chain(new OperatorID(), new OneInputStreamTaskTest.DuplicatingOperator(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig())) .chain(new OperatorID(), new OneInputStreamTaskTest.DuplicatingOperator(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig())) .finish(); final TaskMetricGroup taskMetricGroup = new UnregisteredMetricGroups.UnregisteredTaskMetricGroup() { @Override public OperatorMetricGroup getOrAddOperator(OperatorID operatorID, String name) { return new OperatorMetricGroup(NoOpMetricRegistry.INSTANCE, this, operatorID, name); } }; final StreamMockEnvironment env = new StreamMockEnvironment( testHarness.jobConfig, testHarness.taskConfig, testHarness.memorySize, new MockInputSplitProvider(), testHarness.bufferSize, new TestTaskStateManager()) { @Override public TaskMetricGroup getMetricGroup() { return taskMetricGroup; } }; final Counter numRecordsInCounter = taskMetricGroup.getIOMetricGroup().getNumRecordsInCounter(); final Counter numRecordsOutCounter = taskMetricGroup.getIOMetricGroup().getNumRecordsOutCounter(); testHarness.invoke(env); testHarness.waitForTaskRunning(); final int numRecords1 = 5; final int numRecords2 = 3; for (int x = 0; x < numRecords1; x++) { testHarness.processElement(new StreamRecord<>("hello"), 0, 0); } for (int x = 0; x < numRecords2; x++) { testHarness.processElement(new StreamRecord<>("hello"), 1, 0); } testHarness.waitForInputProcessing(); assertEquals(numRecords1 + numRecords2, numRecordsInCounter.getCount()); assertEquals((numRecords1 + numRecords2) * 2 * 2 * 2, numRecordsOutCounter.getCount()); testHarness.endInput(); testHarness.waitForTaskCompletion(); }
Example #17
Source File: MultipleInputStreamTaskTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testOperatorMetricReuse() throws Exception { TaskMetricGroup taskMetricGroup = new UnregisteredMetricGroups.UnregisteredTaskMetricGroup() { @Override public OperatorMetricGroup getOrAddOperator(OperatorID operatorID, String name) { return new OperatorMetricGroup(NoOpMetricRegistry.INSTANCE, this, operatorID, name); } }; try (StreamTaskMailboxTestHarness<String> testHarness = new MultipleInputStreamTaskTestHarnessBuilder<>(MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO) .addInput(BasicTypeInfo.STRING_TYPE_INFO) .addInput(BasicTypeInfo.STRING_TYPE_INFO) .addInput(BasicTypeInfo.STRING_TYPE_INFO) .setupOperatorChain(new DuplicatingOperatorFactory()) .chain(new OneInputStreamTaskTest.DuplicatingOperator(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig())) .chain(new OneInputStreamTaskTest.DuplicatingOperator(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig())) .finish() .setTaskMetricGroup(taskMetricGroup) .build()) { Counter numRecordsInCounter = taskMetricGroup.getIOMetricGroup().getNumRecordsInCounter(); Counter numRecordsOutCounter = taskMetricGroup.getIOMetricGroup().getNumRecordsOutCounter(); int numRecords1 = 5; int numRecords2 = 3; int numRecords3 = 2; for (int x = 0; x < numRecords1; x++) { testHarness.processElement(new StreamRecord<>("hello"), 0, 0); } for (int x = 0; x < numRecords2; x++) { testHarness.processElement(new StreamRecord<>("hello"), 1, 0); } for (int x = 0; x < numRecords3; x++) { testHarness.processElement(new StreamRecord<>("hello"), 2, 0); } int totalRecords = numRecords1 + numRecords2 + numRecords3; assertEquals(totalRecords, numRecordsInCounter.getCount()); assertEquals((totalRecords) * 2 * 2 * 2, numRecordsOutCounter.getCount()); testHarness.waitForTaskCompletion(); } }
Example #18
Source File: ScheduledDropwizardReporterTest.java From flink with Apache License 2.0 | 4 votes |
/** * Tests that the registered metrics' names don't contain invalid characters. */ @Test public void testAddingMetrics() throws Exception { Configuration configuration = new Configuration(); String taskName = "test\"Ta\"..sk"; String jobName = "testJ\"ob:-!ax..?"; String hostname = "loc<>al\"::host\".:"; String taskManagerId = "tas:kMana::ger"; String counterName = "testCounter"; configuration.setString(MetricOptions.SCOPE_NAMING_TASK, "<host>.<tm_id>.<job_name>"); configuration.setString(MetricOptions.SCOPE_DELIMITER, "_"); MetricRegistryConfiguration metricRegistryConfiguration = MetricRegistryConfiguration.fromConfiguration(configuration); MetricRegistryImpl metricRegistry = new MetricRegistryImpl( metricRegistryConfiguration, Collections.singletonList(ReporterSetup.forReporter("test", new TestingScheduledDropwizardReporter()))); char delimiter = metricRegistry.getDelimiter(); TaskManagerMetricGroup tmMetricGroup = new TaskManagerMetricGroup(metricRegistry, hostname, taskManagerId); TaskManagerJobMetricGroup tmJobMetricGroup = new TaskManagerJobMetricGroup(metricRegistry, tmMetricGroup, new JobID(), jobName); TaskMetricGroup taskMetricGroup = new TaskMetricGroup(metricRegistry, tmJobMetricGroup, new JobVertexID(), new AbstractID(), taskName, 0, 0); SimpleCounter myCounter = new SimpleCounter(); com.codahale.metrics.Meter dropwizardMeter = new com.codahale.metrics.Meter(); DropwizardMeterWrapper meterWrapper = new DropwizardMeterWrapper(dropwizardMeter); taskMetricGroup.counter(counterName, myCounter); taskMetricGroup.meter("meter", meterWrapper); List<MetricReporter> reporters = metricRegistry.getReporters(); assertTrue(reporters.size() == 1); MetricReporter metricReporter = reporters.get(0); assertTrue("Reporter should be of type ScheduledDropwizardReporter", metricReporter instanceof ScheduledDropwizardReporter); TestingScheduledDropwizardReporter reporter = (TestingScheduledDropwizardReporter) metricReporter; Map<Counter, String> counters = reporter.getCounters(); assertTrue(counters.containsKey(myCounter)); Map<Meter, String> meters = reporter.getMeters(); assertTrue(meters.containsKey(meterWrapper)); String expectedCounterName = reporter.filterCharacters(hostname) + delimiter + reporter.filterCharacters(taskManagerId) + delimiter + reporter.filterCharacters(jobName) + delimiter + reporter.filterCharacters(counterName); assertEquals(expectedCounterName, counters.get(myCounter)); metricRegistry.shutdown().get(); }
Example #19
Source File: StatsDReporterTest.java From flink with Apache License 2.0 | 4 votes |
/** * Tests that the registered metrics' names don't contain invalid characters. */ @Test public void testAddingMetrics() throws Exception { Configuration configuration = new Configuration(); String taskName = "testTask"; String jobName = "testJob:-!ax..?"; String hostname = "local::host:"; String taskManagerId = "tas:kMana::ger"; String counterName = "testCounter"; configuration.setString(MetricOptions.SCOPE_NAMING_TASK, "<host>.<tm_id>.<job_name>"); configuration.setString(MetricOptions.SCOPE_DELIMITER, "_"); MetricRegistryImpl metricRegistry = new MetricRegistryImpl( MetricRegistryConfiguration.fromConfiguration(configuration), Collections.singletonList(ReporterSetup.forReporter("test", new TestingStatsDReporter()))); char delimiter = metricRegistry.getDelimiter(); TaskManagerMetricGroup tmMetricGroup = new TaskManagerMetricGroup(metricRegistry, hostname, taskManagerId); TaskManagerJobMetricGroup tmJobMetricGroup = new TaskManagerJobMetricGroup(metricRegistry, tmMetricGroup, new JobID(), jobName); TaskMetricGroup taskMetricGroup = new TaskMetricGroup(metricRegistry, tmJobMetricGroup, new JobVertexID(), new AbstractID(), taskName, 0, 0); SimpleCounter myCounter = new SimpleCounter(); taskMetricGroup.counter(counterName, myCounter); List<MetricReporter> reporters = metricRegistry.getReporters(); assertTrue(reporters.size() == 1); MetricReporter metricReporter = reporters.get(0); assertTrue("Reporter should be of type StatsDReporter", metricReporter instanceof StatsDReporter); TestingStatsDReporter reporter = (TestingStatsDReporter) metricReporter; Map<Counter, String> counters = reporter.getCounters(); assertTrue(counters.containsKey(myCounter)); String expectedCounterName = reporter.filterCharacters(hostname) + delimiter + reporter.filterCharacters(taskManagerId) + delimiter + reporter.filterCharacters(jobName) + delimiter + reporter.filterCharacters(counterName); assertEquals(expectedCounterName, counters.get(myCounter)); metricRegistry.shutdown().get(); }
Example #20
Source File: OneInputStreamTaskTest.java From flink with Apache License 2.0 | 4 votes |
@Test @SuppressWarnings("unchecked") public void testWatermarkMetrics() throws Exception { final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(OneInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); OneInputStreamOperator<String, String> headOperator = new WatermarkMetricOperator(); OperatorID headOperatorId = new OperatorID(); OneInputStreamOperator<String, String> chainedOperator = new WatermarkMetricOperator(); OperatorID chainedOperatorId = new OperatorID(); testHarness.setupOperatorChain(headOperatorId, headOperator) .chain(chainedOperatorId, chainedOperator, BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig())) .finish(); InterceptingOperatorMetricGroup headOperatorMetricGroup = new InterceptingOperatorMetricGroup(); InterceptingOperatorMetricGroup chainedOperatorMetricGroup = new InterceptingOperatorMetricGroup(); InterceptingTaskMetricGroup taskMetricGroup = new InterceptingTaskMetricGroup() { @Override public OperatorMetricGroup getOrAddOperator(OperatorID id, String name) { if (id.equals(headOperatorId)) { return headOperatorMetricGroup; } else if (id.equals(chainedOperatorId)) { return chainedOperatorMetricGroup; } else { return super.getOrAddOperator(id, name); } } }; StreamMockEnvironment env = new StreamMockEnvironment( testHarness.jobConfig, testHarness.taskConfig, testHarness.memorySize, new MockInputSplitProvider(), testHarness.bufferSize, new TestTaskStateManager()) { @Override public TaskMetricGroup getMetricGroup() { return taskMetricGroup; } }; testHarness.invoke(env); testHarness.waitForTaskRunning(); Gauge<Long> taskInputWatermarkGauge = (Gauge<Long>) taskMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK); Gauge<Long> headInputWatermarkGauge = (Gauge<Long>) headOperatorMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK); Gauge<Long> headOutputWatermarkGauge = (Gauge<Long>) headOperatorMetricGroup.get(MetricNames.IO_CURRENT_OUTPUT_WATERMARK); Gauge<Long> chainedInputWatermarkGauge = (Gauge<Long>) chainedOperatorMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK); Gauge<Long> chainedOutputWatermarkGauge = (Gauge<Long>) chainedOperatorMetricGroup.get(MetricNames.IO_CURRENT_OUTPUT_WATERMARK); Assert.assertEquals("A metric was registered multiple times.", 5, new HashSet<>(Arrays.asList( taskInputWatermarkGauge, headInputWatermarkGauge, headOutputWatermarkGauge, chainedInputWatermarkGauge, chainedOutputWatermarkGauge)) .size()); Assert.assertEquals(Long.MIN_VALUE, taskInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(Long.MIN_VALUE, headInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(Long.MIN_VALUE, headOutputWatermarkGauge.getValue().longValue()); Assert.assertEquals(Long.MIN_VALUE, chainedInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(Long.MIN_VALUE, chainedOutputWatermarkGauge.getValue().longValue()); testHarness.processElement(new Watermark(1L)); testHarness.waitForInputProcessing(); Assert.assertEquals(1L, taskInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(1L, headInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(2L, headOutputWatermarkGauge.getValue().longValue()); Assert.assertEquals(2L, chainedInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(4L, chainedOutputWatermarkGauge.getValue().longValue()); testHarness.processElement(new Watermark(2L)); testHarness.waitForInputProcessing(); Assert.assertEquals(2L, taskInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(2L, headInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(4L, headOutputWatermarkGauge.getValue().longValue()); Assert.assertEquals(4L, chainedInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(8L, chainedOutputWatermarkGauge.getValue().longValue()); testHarness.endInput(); testHarness.waitForTaskCompletion(); }
Example #21
Source File: MockEnvironment.java From flink with Apache License 2.0 | 4 votes |
protected MockEnvironment( JobID jobID, JobVertexID jobVertexID, String taskName, MockInputSplitProvider inputSplitProvider, int bufferSize, Configuration taskConfiguration, ExecutionConfig executionConfig, IOManager ioManager, TaskStateManager taskStateManager, GlobalAggregateManager aggregateManager, int maxParallelism, int parallelism, int subtaskIndex, ClassLoader userCodeClassLoader, TaskMetricGroup taskMetricGroup, TaskManagerRuntimeInfo taskManagerRuntimeInfo, MemoryManager memManager, ExternalResourceInfoProvider externalResourceInfoProvider) { this.jobID = jobID; this.jobVertexID = jobVertexID; this.taskInfo = new TaskInfo(taskName, maxParallelism, subtaskIndex, parallelism, 0); this.jobConfiguration = new Configuration(); this.taskConfiguration = taskConfiguration; this.inputs = new LinkedList<>(); this.outputs = new LinkedList<ResultPartitionWriter>(); this.memManager = memManager; this.ioManager = ioManager; this.taskManagerRuntimeInfo = taskManagerRuntimeInfo; this.executionConfig = executionConfig; this.inputSplitProvider = inputSplitProvider; this.bufferSize = bufferSize; this.accumulatorRegistry = new AccumulatorRegistry(jobID, getExecutionId()); this.kvStateRegistry = new KvStateRegistry(); this.taskKvStateRegistry = kvStateRegistry.createTaskRegistry(jobID, getJobVertexId()); this.userCodeClassLoader = Preconditions.checkNotNull(userCodeClassLoader); this.taskStateManager = Preconditions.checkNotNull(taskStateManager); this.aggregateManager = Preconditions.checkNotNull(aggregateManager); this.taskMetricGroup = taskMetricGroup; this.externalResourceInfoProvider = Preconditions.checkNotNull(externalResourceInfoProvider); }
Example #22
Source File: OneInputStreamTaskTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testWatermarkMetrics() throws Exception { final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(OneInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); OneInputStreamOperator<String, String> headOperator = new WatermarkMetricOperator(); OperatorID headOperatorId = new OperatorID(); OneInputStreamOperator<String, String> chainedOperator = new WatermarkMetricOperator(); OperatorID chainedOperatorId = new OperatorID(); testHarness.setupOperatorChain(headOperatorId, headOperator) .chain(chainedOperatorId, chainedOperator, BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig())) .finish(); InterceptingOperatorMetricGroup headOperatorMetricGroup = new InterceptingOperatorMetricGroup(); InterceptingOperatorMetricGroup chainedOperatorMetricGroup = new InterceptingOperatorMetricGroup(); InterceptingTaskMetricGroup taskMetricGroup = new InterceptingTaskMetricGroup() { @Override public OperatorMetricGroup getOrAddOperator(OperatorID id, String name) { if (id.equals(headOperatorId)) { return headOperatorMetricGroup; } else if (id.equals(chainedOperatorId)) { return chainedOperatorMetricGroup; } else { return super.getOrAddOperator(id, name); } } }; StreamMockEnvironment env = new StreamMockEnvironment( testHarness.jobConfig, testHarness.taskConfig, testHarness.memorySize, new MockInputSplitProvider(), testHarness.bufferSize, new TestTaskStateManager()) { @Override public TaskMetricGroup getMetricGroup() { return taskMetricGroup; } }; testHarness.invoke(env); testHarness.waitForTaskRunning(); Gauge<Long> taskInputWatermarkGauge = (Gauge<Long>) taskMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK); Gauge<Long> headInputWatermarkGauge = (Gauge<Long>) headOperatorMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK); Gauge<Long> headOutputWatermarkGauge = (Gauge<Long>) headOperatorMetricGroup.get(MetricNames.IO_CURRENT_OUTPUT_WATERMARK); Gauge<Long> chainedInputWatermarkGauge = (Gauge<Long>) chainedOperatorMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK); Gauge<Long> chainedOutputWatermarkGauge = (Gauge<Long>) chainedOperatorMetricGroup.get(MetricNames.IO_CURRENT_OUTPUT_WATERMARK); Assert.assertEquals("A metric was registered multiple times.", 5, new HashSet<>(Arrays.asList( taskInputWatermarkGauge, headInputWatermarkGauge, headOutputWatermarkGauge, chainedInputWatermarkGauge, chainedOutputWatermarkGauge)) .size()); Assert.assertEquals(Long.MIN_VALUE, taskInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(Long.MIN_VALUE, headInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(Long.MIN_VALUE, headOutputWatermarkGauge.getValue().longValue()); Assert.assertEquals(Long.MIN_VALUE, chainedInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(Long.MIN_VALUE, chainedOutputWatermarkGauge.getValue().longValue()); testHarness.processElement(new Watermark(1L)); testHarness.waitForInputProcessing(); Assert.assertEquals(1L, taskInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(1L, headInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(2L, headOutputWatermarkGauge.getValue().longValue()); Assert.assertEquals(2L, chainedInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(4L, chainedOutputWatermarkGauge.getValue().longValue()); testHarness.processElement(new Watermark(2L)); testHarness.waitForInputProcessing(); Assert.assertEquals(2L, taskInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(2L, headInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(4L, headOutputWatermarkGauge.getValue().longValue()); Assert.assertEquals(4L, chainedInputWatermarkGauge.getValue().longValue()); Assert.assertEquals(8L, chainedOutputWatermarkGauge.getValue().longValue()); testHarness.endInput(); testHarness.waitForTaskCompletion(); }
Example #23
Source File: TaskAsyncCallTest.java From flink with Apache License 2.0 | 4 votes |
private Task createTask(Class<? extends AbstractInvokable> invokableClass) throws Exception { final TestingClassLoaderLease classLoaderHandle = TestingClassLoaderLease.newBuilder() .setGetOrResolveClassLoaderFunction((permanentBlobKeys, urls) -> new TestUserCodeClassLoader()) .build(); ResultPartitionConsumableNotifier consumableNotifier = new NoOpResultPartitionConsumableNotifier(); PartitionProducerStateChecker partitionProducerStateChecker = mock(PartitionProducerStateChecker.class); Executor executor = mock(Executor.class); TaskMetricGroup taskMetricGroup = UnregisteredMetricGroups.createUnregisteredTaskMetricGroup(); JobInformation jobInformation = new JobInformation( new JobID(), "Job Name", new SerializedValue<>(new ExecutionConfig()), new Configuration(), Collections.emptyList(), Collections.emptyList()); TaskInformation taskInformation = new TaskInformation( new JobVertexID(), "Test Task", 1, 1, invokableClass.getName(), new Configuration()); return new Task( jobInformation, taskInformation, new ExecutionAttemptID(), new AllocationID(), 0, 0, Collections.<ResultPartitionDeploymentDescriptor>emptyList(), Collections.<InputGateDeploymentDescriptor>emptyList(), 0, mock(MemoryManager.class), mock(IOManager.class), shuffleEnvironment, new KvStateService(new KvStateRegistry(), null, null), mock(BroadcastVariableManager.class), new TaskEventDispatcher(), ExternalResourceInfoProvider.NO_EXTERNAL_RESOURCES, new TestTaskStateManager(), mock(TaskManagerActions.class), mock(InputSplitProvider.class), mock(CheckpointResponder.class), new NoOpTaskOperatorEventGateway(), new TestGlobalAggregateManager(), classLoaderHandle, mock(FileCache.class), new TestingTaskManagerRuntimeInfo(), taskMetricGroup, consumableNotifier, partitionProducerStateChecker, executor); }
Example #24
Source File: TestTaskBuilder.java From flink with Apache License 2.0 | 4 votes |
public Task build() throws Exception { final JobVertexID jobVertexId = new JobVertexID(); final SerializedValue<ExecutionConfig> serializedExecutionConfig = new SerializedValue<>(executionConfig); final JobInformation jobInformation = new JobInformation( jobId, "Test Job", serializedExecutionConfig, new Configuration(), requiredJarFileBlobKeys, Collections.emptyList()); final TaskInformation taskInformation = new TaskInformation( jobVertexId, "Test Task", 1, 1, invokable.getName(), taskConfig); final TaskMetricGroup taskMetricGroup = UnregisteredMetricGroups.createUnregisteredTaskMetricGroup(); return new Task( jobInformation, taskInformation, executionAttemptId, allocationID, 0, 0, resultPartitions, inputGates, 0, MemoryManagerBuilder.newBuilder().setMemorySize(1024 * 1024).build(), mock(IOManager.class), shuffleEnvironment, kvStateService, new BroadcastVariableManager(), new TaskEventDispatcher(), externalResourceInfoProvider, new TestTaskStateManager(), taskManagerActions, new MockInputSplitProvider(), new TestCheckpointResponder(), new NoOpTaskOperatorEventGateway(), new TestGlobalAggregateManager(), classLoaderHandle, mock(FileCache.class), new TestingTaskManagerRuntimeInfo(taskManagerConfig), taskMetricGroup, consumableNotifier, partitionProducerStateChecker, executor); }
Example #25
Source File: ScheduledDropwizardReporterTest.java From flink with Apache License 2.0 | 4 votes |
/** * Tests that the registered metrics' names don't contain invalid characters. */ @Test public void testAddingMetrics() throws Exception { Configuration configuration = new Configuration(); String taskName = "test\"Ta\"..sk"; String jobName = "testJ\"ob:-!ax..?"; String hostname = "loc<>al\"::host\".:"; String taskManagerId = "tas:kMana::ger"; String counterName = "testCounter"; configuration.setString(MetricOptions.SCOPE_NAMING_TASK, "<host>.<tm_id>.<job_name>"); configuration.setString(MetricOptions.SCOPE_DELIMITER, "_"); MetricRegistryConfiguration metricRegistryConfiguration = MetricRegistryConfiguration.fromConfiguration(configuration); MetricRegistryImpl metricRegistry = new MetricRegistryImpl( metricRegistryConfiguration, Collections.singletonList(ReporterSetup.forReporter("test", new TestingScheduledDropwizardReporter()))); char delimiter = metricRegistry.getDelimiter(); TaskManagerMetricGroup tmMetricGroup = new TaskManagerMetricGroup(metricRegistry, hostname, taskManagerId); TaskManagerJobMetricGroup tmJobMetricGroup = new TaskManagerJobMetricGroup(metricRegistry, tmMetricGroup, new JobID(), jobName); TaskMetricGroup taskMetricGroup = new TaskMetricGroup(metricRegistry, tmJobMetricGroup, new JobVertexID(), new AbstractID(), taskName, 0, 0); SimpleCounter myCounter = new SimpleCounter(); com.codahale.metrics.Meter dropwizardMeter = new com.codahale.metrics.Meter(); DropwizardMeterWrapper meterWrapper = new DropwizardMeterWrapper(dropwizardMeter); taskMetricGroup.counter(counterName, myCounter); taskMetricGroup.meter("meter", meterWrapper); List<MetricReporter> reporters = metricRegistry.getReporters(); assertTrue(reporters.size() == 1); MetricReporter metricReporter = reporters.get(0); assertTrue("Reporter should be of type ScheduledDropwizardReporter", metricReporter instanceof ScheduledDropwizardReporter); TestingScheduledDropwizardReporter reporter = (TestingScheduledDropwizardReporter) metricReporter; Map<Counter, String> counters = reporter.getCounters(); assertTrue(counters.containsKey(myCounter)); Map<Meter, String> meters = reporter.getMeters(); assertTrue(meters.containsKey(meterWrapper)); String expectedCounterName = reporter.filterCharacters(hostname) + delimiter + reporter.filterCharacters(taskManagerId) + delimiter + reporter.filterCharacters(jobName) + delimiter + reporter.filterCharacters(counterName); assertEquals(expectedCounterName, counters.get(myCounter)); metricRegistry.shutdown().get(); }
Example #26
Source File: RuntimeEnvironment.java From flink with Apache License 2.0 | 4 votes |
@Override public TaskMetricGroup getMetricGroup() { return metrics; }
Example #27
Source File: RuntimeEnvironment.java From flink with Apache License 2.0 | 4 votes |
public RuntimeEnvironment( JobID jobId, JobVertexID jobVertexId, ExecutionAttemptID executionId, ExecutionConfig executionConfig, TaskInfo taskInfo, Configuration jobConfiguration, Configuration taskConfiguration, ClassLoader userCodeClassLoader, MemoryManager memManager, IOManager ioManager, BroadcastVariableManager bcVarManager, TaskStateManager taskStateManager, GlobalAggregateManager aggregateManager, AccumulatorRegistry accumulatorRegistry, TaskKvStateRegistry kvStateRegistry, InputSplitProvider splitProvider, Map<String, Future<Path>> distCacheEntries, ResultPartitionWriter[] writers, IndexedInputGate[] inputGates, TaskEventDispatcher taskEventDispatcher, CheckpointResponder checkpointResponder, TaskOperatorEventGateway operatorEventGateway, TaskManagerRuntimeInfo taskManagerInfo, TaskMetricGroup metrics, Task containingTask, ExternalResourceInfoProvider externalResourceInfoProvider) { this.jobId = checkNotNull(jobId); this.jobVertexId = checkNotNull(jobVertexId); this.executionId = checkNotNull(executionId); this.taskInfo = checkNotNull(taskInfo); this.executionConfig = checkNotNull(executionConfig); this.jobConfiguration = checkNotNull(jobConfiguration); this.taskConfiguration = checkNotNull(taskConfiguration); this.userCodeClassLoader = checkNotNull(userCodeClassLoader); this.memManager = checkNotNull(memManager); this.ioManager = checkNotNull(ioManager); this.bcVarManager = checkNotNull(bcVarManager); this.taskStateManager = checkNotNull(taskStateManager); this.aggregateManager = checkNotNull(aggregateManager); this.accumulatorRegistry = checkNotNull(accumulatorRegistry); this.kvStateRegistry = checkNotNull(kvStateRegistry); this.splitProvider = checkNotNull(splitProvider); this.distCacheEntries = checkNotNull(distCacheEntries); this.writers = checkNotNull(writers); this.inputGates = checkNotNull(inputGates); this.taskEventDispatcher = checkNotNull(taskEventDispatcher); this.checkpointResponder = checkNotNull(checkpointResponder); this.operatorEventGateway = checkNotNull(operatorEventGateway); this.taskManagerInfo = checkNotNull(taskManagerInfo); this.containingTask = containingTask; this.metrics = metrics; this.externalResourceInfoProvider = checkNotNull(externalResourceInfoProvider); }
Example #28
Source File: StatsDReporterTest.java From flink with Apache License 2.0 | 4 votes |
/** * Tests that the registered metrics' names don't contain invalid characters. */ @Test public void testAddingMetrics() throws Exception { Configuration configuration = new Configuration(); String taskName = "testTask"; String jobName = "testJob:-!ax..?"; String hostname = "local::host:"; String taskManagerId = "tas:kMana::ger"; String counterName = "testCounter"; configuration.setString(MetricOptions.SCOPE_NAMING_TASK, "<host>.<tm_id>.<job_name>"); configuration.setString(MetricOptions.SCOPE_DELIMITER, "_"); MetricRegistryImpl metricRegistry = new MetricRegistryImpl( MetricRegistryConfiguration.fromConfiguration(configuration), Collections.singletonList(ReporterSetup.forReporter("test", new TestingStatsDReporter()))); char delimiter = metricRegistry.getDelimiter(); TaskManagerMetricGroup tmMetricGroup = new TaskManagerMetricGroup(metricRegistry, hostname, taskManagerId); TaskManagerJobMetricGroup tmJobMetricGroup = new TaskManagerJobMetricGroup(metricRegistry, tmMetricGroup, new JobID(), jobName); TaskMetricGroup taskMetricGroup = new TaskMetricGroup(metricRegistry, tmJobMetricGroup, new JobVertexID(), new AbstractID(), taskName, 0, 0); SimpleCounter myCounter = new SimpleCounter(); taskMetricGroup.counter(counterName, myCounter); List<MetricReporter> reporters = metricRegistry.getReporters(); assertTrue(reporters.size() == 1); MetricReporter metricReporter = reporters.get(0); assertTrue("Reporter should be of type StatsDReporter", metricReporter instanceof StatsDReporter); TestingStatsDReporter reporter = (TestingStatsDReporter) metricReporter; Map<Counter, String> counters = reporter.getCounters(); assertTrue(counters.containsKey(myCounter)); String expectedCounterName = reporter.filterCharacters(hostname) + delimiter + reporter.filterCharacters(taskManagerId) + delimiter + reporter.filterCharacters(jobName) + delimiter + reporter.filterCharacters(counterName); assertEquals(expectedCounterName, counters.get(myCounter)); metricRegistry.shutdown().get(); }
Example #29
Source File: Task.java From flink with Apache License 2.0 | 4 votes |
public TaskMetricGroup getMetricGroup() { return metrics; }
Example #30
Source File: SavepointEnvironment.java From flink with Apache License 2.0 | 4 votes |
@Override public TaskMetricGroup getMetricGroup() { return UnregisteredMetricGroups.createUnregisteredTaskMetricGroup(); }