org.springframework.batch.item.ExecutionContext Java Examples
The following examples show how to use
org.springframework.batch.item.ExecutionContext.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BatchMetricsFlatFileToDbIntegrationTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 6 votes |
@Test public void testRunFlatFileToDbNoSkipJob_Failed() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbNoSkipJob", "metrics/flatFileToDbNoSkipJob_Failed.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.FAILED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next().getExecutionContext(); long writeCount = 3L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(1L).withStreamOpenCount(1L).withStreamUpdateCount(2L).withStreamCloseCount(0L) .withBeforeReadCount(6L).withReadCount(6L).withAfterReadCount(5L).withReadErrorCount(0L) .withBeforeProcessCount(3L).withProcessCount(3L).withAfterProcessCount(3L).withProcessErrorCount(1L) .withBeforeWriteCount(3L).withWriteCount(writeCount).withAfterWriteCount(3L).withAfterChunkCount(1L) .withChunkErrorCount(1L).withSkipInReadCount(0L).withSkipInProcessCount(0L).withSkipInWriteCount(0L) .build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one Gauge gauge = meterRegistry.find(MetricsListener.METRIC_NAME)// .tag("context", "flatFileToDbNoSkipJob.step")// .tag("name", MetricNames.PROCESS_COUNT.getName())// .gauge(); assertThat((Double) gauge.value(), is(notNullValue())); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }
Example #2
Source File: BatchMetricsFlatFileToDbIntegrationTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 6 votes |
@Test public void testRunFlatFileToDbSkipJob_SkipInProcess_Failed() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbSkipJob", "metrics/flatFileToDbSkipJob_SkipInProcess_Failed.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.FAILED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next().getExecutionContext(); long writeCount = 7L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(3L).withStreamOpenCount(1L).withStreamUpdateCount(4L).withStreamCloseCount(0L) .withBeforeReadCount(12L).withReadCount(12L).withAfterReadCount(12L).withReadErrorCount(0L) .withBeforeProcessCount(7L).withProcessCount(7L).withAfterProcessCount(7L).withProcessErrorCount(5L) .withBeforeWriteCount(7L).withWriteCount(writeCount).withAfterWriteCount(7L).withAfterChunkCount(3L) .withChunkErrorCount(6L).withSkipInReadCount(0L).withSkipInProcessCount(2L).withSkipInWriteCount(0L) .build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one Gauge gauge = meterRegistry.find(MetricsListener.METRIC_NAME)// .tag("context", "flatFileToDbSkipJob.step")// .tag("name", MetricNames.PROCESS_COUNT.getName())// .gauge(); assertThat((Double) gauge.value(), is(notNullValue())); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }
Example #3
Source File: SeedMultiResourceItemReader.java From seed with Apache License 2.0 | 6 votes |
/** * Reads the next item, jumping to next resource if necessary. */ @Override public T read() throws Exception, UnexpectedInputException, ParseException { if (noInput) { return null; } // If there is no resource, then this is the first item, set the current // resource to 0 and open the first delegate. if (currentResource == -1) { currentResource = 0; delegate.setResource(resources[currentResource]); delegate.open(new ExecutionContext()); } return readNextItem(); }
Example #4
Source File: CustomMultiResourcePartitioner.java From tutorials with MIT License | 6 votes |
/** * Assign the filename of each of the injected resources to an * {@link ExecutionContext}. * * @see Partitioner#partition(int) */ @Override public Map<String, ExecutionContext> partition(int gridSize) { Map<String, ExecutionContext> map = new HashMap<String, ExecutionContext>(gridSize); int i = 0, k = 1; for (Resource resource : resources) { ExecutionContext context = new ExecutionContext(); Assert.state(resource.exists(), "Resource does not exist: " + resource); context.putString(keyName, resource.getFilename()); context.putString("opFileName", "output" + k++ + ".xml"); map.put(PARTITION_KEY + i, context); i++; } return map; }
Example #5
Source File: SpringBatchBuildReportHandler.java From spring-cloud-release-tools with Apache License 2.0 | 6 votes |
private List<Table> buildTable(List<StepExecution> stepContexts) { return stepContexts.stream().map(step -> { String date = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS") .format(step.getStartTime()); long millis = ChronoUnit.MILLIS.between(step.getStartTime().toInstant(), step.getEndTime().toInstant()); ExecutionContext context = step.getExecutionContext(); ExecutionResultReport entity = (ExecutionResultReport) context.get("entity"); if (entity == null) { return null; } String projectName = TrainPostReleaseReleaserTask.class .isAssignableFrom(entity.getReleaserTaskType()) ? "postRelease" : entity.getProjectName(); return new Table(date, time(millis), projectName, entity.getShortName(), entity.getDescription(), entity.getState(), entity.getExceptions()); }).filter(Objects::nonNull).collect(Collectors.toCollection(LinkedList::new)); }
Example #6
Source File: BatchMetricsFlatFileToDbIntegrationTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 6 votes |
@Test public void testRunFlatFileToDbSkipJob_SkipInWrite_ProcessorNonTransactional() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbSkipProcessorNonTransactionalJob", "metrics/flatFileToDbSkipJob_SkipInWrite.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next().getExecutionContext(); long writeCount = 7L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(4L).withStreamOpenCount(1L).withStreamUpdateCount(4L).withStreamCloseCount(0L) .withBeforeReadCount(9L).withReadCount(9L).withAfterReadCount(8L).withReadErrorCount(0L) .withBeforeProcessCount(8L).withProcessCount(8L).withAfterProcessCount(8L).withProcessErrorCount(0L) .withBeforeWriteCount(5L).withWriteCount(writeCount).withAfterWriteCount(7L).withWriteErrorCount(4L) .withAfterChunkCount(4L).withChunkErrorCount(2L).withSkipInReadCount(0L).withSkipInProcessCount(0L) .withSkipInWriteCount(1L).build(); // TODO Bug in beforeWrite listener in Spring Batch? validator.validate(); // if one is correct, all will be in the metricReader, so I check just one Gauge gauge = meterRegistry.find(MetricsListener.METRIC_NAME)// .tag("context", "flatFileToDbSkipProcessorNonTransactionalJob.step")// .tag("name", MetricNames.PROCESS_COUNT.getName())// .gauge(); assertThat((Double) gauge.value(), is(notNullValue())); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }
Example #7
Source File: CogstackJobPartitioner.java From CogStack-Pipeline with Apache License 2.0 | 6 votes |
private boolean populateMap(ScheduledPartitionParams params, Map<String, ExecutionContext> result, long start, long end, int counter) { checkForEmptyPartitions = true; if(checkForEmptyPartitions) { long recordCountThisPartition = getRecordCountThisPartition(Long.toString(start), Long.toString(end), params.getMinTimeStamp().toString(), params.getMaxTimeStamp().toString()); if (recordCountThisPartition > 0L) { result.put("partition" + counter, getNewExecutionContext(params, start, end)); logger.info("partition " + counter + " created"); return true; } else { return false; } }else{ result.put("partition" + counter, getNewExecutionContext(params, start, end)); logger.info("partition " + counter + " created"); return true; } }
Example #8
Source File: NamedColumnJdbcItemReaderTests.java From spring-cloud-task-app-starters with Apache License 2.0 | 6 votes |
@Test public void testGetSqlRows() throws Exception { NamedColumnJdbcItemReaderFactory factory = new NamedColumnJdbcItemReaderFactory(); factory.setDataSource(dataSource); factory.setSql("SELECT name, id FROM test"); factory.afterPropertiesSet(); NamedColumnJdbcItemReader reader = factory.getObject(); reader.afterPropertiesSet(); reader.open(new ExecutionContext(new HashMap<String, Object>())); reader.setDelimiter(","); verifyRead(reader, "Bob,1"); verifyRead(reader, "Jane,2"); verifyRead(reader, "John,3"); verifyRead(reader, null); }
Example #9
Source File: IncrementalColumnRangePartitionerTests.java From spring-cloud-task-app-starters with Apache License 2.0 | 6 votes |
@Test public void testTwoPartitions() { jdbc.execute("insert into bar (foo) values (1), (2), (3), (4)"); partitioner.setColumn("foo"); partitioner.setTable("bar"); partitioner.setPartitions(2); partitioner.beforeStep(new StepExecution("step1", new JobExecution(5l))); Map<String, ExecutionContext> partitions = partitioner.partition(1); assertEquals(2, partitions.size()); assertTrue(partitions.containsKey("partition0")); assertEquals("WHERE (foo BETWEEN 1 AND 2)", partitions.get("partition0").get("partClause")); assertEquals("-p0", partitions.get("partition0").get("partSuffix")); assertTrue(partitions.containsKey("partition1")); assertEquals("WHERE (foo BETWEEN 3 AND 4)", partitions.get("partition1").get("partClause")); assertEquals("-p1", partitions.get("partition1").get("partSuffix")); }
Example #10
Source File: StepExecutionJacksonMixInTests.java From spring-cloud-dataflow with Apache License 2.0 | 6 votes |
/** * Assert that by using the {@link ExecutionContextJacksonMixIn} Jackson renders the * Step Execution Context correctly. * * @throws JsonProcessingException if a Json generation error occurs. */ @Test public void testSerializationOfSingleStepExecution() throws JsonProcessingException { final ObjectMapper objectMapper = new ObjectMapper(); objectMapper.addMixIn(StepExecution.class, StepExecutionJacksonMixIn.class); objectMapper.addMixIn(ExecutionContext.class, ExecutionContextJacksonMixIn.class); final StepExecution stepExecution = getStepExecution(); final String result = objectMapper.writeValueAsString(stepExecution); assertThat(result, not(containsString("\"executionContext\":{\"dirty\":true,\"empty\":false}"))); assertThat(result, containsString("\"executionContext\":{\"dirty\":true,\"empty\":false,\"values\":[{")); assertThat(result, containsString("{\"counter\":1234}")); assertThat(result, containsString("{\"myDouble\":1.123456}")); assertThat(result, containsString("{\"Josh\":4444444444}")); assertThat(result, containsString("{\"awesomeString\":\"Yep\"}")); assertThat(result, containsString("{\"hello\":\"world\"")); assertThat(result, containsString("{\"counter2\":9999}")); }
Example #11
Source File: BatchMetricsFlatFileToDbIntegrationTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 6 votes |
@Test public void testRunFlatFileToDbSkipJob_SkipInProcess() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbSkipJob", "metrics/flatFileToDbSkipJob_SkipInProcess.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next().getExecutionContext(); long writeCount = 7L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(3L).withStreamOpenCount(1L).withStreamUpdateCount(4L).withStreamCloseCount(0L) .withBeforeReadCount(9L).withReadCount(9L).withAfterReadCount(8L).withReadErrorCount(0L) .withBeforeProcessCount(7L).withProcessCount(7L).withAfterProcessCount(7L).withProcessErrorCount(1L) .withBeforeWriteCount(7L).withWriteCount(writeCount).withAfterWriteCount(7L).withAfterChunkCount(3L) .withChunkErrorCount(1L).withSkipInReadCount(0L).withSkipInProcessCount(1L).withSkipInWriteCount(0L) .build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one Gauge gauge = meterRegistry.find(MetricsListener.METRIC_NAME)// .tag("context", "flatFileToDbSkipJob.step")// .tag("name", MetricNames.PROCESS_COUNT.getName())// .gauge(); assertThat((Double) gauge.value(), is(notNullValue())); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }
Example #12
Source File: DefaultExceptionJobDemo.java From SpringAll with MIT License | 6 votes |
@Bean public Job defaultExceptionJob() { return jobBuilderFactory.get("defaultExceptionJob") .start( stepBuilderFactory.get("step") .tasklet((stepContribution, chunkContext) -> { // 获取执行上下文 ExecutionContext executionContext = chunkContext.getStepContext().getStepExecution().getExecutionContext(); if (executionContext.containsKey("success")) { System.out.println("任务执行成功"); return RepeatStatus.FINISHED; } else { String errorMessage = "处理任务过程发生异常"; System.out.println(errorMessage); executionContext.put("success", true); throw new RuntimeException(errorMessage); } }).build() ).build(); }
Example #13
Source File: ExecutionContextDeserializationTests.java From spring-cloud-dataflow with Apache License 2.0 | 6 votes |
/** * Edge-case. If the JSON data contains values in the execution context but * the {@code empty} property is {@code true}. The deserialized object will return * {@code false}. The property is ignored during deserializtion. * * @throws IOException */ @Test public void testFaultyExecutionContext() throws IOException { final ObjectMapper objectMapper = DataFlowTemplate.prepareObjectMapper(new ObjectMapper()); final InputStream inputStream = ExecutionContextDeserializationTests.class .getResourceAsStream("/FaultyExecutionContextJson.txt"); final String json = new String(StreamUtils.copyToByteArray(inputStream)); ExecutionContext executionContext = objectMapper.readValue(json, new TypeReference<ExecutionContext>() { }); assertEquals(2, executionContext.entrySet().size()); assertEquals("org.springframework.cloud.task.app.timestamp.batch.TimestampBatchTaskConfiguration$1", executionContext.get("batch.taskletType")); assertEquals("org.springframework.batch.core.step.tasklet.TaskletStep", executionContext.get("batch.stepType")); assertTrue(executionContext.isDirty()); assertFalse(executionContext.isEmpty()); }
Example #14
Source File: JobStepExecutionControllerTests.java From spring-cloud-dataflow with Apache License 2.0 | 6 votes |
@Before public void setupMockMVC() { this.mockMvc = MockMvcBuilders.webAppContextSetup(wac) .defaultRequest(get("/").accept(MediaType.APPLICATION_JSON)).build(); if (!initialized) { createStepExecution(JOB_NAME_ORIG, STEP_NAME_ORIG); createStepExecution(JOB_NAME_FOO, STEP_NAME_ORIG, STEP_NAME_FOO); createStepExecution(JOB_NAME_FOOBAR, STEP_NAME_ORIG, STEP_NAME_FOO, STEP_NAME_FOOBAR); initialized = true; } for (HttpMessageConverter<?> converter : adapter.getMessageConverters()) { if (converter instanceof MappingJackson2HttpMessageConverter) { final MappingJackson2HttpMessageConverter jacksonConverter = (MappingJackson2HttpMessageConverter) converter; jacksonConverter.getObjectMapper().addMixIn(StepExecution.class, StepExecutionJacksonMixIn.class); jacksonConverter.getObjectMapper().addMixIn(ExecutionContext.class, ExecutionContextJacksonMixIn.class); jacksonConverter.getObjectMapper().setDateFormat(new ISO8601DateFormatWithMilliSeconds()); } } }
Example #15
Source File: ExecutionContextDeserializationTests.java From spring-cloud-dataflow with Apache License 2.0 | 6 votes |
@Test public void testDeserializationOfBasicExecutionContext() throws IOException { final ObjectMapper objectMapper = DataFlowTemplate.prepareObjectMapper(new ObjectMapper()); final InputStream inputStream = ExecutionContextDeserializationTests.class .getResourceAsStream("/BasicExecutionContextJson.txt"); final String json = new String(StreamUtils.copyToByteArray(inputStream)); ExecutionContext executionContext = objectMapper.readValue(json, new TypeReference<ExecutionContext>() { }); assertEquals(2, executionContext.entrySet().size()); assertEquals("org.springframework.cloud.task.app.timestamp.batch.TimestampBatchTaskConfiguration$1", executionContext.get("batch.taskletType")); assertEquals("org.springframework.batch.core.step.tasklet.TaskletStep", executionContext.get("batch.stepType")); assertFalse(executionContext.isDirty()); assertFalse(executionContext.isEmpty()); }
Example #16
Source File: BatchMetricsFlatFileToDbIntegrationTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 6 votes |
@Test public void testRunFlatFileToDbNoSkipJob_Success() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbNoSkipJob", "metrics/flatFileToDbNoSkipJob_Success.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next().getExecutionContext(); long writeCount = 5L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(2L).withStreamOpenCount(1L).withStreamUpdateCount(3L).withStreamCloseCount(0L) .withBeforeReadCount(6L).withReadCount(6L).withAfterReadCount(5L).withReadErrorCount(0L) .withBeforeProcessCount(5L).withProcessCount(5L).withAfterProcessCount(5L).withProcessErrorCount(0L) .withBeforeWriteCount(5L).withWriteCount(writeCount).withAfterWriteCount(5L).withAfterChunkCount(2L) .withChunkErrorCount(0L).withSkipInReadCount(0L).withSkipInProcessCount(0L).withSkipInWriteCount(0L) .build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one Gauge gauge = meterRegistry.find(MetricsListener.METRIC_NAME)// .tag("context", "flatFileToDbNoSkipJob.step")// .tag("name", MetricNames.PROCESS_COUNT.getName())// .gauge(); assertThat((Double) gauge.value(), is(notNullValue())); // TODO assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }
Example #17
Source File: AlarmReaderTest.java From pinpoint with Apache License 2.0 | 6 votes |
@Test public void readTest3() { StepExecution stepExecution = new StepExecution("alarmStep", null); ExecutionContext executionContext = new ExecutionContext(); stepExecution.setExecutionContext(executionContext); AlarmServiceImpl alarmService = new AlarmServiceImpl(mock(AlarmDao.class)) { @Override public java.util.List<Rule> selectRuleByApplicationId(String applicationId) { return new LinkedList<>(); } }; AlarmReader reader = new AlarmReader(dataCollectorFactory, applicationIndexDao, alarmService); reader.beforeStep(stepExecution); assertNull(reader.read()); }
Example #18
Source File: AlarmReaderTest.java From pinpoint with Apache License 2.0 | 6 votes |
@Test public void readTest() { StepExecution stepExecution = new StepExecution("alarmStep", null); ExecutionContext executionContext = new ExecutionContext(); stepExecution.setExecutionContext(executionContext); AlarmReader reader = new AlarmReader(dataCollectorFactory, applicationIndexDao, alarmService); reader.beforeStep(stepExecution); for(int i = 0; i < 7; i++) { assertNotNull(reader.read()); } assertNull(reader.read()); }
Example #19
Source File: EmployeeJobPartitioner.java From batchers with Apache License 2.0 | 6 votes |
@Override public Map<String, ExecutionContext> partition(int gridSize) { int partitionCount = gridSize * PARTITIONS_PER_NODE; List<Long> employeeIds = taxCalculationRepository.getUnprocessedEmployeeIds(year, month, stepExecution.getJobExecutionId()); int size = employeeIds.size(); int targetSize = size / partitionCount + 1; Map<String, ExecutionContext> result = new HashMap<>(); for (int index = 0, partitionNumber = 0; index < size; index += targetSize, partitionNumber++) { ExecutionContext value = new ExecutionContext(); value.putLong("minValue", employeeIds.get(index)); value.putLong("maxValue", employeeIds.get(Math.min(index + targetSize - 1, size - 1))); value.putLong("partition", partitionNumber); result.put("partition" + partitionNumber, value); } return result; }
Example #20
Source File: StoreInitializer.java From spring-data-examples with Apache License 2.0 | 6 votes |
/** * Reads a file {@code starbucks.csv} from the class path and parses it into {@link Store} instances about to * persisted. * * @return * @throws Exception */ public static List<Store> readStores() throws Exception { ClassPathResource resource = new ClassPathResource("starbucks.csv"); Scanner scanner = new Scanner(resource.getInputStream()); String line = scanner.nextLine(); scanner.close(); FlatFileItemReader<Store> itemReader = new FlatFileItemReader<Store>(); itemReader.setResource(resource); // DelimitedLineTokenizer defaults to comma as its delimiter DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer(); tokenizer.setNames(line.split(",")); tokenizer.setStrict(false); DefaultLineMapper<Store> lineMapper = new DefaultLineMapper<Store>(); lineMapper.setFieldSetMapper(fields -> { Point location = new Point(fields.readDouble("Longitude"), fields.readDouble("Latitude")); Address address = new Address(fields.readString("Street Address"), fields.readString("City"), fields.readString("Zip"), location); return new Store(UUID.randomUUID(), fields.readString("Name"), address); }); lineMapper.setLineTokenizer(tokenizer); itemReader.setLineMapper(lineMapper); itemReader.setRecordSeparatorPolicy(new DefaultRecordSeparatorPolicy()); itemReader.setLinesToSkip(1); itemReader.open(new ExecutionContext()); List<Store> stores = new ArrayList<>(); Store store = null; do { store = itemReader.read(); if (store != null) { stores.add(store); } } while (store != null); return stores; }
Example #21
Source File: BatchMetricsFlatFileToDbIntegrationTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 6 votes |
@Test public void testRunFlatFileToDbSkipJob_SkipInProcess_ReaderTransactional() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbSkipReaderTransactionalJob", "metrics/flatFileToDbSkipJob_SkipInProcess.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next().getExecutionContext(); long writeCount = 5L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(2L).withStreamOpenCount(1L).withStreamUpdateCount(3L).withStreamCloseCount(0L) .withBeforeReadCount(6L).withReadCount(6L).withAfterReadCount(5L).withReadErrorCount(0L) .withBeforeProcessCount(5L).withProcessCount(5L).withAfterProcessCount(5L).withProcessErrorCount(1L) .withBeforeWriteCount(5L).withWriteCount(writeCount).withAfterWriteCount(5L).withAfterChunkCount(2L) .withChunkErrorCount(1L).withSkipInReadCount(0L).withSkipInProcessCount(0L).withSkipInWriteCount(0L) .build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one Gauge gauge = meterRegistry.find(MetricsListener.METRIC_NAME)// .tag("context", "flatFileToDbSkipReaderTransactionalJob.step")// .tag("name", MetricNames.PROCESS_COUNT.getName())// .gauge(); assertThat((Double) gauge.value(), is(notNullValue())); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }
Example #22
Source File: BatchMetricsFlatFileToDbIntegrationTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 6 votes |
@Test public void testRunFlatFileToDbSkipJob_SkipInRead() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbSkipJob", "metrics/flatFileToDbSkipJob_SkipInRead.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next().getExecutionContext(); long writeCount = 7L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(3L).withStreamOpenCount(1L).withStreamUpdateCount(4L).withStreamCloseCount(0L) .withBeforeReadCount(9L).withReadCount(9L).withAfterReadCount(7L).withReadErrorCount(1L) .withBeforeProcessCount(7L).withProcessCount(7L).withAfterProcessCount(7L).withProcessErrorCount(0L) .withBeforeWriteCount(7L).withWriteCount(writeCount).withAfterWriteCount(7L).withWriteErrorCount(0L) .withAfterChunkCount(3L).withChunkErrorCount(0L).withSkipInReadCount(1L).withSkipInProcessCount(0L) .withSkipInWriteCount(0L).build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one Gauge gauge = meterRegistry.find(MetricsListener.METRIC_NAME)// .tag("context", "flatFileToDbSkipJob.step")// .tag("name", MetricNames.PROCESS_COUNT.getName())// .gauge(); assertThat((Double) gauge.value(), is(notNullValue())); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }
Example #23
Source File: BatchMetricsFlatFileToDbIntegrationTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 6 votes |
@Test public void testRunFlatFileToDbSkipJob_SkipInWrite() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbSkipJob", "metrics/flatFileToDbSkipJob_SkipInWrite.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next().getExecutionContext(); long writeCount = 7L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(4L).withStreamOpenCount(1L).withStreamUpdateCount(4L).withStreamCloseCount(0L) .withBeforeReadCount(9L).withReadCount(9L).withAfterReadCount(8L).withReadErrorCount(0L) .withBeforeProcessCount(7L).withProcessCount(7L).withAfterProcessCount(7L).withProcessErrorCount(0L) .withBeforeWriteCount(5L).withWriteCount(writeCount).withAfterWriteCount(7L).withWriteErrorCount(4L) .withAfterChunkCount(4L).withChunkErrorCount(2L).withSkipInReadCount(0L).withSkipInProcessCount(0L) .withSkipInWriteCount(1L).build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one Gauge gauge = meterRegistry.find(MetricsListener.METRIC_NAME)// .tag("context", "flatFileToDbSkipJob.step")// .tag("name", MetricNames.PROCESS_COUNT.getName())// .gauge(); assertThat((Double) gauge.value(), is(notNullValue())); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }
Example #24
Source File: JobConfiguration.java From spring-cloud-task with Apache License 2.0 | 6 votes |
@Bean public Partitioner partitioner() { return new Partitioner() { @Override public Map<String, ExecutionContext> partition(int gridSize) { Map<String, ExecutionContext> partitions = new HashMap<>(gridSize); for (int i = 0; i < GRID_SIZE; i++) { ExecutionContext context1 = new ExecutionContext(); context1.put("partitionNumber", i); partitions.put("partition" + i, context1); } return partitions; } }; }
Example #25
Source File: JobExecutionDeserializationTests.java From spring-cloud-dataflow with Apache License 2.0 | 6 votes |
@Test public void testDeserializationOfSingleJobExecution() throws IOException { final ObjectMapper objectMapper = DataFlowTemplate.prepareObjectMapper(new ObjectMapper()); final InputStream inputStream = JobExecutionDeserializationTests.class .getResourceAsStream("/SingleJobExecutionJson.txt"); final String json = new String(StreamUtils.copyToByteArray(inputStream)); final JobExecutionResource jobExecutionInfoResource = objectMapper.readValue(json, JobExecutionResource.class); assertNotNull(jobExecutionInfoResource); assertEquals(Long.valueOf(1), jobExecutionInfoResource.getJobId()); assertEquals("ff.job", jobExecutionInfoResource.getName()); assertEquals("COMPLETED", jobExecutionInfoResource.getJobExecution().getStatus().name()); assertEquals(1, jobExecutionInfoResource.getJobExecution().getStepExecutions().size()); final StepExecution stepExecution = jobExecutionInfoResource.getJobExecution().getStepExecutions().iterator().next(); assertNotNull(stepExecution); final ExecutionContext stepExecutionExecutionContext = stepExecution.getExecutionContext(); assertNotNull(stepExecutionExecutionContext); assertEquals(2, stepExecutionExecutionContext.size()); }
Example #26
Source File: WebConfiguration.java From spring-cloud-dataflow with Apache License 2.0 | 5 votes |
@Bean public Jackson2ObjectMapperBuilderCustomizer dataflowObjectMapperBuilderCustomizer() { return (builder) -> { builder.dateFormat(new ISO8601DateFormatWithMilliSeconds(TimeZone.getDefault(), Locale.getDefault(), true)); // apply SCDF Batch Mixins to // ignore the JobExecution in StepExecution to prevent infinite loop. // https://github.com/spring-projects/spring-hateoas/issues/333 builder.mixIn(StepExecution.class, StepExecutionJacksonMixIn.class); builder.mixIn(ExecutionContext.class, ExecutionContextJacksonMixIn.class); builder.modules(new JavaTimeModule(), new Jdk8Module()); }; }
Example #27
Source File: CalculateTaxProcessor.java From batchers with Apache License 2.0 | 5 votes |
private String getPartitionIfExists() { ExecutionContext executionContext = stepExecution.getExecutionContext(); if (executionContext != null) { Object partitionObject = executionContext.get("partition"); if (partitionObject != null) { Long partition = (Long) partitionObject; if (partition != null) { return " Partition " + partition; } } } return ""; }
Example #28
Source File: BasePageItemReader.java From seed with Apache License 2.0 | 5 votes |
@Override protected void doReadPage() { if (results == null) { results = new CopyOnWriteArrayList<T>(); } else { results.clear(); } JobParameters params = stepExecution.getJobParameters(); ExecutionContext stepContext = stepExecution.getExecutionContext(); results.addAll(this.doRead(params, stepContext)); }
Example #29
Source File: BasePageItemReader.java From seed with Apache License 2.0 | 5 votes |
@Override final protected void doOpen() throws Exception { log.debug("处理记录开始 "); super.doOpen(); // JobParameters params = JobParametersThreadLocal.get(); JobParameters params = stepExecution.getJobParameters(); ExecutionContext stepContext = stepExecution.getExecutionContext(); doOpen(params, stepContext); }
Example #30
Source File: JobStepExecutionControllerTests.java From spring-cloud-dataflow with Apache License 2.0 | 5 votes |
private void createStepExecution(String jobName, String... stepNames) { JobInstance instance = jobRepository.createJobInstance(jobName, new JobParameters()); JobExecution jobExecution = jobRepository.createJobExecution(instance, new JobParameters(), null); for (String stepName : stepNames) { StepExecution stepExecution = new StepExecution(stepName, jobExecution, 1L); stepExecution.setId(null); ExecutionContext context = new ExecutionContext(); context.put("stepval", stepName); stepExecution.setExecutionContext(context); jobRepository.add(stepExecution); } TaskExecution taskExecution = dao.createTaskExecution(jobName, new Date(), new ArrayList<String>(), null); taskBatchDao.saveRelationship(taskExecution, jobExecution); }