org.springframework.batch.core.BatchStatus Java Examples
The following examples show how to use
org.springframework.batch.core.BatchStatus.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AlarmJobTest.java From pinpoint with Apache License 2.0 | 8 votes |
public static void main(String[] args) throws Exception{ GenericXmlApplicationContext applicationContext = new GenericXmlApplicationContext("/applicationContext-test.xml"); JobLauncherTestUtils testLauncher = applicationContext.getBean(JobLauncherTestUtils.class); JobExecution jobExecution = testLauncher.launchJob(getParameters()); BatchStatus status = jobExecution.getStatus(); assertEquals(BatchStatus.COMPLETED, status); applicationContext.close(); }
Example #2
Source File: JobCompletionPayRollListener.java From Software-Architecture-with-Spring-5.0 with MIT License | 7 votes |
@Override public void afterJob(JobExecution jobExecution) { if (jobExecution.getStatus() == BatchStatus.COMPLETED) { log.info(">>>>> PAY ROLL JOB FINISHED! "); jdbcTemplate.query("SELECT PERSON_IDENTIFICATION, CURRENCY, TX_AMMOUNT, ACCOUNT_TYPE, ACCOUNT_ID, TX_DESCRIPTION, FIRST_LAST_NAME FROM PAYROLL", (rs, row) -> new PayrollTo( rs.getInt(1), rs.getString(2), rs.getDouble(3), rs.getString(4), rs.getString(5), rs.getString(6), rs.getString(7)) ).forEach(payroll -> log.info("Found <" + payroll + "> in the database.")); } }
Example #3
Source File: JobCompleteNotificationListener.java From CogStack-Pipeline with Apache License 2.0 | 7 votes |
@Override public synchronized void afterJob(JobExecution jobExecution) { if(jobExecution.getStatus() == BatchStatus.COMPLETED) { if(columnRangePartitioner.isFirstRun()){ columnRangePartitioner.setFirstRun(false); } log.info("!!! JOB FINISHED! promoting last good record date to JobExecutionContext"); jobExecution.getExecutionContext().put("last_successful_timestamp_from_this_job", timeOfNextJob); jobRepository.updateExecutionContext(jobExecution); // Workaround to close ElasticSearch REST properly (the job was stuck before this change) if (esRestService != null) { try { esRestService.destroy(); } catch (IOException e) { log.warn("IOException when destroying ElasticSearch REST service"); } log.debug("Shutting down ElasticSearch REST service"); esServiceAlreadyClosed = true; } } }
Example #4
Source File: DefaultTaskJobServiceTests.java From spring-cloud-dataflow with Apache License 2.0 | 6 votes |
private void createSampleJob(JobRepository jobRepository, TaskBatchDao taskBatchDao, TaskExecutionDao taskExecutionDao, String jobName, int jobExecutionCount, BatchStatus status) { JobInstance instance = jobRepository.createJobInstance(jobName, new JobParameters()); TaskExecution taskExecution = taskExecutionDao.createTaskExecution(jobName, new Date(), new ArrayList<>(), null); JobExecution jobExecution; for (int i = 0; i < jobExecutionCount; i++) { jobExecution = jobRepository.createJobExecution(instance, this.jobParameters, null); StepExecution stepExecution = new StepExecution("foo", jobExecution, 1L); stepExecution.setId(null); jobRepository.add(stepExecution); taskBatchDao.saveRelationship(taskExecution, jobExecution); jobExecution.setStatus(status); jobExecution.setStartTime(new Date()); jobRepository.update(jobExecution); } }
Example #5
Source File: NotificationListener.java From batch-processing-large-datasets-spring with MIT License | 6 votes |
@Override public void afterJob(final JobExecution jobExecution) { if(jobExecution.getStatus() == BatchStatus.COMPLETED) { LOGGER.info("!!! JOB FINISHED! Time to verify the results"); jdbcTemplate.query("SELECT volt, time FROM voltage", (rs, row) -> new Voltage( rs.getBigDecimal(1), rs.getDouble(2)) ).forEach(voltage -> LOGGER.info("Found <" + voltage + "> in the database.")); } }
Example #6
Source File: JdbcBatchItemWriterJobListener.java From spring-cloud with Apache License 2.0 | 6 votes |
@Override public void afterJob(JobExecution jobExecution) { if (jobExecution.getStatus() == BatchStatus.COMPLETED) { log.info("JdbcBatchItemWriter job finished"); } else { log.info("JdbcBatchItemWriter job " + jobExecution.getStatus().name()); } }
Example #7
Source File: SettleJobListeners.java From seed with Apache License 2.0 | 6 votes |
@Override public void afterJob(JobExecution jobExecution) { if(jobExecution.getStatus() == BatchStatus.COMPLETED){ LogUtil.getLogger().info("批量任务Job-->[{}-{}]-->处理完成,TotalDuration[{}]ms", jobExecution.getJobId(), jobExecution.getJobInstance().getJobName(), SystemClockUtil.INSTANCE.now()-jobExecution.getStartTime().getTime()); LogUtil.getLogger().info("======================================================================="); } }
Example #8
Source File: StopJobIntegrationTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 6 votes |
@Test public void testRunJob() throws InterruptedException { Long executionId = restTemplate.postForObject("http://localhost:" + port + "/batch/operations/jobs/delayJob", "", Long.class); Thread.sleep(500); String runningExecutions = restTemplate .getForObject("http://localhost:" + port + "/batch/monitoring/jobs/runningexecutions", String.class); assertThat(runningExecutions.contains(executionId.toString()), is(true)); String runningExecutionsForDelayJob = restTemplate.getForObject( "http://localhost:" + port + "/batch/monitoring/jobs/runningexecutions/delayJob", String.class); assertThat(runningExecutionsForDelayJob.contains(executionId.toString()), is(true)); restTemplate.delete("http://localhost:" + port + "/batch/operations/jobs/executions/{executionId}", executionId); Thread.sleep(1500); JobExecution jobExecution = jobExplorer.getJobExecution(executionId); assertThat(jobExecution.getStatus(), is(BatchStatus.STOPPED)); String jobExecutionString = restTemplate.getForObject( "http://localhost:" + port + "/batch/monitoring/jobs/executions/{executionId}", String.class, executionId); assertThat(jobExecutionString.contains("STOPPED"), is(true)); }
Example #9
Source File: JavaConfigIntegrationTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 6 votes |
@Test public void testRunJob() throws InterruptedException { Long executionId = restTemplate.postForObject("http://localhost:" + port + "/batch/operations/jobs/simpleJob", "", Long.class); while (!restTemplate .getForObject("http://localhost:" + port + "/batch/operations/jobs/executions/{executionId}", String.class, executionId) .equals("COMPLETED")) { Thread.sleep(1000); } String log = restTemplate.getForObject( "http://localhost:" + port + "/batch/operations/jobs/executions/{executionId}/log", String.class, executionId); assertThat(log.length() > 20, is(true)); JobExecution jobExecution = jobExplorer.getJobExecution(executionId); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); String jobExecutionString = restTemplate.getForObject( "http://localhost:" + port + "/batch/monitoring/jobs/executions/{executionId}", String.class, executionId); assertThat(jobExecutionString.contains("COMPLETED"), is(true)); }
Example #10
Source File: BatchMetricsFlatFileToDbIntegrationTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 6 votes |
@Test public void testRunFlatFileToDbSkipJob_SkipInWrite() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbSkipJob", "metrics/flatFileToDbSkipJob_SkipInWrite.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next().getExecutionContext(); long writeCount = 7L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(4L).withStreamOpenCount(1L).withStreamUpdateCount(4L).withStreamCloseCount(0L) .withBeforeReadCount(9L).withReadCount(9L).withAfterReadCount(8L).withReadErrorCount(0L) .withBeforeProcessCount(7L).withProcessCount(7L).withAfterProcessCount(7L).withProcessErrorCount(0L) .withBeforeWriteCount(5L).withWriteCount(writeCount).withAfterWriteCount(7L).withWriteErrorCount(4L) .withAfterChunkCount(4L).withChunkErrorCount(2L).withSkipInReadCount(0L).withSkipInProcessCount(0L) .withSkipInWriteCount(1L).build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one Gauge gauge = meterRegistry.find(MetricsListener.METRIC_NAME)// .tag("context", "flatFileToDbSkipJob.step")// .tag("name", MetricNames.PROCESS_COUNT.getName())// .gauge(); assertThat((Double) gauge.value(), is(notNullValue())); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }
Example #11
Source File: BatchMetricsFlatFileToDbIntegrationTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 6 votes |
@Test public void testRunFlatFileToDbNoSkipJob_Success() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbNoSkipJob", "metrics/flatFileToDbNoSkipJob_Success.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next().getExecutionContext(); long writeCount = 5L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(2L).withStreamOpenCount(1L).withStreamUpdateCount(3L).withStreamCloseCount(0L) .withBeforeReadCount(6L).withReadCount(6L).withAfterReadCount(5L).withReadErrorCount(0L) .withBeforeProcessCount(5L).withProcessCount(5L).withAfterProcessCount(5L).withProcessErrorCount(0L) .withBeforeWriteCount(5L).withWriteCount(writeCount).withAfterWriteCount(5L).withAfterChunkCount(2L) .withChunkErrorCount(0L).withSkipInReadCount(0L).withSkipInProcessCount(0L).withSkipInWriteCount(0L) .build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one Gauge gauge = meterRegistry.find(MetricsListener.METRIC_NAME)// .tag("context", "flatFileToDbNoSkipJob.step")// .tag("name", MetricNames.PROCESS_COUNT.getName())// .gauge(); assertThat((Double) gauge.value(), is(notNullValue())); // TODO assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }
Example #12
Source File: BatchMetricsFlatFileToDbIntegrationTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 6 votes |
@Test public void testRunFlatFileToDbSkipJob_SkipInRead() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbSkipJob", "metrics/flatFileToDbSkipJob_SkipInRead.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next().getExecutionContext(); long writeCount = 7L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(3L).withStreamOpenCount(1L).withStreamUpdateCount(4L).withStreamCloseCount(0L) .withBeforeReadCount(9L).withReadCount(9L).withAfterReadCount(7L).withReadErrorCount(1L) .withBeforeProcessCount(7L).withProcessCount(7L).withAfterProcessCount(7L).withProcessErrorCount(0L) .withBeforeWriteCount(7L).withWriteCount(writeCount).withAfterWriteCount(7L).withWriteErrorCount(0L) .withAfterChunkCount(3L).withChunkErrorCount(0L).withSkipInReadCount(1L).withSkipInProcessCount(0L) .withSkipInWriteCount(0L).build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one Gauge gauge = meterRegistry.find(MetricsListener.METRIC_NAME)// .tag("context", "flatFileToDbSkipJob.step")// .tag("name", MetricNames.PROCESS_COUNT.getName())// .gauge(); assertThat((Double) gauge.value(), is(notNullValue())); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }
Example #13
Source File: XmlIntegrationTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 6 votes |
@Test public void testRunJob() throws InterruptedException { Long executionId = restTemplate .postForObject("http://localhost:" + port + "/batch/operations/jobs/flatFile2JobXml", "", Long.class); while (!restTemplate .getForObject("http://localhost:" + port + "/batch/operations/jobs/executions/{executionId}", String.class, executionId) .equals("COMPLETED")) { Thread.sleep(1000); } String log = restTemplate.getForObject( "http://localhost:" + port + "/batch/operations/jobs/executions/{executionId}/log", String.class, executionId); assertThat(log.length() > 20, is(true)); JobExecution jobExecution = jobExplorer.getJobExecution(executionId); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); String jobExecutionString = restTemplate.getForObject( "http://localhost:" + port + "/batch/monitoring/jobs/executions/{executionId}", String.class, executionId); assertThat(jobExecutionString.contains("COMPLETED"), is(true)); }
Example #14
Source File: JdbcSearchableStepExecutionDao.java From spring-cloud-dataflow with Apache License 2.0 | 6 votes |
public StepExecution mapRow(ResultSet rs, int rowNum) throws SQLException { StepExecution stepExecution = new StepExecution(rs.getString(2), null); stepExecution.setId(rs.getLong(1)); stepExecution.setStartTime(rs.getTimestamp(3)); stepExecution.setEndTime(rs.getTimestamp(4)); stepExecution.setStatus(BatchStatus.valueOf(rs.getString(5))); stepExecution.setCommitCount(rs.getInt(6)); stepExecution.setReadCount(rs.getInt(7)); stepExecution.setFilterCount(rs.getInt(8)); stepExecution.setWriteCount(rs.getInt(9)); stepExecution.setExitStatus(new ExitStatus(rs.getString(10), rs.getString(11))); stepExecution.setReadSkipCount(rs.getInt(12)); stepExecution.setWriteSkipCount(rs.getInt(13)); stepExecution.setProcessSkipCount(rs.getInt(14)); stepExecution.setRollbackCount(rs.getInt(15)); stepExecution.setLastUpdated(rs.getTimestamp(16)); stepExecution.setVersion(rs.getInt(17)); return stepExecution; }
Example #15
Source File: JobExecutionUtils.java From spring-cloud-dataflow with Apache License 2.0 | 6 votes |
static MockMvc createBaseJobExecutionMockMvc(JobRepository jobRepository, TaskBatchDao taskBatchDao, TaskExecutionDao taskExecutionDao, WebApplicationContext wac, RequestMappingHandlerAdapter adapter) { MockMvc mockMvc = MockMvcBuilders.webAppContextSetup(wac) .defaultRequest(get("/").accept(MediaType.APPLICATION_JSON)).build(); JobExecutionUtils.createSampleJob(jobRepository, taskBatchDao, taskExecutionDao, JOB_NAME_ORIG, 1); JobExecutionUtils.createSampleJob(jobRepository, taskBatchDao, taskExecutionDao, JOB_NAME_FOO, 1); JobExecutionUtils.createSampleJob(jobRepository, taskBatchDao, taskExecutionDao, JOB_NAME_FOOBAR, 2); JobExecutionUtils.createSampleJob(jobRepository, taskBatchDao, taskExecutionDao, JOB_NAME_COMPLETED, 1, BatchStatus.COMPLETED); JobExecutionUtils.createSampleJob(jobRepository, taskBatchDao, taskExecutionDao, JOB_NAME_STARTED, 1, BatchStatus.STARTED); JobExecutionUtils.createSampleJob(jobRepository, taskBatchDao, taskExecutionDao, JOB_NAME_STOPPED, 1, BatchStatus.STOPPED); JobExecutionUtils.createSampleJob(jobRepository, taskBatchDao, taskExecutionDao, JOB_NAME_FAILED1, 1, BatchStatus.FAILED); JobExecutionUtils.createSampleJob(jobRepository, taskBatchDao, taskExecutionDao, JOB_NAME_FAILED2, 1, BatchStatus.FAILED); for (HttpMessageConverter<?> converter : adapter.getMessageConverters()) { if (converter instanceof MappingJackson2HttpMessageConverter) { final MappingJackson2HttpMessageConverter jacksonConverter = (MappingJackson2HttpMessageConverter) converter; jacksonConverter.getObjectMapper().addMixIn(StepExecution.class, StepExecutionJacksonMixIn.class); jacksonConverter.getObjectMapper().addMixIn(ExecutionContext.class, ExecutionContextJacksonMixIn.class); jacksonConverter.getObjectMapper().setDateFormat(new ISO8601DateFormatWithMilliSeconds()); } } return mockMvc; }
Example #16
Source File: JobExecutionUtils.java From spring-cloud-dataflow with Apache License 2.0 | 6 votes |
private static void createSampleJob(JobRepository jobRepository, TaskBatchDao taskBatchDao, TaskExecutionDao taskExecutionDao, String jobName, int jobExecutionCount, BatchStatus status) { JobInstance instance = jobRepository.createJobInstance(jobName, new JobParameters()); TaskExecution taskExecution = taskExecutionDao.createTaskExecution(jobName, new Date(), new ArrayList<>(), null); JobExecution jobExecution; for (int i = 0; i < jobExecutionCount; i++) { jobExecution = jobRepository.createJobExecution(instance, new JobParameters(), null); StepExecution stepExecution = new StepExecution("foo", jobExecution, 1L); stepExecution.setId(null); jobRepository.add(stepExecution); taskBatchDao.saveRelationship(taskExecution, jobExecution); jobExecution.setStatus(status); jobExecution.setStartTime(new Date()); if (BatchStatus.STOPPED.equals(status)) { jobExecution.setEndTime(new Date()); } jobRepository.update(jobExecution); } }
Example #17
Source File: BatchMetricsFlatFileToDbIntegrationTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 6 votes |
@Test public void testRunFlatFileToDbSkipJob_SkipInProcess_ReaderTransactional() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbSkipReaderTransactionalJob", "metrics/flatFileToDbSkipJob_SkipInProcess.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next().getExecutionContext(); long writeCount = 5L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(2L).withStreamOpenCount(1L).withStreamUpdateCount(3L).withStreamCloseCount(0L) .withBeforeReadCount(6L).withReadCount(6L).withAfterReadCount(5L).withReadErrorCount(0L) .withBeforeProcessCount(5L).withProcessCount(5L).withAfterProcessCount(5L).withProcessErrorCount(1L) .withBeforeWriteCount(5L).withWriteCount(writeCount).withAfterWriteCount(5L).withAfterChunkCount(2L) .withChunkErrorCount(1L).withSkipInReadCount(0L).withSkipInProcessCount(0L).withSkipInWriteCount(0L) .build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one Gauge gauge = meterRegistry.find(MetricsListener.METRIC_NAME)// .tag("context", "flatFileToDbSkipReaderTransactionalJob.step")// .tag("name", MetricNames.PROCESS_COUNT.getName())// .gauge(); assertThat((Double) gauge.value(), is(notNullValue())); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }
Example #18
Source File: JobExecutionController.java From spring-cloud-dataflow with Apache License 2.0 | 6 votes |
/** * Retrieve all task job executions with the task name specified * * @param jobName name of the job. SQL server specific wildcards are enabled (eg.: myJob%, * m_Job, ...) * @param pageable page-able collection of {@code TaskJobExecution}s. * @param assembler for the {@link TaskJobExecution}s * @return list task/job executions with the specified jobName. * @throws NoSuchJobException if the job with the given name does not exist. */ @RequestMapping(value = "", method = RequestMethod.GET, produces = "application/json") @ResponseStatus(HttpStatus.OK) public PagedModel<JobExecutionResource> retrieveJobsByParameters( @RequestParam(value = "name", required = false) String jobName, @RequestParam(value = "status", required = false) BatchStatus status, Pageable pageable, PagedResourcesAssembler<TaskJobExecution> assembler) throws NoSuchJobException, NoSuchJobExecutionException { List<TaskJobExecution> jobExecutions; Page<TaskJobExecution> page; if (jobName == null && status == null) { jobExecutions = taskJobService.listJobExecutions(pageable); page = new PageImpl<>(jobExecutions, pageable, taskJobService.countJobExecutions()); } else { jobExecutions = taskJobService.listJobExecutionsForJob(pageable, jobName, status); page = new PageImpl<>(jobExecutions, pageable, taskJobService.countJobExecutionsForJob(jobName, status)); } return assembler.toModel(page, jobAssembler); }
Example #19
Source File: JobStepExecutionsDocumentation.java From spring-cloud-dataflow with Apache License 2.0 | 6 votes |
@Before public void setup() throws Exception { if (!initialized) { registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); initialize(); createJobExecution(JOB_NAME, BatchStatus.STARTED); documentation.dontDocument(() -> this.mockMvc.perform( post("/tasks/definitions") .param("name", "DOCJOB_1") .param("definition", "timestamp --format='YYYY MM DD'")) .andExpect(status().isOk())); initialized = true; } }
Example #20
Source File: JpaPagingBatchJobListener.java From spring-cloud with Apache License 2.0 | 5 votes |
@Override public void afterJob(JobExecution jobExecution) { if (jobExecution.getStatus() == BatchStatus.COMPLETED) { log.info("JpaPagingBatch job finished"); } else { log.info("JpaPagingBatch job " + jobExecution.getStatus().name()); } }
Example #21
Source File: BatchMetricsAspectIntegrationTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 5 votes |
@Test public void testRunJob() throws InterruptedException { Long executionId = restTemplate.postForObject( "http://localhost:" + port + "/batch/operations/jobs/simpleBatchMetricsJob", "", Long.class); while (!restTemplate .getForObject("http://localhost:" + port + "/batch/operations/jobs/executions/{executionId}", String.class, executionId) .equals("COMPLETED")) { Thread.sleep(1000); } String log = restTemplate.getForObject( "http://localhost:" + port + "/batch/operations/jobs/executions/{executionId}/log", String.class, executionId); assertThat(log.length() > 20, is(true)); JobExecution jobExecution = jobExplorer.getJobExecution(executionId); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); String jobExecutionString = restTemplate.getForObject( "http://localhost:" + port + "/batch/monitoring/jobs/executions/{executionId}", String.class, executionId); assertThat(jobExecutionString.contains("COMPLETED"), is(true)); Gauge gauge = meterRegistry.find(MetricsListener.METRIC_NAME)// .tag("context", "simpleBatchMetricsJob.simpleBatchMetricsStep")// .tag("name", "processor")// .gauge(); assertThat(gauge, is(notNullValue())); assertThat((Double) gauge.value(), is(notNullValue())); assertThat((Double) gauge.value(), is(7.0)); Timer timer = meterRegistry.find(MetricsListener.METRIC_NAME)// .tag("context", "simpleBatchMetricsJob.simpleBatchMetricsStep")// .tag("method", "DummyItemReader.read")// .timer(); assertThat(timer, is(notNullValue())); assertThat(timer.totalTime(TimeUnit.SECONDS), greaterThan(0d)); }
Example #22
Source File: DeployerStepExecutionHandlerTests.java From spring-cloud-task with Apache License 2.0 | 5 votes |
@Test public void testRuntimeException() throws Exception { StepExecution workerStep = new StepExecution("workerStep", new JobExecution(1L), 2L); when(this.environment.containsProperty( DeployerPartitionHandler.SPRING_CLOUD_TASK_JOB_EXECUTION_ID)) .thenReturn(true); when(this.environment.containsProperty( DeployerPartitionHandler.SPRING_CLOUD_TASK_STEP_EXECUTION_ID)) .thenReturn(true); when(this.environment .containsProperty(DeployerPartitionHandler.SPRING_CLOUD_TASK_STEP_NAME)) .thenReturn(true); when(this.environment .getProperty(DeployerPartitionHandler.SPRING_CLOUD_TASK_STEP_NAME)) .thenReturn("workerStep"); when(this.beanFactory.getBeanNamesForType(Step.class)) .thenReturn(new String[] { "workerStep", "foo", "bar" }); when(this.environment.getProperty( DeployerPartitionHandler.SPRING_CLOUD_TASK_STEP_EXECUTION_ID)) .thenReturn("2"); when(this.environment .getProperty(DeployerPartitionHandler.SPRING_CLOUD_TASK_JOB_EXECUTION_ID)) .thenReturn("1"); when(this.jobExplorer.getStepExecution(1L, 2L)).thenReturn(workerStep); when(this.environment .getProperty(DeployerPartitionHandler.SPRING_CLOUD_TASK_STEP_NAME)) .thenReturn("workerStep"); when(this.beanFactory.getBean("workerStep", Step.class)).thenReturn(this.step); doThrow(new RuntimeException("expected")).when(this.step).execute(workerStep); this.handler.run(); verify(this.jobRepository).update(this.stepExecutionArgumentCaptor.capture()); assertThat(this.stepExecutionArgumentCaptor.getValue().getStatus()) .isEqualTo(BatchStatus.FAILED); }
Example #23
Source File: HelloWorldBatchTest.java From blog with MIT License | 5 votes |
@Test public void test_That_batch_execution_Should_success_When_launch_with_valid_args() throws Exception { // arrange JobParameters params = new JobParametersBuilder().addLong( "execution.times", 10L).toJobParameters(); // act JobExecution jobExecution = jobLauncherTestUtils.launchJob(params); // assert assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); assertEquals(1, jobExecution.getStepExecutions().size()); StepExecution step = jobExecution.getStepExecutions().iterator().next(); assertEquals(11, step.getCommitCount()); assertEquals(10, step.getWriteCount()); }
Example #24
Source File: DeployerStepExecutionHandlerTests.java From spring-cloud-task with Apache License 2.0 | 5 votes |
@Test public void testJobInterruptedException() throws Exception { StepExecution workerStep = new StepExecution("workerStep", new JobExecution(1L), 2L); when(this.environment.containsProperty( DeployerPartitionHandler.SPRING_CLOUD_TASK_JOB_EXECUTION_ID)) .thenReturn(true); when(this.environment.containsProperty( DeployerPartitionHandler.SPRING_CLOUD_TASK_STEP_EXECUTION_ID)) .thenReturn(true); when(this.environment .containsProperty(DeployerPartitionHandler.SPRING_CLOUD_TASK_STEP_NAME)) .thenReturn(true); when(this.environment .getProperty(DeployerPartitionHandler.SPRING_CLOUD_TASK_STEP_NAME)) .thenReturn("workerStep"); when(this.beanFactory.getBeanNamesForType(Step.class)) .thenReturn(new String[] { "workerStep", "foo", "bar" }); when(this.environment.getProperty( DeployerPartitionHandler.SPRING_CLOUD_TASK_STEP_EXECUTION_ID)) .thenReturn("2"); when(this.environment .getProperty(DeployerPartitionHandler.SPRING_CLOUD_TASK_JOB_EXECUTION_ID)) .thenReturn("1"); when(this.jobExplorer.getStepExecution(1L, 2L)).thenReturn(workerStep); when(this.environment .getProperty(DeployerPartitionHandler.SPRING_CLOUD_TASK_STEP_NAME)) .thenReturn("workerStep"); when(this.beanFactory.getBean("workerStep", Step.class)).thenReturn(this.step); doThrow(new JobInterruptedException("expected")).when(this.step) .execute(workerStep); this.handler.run(); verify(this.jobRepository).update(this.stepExecutionArgumentCaptor.capture()); assertThat(this.stepExecutionArgumentCaptor.getValue().getStatus()) .isEqualTo(BatchStatus.STOPPED); }
Example #25
Source File: StepExecutionEventTests.java From spring-cloud-task with Apache License 2.0 | 5 votes |
@Test public void testBatchStatus() { StepExecutionEvent stepExecutionEvent = new StepExecutionEvent( getBasicStepExecution()); assertThat(stepExecutionEvent.getStatus()).isEqualTo(BatchStatus.STARTING); stepExecutionEvent.setStatus(BatchStatus.ABANDONED); assertThat(stepExecutionEvent.getStatus()).isEqualTo(BatchStatus.ABANDONED); }
Example #26
Source File: FlatFileJobTest.java From spring-boot-starter-batch-web with Apache License 2.0 | 5 votes |
@Test public void runXmlJob() throws Exception { File file = new File("target/out-xmlconfig.txt"); file.delete(); jobOperator.start("flatFileJobXml", ""); while (jobRepository.getLastJobExecution("flatFileJobXml", new JobParameters()).getStatus().isRunning()) { Thread.sleep(100); } assertEquals(BatchStatus.COMPLETED, jobRepository.getLastJobExecution("flatFileJobXml", new JobParameters()).getStatus()); assertEquals(10, FileUtils.readLines(file, StandardCharsets.UTF_8).size()); }
Example #27
Source File: JobExecutionEventTests.java From spring-cloud-task with Apache License 2.0 | 5 votes |
@Test public void testBatchStatus() { JobExecutionEvent jobExecutionEvent = new JobExecutionEvent(); assertThat(jobExecutionEvent.getStatus()).isEqualTo(BatchStatus.STARTING); jobExecutionEvent.setStatus(BatchStatus.ABANDONED); assertThat(jobExecutionEvent.getStatus()).isEqualTo(BatchStatus.ABANDONED); }
Example #28
Source File: JobExecutionEventTests.java From spring-cloud-task with Apache License 2.0 | 5 votes |
@Test public void testUpgradeBatchStatus() { JobExecutionEvent jobExecutionEvent = new JobExecutionEvent(); assertThat(jobExecutionEvent.getStatus()).isEqualTo(BatchStatus.STARTING); jobExecutionEvent.upgradeStatus(BatchStatus.FAILED); assertThat(jobExecutionEvent.getStatus()).isEqualTo(BatchStatus.FAILED); jobExecutionEvent.upgradeStatus(BatchStatus.COMPLETED); assertThat(jobExecutionEvent.getStatus()).isEqualTo(BatchStatus.FAILED); }
Example #29
Source File: ChangeStatusOnFailedStepsJobExecListenerTest.java From batchers with Apache License 2.0 | 5 votes |
@Test public void testAfterJobJobExecutionHasBatchStatusFailedWhenWeHaveStepsWithStatusNotCompleted() throws Exception { JobExecution jobExecution = new JobExecution(1L); StepExecution failedStepExecution = createStepExecution(jobExecution, FAILED); StepExecution completedStepExecution = createStepExecution(jobExecution, COMPLETED); StepExecution stoppedStepExecution = createStepExecution(jobExecution, STOPPED); jobExecution.addStepExecutions(asList(failedStepExecution, completedStepExecution, stoppedStepExecution)); changeStatusOnFailedStepsJobExecListener.afterJob(jobExecution); assertThat(jobExecution.getStatus()).isEqualTo(BatchStatus.FAILED); }
Example #30
Source File: ChangeStatusOnFailedStepsJobExecListenerTest.java From batchers with Apache License 2.0 | 5 votes |
@Test public void testAfterJobJobExecutionHasBatchStatusUnchangedWhenAllStepsCompleted() throws Exception { JobExecution jobExecution = new JobExecution(1L); StepExecution completedStepExecution = createStepExecution(jobExecution, COMPLETED); StepExecution anotherStepExecution = createStepExecution(jobExecution, COMPLETED); jobExecution.addStepExecutions(asList(completedStepExecution, anotherStepExecution)); BatchStatus jobExecutionStatus = jobExecution.getStatus(); changeStatusOnFailedStepsJobExecListener.afterJob(jobExecution); assertThat(jobExecution.getStatus()).isEqualTo(jobExecutionStatus); }