org.springframework.batch.item.ItemWriter Java Examples
The following examples show how to use
org.springframework.batch.item.ItemWriter.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BatchConfiguration.java From Software-Architecture-with-Spring-5.0 with MIT License | 6 votes |
@Bean(name = STEP_PROCESS_CSV_FILE) public Step readCsvFileAndPopulateDbTable( StepBuilderFactory stepBuilderFactory, PlatformTransactionManager platformTransactionManager, @Qualifier(DATA_READER) ItemReader<JavaChampion> itemReader, @Qualifier(DATA_PROCESSOR) ItemProcessor<JavaChampion, JavaChampion> itemProcessor, @Qualifier(DATA_WRITER) ItemWriter<JavaChampion> itemWriter) { StepBuilder builder = stepBuilderFactory.get(STEP_PROCESS_CSV_FILE); return builder.<JavaChampion, JavaChampion>chunk(10) .reader(itemReader) .processor(itemProcessor) .writer(itemWriter) .transactionManager(platformTransactionManager) .build(); }
Example #2
Source File: BudgetVtollConfig.java From SpringBootBucket with MIT License | 6 votes |
/** * step步骤,包含ItemReader,ItemProcessor和ItemWriter * * @param stepBuilderFactory * @param reader * @param writer * @param processor * @return */ @Bean(name = "vtollStep1") public Step vtollStep1(StepBuilderFactory stepBuilderFactory, @Qualifier("vtollReader") ItemReader<BudgetVtoll> reader, @Qualifier("vtollWriter") ItemWriter<BudgetVtoll> writer, @Qualifier("vtollProcessor") ItemProcessor<BudgetVtoll, BudgetVtoll> processor) { return stepBuilderFactory .get("vtollStep1") .<BudgetVtoll, BudgetVtoll>chunk(5000)//批处理每次提交5000条数据 .reader(reader)//给step绑定reader .processor(processor)//给step绑定processor .writer(writer)//给step绑定writer .faultTolerant() .retry(Exception.class) // 重试 .noRetry(ParseException.class) .retryLimit(1) //每条记录重试一次 .skip(Exception.class) .skipLimit(200) //一共允许跳过200次异常 // .taskExecutor(new SimpleAsyncTaskExecutor()) //设置每个Job通过并发方式执行,一般来讲一个Job就让它串行完成的好 // .throttleLimit(10) //并发任务数为 10,默认为4 .build(); }
Example #3
Source File: CantonConfig.java From SpringBootBucket with MIT License | 6 votes |
/** * step步骤,包含ItemReader,ItemProcessor和ItemWriter * * @param stepBuilderFactory * @param reader * @param writer * @param processor * @return */ @Bean(name = "cantonStep1") public Step cantonStep1(StepBuilderFactory stepBuilderFactory, @Qualifier("cantonReader") ItemReader<Canton> reader, @Qualifier("cantonWriter") ItemWriter<Canton> writer, @Qualifier("cantonProcessor") ItemProcessor<Canton, Canton> processor) { return stepBuilderFactory .get("cantonStep1") .<Canton, Canton>chunk(5000)//批处理每次提交5000条数据 .reader(reader)//给step绑定reader .processor(processor)//给step绑定processor .writer(writer)//给step绑定writer .faultTolerant() .retry(Exception.class) // 重试 .noRetry(ParseException.class) .retryLimit(1) //每条记录重试一次 .skip(Exception.class) .skipLimit(200) //一共允许跳过200次异常 // .taskExecutor(new SimpleAsyncTaskExecutor()) //设置每个Job通过并发方式执行,一般来讲一个Job就让它串行完成的好 // .throttleLimit(10) //并发任务数为 10,默认为4 .build(); }
Example #4
Source File: LogConfig.java From SpringBootBucket with MIT License | 6 votes |
/** * step步骤,包含ItemReader,ItemProcessor和ItemWriter * * @param stepBuilderFactory * @param reader * @param writer * @param processor * @return */ @Bean(name = "logStep1") public Step logStep1(StepBuilderFactory stepBuilderFactory, @Qualifier("logReader") ItemReader<Log> reader, @Qualifier("logWriter") ItemWriter<Log> writer, @Qualifier("logProcessor") ItemProcessor<Log, Log> processor) { return stepBuilderFactory .get("logStep1") .<Log, Log>chunk(5000)//批处理每次提交5000条数据 .reader(reader)//给step绑定reader .processor(processor)//给step绑定processor .writer(writer)//给step绑定writer .faultTolerant() .retry(Exception.class) // 重试 .noRetry(ParseException.class) .retryLimit(1) //每条记录重试一次 .skip(Exception.class) .skipLimit(200) //一共允许跳过200次异常 // .taskExecutor(new SimpleAsyncTaskExecutor()) //设置每个Job通过并发方式执行,一般来讲一个Job就让它串行完成的好 // .throttleLimit(10) //并发任务数为 10,默认为4 .build(); }
Example #5
Source File: AppConfig.java From SpringBootBucket with MIT License | 6 votes |
/** * step步骤,包含ItemReader,ItemProcessor和ItemWriter * * @param stepBuilderFactory * @param reader * @param writer * @param processor * @return */ @Bean(name = "zappStep1") public Step zappStep1(StepBuilderFactory stepBuilderFactory, @Qualifier("appReader") ItemReader<App> reader, @Qualifier("appWriter") ItemWriter<App> writer, @Qualifier("appProcessor") ItemProcessor<App, App> processor) { return stepBuilderFactory .get("zappStep1") .<App, App>chunk(5000)//批处理每次提交5000条数据 .reader(reader)//给step绑定reader .processor(processor)//给step绑定processor .writer(writer)//给step绑定writer .faultTolerant() .retry(Exception.class) // 重试 .noRetry(ParseException.class) .retryLimit(1) //每条记录重试一次 .skip(Exception.class) .skipLimit(200) //一共允许跳过200次异常 // .taskExecutor(new SimpleAsyncTaskExecutor()) //设置每个Job通过并发方式执行,一般来讲一个Job就让它串行完成的好 // .throttleLimit(10) //并发任务数为 10,默认为4 .build(); }
Example #6
Source File: DatabaseItemWriterDemo.java From SpringAll with MIT License | 6 votes |
private ItemWriter<TestData> dataSourceItemWriter() { // ItemWriter的实现类之一,mysql数据库数据写入使用JdbcBatchItemWriter, // 其他实现:MongoItemWriter,Neo4jItemWriter等 JdbcBatchItemWriter<TestData> writer = new JdbcBatchItemWriter<>(); writer.setDataSource(dataSource); // 设置数据源 String sql = "insert into TEST(id,field1,field2,field3) values (:id,:field1,:field2,:field3)"; writer.setSql(sql); // 设置插入sql脚本 // 映射TestData对象属性到占位符中的属性 BeanPropertyItemSqlParameterSourceProvider<TestData> provider = new BeanPropertyItemSqlParameterSourceProvider<>(); writer.setItemSqlParameterSourceProvider(provider); writer.afterPropertiesSet(); // 设置一些额外属性 return writer; }
Example #7
Source File: BatchConfig.java From Spring-5.0-Cookbook with MIT License | 6 votes |
@Bean("writer2") public ItemWriter<Permanent> xmlWriter() { StaxEventItemWriter<Permanent> xmlFileWriter = new StaxEventItemWriter<>(); String exportFilePath = "./src/main/resources/emps.xml"; xmlFileWriter.setResource(new FileSystemResource(exportFilePath)); xmlFileWriter.setRootTagName("employees"); Jaxb2Marshaller empMarshaller = new Jaxb2Marshaller(); empMarshaller.setClassesToBeBound(Permanent.class); xmlFileWriter.setMarshaller(empMarshaller); System.out.println("marshalling");; return xmlFileWriter; }
Example #8
Source File: SingleStepJobAutoConfiguration.java From spring-cloud-task with Apache License 2.0 | 6 votes |
@Bean @ConditionalOnMissingBean @ConditionalOnProperty(prefix = "spring.batch.job", name = "jobName") public Job job(ItemReader<Map<Object, Object>> itemReader, ItemWriter<Map<Object, Object>> itemWriter) { SimpleStepBuilder<Map<Object, Object>, Map<Object, Object>> stepBuilder = this.stepBuilderFactory .get(this.properties.getStepName()) .<Map<Object, Object>, Map<Object, Object>>chunk( this.properties.getChunkSize()) .reader(itemReader); stepBuilder.processor(this.itemProcessor); Step step = stepBuilder.writer(itemWriter).build(); return this.jobBuilderFactory.get(this.properties.getJobName()).start(step) .build(); }
Example #9
Source File: SpringBatchConfig.java From tutorials with MIT License | 5 votes |
@Bean protected Step step1(@Qualifier("itemProcessor") ItemProcessor<Transaction, Transaction> processor, ItemWriter<Transaction> writer) throws ParseException { return stepBuilderFactory .get("step1") .<Transaction, Transaction> chunk(10) .reader(itemReader(inputCsv)) .processor(processor) .writer(writer) .build(); }
Example #10
Source File: AddressImportJobConfiguration.java From spring-batch-lightmin with Apache License 2.0 | 5 votes |
@Bean public Step addressImportStep(final FlatFileItemReader<BatchTaskAddress> fileItemReader, final ItemWriter<BatchTaskAddress> addressDatabaseWriter, final StepBuilderFactory stepBuilderFactory) { return stepBuilderFactory .get("addressImportStep") .<BatchTaskAddress, BatchTaskAddress>chunk(1) .reader(fileItemReader) .writer(addressDatabaseWriter) .build(); }
Example #11
Source File: AddressImportJobConfiguration.java From spring-batch-lightmin with Apache License 2.0 | 5 votes |
@Bean public ItemWriter<BatchTaskAddress> addressDatabaseWriter(final BatchTaskAddressDao batchTaskAddressDAO) { return items -> { for (final BatchTaskAddress item : items) { batchTaskAddressDAO.add(item); } }; }
Example #12
Source File: AddressPrinterJobConfiguration.java From spring-batch-lightmin with Apache License 2.0 | 5 votes |
@Bean public Step addressPrinterStep(final StepBuilderFactory stepBuilderFactory, final ItemStreamReader<Long> addressPrinterReader, final ItemProcessor<Long, Address> addressPrinterProcessor, final ItemWriter<Address> addressPrinterWriter) throws Exception { return stepBuilderFactory .get("addressPrinterStep") .<Long, Address>chunk(1) .reader(addressPrinterReader) .processor(addressPrinterProcessor) .writer(addressPrinterWriter) .allowStartIfComplete(Boolean.TRUE) .build(); }
Example #13
Source File: AddressPrinterJobConfiguration.java From spring-batch-lightmin with Apache License 2.0 | 5 votes |
@Bean public Step addressBatchTaskDeletionStep(final StepBuilderFactory stepBuilderFactory, final ItemStreamReader<Long> addressBatchTaskDeletionReader, final ItemWriter<Long> addressBatchTaskDeletionWriter) throws Exception { return stepBuilderFactory .get("addressBatchTaskDeletionStep") .<Long, Long>chunk(1) .reader(addressBatchTaskDeletionReader) .writer(addressBatchTaskDeletionWriter) .allowStartIfComplete(Boolean.TRUE) .build(); }
Example #14
Source File: AddressPrinterJobConfiguration.java From spring-batch-lightmin with Apache License 2.0 | 5 votes |
@Bean public ItemWriter<Address> addressPrinterWriter() { return addresses -> { for (final Address address : addresses) { log.info("Migrated Address: <<<" + address + ">>>"); } }; }
Example #15
Source File: UpdatePostViewsJobConfigurer.java From wallride with Apache License 2.0 | 5 votes |
public Step updatePostViewsStep() { return stepBuilders.get("updatePostViewsStep") .chunk(10) .reader((ItemReader) updatePostViewsItemReader) .writer((ItemWriter) updatePostViewsItemWriter) .build(); }
Example #16
Source File: TestBatchConfiguration.java From spring-batch-admin-spring-boot with Apache License 2.0 | 5 votes |
@Bean public ItemWriter<? super Object> writer() { return new ItemWriter<Object>() { @Override public void write(List<? extends Object> items) throws Exception { System.out.println(items); } }; }
Example #17
Source File: SpringBatchScheduler.java From tutorials with MIT License | 5 votes |
@Bean public ItemWriter<Book> writer() { return new ItemWriter<Book>() { @Override public void write(List<? extends Book> items) throws Exception { logger.debug("writer..." + items.size()); for (Book item : items) { logger.debug(item.toString()); } } }; }
Example #18
Source File: ChunksConfig.java From tutorials with MIT License | 5 votes |
@Bean protected Step processLines(ItemReader<Line> reader, ItemProcessor<Line, Line> processor, ItemWriter<Line> writer) { return steps.get("processLines").<Line, Line> chunk(2) .reader(reader) .processor(processor) .writer(writer) .build(); }
Example #19
Source File: SpringBatchRetryConfig.java From tutorials with MIT License | 5 votes |
@Bean public Step retryStep(@Qualifier("retryItemProcessor") ItemProcessor<Transaction, Transaction> processor, ItemWriter<Transaction> writer) throws ParseException { return stepBuilderFactory.get("retryStep") .<Transaction, Transaction>chunk(10) .reader(itemReader(inputCsv)) .processor(processor) .writer(writer) .faultTolerant() .retryLimit(3) .retry(ConnectTimeoutException.class) .retry(DeadlockLoserDataAccessException.class) .build(); }
Example #20
Source File: JobConfiguration.java From CogStack-Pipeline with Apache License 2.0 | 5 votes |
@Bean @StepScope @Qualifier("mapJdbcItemWriter") @Profile("jdbc_out_map") public ItemWriter<Document> mapJdbcItemWriter( @Qualifier("targetDataSource") DataSource jdbcDocumentTarget) { JdbcBatchItemWriter<Document> writer = new JdbcBatchItemWriter<>(); writer.setItemSqlParameterSourceProvider(new MapItemSqlParameterSourceProvider<Document>()); writer.setSql(env.getRequiredProperty("target.Sql")); writer.setDataSource(jdbcDocumentTarget); return writer; }
Example #21
Source File: SpringBatchConfig.java From tutorials with MIT License | 5 votes |
@Bean public Step skippingStep(@Qualifier("skippingItemProcessor") ItemProcessor<Transaction, Transaction> processor, ItemWriter<Transaction> writer) throws ParseException { return stepBuilderFactory .get("skippingStep") .<Transaction, Transaction>chunk(10) .reader(itemReader(invalidInputCsv)) .processor(processor) .writer(writer) .faultTolerant() .skipLimit(2) .skip(MissingUsernameException.class) .skip(NegativeAmountException.class) .build(); }
Example #22
Source File: SpringBatchConfig.java From tutorials with MIT License | 5 votes |
@Bean public Step skipPolicyStep(@Qualifier("skippingItemProcessor") ItemProcessor<Transaction, Transaction> processor, ItemWriter<Transaction> writer) throws ParseException { return stepBuilderFactory .get("skipPolicyStep") .<Transaction, Transaction>chunk(10) .reader(itemReader(invalidInputCsv)) .processor(processor) .writer(writer) .faultTolerant() .skipPolicy(new CustomSkipPolicy()) .build(); }
Example #23
Source File: SpringBatchConfiguration.java From tutorials with MIT License | 5 votes |
@Bean public Step step1(ItemReader<BookRecord> csvItemReader, ItemWriter<Book> jsonItemWriter) throws IOException { // @formatter:off return stepBuilderFactory .get("step1") .<BookRecord, Book> chunk(3) .reader(csvItemReader) .processor(bookItemProcessor()) .writer(jsonItemWriter) .build(); // @formatter:on }
Example #24
Source File: SpringBatchConfiguration.java From tutorials with MIT License | 5 votes |
@Bean public Step step2(ItemReader<BookRecord> csvItemReader, ItemWriter<BookDetails> listItemWriter) { // @formatter:off return stepBuilderFactory .get("step2") .<BookRecord, BookDetails> chunk(3) .reader(csvItemReader) .processor(bookDetailsItemProcessor()) .writer(listItemWriter) .build(); // @formatter:on }
Example #25
Source File: BatchConfiguration.java From spring-graalvm-native with Apache License 2.0 | 5 votes |
@Bean public Step step1(ItemReader<CustomerCredit> itemReader, ItemProcessor<CustomerCredit, CustomerCredit> itemProcessor, ItemWriter<CustomerCredit> itemWriter) { return this.stepBuilderFactory.get("step1") .<CustomerCredit, CustomerCredit>chunk(2) .reader(itemReader) .processor(itemProcessor) .writer(itemWriter) .build(); }
Example #26
Source File: BatchConfiguration.java From messaging with Apache License 2.0 | 5 votes |
@Bean Job job(JobBuilderFactory jobBuilderFactory, StepBuilderFactory stepBuilderFactory, JdbcTemplate template, ItemReader<Contact> fileReader, ItemProcessor<Contact, Contact> emailProcessor, ItemWriter<Contact> jdbcWriter) { Step setup = stepBuilderFactory.get("clean-contact-table") .tasklet((contribution, chunkContext) -> { template.update("delete from CONTACT"); return RepeatStatus.FINISHED; }).build(); Step fileToJdbc = stepBuilderFactory.get("file-to-jdbc-fileToJdbc") .<Contact, Contact>chunk(5) // <1> .reader(fileReader).processor(emailProcessor).writer(jdbcWriter) .faultTolerant().skip(InvalidEmailException.class) // <2> .skipPolicy((Throwable t, int skipCount) -> { LogFactory.getLog(getClass()).info("skipping "); return t.getClass().isAssignableFrom(InvalidEmailException.class); }).retry(HttpStatusCodeException.class) // <3> .retryLimit(2).build(); return jobBuilderFactory.get("etl") // <4> .start(setup).next(fileToJdbc).build(); }
Example #27
Source File: CsvBatchConfig.java From Demo with Apache License 2.0 | 5 votes |
@Bean public Step step1(StepBuilderFactory stepBuilderFactory, ItemReader<Student> reader, ItemWriter<Student> writer, ItemProcessor<Student,Student> processor) { return stepBuilderFactory .get("step1") .<Student, Student>chunk(65000) //批处理每次提交 65000 条数据 .reader(reader) // 给 step 绑定 reader .processor(processor) // 给 step 绑定 processor .writer(writer) // 给 step 绑定 writer .build(); }
Example #28
Source File: JobConfiguration.java From CogStack-Pipeline with Apache License 2.0 | 5 votes |
@Bean @StepScope @Qualifier("simpleJdbcItemWriter") @Profile("jdbc_out") public ItemWriter<Document> simpleJdbcItemWriter( @Qualifier("targetDataSource") DataSource jdbcDocumentTarget) { JdbcBatchItemWriter<Document> writer = new JdbcBatchItemWriter<>(); writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<>()); writer.setSql(env.getRequiredProperty("target.Sql")); writer.setDataSource(jdbcDocumentTarget); return writer; }
Example #29
Source File: JobConfiguration.java From CogStack-Pipeline with Apache License 2.0 | 5 votes |
@Bean @Qualifier("compositeSlaveStep") public Step compositeSlaveStep( ItemReader<Document> reader, @Qualifier("compositeItemProcessor") ItemProcessor<Document, Document> processor, @Qualifier("compositeESandJdbcItemWriter") ItemWriter<Document> writer, @Qualifier("slaveTaskExecutor")TaskExecutor taskExecutor, @Qualifier("nonFatalExceptionItemProcessorListener") ItemProcessListener nonFatalExceptionItemProcessorListener, //@Qualifier("targetDatasourceTransactionManager")PlatformTransactionManager manager, StepBuilderFactory stepBuilderFactory ) { FaultTolerantStepBuilder stepBuilder = stepBuilderFactory.get("compositeSlaveStep") .<Document, Document> chunk(chunkSize) .reader(reader) .processor(processor) .writer(writer) .faultTolerant() .skipLimit(skipLimit) .skip(WebserviceProcessingFailedException.class); if (env.acceptsProfiles("jdbc_out_map")) { stepBuilder = stepBuilder.skip(InvalidDataAccessApiUsageException.class); } return stepBuilder.noSkip(Exception.class) // .listener(nonFatalExceptionItemProcessorListener) .listener(new SkipListener()) .taskExecutor(taskExecutor) .build(); }
Example #30
Source File: JobConfiguration.java From CogStack-Pipeline with Apache License 2.0 | 5 votes |
@Bean @Qualifier("compositeItemWriter") public ItemWriter<Document> compositeESandJdbcItemWriter() { CompositeItemWriter writer = new CompositeItemWriter<>(); ArrayList<ItemWriter<Document>> delegates = new ArrayList<>(); if(esItemWriter !=null) delegates.add(esItemWriter); if(esRestItemWriter !=null) delegates.add(esRestItemWriter); if(jdbcItemWriter !=null) delegates.add(jdbcItemWriter); if(jdbcMapItemWriter !=null) delegates.add(jdbcMapItemWriter); if(jsonFileItemWriter !=null) delegates.add(jsonFileItemWriter); writer.setDelegates(delegates); return writer; }