Spring Batch Reader, Processor, Writer
Reader, Processor, Writer
Use ItemReader, ItemProcessor, ItemWriter
| ItemReader | Read data(develop custom reader or prepared by Spring Batch File, Database etc…) |
| ItemProcessor | Process data do something |
| ItemWriter | Write data(file, database etc…) |
Example
This example is based on Spring Documentation Creating a Batch Service
Use file as read source.
Save process in HSQL
Write data into MySQL
Preparation
Create table in MySQL.
CREATE TABLE `people` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(255) NOT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB;
DataSourceConfig.java
public class DataSourceConfiguration {
@Bean
@Primary
public DataSource hsqldbDataSource() throws SQLException {
final SimpleDriverDataSource dataSource = new SimpleDriverDataSource();
dataSource.setDriver(new org.hsqldb.jdbcDriver());
dataSource.setUrl("jdbc:hsqldb:mem:mydb");
dataSource.setUsername("sa");
dataSource.setPassword("");
return dataSource;
}
@Bean
public DataSource mysqlDataSource() throws SQLException {
final SimpleDriverDataSource dataSource = new SimpleDriverDataSource();
dataSource.setDriver(new com.mysql.jdbc.Driver());
dataSource.setUrl("jdbc:mysql://localhost/test");
dataSource.setUsername("root");
dataSource.setPassword("atmarkplant");
return dataSource;
}
}
Person.java(Model)
public class Person {
private String name;
public Person() {
}
public Person(String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public String toString() {
return "name : " + this.name;
}
}
PersonItemProcessor.java(Processor)
public class PersonItemProcessor implements ItemProcessor<Person, Person> {
private static final Logger log = LoggerFactory.getLogger(PersonItemProcessor.class);
@Override
public Person process(Person item) throws Exception {
final String name = item.getName().toUpperCase();
final Person transformedPerson = new Person(name);
log.info("Converting (" + item + ") into (" + transformedPerson + ")");
return transformedPerson;
}
}
BatchConfiguration.java(Config)
@Configuration
@EnableBatchProcessing
@ComponentScan(basePackageClasses = DefaultBatchConfigurer.class) // @Componentscan
@EnableAutoConfiguration
@Import({DataSourceConfiguration.class}) // @Import
public class BatchConfiguration {
@Autowired
private JobBuilderFactory jobBuilderFactory;
@Autowired
private StepBuilderFactory stepBuilderFactory;
@Autowired
private DataSourceConfiguration dataSourceConfig;
// tag::readerwriterprocessor[]
@Bean
public FlatFileItemReader<Person> reader() {
// FlatFileItemReader : oneline ? csv parsing
FlatFileItemReader<Person> reader = new FlatFileItemReader<Person>();
reader.setResource(new ClassPathResource("data.csv"));
reader.setLineMapper(new DefaultLineMapper<Person>() {{
setLineTokenizer(new DelimitedLineTokenizer() {{
setNames(new String[] { "name"});
}});
setFieldSetMapper(new BeanWrapperFieldSetMapper<Person>() {{
setTargetType(Person.class);
}});
}});
return reader;
}
@Bean
public PersonItemProcessor processor() {
return new PersonItemProcessor();
}
@Bean
public JdbcBatchItemWriter<Person> writer() {
JdbcBatchItemWriter<Person> writer = new JdbcBatchItemWriter<Person>();
writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<Person>());
writer.setSql("INSERT INTO people (name) VALUES (:name)");
try {
writer.setDataSource(dataSourceConfig.mysqlDataSource());
}
catch (SQLException e) {
}
return writer;
}
// end::readerwriterprocessor[]
// tag::jobstep[]
@Bean
public Job importUserJob() {
return jobBuilderFactory.get("importUserJob")
.incrementer(new RunIdIncrementer())
.flow(step1())
.end()
.build();
}
@Bean
public Step step1() {
return stepBuilderFactory.get("step1")
.<Person, Person> chunk(10) // decide type?
.reader(reader())
.processor(processor())
.writer(writer())
.build();
}
// end::jobstep[]
}
