Skip to content

Commit

Permalink
Add CompositeItemReader feature
Browse files Browse the repository at this point in the history
  • Loading branch information
fmbenhassine committed Nov 7, 2023
1 parent 678b8bb commit f176d65
Show file tree
Hide file tree
Showing 8 changed files with 290 additions and 1 deletion.
50 changes: 49 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,12 @@
This repository contains experimental features in Spring Batch.
Experimental features are *not* intended to be used in production.
They are shared here to be explored by the community and to gather feedback.
Please refer to the [Enabling experimental features](#enabling-experimental-features) section for more details about how to enable experimental features.

The currently available experimental features are the following:

* [MongoDB job repository](#mongodb-job-repository)
* [Composite item reader](#composite-item-reader)

**Important note:** The versioning in this repository follows the [semantic versioning specification](https://semver.org/#spec-item-4).
Public APIs as well as the implementations should not be considered stable and may change at any time :exclamation:
Expand Down Expand Up @@ -44,7 +50,7 @@ To test experimental features, you need to add the following dependency in your

Depending on the feature you are testing, other dependencies might be required.

# MongoDB as data store for batch meta-data
# MongoDB job repository

*Original issue:* https://github.com/spring-projects/spring-batch/issues/877

Expand Down Expand Up @@ -90,6 +96,48 @@ Those can be defined as Spring beans in the application context as described in

You can find a complete example in the [MongoDBJobRepositoryIntegrationTests](./src/test/java/org/springframework/batch/experimental/core/repository/support/MongoDBJobRepositoryIntegrationTests.java) file.

# Composite item reader

*Original issue:* https://github.com/spring-projects/spring-batch/issues/757

This feature introduces a composite `ItemReader` implementation. Similar to the `CompositeItemProcessor` and `CompositeItemWriter`, the idea is to delegate reading to a list of item readers in order.
This is useful when there is a requirement to read data having the same format from different sources (files, databases, etc). Here is an example:

```java
record Person(int id, String name) {}

@Bean
public FlatFileItemReader<Person> fileItemReader() {
return new FlatFileItemReaderBuilder<Person>()
.name("fileItemReader")
.resource(new ClassPathResource("persons.csv"))
.delimited()
.names("id", "name")
.targetType(Person.class)
.build();
}

@Bean
public JdbcCursorItemReader<Person> databaseItemReader() {
String sql = "select * from persons";
return new JdbcCursorItemReaderBuilder<Person>()
.name("databaseItemReader")
.dataSource(dataSource())
.sql(sql)
.rowMapper(new DataClassRowMapper<>(Person.class))
.build();
}

@Bean
public CompositeItemReader<Person> itemReader() {
return new CompositeItemReader<>(Arrays.asList(fileItemReader(), databaseItemReader()));
}
```

This snippet configures a `CompositeItemReader` with two delegates to read the same data from a flat file and a database table.

You can find a complete example in the [CompositeItemReaderIntegrationTests](./src/test/java/org/springframework/batch/experimental/item/support/CompositeItemReaderIntegrationTests.java) file.

# Contribute

The best way to contribute to this project is by trying out the experimental features and sharing your feedback!
Expand Down
7 changes: 7 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
<junit-jupiter.version>5.10.0</junit-jupiter.version>
<spring-test.version>6.0.13</spring-test.version>
<testcontainers.version>1.19.1</testcontainers.version>
<h2.version>2.2.224</h2.version>
<slf4j.version>2.0.9</slf4j.version>
</properties>

Expand Down Expand Up @@ -99,6 +100,12 @@
<version>${testcontainers.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>${h2.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.batch.experimental.item.support;

import java.util.Iterator;
import java.util.List;

import org.springframework.batch.item.ExecutionContext;
import org.springframework.batch.item.ItemStreamException;
import org.springframework.batch.item.ItemStreamReader;

/**
* Composite reader that delegates reading to a list of {@link ItemStreamReader}s.
* This implementation is not thread-safe and not restartable.
*
* @author Mahmoud Ben Hassine
* @param <T> type of objects to read
*/
public class CompositeItemReader<T> implements ItemStreamReader<T> {

private final List<ItemStreamReader<T>> delegates;

private final Iterator<ItemStreamReader<T>> delegatesIterator;

private ItemStreamReader<T> currentDelegate;

public CompositeItemReader(List<ItemStreamReader<T>> delegates) {
this.delegates = delegates;
this.delegatesIterator = this.delegates.iterator();
this.currentDelegate = this.delegatesIterator.hasNext() ? this.delegatesIterator.next() : null;
}

@Override
public void open(ExecutionContext executionContext) throws ItemStreamException {
for (ItemStreamReader<T> delegate : delegates) {
delegate.open(executionContext);
}
}

@Override
public T read() throws Exception {
if (this.currentDelegate == null) {
return null;
}
T item = currentDelegate.read();
if (item == null) {
currentDelegate = this.delegatesIterator.hasNext() ? this.delegatesIterator.next() : null;
return read();
}
return item;
}

@Override
public void close() throws ItemStreamException {
for (ItemStreamReader<T> delegate : delegates) {
delegate.close();
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.batch.experimental.item.support;

import java.util.Arrays;

import javax.sql.DataSource;

import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;

import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.job.builder.JobBuilder;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.step.builder.StepBuilder;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.database.JdbcCursorItemReader;
import org.springframework.batch.item.database.builder.JdbcBatchItemWriterBuilder;
import org.springframework.batch.item.database.builder.JdbcCursorItemReaderBuilder;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.builder.FlatFileItemReaderBuilder;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.jdbc.core.DataClassRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType;
import org.springframework.jdbc.support.JdbcTransactionManager;
import org.springframework.test.jdbc.JdbcTestUtils;

public class CompositeItemReaderIntegrationTests {

record Person(int id, String name) {
}

@Test
void testCompositeItemReader() throws Exception {
// given
ApplicationContext context = new AnnotationConfigApplicationContext(JobConfiguration.class);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
Job job = context.getBean(Job.class);

// when
JobExecution jobExecution = jobLauncher.run(job, new JobParameters());

// then
Assertions.assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus());
JdbcTemplate jdbcTemplate = new JdbcTemplate(context.getBean(DataSource.class));
int personsCount = JdbcTestUtils.countRowsInTable(jdbcTemplate, "person_target");
Assertions.assertEquals(6, personsCount);
}

@Configuration
@EnableBatchProcessing
static class JobConfiguration {

@Bean
public FlatFileItemReader<Person> itemReader1() {
return new FlatFileItemReaderBuilder<Person>()
.name("personItemReader1")
.resource(new ClassPathResource("persons1.csv"))
.delimited()
.names("id", "name")
.targetType(Person.class)
.build();
}

@Bean
public FlatFileItemReader<Person> itemReader2() {
return new FlatFileItemReaderBuilder<Person>()
.name("personItemReader2")
.resource(new ClassPathResource("persons2.csv"))
.delimited()
.names("id", "name")
.targetType(Person.class)
.build();
}

@Bean
public JdbcCursorItemReader<Person> itemReader3() {
String sql = "select * from person_source";
return new JdbcCursorItemReaderBuilder<Person>()
.name("personItemReader3")
.dataSource(dataSource())
.sql(sql)
.rowMapper(new DataClassRowMapper<>(Person.class))
.build();
}

@Bean
public CompositeItemReader<Person> itemReader() {
return new CompositeItemReader<>(Arrays.asList(itemReader1(), itemReader2(), itemReader3()));
}

@Bean
public JdbcBatchItemWriter<Person> itemWriter() {
String sql = "insert into person_target (id, name) values (:id, :name)";
return new JdbcBatchItemWriterBuilder<Person>()
.dataSource(dataSource())
.sql(sql)
.beanMapped()
.build();
}

@Bean
public Job job(JobRepository jobRepository, JdbcTransactionManager transactionManager) {
return new JobBuilder("job", jobRepository)
.start(new StepBuilder("step", jobRepository)
.<Person, Person>chunk(5, transactionManager)
.reader(itemReader())
.writer(itemWriter())
.build())
.build();
}

@Bean
public DataSource dataSource() {
return new EmbeddedDatabaseBuilder()
.setType(EmbeddedDatabaseType.H2)
.addScript("/org/springframework/batch/core/schema-drop-h2.sql")
.addScript("/org/springframework/batch/core/schema-h2.sql")
.addScript("schema.sql")
.addScript("data.sql")
.build();
}

@Bean
public JdbcTransactionManager transactionManager(DataSource dataSource) {
return new JdbcTransactionManager(dataSource);
}

}
}
2 changes: 2 additions & 0 deletions src/test/resources/data.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
insert into person_source values (5, 'baz1');
insert into person_source values (6, 'baz2');
2 changes: 2 additions & 0 deletions src/test/resources/persons1.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
1,foo1
2,foo2
2 changes: 2 additions & 0 deletions src/test/resources/persons2.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
3,bar1
4,bar2
2 changes: 2 additions & 0 deletions src/test/resources/schema.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
create table person_source (id int primary key, name varchar(20));
create table person_target (id int primary key, name varchar(20));

0 comments on commit f176d65

Please sign in to comment.