Error : Field job in com.example.partioner.DemoApplication required a bean of type 'org.springframework.batch.core.Job' that could not be found - spring-batch

I try a spring batch partitioning database program but I have this message when I try to run the batch:
APPLICATION FAILED TO START
Description: Field job in com.example.partioner.DemoApplication required a bean of type
'org.springframework.batch.core.Job' that could not be found.
The injection point has the following annotations: - #org.springframework.beans.factory.annotation.Autowired(required=true)
Action: Consider defining a bean of type
'org.springframework.batch.core.Job' in your configuration.
This is my main class :
package com.example.partioner;
import java.util.Date;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
#SpringBootApplication
#EnableBatchProcessing
public class DemoApplication implements CommandLineRunner {
#Autowired
private JobLauncher jobLauncher;
#Autowired
private Job job;
public static void main(String[] args) {
SpringApplication.run(DemoApplication.class, args);
}
#Override
public void run(String... args) throws Exception {
System.out.println("STATUS STARTED===================");
JobParameters jobParameters = new JobParametersBuilder()
.addString("JobId", String.valueOf(System.currentTimeMillis()))
.addDate("date", new Date())
.addLong("time",System.currentTimeMillis()).toJobParameters();
JobExecution execution = jobLauncher.run(job, jobParameters);
System.out.println("STATUS :: "+execution.getStatus());
}
}
This my jobConfig class :
package com.example.config;
import java.util.HashMap;
import java.util.Map;
import javax.sql.DataSource;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.database.JdbcPagingItemReader;
import org.springframework.batch.item.database.Order;
import org.springframework.batch.item.database.support.MySqlPagingQueryProvider;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.SimpleAsyncTaskExecutor;
import com.example.mapper.CustomerRowMapper;
import com.example.model.Customer;
import com.example.partitioner.ColumnRangePartitioner;
#Configuration
public class JobConfiguration {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Autowired
private DataSource dataSource;
#Bean
public ColumnRangePartitioner partitioner()
{
ColumnRangePartitioner columnRangePartitioner = new ColumnRangePartitioner();
columnRangePartitioner.setColumn("id");
columnRangePartitioner.setDataSource(dataSource);
columnRangePartitioner.setTable("customer");
return columnRangePartitioner;
}
#Bean
#StepScope
public JdbcPagingItemReader<Customer> pagingItemReader(
#Value("#{stepExecutionContext['minValue']}") Long minValue,
#Value("#{stepExecutionContext['maxValue']}") Long maxValue)
{
System.out.println("reading " + minValue + " to " + maxValue);
Map<String, Order> sortKeys = new HashMap<>();
sortKeys.put("id", Order.ASCENDING);
MySqlPagingQueryProvider queryProvider = new MySqlPagingQueryProvider();
queryProvider.setSelectClause("id, firstName, lastName, birthdate");
queryProvider.setFromClause("from customer");
queryProvider.setWhereClause("where id >= " + minValue + " and id < " + maxValue);
queryProvider.setSortKeys(sortKeys);
JdbcPagingItemReader<Customer> reader = new JdbcPagingItemReader<>();
reader.setDataSource(this.dataSource);
reader.setFetchSize(10);
reader.setRowMapper(new CustomerRowMapper());
reader.setQueryProvider(queryProvider);
return reader;
}
#Bean
#StepScope
public JdbcBatchItemWriter<Customer> customerItemWriter()
{
JdbcBatchItemWriter<Customer> itemWriter = new JdbcBatchItemWriter<>();
itemWriter.setDataSource(dataSource);
itemWriter.setSql("INSERT INTO NEW_CUSTOMER VALUES (:id, :firstName, :lastName, :birthdate)");
itemWriter.setItemSqlParameterSourceProvider
(new BeanPropertyItemSqlParameterSourceProvider<>());
itemWriter.afterPropertiesSet();
return itemWriter;
}
#Bean
public Step slaveStep()
{
return stepBuilderFactory.get("slaveStep")
.<Customer, Customer>chunk(10)
.reader(pagingItemReader(null, null))
.writer(customerItemWriter())
.build();
}
#Bean
public Step step1()
{
return stepBuilderFactory.get("step1")
.partitioner(slaveStep().getName(), partitioner())
.step(slaveStep())
.gridSize(4)
.taskExecutor(new SimpleAsyncTaskExecutor())
.build();
}
#Bean
public Job job()
{
return jobBuilderFactory.get("job")
.start(step1())
.build();
}
}
This is my partitioner class:
package com.example.partitioner;
import java.util.HashMap;
import java.util.Map;
import javax.sql.DataSource;
import org.springframework.batch.core.partition.support.Partitioner;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.jdbc.core.JdbcOperations;
import org.springframework.jdbc.core.JdbcTemplate;
public class ColumnRangePartitioner implements Partitioner
{
private JdbcOperations jdbcTemplate;
private String table;
private String column;
public void setTable(String table) {
this.table = table;
}
public void setColumn(String column) {
this.column = column;
}
public void setDataSource(DataSource dataSource) {
jdbcTemplate = new JdbcTemplate(dataSource);
}
#Override
public Map<String, ExecutionContext> partition(int gridSize)
{
int min = jdbcTemplate.queryForObject("SELECT MIN(" + column + ") FROM " + table, Integer.class);
int max = jdbcTemplate.queryForObject("SELECT MAX(" + column + ") FROM " + table, Integer.class);
int targetSize = (max - min) / gridSize + 1;
Map<String, ExecutionContext> result = new HashMap<>();
int number = 0;
int start = min;
int end = start + targetSize - 1;
while (start <= max)
{
ExecutionContext value = new ExecutionContext();
result.put("partition" + number, value);
if(end >= max) {
end = max;
}
value.putInt("minValue", start);
value.putInt("maxValue", end);
start += targetSize;
end += targetSize;
number++;
}
return result;
}
}
I don't understand the reason of this message and can't find a solution. I think I have put all the necessary annotations. I am a beginner and I hope you will help me.

You DemoApplication is in package com.example.partioner while your job configuration class JobConfiguration is in package com.example.config.
In order for Spring Boot to find your job, you need to move your JobConfiguration class to the same package as your main class DemoApplication or a package underneath it.
Please refer to the Structuring Your Code section of the reference documentation.

Related

Spring Batch process different types in one step

I have a batch job that reads records from a file. I want to convert said records to PojoA (all strings). I want to run each record throw a validator ensure all fields are present. I then want to transform PojoA to PojoB. The issue I have is that I am unable to change the type of object mid-step.
return getStepBuilder("downloadData")
.<PojoA, PojoA>chunk(1000)
.reader(pojoAReader())
.processor(pojoAValidator)
.writer(pojoAWriter)
.processor(pojoAToPojoBTransformer) <- issue here, <PojoA, PojoB>
.write(pojoBWriter)
.build();
The reason PojoB exists is because PojoA is all strings; I want to persist all records regardless if they're invalid. PojoB has the accurate data types, e.g. Dates, numbers.
I think I need another step that deals with but how do I pass the PojoA's to step 2?
You cannot declare two processors/writers like:
.processor(pojoAValidator)
.writer(pojoAWriter)
.processor(pojoAToPojoBTransformer) <- issue here, <PojoA, PojoB>
.write(pojoBWriter)
You need to use a composite processor/writer for that.
Here is a quick example for a composite processor with processor1 (Integer -> Integer) then processor2 (Integer -> String):
import java.util.Arrays;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.support.CompositeItemProcessor;
import org.springframework.batch.item.support.ListItemReader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
#Configuration
#EnableBatchProcessing
public class MyJob {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory steps;
#Bean
public ItemReader<Integer> itemReader() {
return new ListItemReader<>(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10));
}
#Bean
public ItemWriter<String> itemWriter() {
return items -> {
for (String item : items) {
System.out.println("item = " + item);
}
};
}
#Bean
public ItemProcessor<Integer, Integer> itemProcessor1() {
return item -> item + 1;
}
#Bean
public ItemProcessor<Integer, String> itemProcessor2() {
return String::valueOf;
}
#Bean
public ItemProcessor<Integer, String> compositeItemProcessor() {
CompositeItemProcessor<Integer, String> compositeItemProcessor = new CompositeItemProcessor<>();
compositeItemProcessor.setDelegates(Arrays.asList(itemProcessor1(), itemProcessor2()));
return compositeItemProcessor;
}
#Bean
public Step step() {
return steps.get("step")
.<Integer, String>chunk(5)
.reader(itemReader())
.processor(compositeItemProcessor())
.writer(itemWriter())
.build();
}
#Bean
public Job job() {
return jobs.get("job")
.start(step())
.build();
}
public static void main(String[] args) throws Exception {
ApplicationContext context = new AnnotationConfigApplicationContext(MyJob.class);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
Job job = context.getBean(Job.class);
jobLauncher.run(job, new JobParameters());
}
}

Field vehicleRepository required a bean of type ..VehicleInterface that could not be found

I'm trying to #autowire repository reference from Service implementation class but still I'm getting bean not found error
I've already tried moving App.java file in the parent
App.java
package com.trucker;
import org.springframework.context.ApplicationContext;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
#SpringBootApplication
public class App
{
public static void main( String[] args ){
ApplicationContext applicationContext = SpringApplication.run(App.class,args);
}
}
VehicleService.java
package com.trucker.service;
import com.trucker.entity.Vehicle;
import com.trucker.repository.VehicleRepositoryInterface;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
#Service
public class VehicleService implements VehicleServiceI {
#Autowired
VehicleRepositoryInterface vehicleRepository;
#Override
public Vehicle addVehicles(Vehicle vehicle) {
System.out.println(vehicle);
return vehicleRepository.save(vehicle);
}
}
VehicleRepositoryInterface.java
package com.trucker.repository;
import com.trucker.entity.Vehicle;
import org.springframework.context.annotation.Bean;
import org.springframework.data.repository.CrudRepository;
import org.springframework.stereotype.Repository;
public interface VehicleRepositoryInterface extends CrudRepository<Vehicle,String> {
}
VehicleController.java
package com.trucker.controller;
import com.trucker.entity.Vehicle;
import com.trucker.service.VehicleServiceI;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMethod;
#Controller
public class VehicleController {
#Autowired
VehicleServiceI vehicleService;
#RequestMapping(path = "/vehicles", method = RequestMethod.PUT)
public String addVehicles(#RequestBody Vehicle vehicle) {
System.out.println("************IN THE CONTROLLER************"+vehicle);
vehicleService.addVehicles(vehicle);
return "";
}
}
Vehicle.java
package com.trucker.entity;
import javax.persistence.Entity;
import java.sql.Timestamp;
import javax.persistence.Id;
#Entity
public class Vehicle {
#Id
String vin;
String make;
String model;
int year;
int redlineRpm;
int maxFuelVolume;
Timestamp lastServiceDate;
public String getVin() {
return vin;
}
public void setVin(String vin) {
this.vin = vin;
}
public String getMake() {
return make;
}
public void setMake(String make) {
this.make = make;
}
public String getModel() {
return model;
}
public void setModel(String model) {
this.model = model;
}
public int getYear() {
return year;
}
public void setYear(int year) {
this.year = year;
}
public int getRedlineRpm() {
return redlineRpm;
}
public void setRedlineRpm(int redlineRpm) {
this.redlineRpm = redlineRpm;
}
public int getMaxFuelVolume() {
return maxFuelVolume;
}
public void setMaxFuelVolume(int maxFuelVolume) {
this.maxFuelVolume = maxFuelVolume;
}
public Timestamp getLastServiceDate() {
return lastServiceDate;
}
public void setLastServiceDate(Timestamp lastServiceDate) {
this.lastServiceDate = lastServiceDate;
}
#Override
public String toString() {
return "Vehicle{" +
"vin='" + vin + '\'' +
", make='" + make + '\'' +
", model='" + model + '\'' +
", year=" + year +
", redlineRpm=" + redlineRpm +
", maxFuelVolume=" + maxFuelVolume +
", lastServiceDate=" + lastServiceDate +
'}';
}
}
application.properties
application.properties
server.port=8084
spring.datasource.url=jdbc:oracle:thin:#localhost:1521:xe
spring.jpa.database-platform=org.hibernate.dialect.Oracle10gDialect
spring.datasource.driver-class=oracle.jdbc.driver.OracleDriver
spring.datasource.username=system
spring.datasource.password=system
spring.jpa.hibernate.ddl-auto=create
spring.jpa.show-sql=true
Directory Structure:
src
src/main
src/main/resources/application.properties
src/main/java
src/main/java/com.trucker
src/main/java/com.trucker/App.java
src/main/java/com.trucker.controller
src/main/java/com.trucker.controller.VehicleController
src/main/java/com.trucker.entity.Vehicle
src/main/java/com.trucker.repository
src/main/java/com.trucker.repository.VehicleRepositoryInterface
src/main/java/com.trucker.service
src/main/java/com.trucker.service.vehicleService
Take a look at this post it has clear answers
Spring boot autowiring an interface with multiple implementations
or
Spring autowire interface

How to archive processed file in Multi-threaded Step in Spring batch?

I'm using multi-threaded step while reading a file from the resources. Let's say I have several files to be processed & multiple-threads are processing the same file so, I'm not sure at which point in time my whole files get processed.
Once my file successfully processed, I need to archive/delete the file. Can someone guide me what should I use?
Here is my sample code.
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.stream.Stream;
import javax.sql.DataSource;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecutionListener;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.annotation.AfterStep;
import org.springframework.batch.core.annotation.BeforeStep;
import org.springframework.batch.core.configuration.annotation.BatchConfigurer;
import org.springframework.batch.core.configuration.annotation.DefaultBatchConfigurer;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.NonTransientResourceException;
import org.springframework.batch.item.ParseException;
import org.springframework.batch.item.UnexpectedInputException;
import org.springframework.batch.support.transaction.ResourcelessTransactionManager;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
import org.springframework.core.task.SimpleAsyncTaskExecutor;
import org.springframework.core.task.TaskExecutor;
import com.iana.spring.batch.dao.GenericDAO;
import com.iana.spring.batch.listener.BatchJobCompletionListener;
#Configuration
public class BatchConfig {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Autowired
private JobLauncher jobLauncher;
#Autowired
private Job processJob;
#Value("classpath*:/final/HYUMER_SI_*.txt")
private Resource[] inputFiles;
#Autowired
#Qualifier("test2DataSource")
private DataSource test2DataSource;
public void saveFileLog(String fileLog) throws Exception{
String query = "INSERT INTO FILE_LOG(LOG_INFO) VALUES (?)";
new GenericDAO().saveOrUpdate(test2DataSource, query, false, fileLog);
}
// This job runs in every 5 seconds
//#Scheduled(fixedRate = 150000000)
public void fixedRatedCallingMethod() {
try {
JobParameters jobParameters = new JobParametersBuilder()
.addLong("time", System.currentTimeMillis())
.toJobParameters();
jobLauncher.run(processJob, jobParameters);
System.out.println("I have been scheduled with Spring scheduler");
} catch (Exception e) {
e.printStackTrace();
}
}
/* In case of multiple DataSources configuration- we need to add following code.
* - It is a good practice to provide Spring Batch database as #Primary to get the benefits of all default functionalities
* implemented by Spring Batch Statistics.
* - All insert and update batch job running statistics will be maintained by Spring Batch Itself.
* - No need to write any extra line of codes.
* Error: To use the default BatchConfigurer the context must contain no more than one DataSource, found 2
*/
#Bean
BatchConfigurer configurer(#Qualifier("testDataSource") DataSource dataSource){
return new DefaultBatchConfigurer(dataSource);
}
#Bean
public Job processJob() throws Exception{
return jobBuilderFactory.get("processJob")
.incrementer(new RunIdIncrementer())
.listener(listener())
.flow(orderStep1())
.end()
.build();
}
#Bean
public TaskExecutor taskExecutor(){
SimpleAsyncTaskExecutor asyncTaskExecutor=new SimpleAsyncTaskExecutor("spring_batch");
asyncTaskExecutor.setConcurrencyLimit(20);
return asyncTaskExecutor;
}
#Bean
public ItemReader<String> batchItemReader() {
Queue<String> dataList = new LinkedList<String>();
return new ItemReader<String>() {
#BeforeStep
public void beforeStep(StepExecution stepExecution) {
System.err.println("in before step...");
try {
if(inputFiles != null) {
for (int i = 0; i < inputFiles.length; i++) {
String fileName = inputFiles[i].getFile().getAbsolutePath();
try (Stream<String> stream = Files.lines(Paths.get(fileName))) {
stream.forEach( s -> dataList.add(s));
} catch (IOException e) {
e.printStackTrace();
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("fileList Size::"+dataList.size());
}
#Override
public synchronized String read()throws Exception, UnexpectedInputException, ParseException, NonTransientResourceException {
System.out.println("--> in item reader.........");
String fileName = null;
if (dataList.size() > 0) {
fileName = dataList.remove();
file_reading_cnt++;
}
return fileName;
}
#AfterStep
public void afterStep(StepExecution stepExecution) {
System.err.println("in after step..."+file_reading_cnt);
}
};
}
volatile int file_reading_cnt = 0;
#Bean
public ItemWriter<String> batchItemWriter(){
return new ItemWriter<String>() {
#Override
public void write(List<? extends String> fileList) throws Exception {
System.out.println("----- in item writer.........");
fileList.forEach(data -> {
try {
saveFileLog(data);
} catch (Exception e) {
e.printStackTrace();
}
});
}
};
}
/**
* To create a step, reader, processor and writer has been passed serially
*
* #return
*/
#Bean
public Step orderStep1() throws Exception{
return stepBuilderFactory.get("orderStep1").<String, String>chunk(20)
.reader(batchItemReader())
.writer(batchItemWriter())
.taskExecutor(taskExecutor())
.throttleLimit(20)
.build();
}
#Bean
public JobExecutionListener listener() {
return new BatchJobCompletionListener();
}
#Bean
public ResourcelessTransactionManager transactionManager() {
return new ResourcelessTransactionManager();
}
}

Spring JPA java.lang.IllegalStateException: No persistence units parsed from {classpath*:META-INF/persistence.xml}

I am facing below issue with Spring JPA. I have done java configuration with NO xml.
I am getting "java.lang.IllegalStateException: No persistence units parsed from {classpath*:META-INF/persistence.xml}"
Below is my application config class.
package co.in.desertlamp.configuration;
import java.util.Properties;
import javax.annotation.Resource;
import javax.sql.DataSource;
import org.hibernate.SessionFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.core.env.Environment;
import org.springframework.core.io.ClassPathResource;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import org.springframework.jdbc.datasource.init.DataSourceInitializer;
import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator;
import org.springframework.orm.hibernate4.HibernateTransactionManager;
import org.springframework.orm.hibernate4.LocalSessionFactoryBean;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.transaction.annotation.EnableTransactionManagement;
#Configuration
#EnableTransactionManagement
#EnableJpaRepositories(basePackages = "co.in.desertlamp")
#ComponentScan({ "co.in.desertlamp" })
#PropertySource(value = { "classpath:/co/in/desertlamp/resources/desertlamp.properties" })
public class DesertLampApplicationConfig {
private static final String DATABASE_DRIVER = "db.driver";
private static final String DATABASE_URL = "db.url";
private static final String DATABASE_USERNAME = "db.username";
private static final String DATABASE_PASSWORD = "db.password";
private static final String HIBERNATE_DIALECT = "hibernate.dialect";
private static final String HIBERNATE_SHOWSQL = "hibernate.show_sql";
private static final String HIBERNATE_FORMATSQL = "hibernate.format_sql";
private static final String HIBERNATE_HBM2DDL = "hibernate.hbm2ddl.auto";
#Resource
private Environment environment;
#Bean
public LocalSessionFactoryBean sessionFactory() {
LocalSessionFactoryBean sessionFactory = new LocalSessionFactoryBean();
sessionFactory.setDataSource(dataSource());
sessionFactory.setPackagesToScan(new String[] { "co.in.desertlamp" });
sessionFactory.setHibernateProperties(hibernateProperties());
return sessionFactory;
}
#Bean
public DataSource dataSource() {
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName(environment.getRequiredProperty(DATABASE_DRIVER));
dataSource.setUrl(environment.getRequiredProperty(DATABASE_URL));
dataSource.setUsername(environment.getRequiredProperty(DATABASE_USERNAME));
dataSource.setPassword(environment.getRequiredProperty(DATABASE_PASSWORD));
return dataSource;
}
#Bean
#Conditional(DataSourceCondition.class)
public DataSourceInitializer dataSourceInitializer(DataSource dataSource) {
ResourceDatabasePopulator resourceDatabasePopulator = new ResourceDatabasePopulator();
//Put Logger here
resourceDatabasePopulator.addScript(new ClassPathResource("/co/in/desertlamp/scripts/loadUserAndRoleData.sql"));
DataSourceInitializer dataSourceInitializer = new DataSourceInitializer();
dataSourceInitializer.setDataSource(dataSource);
dataSourceInitializer.setDatabasePopulator(resourceDatabasePopulator);
return dataSourceInitializer;
}
private Properties hibernateProperties() {
Properties hibernateProperties = new Properties();
hibernateProperties.put(HIBERNATE_DIALECT, environment.getRequiredProperty(HIBERNATE_DIALECT));
hibernateProperties.put(HIBERNATE_SHOWSQL, environment.getRequiredProperty(HIBERNATE_SHOWSQL));
hibernateProperties.put(HIBERNATE_FORMATSQL, environment.getRequiredProperty(HIBERNATE_FORMATSQL));
hibernateProperties.put(HIBERNATE_HBM2DDL, environment.getRequiredProperty(HIBERNATE_HBM2DDL));
return hibernateProperties;
}
#Bean
#Autowired
public HibernateTransactionManager transactionManager(SessionFactory sessionFactory) {
HibernateTransactionManager hibernateTransactionManager = new HibernateTransactionManager();
hibernateTransactionManager.setSessionFactory(sessionFactory);
return hibernateTransactionManager;
}
#Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory(){
LocalContainerEntityManagerFactoryBean localContainerEntityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean();
localContainerEntityManagerFactoryBean.setDataSource(dataSource());
return localContainerEntityManagerFactoryBean;
}
}
Thanks in advance.
Your LocalContainerEntityManagerFactoryBean needs to know the location of your #Entity classes.
So adding below properties should fix it:
localContainerEntityManagerFactoryBean.setPackagesToScan("path.to.your.entities");
localContainerEntityManagerFactoryBean.setPersistenceUnitName("name");

Reading Multiple excel File Using Spring Batch Extension

I am trying to read multiple excel files using Spring-Bath-Excel. In my scenario i don't know i advance how many files client will process i.e. if data would be very large, excel file will be split into multiple files like records1.xls ,records2.xls, records3.xls..
Is there any kind of MultiResourceItemReader available in Spring-Batch-Excel? I tried to set multiple resources at run time and also tried to use the patterns records*.xls but PoiItemReader did't allow me to do that .
I am using PoiItemReader for that .
To Read Multiple Excel
package com.abc.ingestion.job.dci;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.extensions.excel.RowMapper;
import org.springframework.batch.extensions.excel.streaming.StreamingXlsxItemReader;
import org.springframework.batch.extensions.excel.support.rowset.DefaultRowSetFactory;
import org.springframework.batch.extensions.excel.support.rowset.StaticColumnNameExtractor;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.file.MultiResourceItemReader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
#Configuration
#EnableBatchProcessing
public class BatchConfig {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
// Create input folder in resources
#Value("input/DCI*.xlsx")
private Resource[] inputResources;
#Bean
public MultiResourceItemReader<CouncilMapper> multiResourceItemReader() {
MultiResourceItemReader<CouncilMapper> resourceItemReader = new MultiResourceItemReader<>();
resourceItemReader.setResources(inputResources);
resourceItemReader.setDelegate(reader());
return resourceItemReader;
}
private RowMapper<CouncilMapper> excelRowMapper() {
return new Mapper();
}
#SuppressWarnings({ "rawtypes", "unchecked" })
#Bean
public StreamingXlsxItemReader<CouncilMapper> reader() {
final String[] COLUMNS = {"Reg_Type","RegUnder","registration_no","registration_date","course","Other_Course","LRegDate","council_name","full_name","CatName","Other_Category","father_name","mother_name","gender","nationality","date_of_birth","place_of_birth","permanent_address","business_address","current_city","current_state","permanent_city","mobile_number","OfficialTelephone","email","aadhar_number","PanNo","IsDeleted","CreatedDate","UpdatedDate","speciality_name"};
var factory = new DefaultRowSetFactory();
factory.setColumnNameExtractor(new StaticColumnNameExtractor(COLUMNS));
StreamingXlsxItemReader<CouncilMapper> reader = new StreamingXlsxItemReader<>();
reader.setLinesToSkip(1);
reader.setRowSetFactory(factory);
reader.setRowMapper(excelRowMapper());
return reader;
}
#Bean
ItemWriter<CouncilMapper> writer() {
return new Writer();
}
#Bean
public Job readFilesJob() {
return jobBuilderFactory
.get("readFilesJob")
.incrementer(new RunIdIncrementer())
.start(excelFileStep())
.build();
}
#Bean
public Step excelFileStep() {
return stepBuilderFactory.get("excelFileStep")
.<CouncilMapper, CouncilMapper>chunk(5)
.reader(multiResourceItemReader())
.writer(writer())
.build();
}
}
Mapper Class
package com.abc.ingestion.job.dci;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.IntStream;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.batch.extensions.excel.RowMapper;
import org.springframework.batch.extensions.excel.support.rowset.RowSet;
public class Mapper implements RowMapper<CouncilMapper> {
#Override
public CouncilMapper mapRow(RowSet rowSet) throws Exception {
var rowSetMetaData = rowSet.getMetaData();
String[] columnNames = rowSetMetaData.getColumnNames();
String[] rowData = rowSet.getCurrentRow();
var mapper = new ObjectMapper();
Map<String, String> excelData = new HashMap<>();
IntStream.range(0, columnNames.length).forEach(index -> excelData.put(columnNames[index], rowData[index]));
return mapper.convertValue(excelData, CouncilMapper.class);
}
}