Is there a jakarta equivalent to javax.sql.DataSource and javax.persistence.EntityManagerFactory - spring-data-jpa

We just upgraded to Spring 3.0.1, and replaced javax.peristence-api with jakarta.persistence-api.
REMOVED:
<dependency>
<groupId>javax.persistence</groupId>
<artifactId>javax.persistence-api</artifactId>
<version>${javax.persistence.version}</version>
</dependency>
ADDED:
<dependency>
<groupId>jakarta.persistence</groupId>
<artifactId>jakarta.persistence-api</artifactId>
<version>${jakarta.persistence.version}</version>
</dependency>
Question is this: Is there a jakarta replacement for these?
javax.sql.DataSource
javax.persistence.EntityManagerFactory
Not finding that the jakarta.persistence-api has a replacement for them
The error comes from this class. Might be legacy and an old way of managing JPA, transactions, datasource, etc... Might we be able to get rid of it, or update it? Hoping to find some helpful suggestions
package com.orders.persistence.jpa;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.dao.annotation.PersistenceExceptionTranslationPostProcessor;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import org.springframework.orm.hibernate5.HibernateExceptionTranslator;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.JpaVendorAdapter;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.Database;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.sql.DataSource;
import java.util.Properties;
#Configuration
#EnableJpaRepositories(
basePackages = "com.orders.persistence.jpa.repository",
entityManagerFactoryRef = "entityManager",
transactionManagerRef = "transactionManager")
#EnableTransactionManagement
public class PersistenceJPAConfig {
#Value( "${driverClassName}" )
private String driverClassName;
#Value( "${url}" )
private String url;
#Value("${username}")
private String jdbcUsername;
#Value("${password}")
private String jdbcPassword;
#Value( "${hibernateDialect}" )
private String hibernateDialect;
#Value( "${hibernateShowSql}" )
boolean hibernateShowSql;
#Value( "${hibernateHbm2ddlAuto}" )
private String hibernateHbm2ddlAuto;
#Value( "${jpaGenerateDdl}" )
boolean jpaGenerateDdl;
#Value("${useQueryCache}")
boolean useQueryCache;
public PersistenceJPAConfig(){
super();
}
#Bean
public HibernateExceptionTranslator hibernateExceptionTranslator(){
return new HibernateExceptionTranslator();
}
#Bean
#Primary
public LocalContainerEntityManagerFactoryBean entityManager(){
final LocalContainerEntityManagerFactoryBean entityManager = new LocalContainerEntityManagerFactoryBean();
entityManager.setDataSource( dataSource() );
entityManager.setPackagesToScan(new String[] {
"com.orders.persistence.jpa",
"com.orders.persistence.model",
"com.orders.persistence.service",
});
final JpaVendorAdapter vendorAdapter = new HibernateJpaVendorAdapter(){
{
setDatabase( Database.MYSQL );
setDatabasePlatform( hibernateDialect );
setShowSql( hibernateShowSql );
setGenerateDdl( jpaGenerateDdl );
}
};
entityManager.setJpaVendorAdapter( vendorAdapter );
entityManager.setJpaProperties( additionalProperties() );
return entityManager;
}
#Bean
#Primary
public DataSource dataSource(){
final DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName( driverClassName );
dataSource.setUrl( url );
dataSource.setUsername( jdbcUsername );
dataSource.setPassword( jdbcPassword );
return dataSource;
}
#Bean
#Primary
public JpaTransactionManager transactionManager(){
final JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory( entityManager().getObject() );
return transactionManager;
}
#Bean
public PersistenceExceptionTranslationPostProcessor persistenceExceptionTranslationPostProcessor(){
return new PersistenceExceptionTranslationPostProcessor();
}
final Properties additionalProperties(){
return new Properties(){
{// use this to inject additional properties in the EntityManager
setProperty("useSecondLevelCache", "org.hibernate.cache.RegionFactory");
setProperty("useQueryCache", "org.hibernate.cache.RegionFactory");
}
};
}
}

Related

Error : Field job in com.example.partioner.DemoApplication required a bean of type 'org.springframework.batch.core.Job' that could not be found

I try a spring batch partitioning database program but I have this message when I try to run the batch:
APPLICATION FAILED TO START
Description: Field job in com.example.partioner.DemoApplication required a bean of type
'org.springframework.batch.core.Job' that could not be found.
The injection point has the following annotations: - #org.springframework.beans.factory.annotation.Autowired(required=true)
Action: Consider defining a bean of type
'org.springframework.batch.core.Job' in your configuration.
This is my main class :
package com.example.partioner;
import java.util.Date;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
#SpringBootApplication
#EnableBatchProcessing
public class DemoApplication implements CommandLineRunner {
#Autowired
private JobLauncher jobLauncher;
#Autowired
private Job job;
public static void main(String[] args) {
SpringApplication.run(DemoApplication.class, args);
}
#Override
public void run(String... args) throws Exception {
System.out.println("STATUS STARTED===================");
JobParameters jobParameters = new JobParametersBuilder()
.addString("JobId", String.valueOf(System.currentTimeMillis()))
.addDate("date", new Date())
.addLong("time",System.currentTimeMillis()).toJobParameters();
JobExecution execution = jobLauncher.run(job, jobParameters);
System.out.println("STATUS :: "+execution.getStatus());
}
}
This my jobConfig class :
package com.example.config;
import java.util.HashMap;
import java.util.Map;
import javax.sql.DataSource;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.database.JdbcPagingItemReader;
import org.springframework.batch.item.database.Order;
import org.springframework.batch.item.database.support.MySqlPagingQueryProvider;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.SimpleAsyncTaskExecutor;
import com.example.mapper.CustomerRowMapper;
import com.example.model.Customer;
import com.example.partitioner.ColumnRangePartitioner;
#Configuration
public class JobConfiguration {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Autowired
private DataSource dataSource;
#Bean
public ColumnRangePartitioner partitioner()
{
ColumnRangePartitioner columnRangePartitioner = new ColumnRangePartitioner();
columnRangePartitioner.setColumn("id");
columnRangePartitioner.setDataSource(dataSource);
columnRangePartitioner.setTable("customer");
return columnRangePartitioner;
}
#Bean
#StepScope
public JdbcPagingItemReader<Customer> pagingItemReader(
#Value("#{stepExecutionContext['minValue']}") Long minValue,
#Value("#{stepExecutionContext['maxValue']}") Long maxValue)
{
System.out.println("reading " + minValue + " to " + maxValue);
Map<String, Order> sortKeys = new HashMap<>();
sortKeys.put("id", Order.ASCENDING);
MySqlPagingQueryProvider queryProvider = new MySqlPagingQueryProvider();
queryProvider.setSelectClause("id, firstName, lastName, birthdate");
queryProvider.setFromClause("from customer");
queryProvider.setWhereClause("where id >= " + minValue + " and id < " + maxValue);
queryProvider.setSortKeys(sortKeys);
JdbcPagingItemReader<Customer> reader = new JdbcPagingItemReader<>();
reader.setDataSource(this.dataSource);
reader.setFetchSize(10);
reader.setRowMapper(new CustomerRowMapper());
reader.setQueryProvider(queryProvider);
return reader;
}
#Bean
#StepScope
public JdbcBatchItemWriter<Customer> customerItemWriter()
{
JdbcBatchItemWriter<Customer> itemWriter = new JdbcBatchItemWriter<>();
itemWriter.setDataSource(dataSource);
itemWriter.setSql("INSERT INTO NEW_CUSTOMER VALUES (:id, :firstName, :lastName, :birthdate)");
itemWriter.setItemSqlParameterSourceProvider
(new BeanPropertyItemSqlParameterSourceProvider<>());
itemWriter.afterPropertiesSet();
return itemWriter;
}
#Bean
public Step slaveStep()
{
return stepBuilderFactory.get("slaveStep")
.<Customer, Customer>chunk(10)
.reader(pagingItemReader(null, null))
.writer(customerItemWriter())
.build();
}
#Bean
public Step step1()
{
return stepBuilderFactory.get("step1")
.partitioner(slaveStep().getName(), partitioner())
.step(slaveStep())
.gridSize(4)
.taskExecutor(new SimpleAsyncTaskExecutor())
.build();
}
#Bean
public Job job()
{
return jobBuilderFactory.get("job")
.start(step1())
.build();
}
}
This is my partitioner class:
package com.example.partitioner;
import java.util.HashMap;
import java.util.Map;
import javax.sql.DataSource;
import org.springframework.batch.core.partition.support.Partitioner;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.jdbc.core.JdbcOperations;
import org.springframework.jdbc.core.JdbcTemplate;
public class ColumnRangePartitioner implements Partitioner
{
private JdbcOperations jdbcTemplate;
private String table;
private String column;
public void setTable(String table) {
this.table = table;
}
public void setColumn(String column) {
this.column = column;
}
public void setDataSource(DataSource dataSource) {
jdbcTemplate = new JdbcTemplate(dataSource);
}
#Override
public Map<String, ExecutionContext> partition(int gridSize)
{
int min = jdbcTemplate.queryForObject("SELECT MIN(" + column + ") FROM " + table, Integer.class);
int max = jdbcTemplate.queryForObject("SELECT MAX(" + column + ") FROM " + table, Integer.class);
int targetSize = (max - min) / gridSize + 1;
Map<String, ExecutionContext> result = new HashMap<>();
int number = 0;
int start = min;
int end = start + targetSize - 1;
while (start <= max)
{
ExecutionContext value = new ExecutionContext();
result.put("partition" + number, value);
if(end >= max) {
end = max;
}
value.putInt("minValue", start);
value.putInt("maxValue", end);
start += targetSize;
end += targetSize;
number++;
}
return result;
}
}
I don't understand the reason of this message and can't find a solution. I think I have put all the necessary annotations. I am a beginner and I hope you will help me.
You DemoApplication is in package com.example.partioner while your job configuration class JobConfiguration is in package com.example.config.
In order for Spring Boot to find your job, you need to move your JobConfiguration class to the same package as your main class DemoApplication or a package underneath it.
Please refer to the Structuring Your Code section of the reference documentation.

Supporting Multiple data sources of Mongo using Spring data

I am trying to configure two different mongo data sources using spring data.According to my requirement I need to change my scanning of base package dynamically at run time. The user can select the data source for a particular entity via property file by providing datasource and entity mapping.
Eg: entity1:dataSource1, entity2:dataSource2. Depending on this mapping I need to package names and need to replace in the #EnableMongoRepositories(basePackages="xxx.xxx.xxxx") at run time. I tired a lot using both Xml configuration and java configuration.But I could not find a possible way.Could some one please provide a solution for the problem if it is there.I am pasting the Entity classes ,Repositories and Configuration both XML and Java Config
package com.account.entity;
import java.io.Serializable;
import java.util.Date;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.index.Indexed;
import org.springframework.data.mongodb.core.mapping.Document;
#Document(collection = "SampleAccount")
public class Account {
#Id
private String acntName;
#Indexed
private Date startDate;
#Indexed
private String accntType;
private Long acntId;
private Byte byteField;
public String getAcntName() {
return this.acntName;
}
public void setAcntName(String acntName) {
this.acntName = acntName;
}
public Date getStartDate() {
return this.startDate;
}
public void setStartDate(Date startDate) {
this.startDate = startDate;
}
public String getAccntType() {
return this.accntType;
}
public void setAccntType(String accntType) {
this.accntType = accntType;
}
public Long getAcntId() {
return this.acntId;
}
public void setAcntId(Long acntId) {
this.acntId = acntId;
}
public Byte getByteField() {
return this.byteField;
}
public void setByteField(Byte byteField) {
this.byteField = byteField;
}
}
public interface AccountRepository extends MongoRepository<Account, String>{
public Page<Account> findByAcntNameOrStartDate(String acntName, Date startDate, Pageable pageable);
public Page<Account> findByAccntTypeOrStartDate(String accntType, Date startDate, Pageable pageable);
public Account findByAcntName(String acntName);
public Page<Account> findByAcntNameIn(List<String> pkIdList, Pageable pageable);
}
package com.user.entity;
import java.io.Serializable;
import java.util.Date;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.index.Indexed;
import org.springframework.data.mongodb.core.mapping.Document;
#Document(collection = "SampleUser")
public class User {
#Id
private String acntName;
#Indexed
private Date startDate;
#Indexed
private String accntType;
private Long acntId;
private Byte byteField;
public String getAcntName() {
return this.acntName;
}
public void setAcntName(String acntName) {
this.acntName = acntName;
}
public Date getStartDate() {
return this.startDate;
}
public void setStartDate(Date startDate) {
this.startDate = startDate;
}
public String getAccntType() {
return this.accntType;
}
public void setAccntType(String accntType) {
this.accntType = accntType;
}
public Long getAcntId() {
return this.acntId;
}
public void setAcntId(Long acntId) {
this.acntId = acntId;
}
public Byte getByteField() {
return this.byteField;
}
public void setByteField(Byte byteField) {
this.byteField = byteField;
}
}
package com.user.repo;
import java.util.Date;
import java.util.List;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.data.querydsl.QueryDslPredicateExecutor;
import com.account.entity.Account;
import com.user.entity.User;
public interface UserRepository extends MongoRepository<User, String>{
public Page<Account> findByAcntNameOrStartDate(String acntName, Date startDate, Pageable pageable);
public Page<Account> findByAccntTypeOrStartDate(String accntType, Date startDate, Pageable pageable);
public Account findByAcntName(String acntName);
public Page<Account> findByAcntNameIn(List<String> pkIdList, Pageable pageable);
}
JavaConfig Files:
MongoConfig.class
package com.Config;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import javax.annotation.Resource;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
import org.springframework.context.annotation.Primary;
import org.springframework.core.env.Environment;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.config.AbstractMongoConfiguration;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
import org.springframework.data.mongodb.core.convert.CustomConversions;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.repository.config.EnableMongoRepositories;
import com.mongodb.Mongo;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientOptions;
import com.mongodb.MongoCredential;
import com.mongodb.ServerAddress;
#Configuration
#EnableMongoRepositories(basePackages = {"xxx.xxx.xxx"},mongoTemplateRef="primaryTemplate")
public class MongoConfig {
#Resource
Environment environment;
public #Bean(name = "primarydb")
MongoDbFactory mongoDbFactory() throws Exception {
MongoCredential credential = MongoCredential.createCredential(
(environment.getProperty("xxx.datastore.mongo.server.userName")),
environment.getProperty("xxx.datastore.mongo.server.database"),
environment.getProperty("xxx.datastore.mongo.server.password").toCharArray());
List<MongoCredential> mongoCredentials = new ArrayList<>();
mongoCredentials.add(credential);
MongoClientOptions options = MongoClientOptions.builder()
.connectionsPerHost(
Integer.parseInt(environment.getProperty("xxx.datastore.mongo.server.connectionsPerHost")))
.build();
List<ServerAddress> serverAddress = new ArrayList<>();
prepareServerAddress(serverAddress);
MongoClient mongoClient = new MongoClient(serverAddress, mongoCredentials, options);
return new SimpleMongoDbFactory(mongoClient,
environment.getProperty("xxx.datastore.mongo.server.database"));
}
private void prepareServerAddress(List<ServerAddress> serverAddress) throws UnknownHostException {
String serverAddressList[] = environment.getProperty("XDM.datastore.mongo.server.address")
.split("\\s*,\\s*");
for (String svrAddress : serverAddressList) {
String address[] = svrAddress.split("\\s*:\\s*");
String host = Objects.nonNull(address[0]) ? address[0] : "127.0.0.1";
Integer port = Objects.nonNull(address[1]) ? Integer.valueOf(address[1]) : 27017;
serverAddress.add(new ServerAddress(host, port));
}
}
#Primary
public #Bean(name = "primaryTemplate")
MongoTemplate mongoTemplate(#Qualifier(value = "primarydb") MongoDbFactory factory) throws Exception {
MongoTemplate mongoTemplate = new MongoTemplate(factory);
return mongoTemplate;
}
}

Spring JPA java.lang.IllegalStateException: No persistence units parsed from {classpath*:META-INF/persistence.xml}

I am facing below issue with Spring JPA. I have done java configuration with NO xml.
I am getting "java.lang.IllegalStateException: No persistence units parsed from {classpath*:META-INF/persistence.xml}"
Below is my application config class.
package co.in.desertlamp.configuration;
import java.util.Properties;
import javax.annotation.Resource;
import javax.sql.DataSource;
import org.hibernate.SessionFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.core.env.Environment;
import org.springframework.core.io.ClassPathResource;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import org.springframework.jdbc.datasource.init.DataSourceInitializer;
import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator;
import org.springframework.orm.hibernate4.HibernateTransactionManager;
import org.springframework.orm.hibernate4.LocalSessionFactoryBean;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.transaction.annotation.EnableTransactionManagement;
#Configuration
#EnableTransactionManagement
#EnableJpaRepositories(basePackages = "co.in.desertlamp")
#ComponentScan({ "co.in.desertlamp" })
#PropertySource(value = { "classpath:/co/in/desertlamp/resources/desertlamp.properties" })
public class DesertLampApplicationConfig {
private static final String DATABASE_DRIVER = "db.driver";
private static final String DATABASE_URL = "db.url";
private static final String DATABASE_USERNAME = "db.username";
private static final String DATABASE_PASSWORD = "db.password";
private static final String HIBERNATE_DIALECT = "hibernate.dialect";
private static final String HIBERNATE_SHOWSQL = "hibernate.show_sql";
private static final String HIBERNATE_FORMATSQL = "hibernate.format_sql";
private static final String HIBERNATE_HBM2DDL = "hibernate.hbm2ddl.auto";
#Resource
private Environment environment;
#Bean
public LocalSessionFactoryBean sessionFactory() {
LocalSessionFactoryBean sessionFactory = new LocalSessionFactoryBean();
sessionFactory.setDataSource(dataSource());
sessionFactory.setPackagesToScan(new String[] { "co.in.desertlamp" });
sessionFactory.setHibernateProperties(hibernateProperties());
return sessionFactory;
}
#Bean
public DataSource dataSource() {
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName(environment.getRequiredProperty(DATABASE_DRIVER));
dataSource.setUrl(environment.getRequiredProperty(DATABASE_URL));
dataSource.setUsername(environment.getRequiredProperty(DATABASE_USERNAME));
dataSource.setPassword(environment.getRequiredProperty(DATABASE_PASSWORD));
return dataSource;
}
#Bean
#Conditional(DataSourceCondition.class)
public DataSourceInitializer dataSourceInitializer(DataSource dataSource) {
ResourceDatabasePopulator resourceDatabasePopulator = new ResourceDatabasePopulator();
//Put Logger here
resourceDatabasePopulator.addScript(new ClassPathResource("/co/in/desertlamp/scripts/loadUserAndRoleData.sql"));
DataSourceInitializer dataSourceInitializer = new DataSourceInitializer();
dataSourceInitializer.setDataSource(dataSource);
dataSourceInitializer.setDatabasePopulator(resourceDatabasePopulator);
return dataSourceInitializer;
}
private Properties hibernateProperties() {
Properties hibernateProperties = new Properties();
hibernateProperties.put(HIBERNATE_DIALECT, environment.getRequiredProperty(HIBERNATE_DIALECT));
hibernateProperties.put(HIBERNATE_SHOWSQL, environment.getRequiredProperty(HIBERNATE_SHOWSQL));
hibernateProperties.put(HIBERNATE_FORMATSQL, environment.getRequiredProperty(HIBERNATE_FORMATSQL));
hibernateProperties.put(HIBERNATE_HBM2DDL, environment.getRequiredProperty(HIBERNATE_HBM2DDL));
return hibernateProperties;
}
#Bean
#Autowired
public HibernateTransactionManager transactionManager(SessionFactory sessionFactory) {
HibernateTransactionManager hibernateTransactionManager = new HibernateTransactionManager();
hibernateTransactionManager.setSessionFactory(sessionFactory);
return hibernateTransactionManager;
}
#Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory(){
LocalContainerEntityManagerFactoryBean localContainerEntityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean();
localContainerEntityManagerFactoryBean.setDataSource(dataSource());
return localContainerEntityManagerFactoryBean;
}
}
Thanks in advance.
Your LocalContainerEntityManagerFactoryBean needs to know the location of your #Entity classes.
So adding below properties should fix it:
localContainerEntityManagerFactoryBean.setPackagesToScan("path.to.your.entities");
localContainerEntityManagerFactoryBean.setPersistenceUnitName("name");

Reading Multiple excel File Using Spring Batch Extension

I am trying to read multiple excel files using Spring-Bath-Excel. In my scenario i don't know i advance how many files client will process i.e. if data would be very large, excel file will be split into multiple files like records1.xls ,records2.xls, records3.xls..
Is there any kind of MultiResourceItemReader available in Spring-Batch-Excel? I tried to set multiple resources at run time and also tried to use the patterns records*.xls but PoiItemReader did't allow me to do that .
I am using PoiItemReader for that .
To Read Multiple Excel
package com.abc.ingestion.job.dci;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.extensions.excel.RowMapper;
import org.springframework.batch.extensions.excel.streaming.StreamingXlsxItemReader;
import org.springframework.batch.extensions.excel.support.rowset.DefaultRowSetFactory;
import org.springframework.batch.extensions.excel.support.rowset.StaticColumnNameExtractor;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.file.MultiResourceItemReader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
#Configuration
#EnableBatchProcessing
public class BatchConfig {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
// Create input folder in resources
#Value("input/DCI*.xlsx")
private Resource[] inputResources;
#Bean
public MultiResourceItemReader<CouncilMapper> multiResourceItemReader() {
MultiResourceItemReader<CouncilMapper> resourceItemReader = new MultiResourceItemReader<>();
resourceItemReader.setResources(inputResources);
resourceItemReader.setDelegate(reader());
return resourceItemReader;
}
private RowMapper<CouncilMapper> excelRowMapper() {
return new Mapper();
}
#SuppressWarnings({ "rawtypes", "unchecked" })
#Bean
public StreamingXlsxItemReader<CouncilMapper> reader() {
final String[] COLUMNS = {"Reg_Type","RegUnder","registration_no","registration_date","course","Other_Course","LRegDate","council_name","full_name","CatName","Other_Category","father_name","mother_name","gender","nationality","date_of_birth","place_of_birth","permanent_address","business_address","current_city","current_state","permanent_city","mobile_number","OfficialTelephone","email","aadhar_number","PanNo","IsDeleted","CreatedDate","UpdatedDate","speciality_name"};
var factory = new DefaultRowSetFactory();
factory.setColumnNameExtractor(new StaticColumnNameExtractor(COLUMNS));
StreamingXlsxItemReader<CouncilMapper> reader = new StreamingXlsxItemReader<>();
reader.setLinesToSkip(1);
reader.setRowSetFactory(factory);
reader.setRowMapper(excelRowMapper());
return reader;
}
#Bean
ItemWriter<CouncilMapper> writer() {
return new Writer();
}
#Bean
public Job readFilesJob() {
return jobBuilderFactory
.get("readFilesJob")
.incrementer(new RunIdIncrementer())
.start(excelFileStep())
.build();
}
#Bean
public Step excelFileStep() {
return stepBuilderFactory.get("excelFileStep")
.<CouncilMapper, CouncilMapper>chunk(5)
.reader(multiResourceItemReader())
.writer(writer())
.build();
}
}
Mapper Class
package com.abc.ingestion.job.dci;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.IntStream;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.batch.extensions.excel.RowMapper;
import org.springframework.batch.extensions.excel.support.rowset.RowSet;
public class Mapper implements RowMapper<CouncilMapper> {
#Override
public CouncilMapper mapRow(RowSet rowSet) throws Exception {
var rowSetMetaData = rowSet.getMetaData();
String[] columnNames = rowSetMetaData.getColumnNames();
String[] rowData = rowSet.getCurrentRow();
var mapper = new ObjectMapper();
Map<String, String> excelData = new HashMap<>();
IntStream.range(0, columnNames.length).forEach(index -> excelData.put(columnNames[index], rowData[index]));
return mapper.convertValue(excelData, CouncilMapper.class);
}
}

Spring Boot & MongoDB how to remove the '_class' column?

When inserting data into MongoDB Spring Data is adding a custom "_class" column, is there a way to eliminate the "class" column when using Spring Boot & MongoDB?
Or do i need to create a custom type mapper?
Any hints or advice?
Dave's answer is correct. However, we generally recommend not do this (that's why it's enabled by default in the first place) as you effectively throw away to persist type hierarchies or even a simple property set to e.g. Object. Assume the following type:
#Document
class MyDocument {
private Object object;
}
If you now set object to a value, it will be happily persisted but there's no way you can read the value back into it's original type.
A more up to date answer to that question, working with embedded mongo db for test cases:
I quote from http://mwakram.blogspot.fr/2017/01/remove-class-from-mongo-documents.html
Spring Data MongoDB adds _class in the mongo documents to handle
polymorphic behavior of java inheritance. If you want to remove _class
just drop following Config class in your code.
package com.waseem.config;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.convert.DbRefResolver;
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
import org.springframework.data.mongodb.core.convert.DefaultMongoTypeMapper;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
#Configuration
public class MongoConfig {
#Autowired MongoDbFactory mongoDbFactory;
#Autowired MongoMappingContext mongoMappingContext;
#Bean
public MappingMongoConverter mappingMongoConverter() {
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory);
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext);
converter.setTypeMapper(new DefaultMongoTypeMapper(null));
return converter;
}
}
Here is a slightly simpler approach:
#Configuration
public class MongoDBConfig implements InitializingBean {
#Autowired
#Lazy
private MappingMongoConverter mappingMongoConverter;
#Override
public void afterPropertiesSet() throws Exception {
mappingMongoConverter.setTypeMapper(new DefaultMongoTypeMapper(null));
}
}
You can remove the _class by following code. You can use this in your mongo configuration class.
#Bean
public MongoTemplate mongoTemplate(MongoDatabaseFactory databaseFactory, MappingMongoConverter converter) {
converter.setTypeMapper(new DefaultMongoTypeMapper(null));
return new MongoTemplate(databaseFactory, converter);
}
I think you need to create a #Bean of type MongoTemplate and set the type converter explicitly. Details (non-Boot but just extract the template config): http://www.mkyong.com/mongodb/spring-data-mongodb-remove-_class-column/
Similar to RZet but avoids inheritance:
#Configuration
public class MongoConfiguration {
#Autowired
private MappingMongoConverter mappingMongoConverter;
// remove _class
#PostConstruct
public void setUpMongoEscapeCharacterConversion() {
mappingMongoConverter.setTypeMapper(new DefaultMongoTypeMapper(null));
}
}
A simple way (+ for ReactiveMongoTemplate):
#Configuration
public class MongoDBConfig {
#Autowired
private MongoClient mongoClient;
#Value("${spring.data.mongodb.database}")
private String dbName;
#Bean
public ReactiveMongoTemplate reactiveMongoTemplate() {
ReactiveMongoTemplate template = new ReactiveMongoTemplate(mongoClient, dbName);
MappingMongoConverter converter = (MappingMongoConverter) template.getConverter();
converter.setTypeMapper(new DefaultMongoTypeMapper(null));
converter.afterPropertiesSet();
return template;
}
}
Add a converter to remove class.
MappingMongoConverter converter =
new MappingMongoConverter(mongoDbFactory(), new MongoMappingContext());
converter.setTypeMapper(new DefaultMongoTypeMapper(null));
MongoTemplate mongoTemplate = new MongoTemplate(mongoDbFactory(), converter);
return mongoTemplate;
`
The correct answer above seems to be using a number of deprecated dependencies. For example if you check the code, it mentions MongoDbFactory which is deprecated in the latest Spring release. If you happen to be using MongoDB with Spring-Data in 2020, this solution seems to be older. For instant results, check this snippet of code. Works 100%.
Just Create a new AppConfig.java file and paste this block of code. You'll see the "_class" property disappearing from the MongoDB document.
package com.reddit.redditmain.Configuration;
import org.apache.naming.factory.BeanFactory;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.convert.CustomConversions;
import org.springframework.data.mongodb.MongoDatabaseFactory;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.convert.DbRefResolver;
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
import org.springframework.data.mongodb.core.convert.DefaultMongoTypeMapper;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
#Configuration
public class AppConfig {
#Autowired
MongoDatabaseFactory mongoDbFactory;
#Autowired
MongoMappingContext mongoMappingContext;
#Bean
public MappingMongoConverter mappingMongoConverter() {
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory);
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext);
converter.setTypeMapper(new DefaultMongoTypeMapper(null));
return converter;
}
}
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.data.mongodb.MongoDatabaseFactory;
import org.springframework.data.mongodb.core.MongoTemplate;
import
org.springframework.data.mongodb.core.convert.DefaultMongoTypeMapper;
import
org.springframework.data.mongodb.core.convert.MappingMongoConverter;
#Configuration
public class MongoConfigWithAuditing {
#Bean
#Primary
public MongoTemplate mongoTemplate(MongoDatabaseFactory
mongoDatabaseFactory, MappingMongoConverter mappingMongoConverter) {
// this is to avoid saving _class to db
mappingMongoConverter.setTypeMapper(new DefaultMongoTypeMapper(null));
MongoTemplate mongoTemplate = new MongoTemplate(mongoDatabaseFactory, mappingMongoConverter);
return mongoTemplate;
}
}
Spring Boot 3 with reactive mongo.
package es.dmunozfer.trading.config;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.mongo.MongoProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
import org.springframework.data.mongodb.config.AbstractReactiveMongoConfiguration;
import org.springframework.data.mongodb.core.convert.DefaultMongoTypeMapper;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.repository.config.EnableReactiveMongoRepositories;
import lombok.extern.slf4j.Slf4j;
#Slf4j
#Configuration
#EnableReactiveMongoRepositories("es.dmunozfer.trading.repository")
public class MongoConfig extends AbstractReactiveMongoConfiguration {
#Autowired
private MongoProperties mongoProperties;
#Override
protected String getDatabaseName() {
return mongoProperties.getDatabase();
}
#Bean
#Override
public MappingMongoConverter mappingMongoConverter(ReactiveMongoDatabaseFactory databaseFactory, MongoCustomConversions customConversions,
MongoMappingContext mappingContext) {
MappingMongoConverter converter = super.mappingMongoConverter(databaseFactory, customConversions, mappingContext);
converter.setTypeMapper(new DefaultMongoTypeMapper(null));
return converter;
}
}
I'm leaving this answer here in case someone wants to remove the _class from kotlin and update it a bit since the previous answers have several deprecated dependencies.
import org.springframework.beans.factory.BeanFactory
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Configuration
import org.springframework.data.mongodb.MongoDatabaseFactory
import org.springframework.data.mongodb.core.convert.DbRefResolver
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver
import org.springframework.data.mongodb.core.convert.DefaultMongoTypeMapper
import org.springframework.data.mongodb.core.convert.MappingMongoConverter
import org.springframework.data.mongodb.core.mapping.MongoMappingContext
#Configuration
internal class SpringMongoConfig {
#Bean
fun mappingMongoConverter(
factory: MongoDatabaseFactory, context: MongoMappingContext,
beanFactory: BeanFactory
): MappingMongoConverter {
val dbRefResolver: DbRefResolver = DefaultDbRefResolver(factory)
val mappingConverter = MappingMongoConverter(dbRefResolver, context)
mappingConverter.setTypeMapper(DefaultMongoTypeMapper(null))
return mappingConverter
}
}