How to skip batch step when condition is false - spring-batch

I have one basis job with one basic step. This jobs is executing every x second (I am using quartz for this). Then in my config class I also have variable "runStep". Where should I add this attribute and run my step only if runStep is true ?
<batch:job id="export1" parent="baseJob">
<batch:step id="registruj" parent="baseStep">
<tasklet>
<chunk reader="registrujReader" processor="registrujProcessor" writer="registrujWriter"
commit-interval="1" />
</tasklet>
</batch:step>
</batch:job>
<bean id="baseJob" class="org.springframework.batch.core.job.SimpleJob" abstract="true">
<property name="jobRepository" ref="jobRepository" />
</bean>
<bean id="baseStep" class="org.springframework.batch.core.step.factory.SimpleStepFactoryBean" abstract="true">
<property name="transactionManager" ref="transactionManager" />
<property name="jobRepository" ref="jobRepository" />
<property name="startLimit" value="100" />
<property name="commitInterval" value="1" />
</bean>
<bean id="jobRepository" class="org.springframework.batch.core.repository.support.JobRepositoryFactoryBean"
p:dataSource-ref="expDataSource" p:transactionManager-ref="transactionManager" />
<bean id="jobLauncher" class="org.springframework.batch.core.launch.support.SimpleJobLauncher">
<property name="jobRepository" ref="jobRepository" />
</bean>
<bean class="org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor">
<property name="jobRegistry" ref="jobRegistry" />
</bean>
<bean id="jobRegistry" class="org.springframework.batch.core.configuration.support.MapJobRegistry" />
<bean id="registrujWriter" class="cz.isvs.reg.rob.util.export.batch.RegistrujItemWriter" scope="step" />
<bean id="registrujReader" class="cz.isvs.reg.rob.util.export.batch.RegistrujItemReader" scope="step" />
<bean id="registrujProcessor" class="cz.isvs.reg.rob.util.export.batch.RegistrujItemProcessor" scope="step" />
<!-- run every 10 seconds -->
<bean class="org.springframework.scheduling.quartz.SchedulerFactoryBean">
<property name="triggers">
<bean id="cronTrigger" class="org.springframework.scheduling.quartz.CronTriggerBean">
<property name="jobDetail" ref="jobDetail" />
<property name="cronExpression" value="*/10 * * * * ?" />
</bean>
</property>
</bean>
<bean id="jobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass" value="cz.isvs.reg.rob.util.export.batch.JobLauncherDetails" />
<property name="group" value="quartz-batch" />
<property name="jobDataAsMap">
<map>
<entry key="jobName" value="export1" />
<entry key="jobLocator" value-ref="jobRegistry" />
<entry key="jobLauncher" value-ref="jobLauncher" />
</map>
</property>
</bean>

Use a JobExecutionDecider
public class RunStepDecider implements JobExecutionDecider {
public FlowExecutionStatus decide(JobExecution jobExecution, StepExecution stepExecution) {
final String runStep = jobExecution.getJobParameters().getString("runStep");
//Depending on the above condition you can return Completed or Failed.
return new FlowExecutionStatus.COMPLETED;
}
}
<batch:job id="export1" parent="baseJob">
<decision id="decision" decider="decider">
<next on="COMPLETED" to="registruj" />
</decision>
<batch:step id="registruj" parent="baseStep">
<tasklet>
<chunk reader="registrujReader" processor="registrujProcessor" writer="registrujWriter" commit-interval="1" />
</tasklet>
</batch:step>
</batch:job>
<bean id="decider" class="RunStepDecider" />
and pass runStep as JobParameter.
Hope can help to solve your problem.

Related

Using one of job parameter in Item Processor in Spring Batch application

I am trying get job-parameteres-in-to-item-processor-using-spring-batch-annotation.
I implemented it by referring below link; but for me the variable (batchRunName) is coming as null when I try to access it in my Processor class.
Can any one please look at it. I am sure; I am missing some small thing.
How to get Job parameteres in to item processor using spring Batch annotation
public static void main(String[] args) {
contextObj = new ClassPathXmlApplicationContext(springConfig);
jobObj = (Job) contextObj.getBean("XYZ-1001-DD-01");
JobParametersBuilder jobBuilder = new JobParametersBuilder();
System.out.println("args[0] is " + args[0] );
jobBuilder.addString("batchRunName", args[0]);
public class TimeProcessor implements ItemProcessor<Time, TimeMetric> {
private DataSource dataSource;
#Value("#{jobParameters['batchRunNumber']}")
private String batchRunNumber;
public void setBatchRunNumber(String batchRunNumber) {
this.batchRunNumber = batchRunNumber;
}
<bean id="timeProcessor"
class="com.xyz.processor.TimeProcessor" scope="step">
<property name="dataSource" ref="oracledataSource" />
</bean>
=================FULL XML CONFIGURATION========================
<import resource="classpath:/batch/utility/skip/batch_skip.xml" />
<import resource="classpath:/batch/config/context-postgres.xml" />
<import resource="classpath:/batch/config/oracle-database.xml" />
<context:property-placeholder
location="classpath:/batch/jobs/TPF-1001-DD-01/TPF-1001-DD-01.properties" />
<bean id="gridSizePartitioner"
class="com.tpf.partitioner.GridSizePartitioner" />
<task:executor id="taskExecutor" pool-size="${pool.size}" />
<batch:job id="XYZJob" job-repository="jobRepository"
restartable="true">
<batch:step id="XYZSTEP">
<batch:description>Convert TIF files to PDF</batch:description>
<batch:partition partitioner="gridSizePartitioner">
<batch:handler task-executor="taskExecutor"
grid-size="${pool.size}" />
<batch:step>
<batch:tasklet allow-start-if-complete="true">
<batch:chunk commit-interval="${commit.interval}"
skip-limit="${job.skip.limit}">
<batch:reader>
<bean id="timeReader"
class="org.springframework.batch.item.database.JdbcCursorItemReader"
scope="step">
<property name="dataSource" ref="oracledataSource" />
<property name="sql">
<value>
select TIME_ID as timesheetId,count(*),max(CREATION_DATETIME) as creationDateTime , ILN_NUMBER as ilnNumber
from TS_FAKE_NAME
where creation_datetime >= '#{jobParameters['creation_start_date1']} 12.00.00.000000000 AM'
and creation_datetime < '#{jobParameters['creation_start_date2']} 11.59.59.999999999 PM'
and mod(time_id,${pool.size})=#{stepExecutionContext['partition.id']}
group by time_id ,ILN_NUMBER
</value>
</property>
<property name="rowMapper">
<bean
class="org.springframework.jdbc.core.BeanPropertyRowMapper">
<property name="mappedClass"
value="com.tpf.model.Time" />
</bean>
</property>
</bean>
</batch:reader>
<batch:processor>
<bean id="compositeItemProcessor"
class="org.springframework.batch.item.support.CompositeItemProcessor">
<property name="delegates">
<list>
<ref bean="timeProcessor" />
</list>
</property>
</bean>
</batch:processor>
<batch:writer>
<bean id="compositeItemWriter"
class="org.springframework.batch.item.support.CompositeItemWriter">
<property name="delegates">
<list>
<ref bean="timeWriter" />
</list>
</property>
</bean>
</batch:writer>
<batch:skippable-exception-classes>
<batch:include
class="com.utility.skip.BatchSkipException" />
</batch:skippable-exception-classes>
<batch:listeners>
<batch:listener ref="batchSkipListener" />
</batch:listeners>
</batch:chunk>
</batch:tasklet>
</batch:step>
</batch:partition>
</batch:step>
<batch:validator>
<bean
class="org.springframework.batch.core.job.DefaultJobParametersValidator">
<property name="requiredKeys">
<list>
<value>batchRunNumber</value>
<value>creation_start_date1</value>
<value>creation_start_date2</value>
</list>
</property>
</bean>
</batch:validator>
</batch:job>
<bean id="timesheetWriter" class="com.tpf.writer.TimeWriter"
scope="step">
<property name="dataSource" ref="dataSource" />
</bean>
<bean id="timeProcessor"
class="com.tpf.processor.TimeProcessor" scope="step">
<property name="dataSource" ref="oracledataSource" />
</bean>
I think you are facing the issue reported in BATCH-2351.
You can try to provide the job parameter via XML instead of annotation (Since the majority of your config is XML based):
<bean id="timeProcessor" class="com.xyz.processor.TimeProcessor" scope="step">
<property name="dataSource" ref="oracledataSource" />
<property name="batchRunNumber" value="#{jobParameters['batchRunNumber']}" />
</bean>
Hope this helps.

spring batch : itemprocessor is not getting all the data read by reader

I am reading from MongoDB using custom Iteamreader bean. My reader is returning data as per pageSize(50) defined in reader. But processor is getting only first 31 rows of data from 50. I tried various chunk sizes but some how processor is getting only first 31 rows.
Please help me in find the bug...I tried listeners but not able to find issue..
---- config XML ----
<?xml version="1.0" encoding="UTF-8"?>
<context:property-placeholder location="classpath:application.properties" />
<context:component-scan base-package="com.xxx.yyy.batch.kernel" />
<context:component-scan base-package="com.xxx.yyy.batch.dao" />
<context:annotation-config />
<!-- Enable Annotation based Declarative Transaction Management -->
<tx:annotation-driven proxy-target-class="true"
transaction-manager="transactionManager" />
<!-- Creating TransactionManager Bean, since JDBC we are creating of type
DataSourceTransactionManager -->
<bean id="transactionManager"
class="org.springframework.jdbc.datasource.DataSourceTransactionManager">
<property name="dataSource" ref="dataSource" />
</bean>
<batch:job id="txnLogJob" job-repository="jobRepository"
restartable="true">
<batch:step id="txnload">
<tasklet allow-start-if-complete="true">
<chunk reader="txnLogItemReader" writer="txnLogItemWriter"
processor="txnLogProcessor" commit-interval="20" />
</tasklet>
</batch:step>
<batch:listeners>
<batch:listener ref="completionListener" />
</batch:listeners>
</batch:job>
<bean id="completionListener"
class="com.xxx.yyy.batch.listeners.JobCompletionNotificationListener" />
<bean id="jobParametersDAOImpl" class="com.xxx.yyy.batch.dao.JobParametersDAOImpl" />
<bean id="batchLoader" class="com.xxx.yyy.batch.kernel.BatchLoader" />
<bean id="batchjobParameter" class="com.xxx.yyy.batch.dao.Batch_Job_Parameters" />
<bean id="txnLogItemWriter" class="org.springframework.batch.item.file.FlatFileItemWriter"
scope="step">
<property name="shouldDeleteIfExists" value="true" />
<property name="resource" value="file:target/test-outputs/output.txt" />
<property name="lineAggregator">
<bean
class="org.springframework.batch.item.file.transform.PassThroughLineAggregator" />
</property>
</bean>
<bean id="txnLogProcessor"
class="com.xxx.yyy.batch.processor.MessageContextItemProcessor" />
<bean id="jobLauncher"
class="org.springframework.batch.core.launch.support.SimpleJobLauncher">
<property name="jobRepository" ref="jobRepository" />
</bean>
<bean id="jobRepository"
class="org.springframework.batch.core.repository.support.JobRepositoryFactoryBean">
<property name="databaseType" value="MYSQL" />
<property name="dataSource" ref="dataSource" />
<property name="transactionManager" ref="transactionManager" />
</bean>
<bean id="dataSource" class="com.xxx.yyy.common.DataSource"
destroy-method="close">
<property name="driverClassName" value="${jdbc.driverClassName}" />
<property name="url" value="${jdbc.url}" />
<property name="username" value="${jdbc.username}" />
<property name="password" value="${jdbc.password}" />
<property name="connectionProperties" value="${jdbc.connectionProperties}" />
<property name="initialSize" value="${jdbc.initialSize}" />
<property name="maxTotal" value="${jdbc.maxTotal}" />
<property name="maxIdle" value="${jdbc.maxIdle}" />
<property name="minIdle" value="${jdbc.minIdle}" />
<property name="maxWaitMillis" value="${jdbc.maxWaitMillis}" />
<property name="testOnBorrow" value="${jdbc.testOnBorrow}" />
<property name="testWhileIdle" value="${jdbc.testWhileIdle}" />
<property name="testOnReturn" value="${jdbc.testOnReturn}" />
<property name="validationQuery" value="${jdbc.validationQuery}" />
</bean>
</beans>
custom reader bean:
#Bean
public MongoItemReader<MessageContext> txnLogItemReader() {
MongoItemReader<MessageContext> reader = new MongoItemReader<MessageContext>();
reader.setPageSize(50);
reader.setCollection("txnlog");
reader.setTemplate(mongoTemplate);
String query = null ;
query = "{ \"audit_info.created_on\": { $gt: { \"$date\" : ?0 }, $lte: { \"$date\" : ?1 } }, "
+ "$and: [ { \"processing_status\": { $in: [?2] } } ] }" ;
reader.setQuery(query);
//Timestamp to_date_timestamp = jobParametersDAOImpl.getCurrentTimeStamp() ;
Batch_Job_Parameters job_param = jobParametersDAOImpl.getBatchJobParameters() ;
String from_date = job_param.getFrom_date().toString() ;
String [] splitstr = from_date.split(" ") ;
from_date = splitstr[0]+"T"+splitstr[1]+"00Z" ;
String to_date = job_param.getTo_date().toString() ;
splitstr = to_date.split(" ") ;
to_date = splitstr[0]+"T"+splitstr[1]+"00Z" ;
List<Object> parameterValues = new ArrayList<Object>() ;
parameterValues.add(from_date) ;
parameterValues.add(to_date) ;
parameterValues.add(job_param.getTxnlog_status_list()) ;
reader.setParameterValues(parameterValues);
reader.setTargetType(com....MessageContext.class);
Map<String,Direction> sorts = new HashMap<String,Direction>() ;
sorts.put("audit_info.created_on", org.springframework.data.domain.Sort.Direction.ASC) ;
reader.setSort(sorts);
return reader;
}
I have implemented MessageContextReadConverter implements Converter and I am returning null in case conversion is not done. So, in case of null, read() method is not passing elements further to Processor/Writer. Issue is Converter doesnt allows exception to be thrown. Looking how to resolve this part.
Updated answer since the question shifted focus
I would have the MessageContextReadConverter not return null and instead do validation in the Processor. If the Processor returns null, it just increments the filter count as opposed to confusing the Step into thinking there are no more rows to process.

Processing a large file using spring batch

I have a large file which may contain 100K to 500K records. I am planning to use chunk oriented processing and my thought is
1) Split the large file into smaller based on the count let say 10K in each file.
2) If there are 100K records then I will get 10 files each containing 10K reocrds
3) I would like to partition these 10 files and would like to process using 5 threads. I am thinking to use custom MultiResourcePartioner
4) The 5 threads should process all the 10 files created in split process.
5) I don't want to create same number of threads equal to file count as in that case I may face memory issues. What I am looking is whatever the number of files I would like to process them using only 5 threads (I can increase based on my requirements).
Expert could you let me know this can be achieved using spring batch? If yes could you please share pointers or reference implementations
Thanks in advance
The working job-config xml
<description>Spring Batch File Chunk Processing</description>
<import resource="../config/batch-context.xml" />
<batch:job id="file-partition-batch" job-repository="jobRepository" restartable="false">
<batch:step id="master">
<batch:partition partitioner="partitioner" handler="partitionHandler" />
</batch:step>
</batch:job>
<batch:step id="slave">
<batch:tasklet>
<batch:chunk reader="reader" processor="compositeProcessor"
writer="compositeWriter" commit-interval="5">
</batch:chunk>
</batch:tasklet>
</batch:step>
<bean id="partitionHandler" class="org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler">
<property name="taskExecutor" ref="taskExecutor"/>
<property name="step" ref="slave" />
<property name="gridSize" value="5" />
</bean>
<bean id="partitioner" class="com.poc.partitioner.FileMultiResourcePartitioner">
<property name="resources" value="file:/Users/anupghosh/Documents/Spring_Batch/FilePartitionBatch/*.txt" />
<property name="threadName" value="feed-processor" />
</bean>
<bean id="taskExecutor" class="org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor">
<property name="corePoolSize" value="5" />
<property name="maxPoolSize" value="5" />
</bean>
<bean id="reader" class="org.springframework.batch.item.file.FlatFileItemReader" scope="step">
<property name="resource" value="#{stepExecutionContext['fileName']}" />
<property name="lineMapper">
<bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<property name="lineTokenizer">
<bean class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer">
<property name="delimiter" value="|"/>
<property name="names" value="key,docName,docTypCD,itemType,itemNum,launchDate,status" />
</bean>
</property>
<property name="fieldSetMapper">
<bean class="com.poc.mapper.FileRowMapper" />
</property>
</bean>
</property>
</bean>
<bean id="validatingProcessor" class="org.springframework.batch.item.validator.ValidatingItemProcessor">
<constructor-arg ref="feedRowValidator" />
</bean>
<bean id="feedProcesor" class="com.poc.processor.FeedProcessor" />
<bean id="compositeProcessor" class="org.springframework.batch.item.support.CompositeItemProcessor" scope="step">
<property name="delegates">
<list>
<ref bean="validatingProcessor" />
<ref bean="feedProcesor" />
</list>
</property>
</bean>
<bean id="recordDecWriter" class="com.poc.writer.RecordDecWriter" />
<bean id="reconFlatFileCustomWriter" class="com.poc.writer.ReconFileWriter">
<property name="reconFlatFileWriter" ref="reconFlatFileWriter" />
</bean>
<bean id="reconFlatFileWriter" class="org.springframework.batch.item.file.FlatFileItemWriter" scope="step">
<property name="resource" value="file:/Users/anupghosh/Documents/Spring_Batch/recon-#{stepExecutionContext[threadName]}.txt" />
<property name="shouldDeleteIfExists" value="true" />
<property name="lineAggregator">
<bean class="org.springframework.batch.item.file.transform.DelimitedLineAggregator">
<property name="delimiter" value="|" />
<property name="fieldExtractor">
<bean class="org.springframework.batch.item.file.transform.BeanWrapperFieldExtractor">
<property name="names" value="validationError" />
</bean>
</property>
</bean>
</property>
</bean>
<bean id="compositeWriter" class="org.springframework.batch.item.support.CompositeItemWriter">
<property name="delegates">
<list>
<ref bean="recordDecWriter" />
<ref bean="reconFlatFileCustomWriter" />
</list>
</property>
</bean>
<bean id="feedRowValidator" class="org.springframework.batch.item.validator.SpringValidator">
<property name="validator">
<bean class="com.poc.validator.FeedRowValidator"/>
</property>
</bean>
was able to solve this using MultiResourcePartitioner. below are java config
#Bean
public Partitioner partitioner() {
MultiResourcePartitioner partitioner = new MultiResourcePartitioner();
ClassLoader cl = this.getClass().getClassLoader();
ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver(cl);
Resource[] resources = resolver.getResources("file:" + filePath + "/"+"*.csv");
partitioner.setResources(resources);
partitioner.partition(10);
return partitioner;
}
#Bean
public TaskExecutor taskExecutor() {
ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
taskExecutor.setMaxPoolSize(4);
taskExecutor.afterPropertiesSet();
return taskExecutor;
}
#Bean
#Qualifier("masterStep")
public Step masterStep() {
return stepBuilderFactory.get("masterStep")
.partitioner(ProcessDataStep())
.partitioner("ProcessDataStep",partitioner())
.taskExecutor(taskExecutor())
.listener(pcStressStepListener)
.build();
}
#Bean
#Qualifier("processData")
public Step processData() {
return stepBuilderFactory.get("processData")
.<pojo, pojo> chunk(5000)
.reader(reader)
.processor(processor())
.writer(writer)
.build();
}
#Bean(name="reader")
#StepScope
public FlatFileItemReader<pojo> reader(#Value("#{stepExecutionContext['fileName']}") String filename) {
FlatFileItemReader<pojo> reader = new FlatFileItemReader<>();
reader.setResource(new UrlResource(filename));
reader.setLineMapper(new DefaultLineMapper<pojo>() {
{
setLineTokenizer(new DelimitedLineTokenizer() {
{
setNames(FILE HEADER);
}
});
setFieldSetMapper(new BeanWrapperFieldSetMapper<pojo>() {
{
setTargetType(pojo.class);
}
});
}
});
return reader;
}

How to disable quartz load balancing?

I have this scheduler :
<bean id="odilQuartzScheduler"
class="org.springframework.scheduling.quartz.SchedulerFactoryBean"
lazy-init="false">
<property name="jobFactory">
<bean class="org.springframework.scheduling.quartz.SpringBeanJobFactory"/>
</property>
<property name="schedulerName" value="OdilScheduler"/>
<property name="applicationContextSchedulerContextKey" value="applicationContext"/>
<property name="autoStartup" value="${popo.scheduler}"/>
<property name="startupDelay" value="60"/>
<property name="overwriteExistingJobs" value="true"/>
<property name="configLocation" value="classpath:quartz-odil.properties"/>
<property name="transactionManager" ref="applicationTransactionManager"/>
<property name="schedulerContextAsMap">
<map>
<entry key="globalConfiguration" value-ref="globalConfiguration"/>
<entry key="odilFileImporter" value-ref="odilFileImporter"/>
<entry key="odilRemoteDispatcher" value-ref="odilRemoteDispatcher"/>
<entry key="odilService" value-ref="odilService"/>
<entry key="centreReferenceService" value-ref="centreReferenceService"/>
<entry key="envoiParserFactory" value-ref="envoiParserFactory"/>
<entry key="versionReferenceService" value-ref="versionReferenceService"/>
<entry key="delPublisherService" value-ref="delPublisherService"/>
<entry key="capteurPublisherService" value-ref="capteurPublisherService"/>
</map>
</property>
<property name="triggers">
<list>
<ref bean="piloteOdilTrigger-1"/>
<ref bean="piloteOdilTrigger-2"/>
<ref bean="piloteOdilTrigger-3"/>
<ref bean="piloteOdilTrigger-4"/>
<ref bean="piloteOdilTrigger-5"/>
<ref bean="piloteOdilTrigger-6"/>
</list>
</property>
</bean>
<bean id="piloteOdilTrigger-1" class="org.springframework.scheduling.quartz.SimpleTriggerBean">
<property name="name" value="Trigger 1"/>
<property name="jobDetail" ref="piloteOdilJobBean1"/>
<property name="repeatInterval" value="60000"/>
<property name="group" value="POPO"/>
</bean>
<bean id="piloteOdilTrigger-2" class="org.springframework.scheduling.quartz.CronTriggerBean">
<property name="name" value="Trigger 2"/>
<property name="jobDetail" ref="piloteOdilJobBean2"/>
<property name="cronExpression" value="${popo.odil.rushhour.cronexpression}" />
<property name="group" value="POPO"/>
</bean>
<bean id="piloteOdilTrigger-3" class="org.springframework.scheduling.quartz.CronTriggerBean">
<property name="name" value="Trigger 3"/>
<property name="jobDetail" ref="piloteOdilJobBean3"/>
<property name="cronExpression" value="${popo.odil.rushhour.cronexpression}" />
<property name="group" value="POPO"/>
</bean>
<bean id="piloteOdilTrigger-4" class="org.springframework.scheduling.quartz.CronTriggerBean">
<property name="name" value="Trigger 4"/>
<property name="jobDetail" ref="piloteOdilJobBean4"/>
<property name="cronExpression" value="${popo.odil.rushhour.cronexpression}" />
<property name="group" value="POPO"/>
</bean>
<bean id="piloteOdilTrigger-5" class="org.springframework.scheduling.quartz.CronTriggerBean">
<property name="name" value="Trigger 5"/>
<property name="jobDetail" ref="piloteOdilJobBean5"/>
<property name="cronExpression" value="${popo.odil.rushhour.cronexpression}" />
<property name="group" value="POPO"/>
</bean>
<bean id="piloteOdilTrigger-6" class="org.springframework.scheduling.quartz.CronTriggerBean">
<property name="name" value="Trigger 6"/>
<property name="jobDetail" ref="piloteOdilJobBean6"/>
<property name="cronExpression" value="${popo.odil.rushhour.cronexpression}" />
<property name="group" value="POPO"/>
</bean>
<bean id="piloteOdilJobBean-template" abstract="true"
class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="name" value="Pilote ODIL"/>
<property name="jobClass" value="fr.xxxx.popo.batch.PiloteOdilJob"/> <!-- implements StatefulJob -->
<property name="requestsRecovery" value="true"/>
<property name="group" value="POPO"/>
</bean>
<bean id="piloteOdilJobBean1" parent="piloteOdilJobBean-template" />
<bean id="piloteOdilJobBean2" parent="piloteOdilJobBean-template" />
<bean id="piloteOdilJobBean3" parent="piloteOdilJobBean-template" />
<bean id="piloteOdilJobBean4" parent="piloteOdilJobBean-template" />
<bean id="piloteOdilJobBean5" parent="piloteOdilJobBean-template" />
<bean id="piloteOdilJobBean6" parent="piloteOdilJobBean-template" />
this is it's configuration file
#============================================================================
# Main Scheduler Properties
#============================================================================
org.quartz.scheduler.instanceName = OdilScheduler
org.quartz.scheduler.instanceId = AUTO
org.quartz.scheduler.rmi.export = false
org.quartz.scheduler.rmi.proxy = false
#============================================================================
# ThreadPool
#============================================================================
org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool
org.quartz.threadPool.threadCount = 6
org.quartz.threadPool.threadPriority = 5
#============================================================================
# JobStore
#============================================================================
org.quartz.jobStore.class = org.quartz.simpl.RAMJobStore
org.quartz.jobStore.misfireThreshold = 3
#============================================================================
# Plugins
#============================================================================
org.quartz.plugin.shutdownhook.class = org.quartz.plugins.management.ShutdownHookPlugin
org.quartz.plugin.shutdownhook.cleanShutdown = true
From what I understood using a ramJobStore make it non clusterable and thus load balancing should not work.
But in reallity when I have 2 servers running, each one has it's own OdilScheduler, with 6 triggers each.
However a total of only 6 trigger will work at the same time.
3 on instance 1 and 3 on instance 2 for example, I want all the 12 trigger to work simultaniously.
What am I missing?
Thanks
ps : quartz version : 1.8.5
I was mislead, in fact I only tried with 3 trigger on each instance.
1 normal and 2 cron trigger.
The cron trigger were using the RAMJobStore, but because of the template, each trigger had the same name so each instance could only use 1 cron trigger (they are identified by their name).
Which is why it looked like load balancing was happening, in fact what was happening is that since they had the same name only one trigger was triggered per instance.
If I had set 5 cron trigger for each instance I would have had only 1 trigger per instance.

Parallel Step execution in spring batch

Right now I got to know that we can run concurrent steps in spring batch using Parallel Steps (http://docs.spring.io/spring-batch/trunk/reference/html/scalability.html 7.2).I did and got success with it.
But When I see the database what has happened is person record in the table is updated with others person information, though I have made my ItemProcessor as synchronized.
Background about what job is doing is simple, just process person records from person table across department (200,400) and writes in to a flat file.
When I saw flat file I could see records a person from department 200 is written with person information from department 400.Kindly help is any thing i need to take care of?
<batch:job id="dept">
<batch:step id="dojStep1" parent="dojMainStep1" next="parallelProcessMatch">
</batch:step>
<batch:split id="parallelProcessMatch" task-executor="taskExecutor">
<batch:flow>
<batch:step id="step200" parent="dojMainStep200" >
</batch:step>
</batch:flow>
<batch:flow>
<batch:step id="step400" parent="dojMainStep400" >
</batch:step>
</batch:flow>
</batch:split>
</batch:job>
<bean id="taskExecutor" class="org.springframework.core.task.SimpleAsyncTaskExecutor"/>
<!-- Start parallelProcessMatch -->
<!-- Start dojMainStep200 -->
<batch:step id="dojMainStep200" abstract="true">
<batch:tasklet>
<batch:chunk commit-interval="1000" reader="dojDbReader200"
processor="dojMatchItemProcessor200" writer="dojClassifierMatchReportWriter200">
<batch:streams>
<batch:stream ref="itemWriterMatch200" />
<batch:stream ref="itemWriterUnMatch200" />
</batch:streams>
</batch:chunk>
</batch:tasklet>
<batch:listeners>
<batch:listener ref="dojMatch200PageHeaderCallback" />
<batch:listener ref="dojUnMatch200PageHeaderCallback" />
<batch:listener ref="dojInPageFooterCallback" />
</batch:listeners>
</batch:step>
<bean id="dojMatchItemProcessor200"
class="com.batchinterface.dept.recordresultsvr.DojMatchItemProccesor"
p:holdingTankDao-ref="holdingTankDao" p:rsdProvider-ref="rsdProvider" p:searchProvider-ref="searchProvider" />
<bean id="dojDbReader200"
class="org.springframework.batch.item.database.StoredProcedureItemReader"
p:dataSource-ref="oracleDataSource" p:rowMapper-ref="dojMatchRowMapper200"
scope="step" p:function="false" p:procedureName="PKG_JOIN.PRC_SELECT"
p:preparedStatementSetter-ref="dojmatchpropertySetter200"
p:refCursorPosition="1">
<property name="parameters">
<list>
<bean class="org.springframework.jdbc.core.SqlOutParameter">
<constructor-arg index="0" value="c1" />
<constructor-arg index="1">
<util:constant static-field="oracle.jdbc.OracleTypes.CURSOR" />
</constructor-arg>
</bean>
<bean class="org.springframework.jdbc.core.SqlParameter">
<constructor-arg index="0" value="dept" />
<constructor-arg index="1">
<util:constant static-field="oracle.jdbc.OracleTypes.VARCHAR" />
</constructor-arg>
</bean>
</list>
</property>
</bean>
<bean id="dojmatchpropertySetter200"
class="com.batchinterface.dept.recordresultsvr.DojPreparedStateSetter">
<property name="dept" value="200" />
</bean>
<bean id="dojMatchRowMapper200"
class="com.batchinterface.dept.recordresultsvr.DojMatchRowMapper" />
<bean id="dojClassifierMatchReportWriter200"
class="org.springframework.batch.item.support.ClassifierCompositeItemWriter"
p:classifier-ref="dojMatchClassifier200">
</bean>
<bean id="dojMatchClassifier200"
class="com.batchinterface.dept.recordresultsvr.DojMatchReportClassifier"
p:itemWriterMatch200-ref="itemWriterMatch200"
p:itemWriterUnMatch200-ref="itemWriterUnMatch200"
p:lastRunDate-ref="LastSuccessfulRunDate200">
</bean>
<!-- End dojMainStep200 -->
<!-- Start dojMainStep400 -->
<batch:step id="dojMainStep400" abstract="true">
<batch:tasklet>
<batch:chunk commit-interval="1000" reader="dojDbReader400"
processor="dojMatchItemProcessor400" writer="dojClassifierMatchReportWriter400">
<batch:streams>
<batch:stream ref="itemWriterMatch400" />
<batch:stream ref="itemWriterUnMatch400" />
</batch:streams>
</batch:chunk>
</batch:tasklet>
<batch:listeners>
<batch:listener ref="dojMatch400PageHeaderCallback" />
<batch:listener ref="dojUnMatch400PageHeaderCallback" />
<batch:listener ref="dojInPageFooterCallback" />
</batch:listeners>
</batch:step>
<bean id="dojMatchItemProcessor400"
class="com.batchinterface.dept.recordresultsvr.DojMatchItemProccesor"
p:holdingTankDao-ref="holdingTankDao" p:rsdProvider-ref="rsdProvider" p:searchProvider-ref="searchProvider" />
<bean id="dojDbReader400"
class="org.springframework.batch.item.database.StoredProcedureItemReader"
p:dataSource-ref="oracleDataSource" p:rowMapper-ref="dojMatchRowMapper400"
scope="step" p:function="false" p:procedureName="PKG_JOIN.PRC_SELECT"
p:preparedStatementSetter-ref="dojmatchpropertySetter400"
p:refCursorPosition="1">
<property name="parameters">
<list>
<bean class="org.springframework.jdbc.core.SqlOutParameter">
<constructor-arg index="0" value="c1" />
<constructor-arg index="1">
<util:constant static-field="oracle.jdbc.OracleTypes.CURSOR" />
</constructor-arg>
</bean>
<bean class="org.springframework.jdbc.core.SqlParameter">
<constructor-arg index="0" value="dept" />
<constructor-arg index="1">
<util:constant static-field="oracle.jdbc.OracleTypes.VARCHAR" />
</constructor-arg>
</bean>
</list>
</property>
</bean>
<bean id="dojmatchpropertySetter400"
class="com.batchinterface.dept.recordresultsvr.DojPreparedStateSetter">
<property name="dept" value="400" />
</bean>
<bean id="dojMatchRowMapper400"
class="com.batchinterface.dept.recordresultsvr.DojMatchRowMapper" />
<bean id="dojClassifierMatchReportWriter400"
class="org.springframework.batch.item.support.ClassifierCompositeItemWriter"
p:classifier-ref="dojMatchClassifier400">
</bean>
<bean id="dojMatchClassifier400"
class="com.batchinterface.dept.recordresultsvr.DojMatchReportClassifier"
p:itemWriterMatch400-ref="itemWriterMatch400"
p:itemWriterUnMatch400-ref="itemWriterUnMatch400"
p:lastRunDate-ref="LastSuccessfulRunDate400">
</bean>
<!-- End dojMainStep400 -->
<!-- End parallelProcessMatch -->