Spring Batch Partitioning - partition step not executing - spring-batch

I am using Batch Partitioning to fetch and process records based on some range in one of the columns from the DB table. The partition step stagingPartitionStep appears after few steps in the job configuration as follows:
<batch:job id="myBatchJob">
<batch:step id="mainStep">
<batch:tasklet task-executor="myTaskExecutor" transaction-manager="batchTransactionManager">
<batch:chunk reader="myDataReader"
processor="MyDataProcessor" writer="MyDataWriter" commit-interval="50">
</batch:chunk>
<batch:listeners>
<batch:listener ref="myItemListener" />
</batch:listeners>
</batch:tasklet>
<batch:next on="COMPLETED" to="checkConditionStep" />
</batch:step>
<batch:step id="checkConditionStep">
<batch:tasklet task-executor="checkConditionExecutor"
transaction-manager="batchTransactionManager" ref="checkConditionTasklet">
</batch:tasklet>
<batch:next on="FAILED" to="updateStagingTableStep" />
<batch:next on="COMPLETED" to="stagingPartitionStep" />
</batch:step>
<batch:step id="updateStagingTableStep">
<batch:tasklet task-executor="checkConditionExecutor"
transaction-manager="batchTransactionManager" ref="updateStagingTasklet">
</batch:tasklet>
</batch:step>
<batch:step id="stagingPartitionStep">
<batch:partition step="processStagingStep" partitioner="stagingProcessPartitioner">
<batch:handler grid-size="10" task-executor="stagingProcessTaskExecutor" />
</batch:partition>
</batch:step>
<batch:job>
The partitioner and the partion step:
<bean id="stagingProcessPartitioner"
class="com.mycom.batch.partitioner.StagingProcessPartitioner"
scope="step">
</bean>
<batch:step id="processStagingStep">
<batch:tasklet transaction-manager="batchTransactionManager">
<batch:chunk reader="stagingProcessorDataReader" writer="stagingProcessorDataWriter"
commit-interval="50">
</batch:chunk>
</batch:tasklet>
</batch:step>
The task executors:
<bean id="myTaskExecutor"
class="org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor">
<property name="corePoolSize" value="20" />
<property name="maxPoolSize" value="20" />
</bean>
<bean id="stagingProcessTaskExecutor"
class="org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor">
<property name="corePoolSize" value="10" />
<property name="maxPoolSize" value="10" />
<property name="allowCoreThreadTimeOut" value="true" />
</bean>
<bean id="checkConditionStep"
class="org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor">
<property name="corePoolSize" value="1" />
<property name="maxPoolSize" value="1" />
</bean>
The partitioner implementation: The partitioner creates ExecutionContext for each partioned step and puts a prefix value which will be used in the JDBC query to create a partion based on voucher number.
public class StagingProcessPartitioner implements Partitioner{
public Map<String, ExecutionContext> partition(int gridSize) {
Map<String, ExecutionContext> partitionMap = new HashMap<String, ExecutionContext>();
for(int threadId = 0; threadId < gridSize; threadId++){
ExecutionContext context = new ExecutionContext();
String stepName = "step" + threadId;
context.put("voucherSuffix", threadId);
partitionMap.put(stepName, context);
LOGGER.info("Created ExecutionContext for partioned step : " + stepName);
}
return partitionMap;
}}
The data reader: The voucherSuffix from the step context is used in JDBC query to create data partion. Therefore 10 partions should be created on voucher numbers ending 0,1,2....,9.
<bean id="stagingProcessorDataReader"
class="org.springframework.batch.item.database.JdbcPagingItemReader"
scope="step">
<property name="dataSource" ref="dataSource" />
<property name="queryProvider" ref="stagingDataQueryProvider" />
<property name="parameterValues">
<map>
<entry key="department" value="#{jobParameters[department]}" />
<entry key="joiningDate" value="#{jobParameters[joiningDate]}" />
<entry key="voucherSuffix" value="#{stepExecutionContext[voucherSuffix]}" />
</map>
</property>
<property name="pageSize" value="1000" />
<property name="rowMapper" ref="myDataRowMapper"/>
</bean>
<bean id="stagingDataQueryProvider"
class="org.springframework.batch.item.database.support.SqlPagingQueryProviderFactoryBean">
<property name="dataSource" ref="dataSource" />
<property name="selectClause" value="SELECT EMP_ID, EMP_NAME, DOB, ADDRESS1, ADDRESS2" />
<property name="fromClause" value="EMP_STG_TBL" />
<property name="whereClause" value="WHERE DEPT_ID=:department AND DOJ=:joiningDate AND VCHR LIKE '%:voucherSuffix'" />
<property name="sortKeys">
<map>
<entry key="EMP_ID" value="ASCENDING"></entry>
</map>
</property>
</bean>
The problem is, when the job is executted, each step is executed fine till the partion step. The partioner creates the execution context which can be confirmed from the log statements but the step processStagingStep is not executed and the job finishes with status COMPLETED. Is this job and partion step configuration correct?
Here are the log statements
2015-02-23 03:03:04 INFO myTaskScheduler-3 SimpleStepHandler:146 - Executing step: [checkConditionStep]
2015-02-23 03:03:04 INFO myTaskScheduler-3 SimpleStepHandler:146 - Executing step: [stagingPartitionStep]
2015-02-23 03:03:04 INFO myTaskScheduler-3 StagingProcessPartitioner:29 - Created ExecutionContext for partioned step : step0
2015-02-23 03:03:04 INFO myTaskScheduler-3 StagingProcessPartitioner:29 - Created ExecutionContext for partioned step : step1
2015-02-23 03:03:04 INFO myTaskScheduler-3 StagingProcessPartitioner:29 - Created ExecutionContext for partioned step : step2
2015-02-23 03:03:04 INFO myTaskScheduler-3 StagingProcessPartitioner:29 - Created ExecutionContext for partioned step : step3
2015-02-23 03:03:04 INFO myTaskScheduler-3 StagingProcessPartitioner:29 - Created ExecutionContext for partioned step : step4
2015-02-23 03:03:04 INFO myTaskScheduler-3 StagingProcessPartitioner:29 - Created ExecutionContext for partioned step : step5
2015-02-23 03:03:04 INFO myTaskScheduler-3 StagingProcessPartitioner:29 - Created ExecutionContext for partioned step : step6
2015-02-23 03:03:04 INFO myTaskScheduler-3 StagingProcessPartitioner:29 - Created ExecutionContext for partioned step : step7
2015-02-23 03:03:04 INFO myTaskScheduler-3 StagingProcessPartitioner:29 - Created ExecutionContext for partioned step : step8
2015-02-23 03:03:04 INFO myTaskScheduler-3 StagingProcessPartitioner:29 - Created ExecutionContext for partioned step : step9
2015-02-23 03:03:05 INFO myTaskScheduler-3 SimpleJobLauncher:136 - Job: [FlowJob: [name=myBatchJob]] completed with the following parameters: [] and the following status: [COMPLETED]

Related

NonTransientFlatFileException in Spring Batch

I was trying to read a CSV file having 100 records and processing them in a batch of 10 records in one go. Everything is working fine but after processing all the records, i am getting
org.springframework.batch.item.file.NonTransientFlatFileException:
Unable to read from resource: [class path resource
[csv/input/VMwareImport.csv]] and the root cause is
org.springframework.batch.core.JobExecutionException: Partition
handler returned an unsuccessful step.
Below is my job xml:
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:batch="http://www.springframework.org/schema/batch" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/batch
http://www.springframework.org/schema/batch/spring-batch-2.2.xsd
http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.2.xsd">
<import resource="../config/context.xml" />
<bean id="report" class="com.abc.model.VMware" scope="prototype" />
<bean id="itemProcessor" class="com.abc.CustomItemProcessor" scope="step">
<property name="threadName" value="#{stepExecutionContext[name]}" />
</bean>
<batch:job id="vmImport">
<batch:step id="step1">
<partition step="slave" partitioner="rangePartitioner">
<handler grid-size="10" task-executor="taskExecutor" />
</partition>
</batch:step>
</batch:job>
<batch:step id="slave">
<batch:tasklet>
<batch:chunk reader="cvsFileItemReader" writer="xmlItemWriter"
processor="itemProcessor" commit-interval="10" />
</batch:tasklet>
</batch:step>
<bean id="rangePartitioner" class="com.abc.partition.RangePartitioner" />
<bean id="taskExecutor" class="org.springframework.core.task.SimpleAsyncTaskExecutor" />
<bean id="cvsFileItemReader" class="org.springframework.batch.item.file.FlatFileItemReader">
<property name="resource" value="classpath:csv/input/VMwareImport.csv" />
<property name="lineMapper">
<bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<property name="lineTokenizer">
<bean
class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer">
<property name="names" value="SubscriptionId,TemplateName,ResourcePool,CpuCount,MemorySize,Network,DiskSize,
StorageCluster,DHCP,StaticIP,Subnet,Gateway,DNS1,DNS2,
DtapTag,FinTag,TechTag,TshirtTag,BackupTag " />
</bean>
</property>
<property name="fieldSetMapper">
<bean class="com.abc.VMwareFieldSetMapper" />
<!-- if no data type conversion, use BeanWrapperFieldSetMapper to map
by name <bean class="org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper">
<property name="prototypeBeanName" value="report" /> </bean> -->
</property>
</bean>
</property>
</bean>
<!-- <bean id="cvsFileItemReader" class="com.abc.VMwareReaderFromFile" scope="step">
<constructor-arg value="classpath:csv/input/VMwareImport.csv" />
</bean> -->
<bean id="xmlItemWriter" class="com.abc.DummyWriter">
<!-- <property name="resource" value="file:xml/outputs/report.xml" /> <property
name="marshaller" ref="reportMarshaller" /> <property name="rootTagName"
value="report" /> -->
</bean>
<!-- <bean id="reportMarshaller" class="org.springframework.oxm.jaxb.Jaxb2Marshaller">
<property name="classesToBeBound"> <list> <value>com.abc.model.VMware</value>
</list> </property> </bean> -->
Complete Exception StackTrace:
org.springframework.batch.item.file.NonTransientFlatFileException: Unable to read from resource: [class path resource [csv/input/VMwareImport.csv]]
at org.springframework.batch.item.file.FlatFileItemReader.readLine(FlatFileItemReader.java:220)
at org.springframework.batch.item.file.FlatFileItemReader.doRead(FlatFileItemReader.java:173)
at org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader.read(AbstractItemCountingItemStreamItemReader.java:83)
at org.springframework.batch.core.step.item.SimpleChunkProvider.doRead(SimpleChunkProvider.java:91)
at org.springframework.batch.core.step.item.SimpleChunkProvider.read(SimpleChunkProvider.java:155)
at org.springframework.batch.core.step.item.SimpleChunkProvider$1.doInIteration(SimpleChunkProvider.java:114)
at org.springframework.batch.repeat.support.RepeatTemplate.getNextResult(RepeatTemplate.java:368)
at org.springframework.batch.repeat.support.RepeatTemplate.executeInternal(RepeatTemplate.java:215)
at org.springframework.batch.repeat.support.RepeatTemplate.iterate(RepeatTemplate.java:144)
at org.springframework.batch.core.step.item.SimpleChunkProvider.provide(SimpleChunkProvider.java:108)
at org.springframework.batch.core.step.item.ChunkOrientedTasklet.execute(ChunkOrientedTasklet.java:69)
at org.springframework.batch.core.step.tasklet.TaskletStep$ChunkTransactionCallback.doInTransaction(TaskletStep.java:395)
at org.springframework.transaction.support.TransactionTemplate.execute(TransactionTemplate.java:131)
at org.springframework.batch.core.step.tasklet.TaskletStep$2.doInChunkContext(TaskletStep.java:267)
at org.springframework.batch.core.scope.context.StepContextRepeatCallback.doInIteration(StepContextRepeatCallback.java:77)
at org.springframework.batch.repeat.support.RepeatTemplate.getNextResult(RepeatTemplate.java:368)
at org.springframework.batch.repeat.support.RepeatTemplate.executeInternal(RepeatTemplate.java:215)
at org.springframework.batch.repeat.support.RepeatTemplate.iterate(RepeatTemplate.java:144)
at org.springframework.batch.core.step.tasklet.TaskletStep.doExecute(TaskletStep.java:253)
at org.springframework.batch.core.step.AbstractStep.execute(AbstractStep.java:195)
at org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler$1.call(TaskExecutorPartitionHandler.java:139)
at org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler$1.call(TaskExecutorPartitionHandler.java:136)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.lang.Thread.run(Thread.java:744)
Caused by: java.io.IOException: Stream closed
at java.io.BufferedReader.ensureOpen(BufferedReader.java:115)
at java.io.BufferedReader.readLine(BufferedReader.java:310)
at java.io.BufferedReader.readLine(BufferedReader.java:382)
at org.springframework.batch.item.file.FlatFileItemReader.readLine(FlatFileItemReader.java:201)
... 23 more
SEVERE: Encountered an error executing the step
org.springframework.batch.core.JobExecutionException: Partition handler returned an unsuccessful step
at org.springframework.batch.core.partition.support.PartitionStep.doExecute(PartitionStep.java:111)
at org.springframework.batch.core.step.AbstractStep.execute(AbstractStep.java:195)
at org.springframework.batch.core.job.SimpleStepHandler.handleStep(SimpleStepHandler.java:137)
at org.springframework.batch.core.job.flow.JobFlowExecutor.executeStep(JobFlowExecutor.java:64)
at org.springframework.batch.core.job.flow.support.state.StepState.handle(StepState.java:60)
at org.springframework.batch.core.job.flow.support.SimpleFlow.resume(SimpleFlow.java:152)
at org.springframework.batch.core.job.flow.support.SimpleFlow.start(SimpleFlow.java:131)
at org.springframework.batch.core.job.flow.FlowJob.doExecute(FlowJob.java:135)
at org.springframework.batch.core.job.AbstractJob.execute(AbstractJob.java:301)
at org.springframework.batch.core.launch.support.SimpleJobLauncher$1.run(SimpleJobLauncher.java:134)
at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:49)
at org.springframework.batch.core.launch.support.SimpleJobLauncher.run(SimpleJobLauncher.java:127)
at com.fujitsu.App.main(App.java:27)
Your cvsFileItemReader needs to be step scoped. Whichever partition finishes first is closing the stream so the other partitions are unable to continue reading.

Spring Batch With Schedular

enter image description hereI am new to Spring Batch with Scheduler. Here my task is to read the data from one table and write it into another table.
I am randomly going through the blogs and different tutorials.
I don't know whether there is any direct approach read from database and write into database. I took this approach like
Job 1 : Reads the data from the db using JdbcCursorItemReader, writing the data into a txt file using FlatFileItemWriter.
Job 2: Read the data from the txt file using FlatFileItemReader, multiResourceItemReader and writing the data into another table using HibernateItemWriter.
I am using a scheduler and it is going to run the batch for every 20 sec.
In this approach for the first run it is working fine. For the second run(after 20 sec), I am updating the data in the database(base table) but it is not writing updated data into the file and database.
Here is my configuration & code`package com.cg.schedulers;
public class UserScheduler {
#Autowired
private JobLauncher launcher;
#Autowired
private Job userJob;
#Autowired
private Job userJob2;
private JobExecution execution1,execution2;
public void run() {
try {
execution1 = launcher.run(userJob, new JobParameters());
execution2 = launcher.run(userJob2, new JobParameters());
System.out.println("Execution status: " + execution1.getStatus());
System.out.println("Execution status: " + execution2.getStatus());
} catch (JobExecutionAlreadyRunningException e) {
e.printStackTrace();
} catch (JobRestartException e) {
e.printStackTrace();
} catch (JobInstanceAlreadyCompleteException e) {
e.printStackTrace();
} catch (JobParametersInvalidException e) {
e.printStackTrace();
}
}
}
Xml Configuration
<import resource="spring-batch1.xml" />
<import resource="springbatch-database.xml" />
<context:annotation-config/>
<context:component-scan base-package="com.cg"/>
<!-- Reading data from -->
<bean id="itemReader"
class="org.springframework.batch.item.database.JdbcCursorItemReader"
scope="step">
<property name="dataSource" ref="dataSource" />
<property name="sql" value="select UserId, UserName, Password from USER" />
<property name="rowMapper">
<bean class="com.cg.mapper.UserRowMapper" />
</property>
</bean>
<!-- ItemWriter writes a line into output flat file -->
<bean id="flatFileItemWriter" class="org.springframework.batch.item.file.FlatFileItemWriter"
scope="step">
<property name="resource" value="file:csv/User.txt" />
<property name="lineAggregator">
<!-- An Aggregator which converts an object into delimited list of strings -->
<bean
class="org.springframework.batch.item.file.transform.DelimitedLineAggregator">
<property name="delimiter" value="," />
<property name="fieldExtractor">
<!-- Extractor which returns the value of beans property through reflection -->
<bean
class="org.springframework.batch.item.file.transform.BeanWrapperFieldExtractor">
<property name="names" value="userId, username, password" />
</bean>
</property>
</bean>
</property>
</bean>
<!-- ItemReader reads a complete line one by one from input file -->
<bean id="flatFileItemReader" class="org.springframework.batch.item.file.FlatFileItemReader"
scope="step">
<property name="lineMapper">
<bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<property name="fieldSetMapper">
<!-- Mapper which maps each individual items in a record to properties
in POJO -->
<bean class="com.cg.mapper.UserFieldSetMapper" />
</property>
<property name="lineTokenizer">
<!-- A tokenizer class to be used when items in input record are separated
by specific characters -->
<bean
class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer">
<property name="delimiter" value="," />
</bean>
</property>
</bean>
</property>
</bean>
<bean id="multiResourceItemReader"
class="org.springframework.batch.item.file.MultiResourceItemReader">
<property name="resources" value="classpath:csv/User.txt" />
<property name="delegate" ref="flatFileItemReader" />
</bean>
<!-- Optional JobExecutionListener to perform business logic before and
after the job -->
<bean id="jobListener" class="com.cg.support.UserItemListener" />
<!-- Optional ItemProcessor to perform business logic/filtering on the input
records -->
<bean id="itemProcessor1" class="com.cg.support.UserItemProcessor" />
<bean id="itemProcessor2" class="com.cg.support.UserItemProcessor2" />
<!-- ItemWriter which writes data to database -->
<bean id="databaseItemWriter"
class="org.springframework.batch.item.database.HibernateItemWriter">
<property name="sessionFactory" ref="sessionFactory" />
</bean>
<!-- Actual Job -->
<batch:job id="userJob">
<batch:step id="step1">
<batch:tasklet transaction-manager="transactionManager">
<batch:chunk reader="itemReader" writer="flatFileItemWriter"
processor="itemProcessor1" commit-interval="10" />
</batch:tasklet>
</batch:step>
<batch:listeners>
<batch:listener ref="jobListener" />
</batch:listeners>
</batch:job>
<batch:job id="userJob2">
<batch:step id="step2">
<batch:tasklet transaction-manager="transactionManager">
<batch:chunk reader="multiResourceItemReader" writer="databaseItemWriter"
processor="itemProcessor2" commit-interval="10" />
</batch:tasklet>
</batch:step>
</batch:job>
<bean id="myScheduler" class="com.cg.schedulers.UserScheduler"/>
<task:scheduled-tasks>
<task:scheduled ref="myScheduler" method="run" cron="*/20 * * * * *" />
</task:scheduled-tasks>
Please provide the direct approach if possible using hibernate.
[enter image description here][2]
Execution status: COMPLETED
Thanks,
Vamshi.

Parallel Step execution in spring batch

Right now I got to know that we can run concurrent steps in spring batch using Parallel Steps (http://docs.spring.io/spring-batch/trunk/reference/html/scalability.html 7.2).I did and got success with it.
But When I see the database what has happened is person record in the table is updated with others person information, though I have made my ItemProcessor as synchronized.
Background about what job is doing is simple, just process person records from person table across department (200,400) and writes in to a flat file.
When I saw flat file I could see records a person from department 200 is written with person information from department 400.Kindly help is any thing i need to take care of?
<batch:job id="dept">
<batch:step id="dojStep1" parent="dojMainStep1" next="parallelProcessMatch">
</batch:step>
<batch:split id="parallelProcessMatch" task-executor="taskExecutor">
<batch:flow>
<batch:step id="step200" parent="dojMainStep200" >
</batch:step>
</batch:flow>
<batch:flow>
<batch:step id="step400" parent="dojMainStep400" >
</batch:step>
</batch:flow>
</batch:split>
</batch:job>
<bean id="taskExecutor" class="org.springframework.core.task.SimpleAsyncTaskExecutor"/>
<!-- Start parallelProcessMatch -->
<!-- Start dojMainStep200 -->
<batch:step id="dojMainStep200" abstract="true">
<batch:tasklet>
<batch:chunk commit-interval="1000" reader="dojDbReader200"
processor="dojMatchItemProcessor200" writer="dojClassifierMatchReportWriter200">
<batch:streams>
<batch:stream ref="itemWriterMatch200" />
<batch:stream ref="itemWriterUnMatch200" />
</batch:streams>
</batch:chunk>
</batch:tasklet>
<batch:listeners>
<batch:listener ref="dojMatch200PageHeaderCallback" />
<batch:listener ref="dojUnMatch200PageHeaderCallback" />
<batch:listener ref="dojInPageFooterCallback" />
</batch:listeners>
</batch:step>
<bean id="dojMatchItemProcessor200"
class="com.batchinterface.dept.recordresultsvr.DojMatchItemProccesor"
p:holdingTankDao-ref="holdingTankDao" p:rsdProvider-ref="rsdProvider" p:searchProvider-ref="searchProvider" />
<bean id="dojDbReader200"
class="org.springframework.batch.item.database.StoredProcedureItemReader"
p:dataSource-ref="oracleDataSource" p:rowMapper-ref="dojMatchRowMapper200"
scope="step" p:function="false" p:procedureName="PKG_JOIN.PRC_SELECT"
p:preparedStatementSetter-ref="dojmatchpropertySetter200"
p:refCursorPosition="1">
<property name="parameters">
<list>
<bean class="org.springframework.jdbc.core.SqlOutParameter">
<constructor-arg index="0" value="c1" />
<constructor-arg index="1">
<util:constant static-field="oracle.jdbc.OracleTypes.CURSOR" />
</constructor-arg>
</bean>
<bean class="org.springframework.jdbc.core.SqlParameter">
<constructor-arg index="0" value="dept" />
<constructor-arg index="1">
<util:constant static-field="oracle.jdbc.OracleTypes.VARCHAR" />
</constructor-arg>
</bean>
</list>
</property>
</bean>
<bean id="dojmatchpropertySetter200"
class="com.batchinterface.dept.recordresultsvr.DojPreparedStateSetter">
<property name="dept" value="200" />
</bean>
<bean id="dojMatchRowMapper200"
class="com.batchinterface.dept.recordresultsvr.DojMatchRowMapper" />
<bean id="dojClassifierMatchReportWriter200"
class="org.springframework.batch.item.support.ClassifierCompositeItemWriter"
p:classifier-ref="dojMatchClassifier200">
</bean>
<bean id="dojMatchClassifier200"
class="com.batchinterface.dept.recordresultsvr.DojMatchReportClassifier"
p:itemWriterMatch200-ref="itemWriterMatch200"
p:itemWriterUnMatch200-ref="itemWriterUnMatch200"
p:lastRunDate-ref="LastSuccessfulRunDate200">
</bean>
<!-- End dojMainStep200 -->
<!-- Start dojMainStep400 -->
<batch:step id="dojMainStep400" abstract="true">
<batch:tasklet>
<batch:chunk commit-interval="1000" reader="dojDbReader400"
processor="dojMatchItemProcessor400" writer="dojClassifierMatchReportWriter400">
<batch:streams>
<batch:stream ref="itemWriterMatch400" />
<batch:stream ref="itemWriterUnMatch400" />
</batch:streams>
</batch:chunk>
</batch:tasklet>
<batch:listeners>
<batch:listener ref="dojMatch400PageHeaderCallback" />
<batch:listener ref="dojUnMatch400PageHeaderCallback" />
<batch:listener ref="dojInPageFooterCallback" />
</batch:listeners>
</batch:step>
<bean id="dojMatchItemProcessor400"
class="com.batchinterface.dept.recordresultsvr.DojMatchItemProccesor"
p:holdingTankDao-ref="holdingTankDao" p:rsdProvider-ref="rsdProvider" p:searchProvider-ref="searchProvider" />
<bean id="dojDbReader400"
class="org.springframework.batch.item.database.StoredProcedureItemReader"
p:dataSource-ref="oracleDataSource" p:rowMapper-ref="dojMatchRowMapper400"
scope="step" p:function="false" p:procedureName="PKG_JOIN.PRC_SELECT"
p:preparedStatementSetter-ref="dojmatchpropertySetter400"
p:refCursorPosition="1">
<property name="parameters">
<list>
<bean class="org.springframework.jdbc.core.SqlOutParameter">
<constructor-arg index="0" value="c1" />
<constructor-arg index="1">
<util:constant static-field="oracle.jdbc.OracleTypes.CURSOR" />
</constructor-arg>
</bean>
<bean class="org.springframework.jdbc.core.SqlParameter">
<constructor-arg index="0" value="dept" />
<constructor-arg index="1">
<util:constant static-field="oracle.jdbc.OracleTypes.VARCHAR" />
</constructor-arg>
</bean>
</list>
</property>
</bean>
<bean id="dojmatchpropertySetter400"
class="com.batchinterface.dept.recordresultsvr.DojPreparedStateSetter">
<property name="dept" value="400" />
</bean>
<bean id="dojMatchRowMapper400"
class="com.batchinterface.dept.recordresultsvr.DojMatchRowMapper" />
<bean id="dojClassifierMatchReportWriter400"
class="org.springframework.batch.item.support.ClassifierCompositeItemWriter"
p:classifier-ref="dojMatchClassifier400">
</bean>
<bean id="dojMatchClassifier400"
class="com.batchinterface.dept.recordresultsvr.DojMatchReportClassifier"
p:itemWriterMatch400-ref="itemWriterMatch400"
p:itemWriterUnMatch400-ref="itemWriterUnMatch400"
p:lastRunDate-ref="LastSuccessfulRunDate400">
</bean>
<!-- End dojMainStep400 -->
<!-- End parallelProcessMatch -->

Spring Batch error (A Job Instance Already Exists) and RunIdIncrementer generates only once

i'm using Spring Batch & Quartz to read from database table and write in another table. the database is Oracle and it is c3p0
the problem is each job must have a unique parameters, I tried RunIdIncrementer and I tried this code:
public class JobRerunner implements JobParametersIncrementer {
#Override
public JobParameters getNext(JobParameters parameters) {
System.out.println("got job parameters: " + parameters);
if (parameters==null || parameters.isEmpty()) {
return new JobParametersBuilder().addLong("run.id", System.currentTimeMillis()).toJobParameters();
}
long currentTime = parameters.getLong("run.id",System.currentTimeMillis()) + 1;
return new JobParametersBuilder().addLong("run.id",currentTime).toJobParameters();
}
}
but I get the same problem, the run.id is generated only once, and when the job is ran for the second time it has no parameters at all and the third time also (the second and third run JobParameter = null so (Job Instance Already Exists)
job context
<batch:job id="readyReqPoolJob" restartable="true">
<batch:step id="readyReqPoolStep">
<batch:tasklet>
<batch:chunk reader="readyReqPoolReader" writer="readyReqPoolWrtiter"
commit-interval="100" />
</batch:tasklet>
</batch:step>
</batch:job>
<!-- ======================================================= -->
<!-- 6) READER -->
<!-- ======================================================= -->
<bean id="readyReqPoolReader"
class="org.springframework.batch.item.database.JdbcCursorItemReader">
<property name="dataSource" ref="dataSource" />
<property name="sql" value="select * from SF_ILA_Ready_Request_Pool" />
<property name="rowMapper" ref="ReadyReqPoolRowMapper" />
</bean>
<bean id="readyReqPoolWrtiter"
class="com.housekeepingservice.readyrequestpoolarchive.ReadyReqPoolArchiveWriter" />
<bean id="jobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass"
value="org.springframework.batch.sample.quartz.JobLauncherDetails" />
<property name="jobDataAsMap">
<map>
<entry key="jobName" value="readyReqPoolJob" />
<entry key="jobLocator" value-ref="jobRegistry" />
<entry key="jobLauncher" value-ref="jobLauncher" />
</map>
</property>
</bean>
<bean class="org.springframework.scheduling.quartz.SchedulerFactoryBean">
<property name="triggers">
<bean id="cronTrigger"
class="org.springframework.scheduling.quartz.CronTriggerFactoryBean">
<property name="jobDetail" ref="jobDetail" />
<property name="cronExpression" value="0 0/5 * * * ?" />
</bean>
</property>
</bean>
main context:
<import resource="classpath:spring/batch/config/readyReqPoolContext.xml"
<import resource="classpath:spring/batch/config/jdbc.commons.xml" />
<!-- 1) USE ANNOTATIONS TO CONFIGURE SPRING BEANS -->
<context:component-scan base-package="com.housekeepingservice" />
<bean id="transactionManager"
class="org.springframework.jdbc.datasource.DataSourceTransactionManager">
<property name="dataSource" ref="dataSource" />
</bean>
<tx:annotation-driven transaction-manager="transactionManager" />
<bean id="jdbcTemplate" class="org.springframework.jdbc.core.JdbcTemplate">
<property name="dataSource" ref="dataSource" />
</bean>
<bean
class="org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor">
<property name="jobRegistry" ref="jobRegistry" />
</bean>
<bean id="jobRegistry"
class="org.springframework.batch.core.configuration.support.MapJobRegistry" />
<!-- 3) JOB REPOSITORY -->
<bean id="jobRepository"
class="org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean">
<property name="transactionManager" ref="transactionManager" />
</bean>
<!-- 4) LAUNCH JOBS FROM A REPOSITORY -->
<bean id="jobLauncher"
class="org.springframework.batch.core.launch.support.SimpleJobLauncher">
<property name="jobRepository" ref="jobRepository" />
<property name="taskExecutor" ref="taskExecutor" />
</bean>
<bean id="taskExecutor" class="org.springframework.core.task.SimpleAsyncTaskExecutor" />
<bean id="jobExplorer"
class="org.springframework.batch.core.explore.support.JobExplorerFactoryBean">
<property name="dataSource" ref="dataSource" />
</bean>
<bean name="jobParamatersIncrementer" class="org.springframework.batch.core.launch.support.RunIdIncrementer">
</bean>
Test.java
public class Test {
public static void main(String[] args) {
String[] springConfig = { "spring/batch/config/mainContext.xml" };
ApplicationContext context = new ClassPathXmlApplicationContext(
springConfig);
JobRerunner rerun = new JobRerunner();
JobLauncher jobLauncher = (JobLauncher) context.getBean("jobLauncher");
Job readyRequestPoolJob = (Job) context.getBean("readyReqPoolJob");
try {
JobParameters jobParameters = new JobParameters();
JobExecution execution2 = jobLauncher.run(readyRequestPoolJob, rerun.getNext(jobParameters));
System.out.println("Exit Status : " + execution2.getStatus());
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("Done");
}
}
log (checkout the job incetance parameters in the first run and the second run):
17:00:27,053 INFO SimpleJobLauncher:132 - Job: [FlowJob: [name=readyReqPoolJob]] launched with the following parameters: **[{run.id=1393855226339}]**
17:00:27.085 [Timer-0] DEBUG org.quartz.utils.UpdateChecker - Checking for available updated version of Quartz...
17:00:27,272 INFO SimpleStepHandler:135 - Executing step: [readyReqPoolStep]
17:02:08,791 INFO SimpleJobLauncher:135 - Job: [FlowJob: [name=readyReqPoolJob]] completed with the following parameters: [{run.id=1393855226339}] and the following status: [COMPLETED]
17:10:00.005 [org.springframework.scheduling.quartz.SchedulerFactoryBean#0_Worker-1] DEBUG org.quartz.core.JobRunShell - Calling execute on job DEFAULT.jobDetail
17:10:00,008 INFO JobLauncherDetails:69 - Quartz trigger firing with Spring Batch jobName=readyReqPoolJob
17:10:00,036 INFO SimpleJobLauncher:132 - Job: [FlowJob: [name=readyReqPoolJob]] launched with the following parameters: **[{}]**
17:10:00,059 INFO SimpleStepHandler:135 - Executing step: [readyReqPoolStep]
To lunch a job with job Incremater you need two things
Attach the RunIdIncremater to your job.
Use a launcher that is aware of the usage Incremater.
I do not see any need for your own implementation just use the existing one.
Attach the RunIdIncremater to your job.
<batch:job id="readyReqPoolJob" incrementer="runIdIncrementer" restartable="true">
</batch:job>
<bean id="runIdIncrementer"
class="org.springframework.batch.core.launch.support.RunIdIncrementer"/>
Use a launcher
To launch it you should use one of the following:
Option 1: CommandLineJobRunner with the –next option see the API
Option 2: User JobOperator
<bean id="jobOperator"
class="org.springframework.batch.core.launch.support.SimpleJobOperator">
<property name="jobRepository" ref="jobRepository" />
<property name="jobLauncher" ref="jobLauncher" />
<property name="jobRegistry" ref="jobRegistry" />
<property name="jobExplorer" ref="jobExplorer" />
</bean>
in the code
jobOperator.startNextInstance(jobName)
Option 3: In Junit you can use JobLauncherTestUtils.
Note that it has it’s own id Incremater and will ignore the one you use
see also the following answer SpringBatch: Test a JobExecutionListener
Set step allowStartIfComplete flag to True
Add a parameter called 'timestamp' for example or - if you want to use run.id - set Job.jobParametersIncrementer with your jobParamatersIncrementer bean definition.

How to skip batch step when condition is false

I have one basis job with one basic step. This jobs is executing every x second (I am using quartz for this). Then in my config class I also have variable "runStep". Where should I add this attribute and run my step only if runStep is true ?
<batch:job id="export1" parent="baseJob">
<batch:step id="registruj" parent="baseStep">
<tasklet>
<chunk reader="registrujReader" processor="registrujProcessor" writer="registrujWriter"
commit-interval="1" />
</tasklet>
</batch:step>
</batch:job>
<bean id="baseJob" class="org.springframework.batch.core.job.SimpleJob" abstract="true">
<property name="jobRepository" ref="jobRepository" />
</bean>
<bean id="baseStep" class="org.springframework.batch.core.step.factory.SimpleStepFactoryBean" abstract="true">
<property name="transactionManager" ref="transactionManager" />
<property name="jobRepository" ref="jobRepository" />
<property name="startLimit" value="100" />
<property name="commitInterval" value="1" />
</bean>
<bean id="jobRepository" class="org.springframework.batch.core.repository.support.JobRepositoryFactoryBean"
p:dataSource-ref="expDataSource" p:transactionManager-ref="transactionManager" />
<bean id="jobLauncher" class="org.springframework.batch.core.launch.support.SimpleJobLauncher">
<property name="jobRepository" ref="jobRepository" />
</bean>
<bean class="org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor">
<property name="jobRegistry" ref="jobRegistry" />
</bean>
<bean id="jobRegistry" class="org.springframework.batch.core.configuration.support.MapJobRegistry" />
<bean id="registrujWriter" class="cz.isvs.reg.rob.util.export.batch.RegistrujItemWriter" scope="step" />
<bean id="registrujReader" class="cz.isvs.reg.rob.util.export.batch.RegistrujItemReader" scope="step" />
<bean id="registrujProcessor" class="cz.isvs.reg.rob.util.export.batch.RegistrujItemProcessor" scope="step" />
<!-- run every 10 seconds -->
<bean class="org.springframework.scheduling.quartz.SchedulerFactoryBean">
<property name="triggers">
<bean id="cronTrigger" class="org.springframework.scheduling.quartz.CronTriggerBean">
<property name="jobDetail" ref="jobDetail" />
<property name="cronExpression" value="*/10 * * * * ?" />
</bean>
</property>
</bean>
<bean id="jobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass" value="cz.isvs.reg.rob.util.export.batch.JobLauncherDetails" />
<property name="group" value="quartz-batch" />
<property name="jobDataAsMap">
<map>
<entry key="jobName" value="export1" />
<entry key="jobLocator" value-ref="jobRegistry" />
<entry key="jobLauncher" value-ref="jobLauncher" />
</map>
</property>
</bean>
Use a JobExecutionDecider
public class RunStepDecider implements JobExecutionDecider {
public FlowExecutionStatus decide(JobExecution jobExecution, StepExecution stepExecution) {
final String runStep = jobExecution.getJobParameters().getString("runStep");
//Depending on the above condition you can return Completed or Failed.
return new FlowExecutionStatus.COMPLETED;
}
}
<batch:job id="export1" parent="baseJob">
<decision id="decision" decider="decider">
<next on="COMPLETED" to="registruj" />
</decision>
<batch:step id="registruj" parent="baseStep">
<tasklet>
<chunk reader="registrujReader" processor="registrujProcessor" writer="registrujWriter" commit-interval="1" />
</tasklet>
</batch:step>
</batch:job>
<bean id="decider" class="RunStepDecider" />
and pass runStep as JobParameter.
Hope can help to solve your problem.