Right now I got to know that we can run concurrent steps in spring batch using Parallel Steps (http://docs.spring.io/spring-batch/trunk/reference/html/scalability.html 7.2).I did and got success with it.
But When I see the database what has happened is person record in the table is updated with others person information, though I have made my ItemProcessor as synchronized.
Background about what job is doing is simple, just process person records from person table across department (200,400) and writes in to a flat file.
When I saw flat file I could see records a person from department 200 is written with person information from department 400.Kindly help is any thing i need to take care of?
<batch:job id="dept">
<batch:step id="dojStep1" parent="dojMainStep1" next="parallelProcessMatch">
</batch:step>
<batch:split id="parallelProcessMatch" task-executor="taskExecutor">
<batch:flow>
<batch:step id="step200" parent="dojMainStep200" >
</batch:step>
</batch:flow>
<batch:flow>
<batch:step id="step400" parent="dojMainStep400" >
</batch:step>
</batch:flow>
</batch:split>
</batch:job>
<bean id="taskExecutor" class="org.springframework.core.task.SimpleAsyncTaskExecutor"/>
<!-- Start parallelProcessMatch -->
<!-- Start dojMainStep200 -->
<batch:step id="dojMainStep200" abstract="true">
<batch:tasklet>
<batch:chunk commit-interval="1000" reader="dojDbReader200"
processor="dojMatchItemProcessor200" writer="dojClassifierMatchReportWriter200">
<batch:streams>
<batch:stream ref="itemWriterMatch200" />
<batch:stream ref="itemWriterUnMatch200" />
</batch:streams>
</batch:chunk>
</batch:tasklet>
<batch:listeners>
<batch:listener ref="dojMatch200PageHeaderCallback" />
<batch:listener ref="dojUnMatch200PageHeaderCallback" />
<batch:listener ref="dojInPageFooterCallback" />
</batch:listeners>
</batch:step>
<bean id="dojMatchItemProcessor200"
class="com.batchinterface.dept.recordresultsvr.DojMatchItemProccesor"
p:holdingTankDao-ref="holdingTankDao" p:rsdProvider-ref="rsdProvider" p:searchProvider-ref="searchProvider" />
<bean id="dojDbReader200"
class="org.springframework.batch.item.database.StoredProcedureItemReader"
p:dataSource-ref="oracleDataSource" p:rowMapper-ref="dojMatchRowMapper200"
scope="step" p:function="false" p:procedureName="PKG_JOIN.PRC_SELECT"
p:preparedStatementSetter-ref="dojmatchpropertySetter200"
p:refCursorPosition="1">
<property name="parameters">
<list>
<bean class="org.springframework.jdbc.core.SqlOutParameter">
<constructor-arg index="0" value="c1" />
<constructor-arg index="1">
<util:constant static-field="oracle.jdbc.OracleTypes.CURSOR" />
</constructor-arg>
</bean>
<bean class="org.springframework.jdbc.core.SqlParameter">
<constructor-arg index="0" value="dept" />
<constructor-arg index="1">
<util:constant static-field="oracle.jdbc.OracleTypes.VARCHAR" />
</constructor-arg>
</bean>
</list>
</property>
</bean>
<bean id="dojmatchpropertySetter200"
class="com.batchinterface.dept.recordresultsvr.DojPreparedStateSetter">
<property name="dept" value="200" />
</bean>
<bean id="dojMatchRowMapper200"
class="com.batchinterface.dept.recordresultsvr.DojMatchRowMapper" />
<bean id="dojClassifierMatchReportWriter200"
class="org.springframework.batch.item.support.ClassifierCompositeItemWriter"
p:classifier-ref="dojMatchClassifier200">
</bean>
<bean id="dojMatchClassifier200"
class="com.batchinterface.dept.recordresultsvr.DojMatchReportClassifier"
p:itemWriterMatch200-ref="itemWriterMatch200"
p:itemWriterUnMatch200-ref="itemWriterUnMatch200"
p:lastRunDate-ref="LastSuccessfulRunDate200">
</bean>
<!-- End dojMainStep200 -->
<!-- Start dojMainStep400 -->
<batch:step id="dojMainStep400" abstract="true">
<batch:tasklet>
<batch:chunk commit-interval="1000" reader="dojDbReader400"
processor="dojMatchItemProcessor400" writer="dojClassifierMatchReportWriter400">
<batch:streams>
<batch:stream ref="itemWriterMatch400" />
<batch:stream ref="itemWriterUnMatch400" />
</batch:streams>
</batch:chunk>
</batch:tasklet>
<batch:listeners>
<batch:listener ref="dojMatch400PageHeaderCallback" />
<batch:listener ref="dojUnMatch400PageHeaderCallback" />
<batch:listener ref="dojInPageFooterCallback" />
</batch:listeners>
</batch:step>
<bean id="dojMatchItemProcessor400"
class="com.batchinterface.dept.recordresultsvr.DojMatchItemProccesor"
p:holdingTankDao-ref="holdingTankDao" p:rsdProvider-ref="rsdProvider" p:searchProvider-ref="searchProvider" />
<bean id="dojDbReader400"
class="org.springframework.batch.item.database.StoredProcedureItemReader"
p:dataSource-ref="oracleDataSource" p:rowMapper-ref="dojMatchRowMapper400"
scope="step" p:function="false" p:procedureName="PKG_JOIN.PRC_SELECT"
p:preparedStatementSetter-ref="dojmatchpropertySetter400"
p:refCursorPosition="1">
<property name="parameters">
<list>
<bean class="org.springframework.jdbc.core.SqlOutParameter">
<constructor-arg index="0" value="c1" />
<constructor-arg index="1">
<util:constant static-field="oracle.jdbc.OracleTypes.CURSOR" />
</constructor-arg>
</bean>
<bean class="org.springframework.jdbc.core.SqlParameter">
<constructor-arg index="0" value="dept" />
<constructor-arg index="1">
<util:constant static-field="oracle.jdbc.OracleTypes.VARCHAR" />
</constructor-arg>
</bean>
</list>
</property>
</bean>
<bean id="dojmatchpropertySetter400"
class="com.batchinterface.dept.recordresultsvr.DojPreparedStateSetter">
<property name="dept" value="400" />
</bean>
<bean id="dojMatchRowMapper400"
class="com.batchinterface.dept.recordresultsvr.DojMatchRowMapper" />
<bean id="dojClassifierMatchReportWriter400"
class="org.springframework.batch.item.support.ClassifierCompositeItemWriter"
p:classifier-ref="dojMatchClassifier400">
</bean>
<bean id="dojMatchClassifier400"
class="com.batchinterface.dept.recordresultsvr.DojMatchReportClassifier"
p:itemWriterMatch400-ref="itemWriterMatch400"
p:itemWriterUnMatch400-ref="itemWriterUnMatch400"
p:lastRunDate-ref="LastSuccessfulRunDate400">
</bean>
<!-- End dojMainStep400 -->
<!-- End parallelProcessMatch -->
Related
I have a file to read like below. A record is split into multiple lines.
Each record can have any number of lines; the only way to recognize a new record is when a line starts with "ABC" and there is another line with identifier ABC_SUB. Each line for this record need to have a separate mapper identified by the pattern with the start of the line(e.g., ABC, line2, line3, line, ABC_SUB, line3, line4)
The same pattern line3 and line4 can be present, but they need different mappers based on the previous line type identifier.
In this example,
line3 pattern(with mapper1) exists after a line that starts with ABC and
line3 pattern(with mapper2) exits after a line that starts with ABC_SUB.
How to identify a line3 pattern under ABC vs. a line3 pattern under ABC_SUB?
I tried PatternMatchingCompositeTokenizer, but this gives the first matching mapper.
Is there a way to check a few lines before identifying a subtype(like ABC or ABC_SUB) and giving the respective mapper?
HDR
ABCline1goesonforrecord1 //record starts
line2goesonForRecord1
line3goesonForRecord1 //this requires ABC_line3_mapper
line4goesonForRecord1
ABC_SUBline1goesonforrecord1 //sub-record where it can have same pattern in below lines
line3goesonForRecord1 //this requires ABC_SUB_line3_mapper
line4goesonForRecord1
ABCline2goesOnForRecord2 //record 2 begins
line2goesonForRecord2
line3goesonForRecord2
line4goesonForRecord2
line5goesonForRecord2
ABCline2goesOnForRecord3
line2goesonForRecord3
line3goesonForRecord3
line4goesonForRecord3
TRL
Below is the XML config
<batch:job id="importFileData">
<batch:step id="parseAndLoadData">
<batch:tasklet>
<batch:chunk reader="multiLineReader" writer="writer"
commit-interval="5" skip-limit="100">
<batch:streams>
<batch:stream ref="fileItemReader" />
</batch:streams>
</batch:chunk>
</batch:tasklet>
</batch:step>
</batch:job>
<bean id="fileItemReader"
class="org.springframework.batch.item.file.FlatFileItemReader"
scope="step">
<property name="resource" value="classpath:input/input.txt"></property>
<property name="linesToSkip" value="2" />
<property name="lineMapper">
<bean
class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<property name="lineTokenizer" ref="customLineTokenizer">
</property>
<property name="fieldSetMapper">
<bean
class="org.springframework.batch.item.file.mapping.PassThroughFieldSetMapper">
</bean>
</property>
</bean>
</property>
</bean>
<bean id="reader" class="sample.MultiLineReader">
<property name="fieldSetReader" ref="fileItemReader" />
<property name="abcMapper" ref="abcMapper" />
<property name="123Mapper" ref="sample123Mapper" />
</bean>
<bean id="orderFileTokenizer"
class="org.springframework.batch.item.file.transform.PatternMatchingCompositeLineTokenizer">
<property name="tokenizers">
<map>
<entry key="ABC*" value-ref="abcLineTokenizer" />
<entry key="123*" value-ref="123LineTokenizer" />
</map>
</property>
</bean>
<bean id="abcLineTokenizer"
class="org.springframework.batch.item.file.transform.FixedLengthTokenizer">
<property name="names" value="NAME, AGE, GENDER" />
<property name="columns" value="1-10,11-15,16-20" />
<property name="strict" value="false" />
</bean>
<bean id="123LineTokenizer"
class="org.springframework.batch.item.file.transform.FixedLengthTokenizer">
<property name="names" value="CONTACT, ALT_CONTACT" />
<property name="columns" value="1-15,16-30" />
<property name="strict" value="false" />
</bean>
<bean id="abcMapper" class="sample.ABCMapper" />
<bean id="sample123Mapper" class="sample.123Mapper" />
</beans>
I am trying get job-parameteres-in-to-item-processor-using-spring-batch-annotation.
I implemented it by referring below link; but for me the variable (batchRunName) is coming as null when I try to access it in my Processor class.
Can any one please look at it. I am sure; I am missing some small thing.
How to get Job parameteres in to item processor using spring Batch annotation
public static void main(String[] args) {
contextObj = new ClassPathXmlApplicationContext(springConfig);
jobObj = (Job) contextObj.getBean("XYZ-1001-DD-01");
JobParametersBuilder jobBuilder = new JobParametersBuilder();
System.out.println("args[0] is " + args[0] );
jobBuilder.addString("batchRunName", args[0]);
public class TimeProcessor implements ItemProcessor<Time, TimeMetric> {
private DataSource dataSource;
#Value("#{jobParameters['batchRunNumber']}")
private String batchRunNumber;
public void setBatchRunNumber(String batchRunNumber) {
this.batchRunNumber = batchRunNumber;
}
<bean id="timeProcessor"
class="com.xyz.processor.TimeProcessor" scope="step">
<property name="dataSource" ref="oracledataSource" />
</bean>
=================FULL XML CONFIGURATION========================
<import resource="classpath:/batch/utility/skip/batch_skip.xml" />
<import resource="classpath:/batch/config/context-postgres.xml" />
<import resource="classpath:/batch/config/oracle-database.xml" />
<context:property-placeholder
location="classpath:/batch/jobs/TPF-1001-DD-01/TPF-1001-DD-01.properties" />
<bean id="gridSizePartitioner"
class="com.tpf.partitioner.GridSizePartitioner" />
<task:executor id="taskExecutor" pool-size="${pool.size}" />
<batch:job id="XYZJob" job-repository="jobRepository"
restartable="true">
<batch:step id="XYZSTEP">
<batch:description>Convert TIF files to PDF</batch:description>
<batch:partition partitioner="gridSizePartitioner">
<batch:handler task-executor="taskExecutor"
grid-size="${pool.size}" />
<batch:step>
<batch:tasklet allow-start-if-complete="true">
<batch:chunk commit-interval="${commit.interval}"
skip-limit="${job.skip.limit}">
<batch:reader>
<bean id="timeReader"
class="org.springframework.batch.item.database.JdbcCursorItemReader"
scope="step">
<property name="dataSource" ref="oracledataSource" />
<property name="sql">
<value>
select TIME_ID as timesheetId,count(*),max(CREATION_DATETIME) as creationDateTime , ILN_NUMBER as ilnNumber
from TS_FAKE_NAME
where creation_datetime >= '#{jobParameters['creation_start_date1']} 12.00.00.000000000 AM'
and creation_datetime < '#{jobParameters['creation_start_date2']} 11.59.59.999999999 PM'
and mod(time_id,${pool.size})=#{stepExecutionContext['partition.id']}
group by time_id ,ILN_NUMBER
</value>
</property>
<property name="rowMapper">
<bean
class="org.springframework.jdbc.core.BeanPropertyRowMapper">
<property name="mappedClass"
value="com.tpf.model.Time" />
</bean>
</property>
</bean>
</batch:reader>
<batch:processor>
<bean id="compositeItemProcessor"
class="org.springframework.batch.item.support.CompositeItemProcessor">
<property name="delegates">
<list>
<ref bean="timeProcessor" />
</list>
</property>
</bean>
</batch:processor>
<batch:writer>
<bean id="compositeItemWriter"
class="org.springframework.batch.item.support.CompositeItemWriter">
<property name="delegates">
<list>
<ref bean="timeWriter" />
</list>
</property>
</bean>
</batch:writer>
<batch:skippable-exception-classes>
<batch:include
class="com.utility.skip.BatchSkipException" />
</batch:skippable-exception-classes>
<batch:listeners>
<batch:listener ref="batchSkipListener" />
</batch:listeners>
</batch:chunk>
</batch:tasklet>
</batch:step>
</batch:partition>
</batch:step>
<batch:validator>
<bean
class="org.springframework.batch.core.job.DefaultJobParametersValidator">
<property name="requiredKeys">
<list>
<value>batchRunNumber</value>
<value>creation_start_date1</value>
<value>creation_start_date2</value>
</list>
</property>
</bean>
</batch:validator>
</batch:job>
<bean id="timesheetWriter" class="com.tpf.writer.TimeWriter"
scope="step">
<property name="dataSource" ref="dataSource" />
</bean>
<bean id="timeProcessor"
class="com.tpf.processor.TimeProcessor" scope="step">
<property name="dataSource" ref="oracledataSource" />
</bean>
I think you are facing the issue reported in BATCH-2351.
You can try to provide the job parameter via XML instead of annotation (Since the majority of your config is XML based):
<bean id="timeProcessor" class="com.xyz.processor.TimeProcessor" scope="step">
<property name="dataSource" ref="oracledataSource" />
<property name="batchRunNumber" value="#{jobParameters['batchRunNumber']}" />
</bean>
Hope this helps.
I was trying to read a CSV file having 100 records and processing them in a batch of 10 records in one go. Everything is working fine but after processing all the records, i am getting
org.springframework.batch.item.file.NonTransientFlatFileException:
Unable to read from resource: [class path resource
[csv/input/VMwareImport.csv]] and the root cause is
org.springframework.batch.core.JobExecutionException: Partition
handler returned an unsuccessful step.
Below is my job xml:
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:batch="http://www.springframework.org/schema/batch" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/batch
http://www.springframework.org/schema/batch/spring-batch-2.2.xsd
http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.2.xsd">
<import resource="../config/context.xml" />
<bean id="report" class="com.abc.model.VMware" scope="prototype" />
<bean id="itemProcessor" class="com.abc.CustomItemProcessor" scope="step">
<property name="threadName" value="#{stepExecutionContext[name]}" />
</bean>
<batch:job id="vmImport">
<batch:step id="step1">
<partition step="slave" partitioner="rangePartitioner">
<handler grid-size="10" task-executor="taskExecutor" />
</partition>
</batch:step>
</batch:job>
<batch:step id="slave">
<batch:tasklet>
<batch:chunk reader="cvsFileItemReader" writer="xmlItemWriter"
processor="itemProcessor" commit-interval="10" />
</batch:tasklet>
</batch:step>
<bean id="rangePartitioner" class="com.abc.partition.RangePartitioner" />
<bean id="taskExecutor" class="org.springframework.core.task.SimpleAsyncTaskExecutor" />
<bean id="cvsFileItemReader" class="org.springframework.batch.item.file.FlatFileItemReader">
<property name="resource" value="classpath:csv/input/VMwareImport.csv" />
<property name="lineMapper">
<bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<property name="lineTokenizer">
<bean
class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer">
<property name="names" value="SubscriptionId,TemplateName,ResourcePool,CpuCount,MemorySize,Network,DiskSize,
StorageCluster,DHCP,StaticIP,Subnet,Gateway,DNS1,DNS2,
DtapTag,FinTag,TechTag,TshirtTag,BackupTag " />
</bean>
</property>
<property name="fieldSetMapper">
<bean class="com.abc.VMwareFieldSetMapper" />
<!-- if no data type conversion, use BeanWrapperFieldSetMapper to map
by name <bean class="org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper">
<property name="prototypeBeanName" value="report" /> </bean> -->
</property>
</bean>
</property>
</bean>
<!-- <bean id="cvsFileItemReader" class="com.abc.VMwareReaderFromFile" scope="step">
<constructor-arg value="classpath:csv/input/VMwareImport.csv" />
</bean> -->
<bean id="xmlItemWriter" class="com.abc.DummyWriter">
<!-- <property name="resource" value="file:xml/outputs/report.xml" /> <property
name="marshaller" ref="reportMarshaller" /> <property name="rootTagName"
value="report" /> -->
</bean>
<!-- <bean id="reportMarshaller" class="org.springframework.oxm.jaxb.Jaxb2Marshaller">
<property name="classesToBeBound"> <list> <value>com.abc.model.VMware</value>
</list> </property> </bean> -->
Complete Exception StackTrace:
org.springframework.batch.item.file.NonTransientFlatFileException: Unable to read from resource: [class path resource [csv/input/VMwareImport.csv]]
at org.springframework.batch.item.file.FlatFileItemReader.readLine(FlatFileItemReader.java:220)
at org.springframework.batch.item.file.FlatFileItemReader.doRead(FlatFileItemReader.java:173)
at org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader.read(AbstractItemCountingItemStreamItemReader.java:83)
at org.springframework.batch.core.step.item.SimpleChunkProvider.doRead(SimpleChunkProvider.java:91)
at org.springframework.batch.core.step.item.SimpleChunkProvider.read(SimpleChunkProvider.java:155)
at org.springframework.batch.core.step.item.SimpleChunkProvider$1.doInIteration(SimpleChunkProvider.java:114)
at org.springframework.batch.repeat.support.RepeatTemplate.getNextResult(RepeatTemplate.java:368)
at org.springframework.batch.repeat.support.RepeatTemplate.executeInternal(RepeatTemplate.java:215)
at org.springframework.batch.repeat.support.RepeatTemplate.iterate(RepeatTemplate.java:144)
at org.springframework.batch.core.step.item.SimpleChunkProvider.provide(SimpleChunkProvider.java:108)
at org.springframework.batch.core.step.item.ChunkOrientedTasklet.execute(ChunkOrientedTasklet.java:69)
at org.springframework.batch.core.step.tasklet.TaskletStep$ChunkTransactionCallback.doInTransaction(TaskletStep.java:395)
at org.springframework.transaction.support.TransactionTemplate.execute(TransactionTemplate.java:131)
at org.springframework.batch.core.step.tasklet.TaskletStep$2.doInChunkContext(TaskletStep.java:267)
at org.springframework.batch.core.scope.context.StepContextRepeatCallback.doInIteration(StepContextRepeatCallback.java:77)
at org.springframework.batch.repeat.support.RepeatTemplate.getNextResult(RepeatTemplate.java:368)
at org.springframework.batch.repeat.support.RepeatTemplate.executeInternal(RepeatTemplate.java:215)
at org.springframework.batch.repeat.support.RepeatTemplate.iterate(RepeatTemplate.java:144)
at org.springframework.batch.core.step.tasklet.TaskletStep.doExecute(TaskletStep.java:253)
at org.springframework.batch.core.step.AbstractStep.execute(AbstractStep.java:195)
at org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler$1.call(TaskExecutorPartitionHandler.java:139)
at org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler$1.call(TaskExecutorPartitionHandler.java:136)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.lang.Thread.run(Thread.java:744)
Caused by: java.io.IOException: Stream closed
at java.io.BufferedReader.ensureOpen(BufferedReader.java:115)
at java.io.BufferedReader.readLine(BufferedReader.java:310)
at java.io.BufferedReader.readLine(BufferedReader.java:382)
at org.springframework.batch.item.file.FlatFileItemReader.readLine(FlatFileItemReader.java:201)
... 23 more
SEVERE: Encountered an error executing the step
org.springframework.batch.core.JobExecutionException: Partition handler returned an unsuccessful step
at org.springframework.batch.core.partition.support.PartitionStep.doExecute(PartitionStep.java:111)
at org.springframework.batch.core.step.AbstractStep.execute(AbstractStep.java:195)
at org.springframework.batch.core.job.SimpleStepHandler.handleStep(SimpleStepHandler.java:137)
at org.springframework.batch.core.job.flow.JobFlowExecutor.executeStep(JobFlowExecutor.java:64)
at org.springframework.batch.core.job.flow.support.state.StepState.handle(StepState.java:60)
at org.springframework.batch.core.job.flow.support.SimpleFlow.resume(SimpleFlow.java:152)
at org.springframework.batch.core.job.flow.support.SimpleFlow.start(SimpleFlow.java:131)
at org.springframework.batch.core.job.flow.FlowJob.doExecute(FlowJob.java:135)
at org.springframework.batch.core.job.AbstractJob.execute(AbstractJob.java:301)
at org.springframework.batch.core.launch.support.SimpleJobLauncher$1.run(SimpleJobLauncher.java:134)
at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:49)
at org.springframework.batch.core.launch.support.SimpleJobLauncher.run(SimpleJobLauncher.java:127)
at com.fujitsu.App.main(App.java:27)
Your cvsFileItemReader needs to be step scoped. Whichever partition finishes first is closing the stream so the other partitions are unable to continue reading.
I would like to invoke sftp:outbound-gateway from batch tasklet in order to download a file from sftp server.
I've seen other posts related to this subject but I'm not sure what am I doing wrong. Could anybody give me a hint based on my configuration? My batch works so the problem is just to ivoke the sftp component in batch step. I've marked the Spring Integration section with comment so it is easier to read just a relevant configuration.
I can see in my logs: DEBUG [o.s.i.e.SourcePollingChannelAdapter] Received no Message during the poll, returning 'false'. So I am not receiving a file but why?
Thanks in advance for your time spend on analysis!
<bean id="ftsSftpClientFactory" class="org.springframework.integration.sftp.session.DefaultSftpSessionFactory">
<property name="host" value="${my.import.sftp.localhost}"/>
<property name="user" value="${my.import.sftp.username}"/>
<property name="password" value="${my.import.sftp.passwort}"/>
</bean>
<!-- Start: Spring Integration -->
<int:channel id="replyChannel" >
<int:queue/>
</int:channel>
<int:channel id="requestChannel" />
<int-sftp:outbound-gateway id="sftpGateway"
session-factory="ftsSftpClientFactory"
request-channel="requestChannel"
reply-channel="replyChannel"
auto-startup="true"
command="get"
command-options="-P"
expression="payload"
remote-directory="."
local-directory="${my.import.sftp.copy.file.destinationpath}">
</int-sftp:outbound-gateway>
<bean name="copyFileTasklet" class="com.mydomain.CopyFileTasklet">
<property name="channel" ref="replyChannel" />
<property name="pollableChannel" ref="requestChannel" />
</bean>
<!-- Start: Spring Batch -->
<bean name="myImportTask" class="com.mydomain.MyImportTask">
<property name="job" ref="unternehmungImportJob"/>
<property name="jobLauncher" ref="jobLauncher"/>
</bean>
<bean id="jobDetail"
class="com.mydomain.MyImportJob">
<property name="myImportTask" ref="myImportTask" />
</bean>
<!--suppress SpringBatchModel -->
<batch:job id="myImportJob">
<batch:step id="copy-file-step" next="my-import-step">
<batch:tasklet ref="copyFileTasklet"/>
</batch:step>
<batch:step id="my-import-step">
<batch:tasklet>
<batch:chunk reader="myItemReader"
writer="myItemWriter"
commit-interval="10000">
<!--
skip-limit="10000"
<batch:skippable-exception-classes>
<batch:include class="java.lang.Exception"/>
<batch:exclude class="java.io.FileNotFoundException"/>
</batch:skippable-exception-classes> -->
</batch:chunk>
<batch:transaction-attributes isolation="DEFAULT" propagation="REQUIRED"/>
</batch:tasklet>
</batch:step>
</batch:job>
<bean id="myItemReader" scope="step" class="org.springframework.batch.item.file.FlatFileItemReader">
<property name="linesToSkip" value="1"/>
<property name="encoding" value="${my.import.batch.encoding}" />
<property name="resource" value="${my.import.batch.input.resource}"/>
<property name="lineMapper">
<bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<property name="lineTokenizer" ref="lineTokenizer"/>
<property name="fieldSetMapper">
<bean class="com.mydomain.MyImportMapper"/>
</property>
</bean>
</property>
</bean>
<bean id="myItemWriter" class="com.mydomain.MyItemWriter">
<property name="myApplicationService" ref="defaultmyApplicationService" />
</bean>
<bean id="lineTokenizer" class="com.mydomain.DelimitedLineTokenizerWithEOF">
<property name="delimiter" value="${my.import.batch.delimiter}" />
<property name="eofMarker" value="${my.import.batch.eof.marker}" />
</bean>
public class CopyFileTasklet implements Tasklet {
private MessageChannel requestChannel;
private PollableChannel replyChannel;
public void setRequestChannel(MessageChannel requestChannel) {
this.requestChannel = requestChannel;
}
public void setReplyChannel(PollableChannel replyChannel) {
this.replyChannel = replyChannel;
}
#Override
public RepeatStatus execute(StepContribution stepContribution, ChunkContext chunkContext) throws Exception {
Message<?> result = replyChannel.receive(10000);
Object file = result.getPayload();
return RepeatStatus.FINISHED;
}
}
Your issue that you don't inititate Integration Flow from your custom Tasklet. Of course you can't receive anything from the replyChannel, if you haven't sent request before.
If you just need to process Integration Flow and get result from it, it would be better to use POJI <gateway> from that Tasklet:
public interface SftpGateway {
File download(String fileName);
}
<gateway id="sftpGateway" service-interface="com.my.proj.SftpGateway"
default-request-channel="requestChannel"/>
<bean name="copyFileTasklet" class="com.mydomain.CopyFileTasklet">
<property name="sftpGateway" ref="sftpGateway" />
</bean>
Something like that.
I have one basis job with one basic step. This jobs is executing every x second (I am using quartz for this). Then in my config class I also have variable "runStep". Where should I add this attribute and run my step only if runStep is true ?
<batch:job id="export1" parent="baseJob">
<batch:step id="registruj" parent="baseStep">
<tasklet>
<chunk reader="registrujReader" processor="registrujProcessor" writer="registrujWriter"
commit-interval="1" />
</tasklet>
</batch:step>
</batch:job>
<bean id="baseJob" class="org.springframework.batch.core.job.SimpleJob" abstract="true">
<property name="jobRepository" ref="jobRepository" />
</bean>
<bean id="baseStep" class="org.springframework.batch.core.step.factory.SimpleStepFactoryBean" abstract="true">
<property name="transactionManager" ref="transactionManager" />
<property name="jobRepository" ref="jobRepository" />
<property name="startLimit" value="100" />
<property name="commitInterval" value="1" />
</bean>
<bean id="jobRepository" class="org.springframework.batch.core.repository.support.JobRepositoryFactoryBean"
p:dataSource-ref="expDataSource" p:transactionManager-ref="transactionManager" />
<bean id="jobLauncher" class="org.springframework.batch.core.launch.support.SimpleJobLauncher">
<property name="jobRepository" ref="jobRepository" />
</bean>
<bean class="org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor">
<property name="jobRegistry" ref="jobRegistry" />
</bean>
<bean id="jobRegistry" class="org.springframework.batch.core.configuration.support.MapJobRegistry" />
<bean id="registrujWriter" class="cz.isvs.reg.rob.util.export.batch.RegistrujItemWriter" scope="step" />
<bean id="registrujReader" class="cz.isvs.reg.rob.util.export.batch.RegistrujItemReader" scope="step" />
<bean id="registrujProcessor" class="cz.isvs.reg.rob.util.export.batch.RegistrujItemProcessor" scope="step" />
<!-- run every 10 seconds -->
<bean class="org.springframework.scheduling.quartz.SchedulerFactoryBean">
<property name="triggers">
<bean id="cronTrigger" class="org.springframework.scheduling.quartz.CronTriggerBean">
<property name="jobDetail" ref="jobDetail" />
<property name="cronExpression" value="*/10 * * * * ?" />
</bean>
</property>
</bean>
<bean id="jobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass" value="cz.isvs.reg.rob.util.export.batch.JobLauncherDetails" />
<property name="group" value="quartz-batch" />
<property name="jobDataAsMap">
<map>
<entry key="jobName" value="export1" />
<entry key="jobLocator" value-ref="jobRegistry" />
<entry key="jobLauncher" value-ref="jobLauncher" />
</map>
</property>
</bean>
Use a JobExecutionDecider
public class RunStepDecider implements JobExecutionDecider {
public FlowExecutionStatus decide(JobExecution jobExecution, StepExecution stepExecution) {
final String runStep = jobExecution.getJobParameters().getString("runStep");
//Depending on the above condition you can return Completed or Failed.
return new FlowExecutionStatus.COMPLETED;
}
}
<batch:job id="export1" parent="baseJob">
<decision id="decision" decider="decider">
<next on="COMPLETED" to="registruj" />
</decision>
<batch:step id="registruj" parent="baseStep">
<tasklet>
<chunk reader="registrujReader" processor="registrujProcessor" writer="registrujWriter" commit-interval="1" />
</tasklet>
</batch:step>
</batch:job>
<bean id="decider" class="RunStepDecider" />
and pass runStep as JobParameter.
Hope can help to solve your problem.