Standalone SpringBatch reading from DB - spring-batch

Im very new to Spring batch, trying to make a standalone spring batch (Without springboot) that reading from MySql DB and write the results in CSV.
I tried googling and nothing worked out. I also can't get to understand the itemWriter/Itemreader creating are they just methods or classes inherit Writer/Reader? the read/write method what are their inputs/outputs ? I can't get any clear rules about this.
The springBatch can never be implanted without SpringBoot? if no so how?
Why I want standalone Springbatch cause I just want a main method to trigger the job. And I don't want a web app to call a certain endpoint to trigger the job.
The injection works without Springboot or its mandatory for the injection to work?
Also The jobLuncher is it injected or configured by xml files? cause sometimes I got error of jobLuancher is null after injecting it.
MY configuration file is :
import com.techtalk.debu.batch.entity.Employee;
import org.springframework.batch.core.*;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.job.builder.JobBuilder;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.step.builder.StepBuilder;
import org.springframework.batch.item.database.JdbcCursorItemReader;
import org.springframework.batch.item.database.builder.JdbcCursorItemReaderBuilder;
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.file.builder.FlatFileItemWriterBuilder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.core.io.FileSystemResource;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType;
import org.springframework.jdbc.support.JdbcTransactionManager;
import javax.sql.DataSource;
#EnableBatchProcessing(dataSourceRef = "batchDataSource", transactionManagerRef = "batchTransactionManager")
public class LoadController {
#Bean
public DataSource batchDataSource() {
return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2)
.addScript("/org/springframework/batch/core/schema-h2.sql")
.generateUniqueName(false).build();
}
#Bean
public DataSource dataSource() {
DataSourceBuilder dataSourceBuilder = DataSourceBuilder.create();
dataSourceBuilder.url("jdbc:mysql://localhost:3311/new");
dataSourceBuilder.username("sa");
dataSourceBuilder.password("12345678908");
return dataSourceBuilder.build();
}
#Bean
public JdbcTransactionManager batchTransactionManager(DataSource dataSource) {
return new JdbcTransactionManager(dataSource);
}
public static void main(String e []) throws Exception {
JobParameters jobParameters = new JobParametersBuilder().addLong("time", System.currentTimeMillis()).toJobParameters();
ApplicationContext context = new AnnotationConfigApplicationContext(LoadController.class);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
JobExecution jobExecution = jobLauncher.run(context.getBean(Job.class), jobParameters);
System.out.println("Job Exit Status : " + jobExecution.getStatus());
}
#Bean
public Step step(JobRepository jobRepository, JdbcTransactionManager transactionManager) {
return new StepBuilder("step", jobRepository)
.<Employee, Employee>chunk(5, transactionManager)
.reader(itemReader())
.writer(itemWriter())
.processor(new CSVItemProcessor())
.build();
}
#Bean
public JdbcTransactionManager transactionManager(DataSource dataSource) {
return new JdbcTransactionManager(dataSource);
}
#Bean
public Job job(JobRepository jobRepository, JdbcTransactionManager transactionManager) {
return new JobBuilder("job")
.start(step(jobRepository, transactionManager))
.build();
}
#Bean
public JdbcCursorItemReader<Employee> itemReader() {
String sql = "select * from person";
return new JdbcCursorItemReaderBuilder<Employee>()
.name("personItemReader")
.dataSource(dataSource())
.sql(sql)
.beanRowMapper(Employee.class)
.build();
}
#Bean
public FlatFileItemWriter<Employee> itemWriter() {
return new FlatFileItemWriterBuilder<Employee>()
.resource(new FileSystemResource("persons.csv"))
.name("personItemWriter")
.delimited()
.names("id", "name")
.build();
}
}
and my pom file is:
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>3.0.2</version>
<relativePath /> <!-- lookup parent from repository -->
</parent>
<groupId>com.techtalk.debu</groupId>
<artifactId>Spring-Batch-CSV-to-DB-Project</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>spring-batch-CSV-To_DB-Write-Example-For-Beginners</name>
<description>Demo project for Beginners to understand Spring Boot</description>
<packaging>jar</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>17</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
<version>3.0.2</version>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-core</artifactId>
<version>5.0.0</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>8.0.21</version>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-infrastructure</artifactId>
<version>5.0.0</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
when I run my code I got this error:
Failed to instantiate [org.springframework.batch.core.Job]: Factory
method 'job' threw exception with message:
java.lang.IllegalStateException: JobRepository must be set
Also I have some questions:
reader()/writer() this function should return what? what if I want the reader to read from another DataSource what should I do?
The data source defining in my application.properties by default is used to save job meta data or to apply the spring batch work?

You can run a standalone Spring Batch application without Spring Boot. The reference documentation contains a code sample for that. The javadoc of EnableBatchProcessing contains an example as well. You also have the samples which are not based on Spring Boot.
Here is a quick example for a non Boot standalone Spring Batch app with a main method:
package org.springframework.batch.sample;
import javax.sql.DataSource;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.job.builder.JobBuilder;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.step.builder.StepBuilder;
import org.springframework.batch.item.database.JdbcCursorItemReader;
import org.springframework.batch.item.database.builder.JdbcCursorItemReaderBuilder;
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.file.builder.FlatFileItemWriterBuilder;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.FileSystemResource;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType;
import org.springframework.jdbc.support.JdbcTransactionManager;
#Configuration
#EnableBatchProcessing
public class MyJobConfiguration {
#Bean
public JdbcCursorItemReader<Person> itemReader() {
String sql = "select * from person";
return new JdbcCursorItemReaderBuilder<Person>()
.name("personItemReader")
.dataSource(dataSource())
.sql(sql)
.beanRowMapper(Person.class)
.build();
}
#Bean
public FlatFileItemWriter<Person> itemWriter() {
return new FlatFileItemWriterBuilder<Person>()
.resource(new FileSystemResource("persons.csv"))
.name("personItemWriter")
.delimited()
.names("id", "name")
.build();
}
#Bean
public Job job(JobRepository jobRepository, JdbcTransactionManager transactionManager) {
return new JobBuilder("job", jobRepository)
.start(new StepBuilder("step", jobRepository)
.<Person, Person>chunk(5, transactionManager)
.reader(itemReader())
.writer(itemWriter())
.build())
.build();
}
#Bean
public DataSource dataSource() {
EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder()
.setType(EmbeddedDatabaseType.HSQL)
.addScript("/org/springframework/batch/core/schema-hsqldb.sql")
.build();
JdbcTemplate jdbcTemplate = new JdbcTemplate(embeddedDatabase);
jdbcTemplate.execute("create table person (id int primary key, name varchar(20));");
for (int i = 1; i <= 10; i++) {
jdbcTemplate.execute(String.format("insert into person values (%s, 'foo%s');", i, i));
}
return embeddedDatabase;
}
#Bean
public JdbcTransactionManager transactionManager(DataSource dataSource) {
return new JdbcTransactionManager(dataSource);
}
static class Person {
private int id;
private String name;
public Person() {
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String toString() {
return "Person{id=" + id + ", name='" + name + '\'' + '}';
}
}
public static void main(String[] args) throws Exception {
ApplicationContext context = new AnnotationConfigApplicationContext(MyJobConfiguration.class);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
Job job = context.getBean(Job.class);
jobLauncher.run(job, new JobParameters());
}
}
You just need to make sure to use Spring Batch 5.0.0 and have Hsqldb in the classpath.
EDIT: based on question update
rying to make a standalone spring batch (Without springboot) that reading from MySql DB and write the results in CSV.
I updated the sample to something like what you are trying to do. You just need to update the datasource to point to your MySQL database.
reader()/writer() this function should return what?
Those should return a type implementing ItemReader and ItemWriter repectively.
what if I want the reader to read from another DataSource what should I do?
In this case, you can configure your reader to read from that datasource, as shown in the example above. The datasource could be different from the one storing the batch meta-data.
CSVItemProcessor is a class implementing ItemProcessor <Employee, Employee> and having the process method but the reader not accepted this code
Your CSVItemProcessor should implement ItemProcessor<Employee, Employee> and your step should be declared as follows:
#Bean
public Step step() {
return new StepBuilder("MY Step")
.<Employee, Employee>chunk(10) // note the explicit input and output type definition here, which is missing in your code
.reader(reader())
.processor(new CSVItemProcessor())
.writer(writer())
.build();
}
Moreover, you need to pass the job repository and transaction manager to the step as shown in the complete example above, something like:
#Bean
public Step step(JobRepository jobRepository, JdbcTransactionManager transactionManager) {
return new StepBuilder("MY Step", jobRepository)
.<Employee, Employee>chunk(10, transactionManager) // note the explicit input and output type definition here, which is missing in your code
.reader(reader())
.processor(new CSVItemProcessor())
.writer(writer())
.build();
}

Related

How to fix this error in spring-batch? jobReposiroty must be set

Im new to Springbatch and have a very simple code to Create a SpringBatch POC (without springboot) move data from MySql to CSV and it shows me this error:
Failed to instantiate [org.springframework.batch.core.Job]: Factory
method 'job' threw exception with message:
java.lang.IllegalStateException: JobRepository must be set
I tried to google it but can't find any working example.
can someone plz explains what im missing ?
And I want to use a different datasrouce for jobRepository than the one Im batching from
My main class:
import com.techtalk.debu.batch.entity.Employee;
import org.springframework.batch.core.*;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.job.builder.JobBuilder;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean;
import org.springframework.batch.core.step.builder.StepBuilder;
import org.springframework.batch.item.database.JdbcCursorItemReader;
import org.springframework.batch.item.database.builder.JdbcCursorItemReaderBuilder;
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.file.builder.FlatFileItemWriterBuilder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.core.io.FileSystemResource;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType;
import org.springframework.jdbc.support.JdbcTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;
import javax.sql.DataSource;
#EnableBatchProcessing(dataSourceRef = "batchDataSource", transactionManagerRef = "batchTransactionManager")
public class LoadController {
#Bean
public DataSource batchDataSource() {
return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2)
.addScript("/org/springframework/batch/core/schema-h2.sql")
.generateUniqueName(false).build();
}
#Bean
public DataSource dataSource() {
DataSourceBuilder dataSourceBuilder = DataSourceBuilder.create();
dataSourceBuilder.url("jdbc:mysql://localhost:3306/Test");
dataSourceBuilder.username("sa");
dataSourceBuilder.password("1234567890");
return dataSourceBuilder.build();
}
#Bean
public JdbcTransactionManager batchTransactionManager(DataSource batchDataSource) {
return new JdbcTransactionManager(batchDataSource);
}
public static void main(String e []) throws Exception {
ApplicationContext context = new AnnotationConfigApplicationContext(LoadController.class);
JobParameters jobParameters = new JobParametersBuilder().addLong("time", System.currentTimeMillis()).toJobParameters();
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
JobExecution jobExecution = jobLauncher.run(context.getBean(Job.class), jobParameters);
System.out.println("Job Exit Status : " + jobExecution.getStatus());
}
#Bean
public Step step(JobRepository jobRepository, PlatformTransactionManager transactionManager) {
return new StepBuilder("step", jobRepository)
.<Employee, Employee>chunk(5, transactionManager)
.reader(itemReader())
.writer(itemWriter())
.build();
}
#Bean
public JdbcTransactionManager transactionManager(DataSource dataSource) {
return new JdbcTransactionManager(dataSource);
}
#Bean
public Job job(JobRepository jobRepository, JdbcTransactionManager transactionManager) {
return new JobBuilder("job")
.start(step(jobRepository, transactionManager))
.build();
}
#Bean
public JdbcCursorItemReader<Employee> itemReader() {
String sql = "select * from person";
return new JdbcCursorItemReaderBuilder<Employee>()
.name("personItemReader")
.dataSource(dataSource())
.sql(sql)
.beanRowMapper(Employee.class)
.build();
}
#Bean
public FlatFileItemWriter<Employee> itemWriter() {
return new FlatFileItemWriterBuilder<Employee>()
.resource(new FileSystemResource("persons.csv"))
.name("personItemWriter")
.delimited()
.names("id", "name")
.build();
}
}
My Pom file:
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>3.0.2</version>
<relativePath /> <!-- lookup parent from repository -->
</parent>
<groupId>com.techtalk.debu</groupId>
<artifactId>Spring-Batch-CSV-to-DB-Project</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>spring-batch-CSV-To_DB-Write-Example-For-Beginners</name>
<description>Demo project for Beginners to understand Spring Boot</description>
<packaging>jar</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>17</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
<version>3.0.2</version>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-core</artifactId>
<version>5.0.0</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>8.0.21</version>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-infrastructure</artifactId>
<version>5.0.0</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
The Employee class
import jakarta.persistence.*;
import lombok.Data;
import java.util.Date;
#Entity
#Table(name = "user_analytics_table")
#Data
public class Employee {
#Id
private String id;
private String name;
private String email;
private String phone_number;
}
You need to set the jobRepository in the JobBuilder:
#Bean
public Job job(JobRepository jobRepository, JdbcTransactionManager transactionManager) {
return new JobBuilder("job", jobRepository)
.start(step(jobRepository, transactionManager))
.build();
}
import org.springframework.batch.core.*;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.job.builder.JobBuilder;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.step.builder.StepBuilder;
import org.springframework.batch.item.database.JdbcCursorItemReader;
import org.springframework.batch.item.database.builder.JdbcCursorItemReaderBuilder;
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.file.builder.FlatFileItemWriterBuilder;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.core.io.FileSystemResource;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType;
import org.springframework.jdbc.support.JdbcTransactionManager;
import org.springframework.orm.jpa.JpaVendorAdapter;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import org.springframework.transaction.PlatformTransactionManager;
import javax.sql.DataSource;
import java.util.Properties;
#EnableAutoConfiguration
#EnableBatchProcessing(dataSourceRef = "batchDataSource", transactionManagerRef = "batchTransactionManager")
public class LoadController{
#Bean
public DataSource batchDataSource() {
return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2)
.addScript("/org/springframework/batch/core/schema-h2.sql")
.generateUniqueName(false).build();
}
#Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean em = new LocalContainerEntityManagerFactoryBean();
em.setDataSource(dataSource());
em.setPackagesToScan(new String[] {"com.example.demo"});
JpaVendorAdapter jpaAdapter = new HibernateJpaVendorAdapter();
em.setJpaVendorAdapter(jpaAdapter);
em.setJpaProperties(jpaProperties());
return em;
}
private final Properties jpaProperties() {
Properties properties = new Properties();
properties.setProperty("hibernate.dialect", "org.hibernate.dialect.MySQL5Dialect");
return properties;
}
#Bean
public DataSource dataSource() {
DataSourceBuilder dataSourceBuilder = DataSourceBuilder.create();
dataSourceBuilder.url("jdbc:mysql://localhost:3306/Test");
dataSourceBuilder.username("root");
dataSourceBuilder.password("root-password");
return dataSourceBuilder.build();
}
#Bean
public JdbcTransactionManager batchTransactionManager(DataSource batchDataSource) {
return new JdbcTransactionManager(batchDataSource);
}
public static void main(String e []) throws Exception {
ApplicationContext context = new AnnotationConfigApplicationContext(LoadController.class);
JobParameters jobParameters = new JobParametersBuilder().addLong("time", System.currentTimeMillis()).toJobParameters();
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
JobExecution jobExecution = jobLauncher.run(context.getBean(Job.class), jobParameters);
System.out.println("Job Exit Status : " + jobExecution.getStatus());
}
#Bean
public Step step(JobRepository jobRepository, PlatformTransactionManager transactionManager) {
return new StepBuilder("step", jobRepository)
.<Employee, Employee>chunk(5, transactionManager)
.reader(itemReader())
.writer(itemWriter())
.build();
}
#Bean
public JdbcTransactionManager transactionManager(DataSource dataSource) {
return new JdbcTransactionManager(dataSource);
}
#Bean
public Job job(JobRepository jobRepository, JdbcTransactionManager transactionManager) {
return new JobBuilder("job", jobRepository)
.start(step(jobRepository, transactionManager))
.build();
}
#Bean
public JdbcCursorItemReader<Employee> itemReader() {
String sql = "select * from person";
return new JdbcCursorItemReaderBuilder<Employee>()
.name("personItemReader")
.dataSource(dataSource())
.sql(sql)
.beanRowMapper(Employee.class)
.build();
}
#Bean
public FlatFileItemWriter<Employee> itemWriter() {
return new FlatFileItemWriterBuilder<Employee>()
.resource(new FileSystemResource("persons.csv"))
.name("personItemWriter")
.delimited()
.names("id", "name")
.build();
}
}

Spring Batch remote-partitioning with Kafka - master always continues the oldest job in the JobListener

I'm using spring-batch along with spring-boot 2.5.6. I decided to use remote-partitioning with Kafka as the middleware. I have one manager and three workers. accordingly, one partition has been assigned for the manager's input topic and three partitions have been assigned for the worker's input.
the manager takes a file, creates multiples ExecutionContexts and sends those over Kafka. workers start processing the respective steps and send the message at the end of their process. manager will aggregate the worker's results and decide to complete the job if all workers are done. so far so good.
now assume first I run a long-running job that requires lots of time to finish and then I run a small job that finishes quickly. not surprisingly the second job finishes sooner and sends a completed signal, the manager consumes this message and continues the process. I even checked AggregatingMessageHandler, the completed message is related to the second job (short-running one) only, I checked the jobExecutionId
now the problem happens, I have a JobListener that has an afterJob method. this method will be run against the first job (the long-running one that is still being processed by workers), not the second one (the short-running one that a completed signal has been sent for it)! I can say this by looking at the jobExecutionId. it's really weird because I never saw in the logs that there's a completion signal for the first job.
after some time and whenever the first long-running job is finished, the final worker sends a completed message and the manager decides to finish the job, now the JobListener is run against the second job (short-running one)!
I couldn't understand what goes wrong? I would like to assume that probably it's a miss-configuration, but by debugging the code and checking AggregatingMessageHandler and TRACE logs in the workers and manager, I can clearly see that the messages are being sent fine and there's nothing wrong with the messages. any suggestions/ideas are welcome.
UPDATE
here is a sample implementation: let's say we have a Customer table.
the job takes minId and maxId (ID column in Customer table is a simple number) then the manager creates multiple ExecutionContexts based on the ids range.
manager config
package com.example.batchdemo.job;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.core.partition.support.Partitioner;
import org.springframework.batch.integration.partition.RemotePartitioningManagerStepBuilderFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.dsl.IntegrationFlow;
import org.springframework.integration.dsl.IntegrationFlows;
import org.springframework.integration.kafka.dsl.Kafka;
import org.springframework.integration.scheduling.PollerMetadata;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.listener.ConsumerProperties;
import org.springframework.scheduling.support.PeriodicTrigger;
#Profile("!worker")
#Configuration
public class JobConfiguration {
private final JobBuilderFactory jobBuilderFactory;
private final JobExplorer jobExplorer;
private final RemotePartitioningManagerStepBuilderFactory managerStepBuilderFactory;
private final JobListener jobListener;
public JobConfiguration(JobBuilderFactory jobBuilderFactory, JobExplorer jobExplorer, RemotePartitioningManagerStepBuilderFactory managerStepBuilderFactory, JobListener jobListener) {
this.jobBuilderFactory = jobBuilderFactory;
this.jobExplorer = jobExplorer;
this.managerStepBuilderFactory = managerStepBuilderFactory;
this.jobListener = jobListener;
}
#Bean
public Job job() {
return jobBuilderFactory.get("job")
.incrementer(new RunIdIncrementer())
.start(managerStep())
.listener(jobListener)
.build();
}
#Bean
public Step managerStep() {
return managerStepBuilderFactory.get("managerStep")
.partitioner("workerStep", rangePartitioner(null, null))
.outputChannel(requestForWorkers())
.inputChannel(repliesFromWorkers())
.jobExplorer(jobExplorer)
.build();
}
#Bean
#StepScope
public Partitioner rangePartitioner(#Value("#{jobParameters['minId']}") Integer minId, #Value("#{jobParameters['maxId']}") Integer maxId) {
return new CustomerIdRangePartitioner(minId, maxId);
}
////////////////////////////////////////////////////////////////////////////////////////////////
#Bean
public DirectChannel requestForWorkers() {
return new DirectChannel();
}
#Bean
public IntegrationFlow outboundFlow(KafkaTemplate kafkaTemplate) {
return IntegrationFlows
.from(requestForWorkers())
.handle(Kafka.outboundChannelAdapter(kafkaTemplate).topic("requestForWorkers"))
.route("requestForWorkers")
.get();
}
#Bean
public DirectChannel repliesFromWorkers() {
return new DirectChannel();
}
#Bean
public IntegrationFlow inboundFlow(ConsumerFactory consumerFactory) {
return IntegrationFlows
.from(Kafka.inboundChannelAdapter(consumerFactory, new ConsumerProperties("repliesFromWorkers")))
.channel(repliesFromWorkers())
.get();
}
#Bean(name = PollerMetadata.DEFAULT_POLLER)
public PollerMetadata defaultPoller() {
PollerMetadata pollerMetadata = new PollerMetadata();
pollerMetadata.setTrigger(new PeriodicTrigger(10));
return pollerMetadata;
}
}
worker config
package com.example.batchdemo.job;
import com.example.batchdemo.domain.Customer;
import com.example.batchdemo.domain.CustomerRowMapper;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.step.builder.SimpleStepBuilder;
import org.springframework.batch.integration.partition.RemotePartitioningWorkerStepBuilderFactory;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.database.JdbcPagingItemReader;
import org.springframework.batch.item.database.Order;
import org.springframework.batch.item.database.support.MySqlPagingQueryProvider;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.dsl.IntegrationFlow;
import org.springframework.integration.dsl.IntegrationFlows;
import org.springframework.integration.kafka.dsl.Kafka;
import org.springframework.integration.scheduling.PollerMetadata;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.listener.ConsumerProperties;
import org.springframework.scheduling.support.PeriodicTrigger;
import javax.sql.DataSource;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
#Configuration
#Profile("worker")
public class WorkerConfiguration {
private static final int CHUNK_SIZE = 10;
private static final int WAITING_TIME = 3000;
public final DataSource dataSource;
private final RemotePartitioningWorkerStepBuilderFactory workerStepBuilderFactory;
public WorkerConfiguration(DataSource dataSource, RemotePartitioningWorkerStepBuilderFactory workerStepBuilderFactory) {
this.dataSource = dataSource;
this.workerStepBuilderFactory = workerStepBuilderFactory;
}
#Bean
public DirectChannel repliesFromWorkers() {
return new DirectChannel();
}
#Bean
public IntegrationFlow outboundFlow(KafkaTemplate kafkaTemplate) {
return IntegrationFlows
.from(repliesFromWorkers())
.handle(Kafka.outboundChannelAdapter(kafkaTemplate).topic("repliesFromWorkers"))
.route("repliesFromWorkers")
.get();
}
#Bean
public DirectChannel requestForWorkers() {
return new DirectChannel();
}
#Bean
public IntegrationFlow inboundFlow(ConsumerFactory consumerFactory) {
return IntegrationFlows
.from(Kafka.inboundChannelAdapter(consumerFactory, new ConsumerProperties("requestForWorkers")))
.channel(requestForWorkers())
.get();
}
#Bean(name = PollerMetadata.DEFAULT_POLLER)
public PollerMetadata defaultPoller() {
PollerMetadata pollerMetadata = new PollerMetadata();
pollerMetadata.setTrigger(new PeriodicTrigger(10));
return pollerMetadata;
}
/////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////
#Bean
public Step workerStep() {
SimpleStepBuilder workerStepBuilder = workerStepBuilderFactory.get("workerStep")
.inputChannel(requestForWorkers())
.outputChannel(repliesFromWorkers())
.<Customer, Customer>chunk(CHUNK_SIZE)
.reader(pagingItemReader(null, null))
.processor(itemProcessor())
.writer(customerItemWriter());
return workerStepBuilder.build();
}
#Bean
#StepScope
public JdbcPagingItemReader<Customer> pagingItemReader(#Value("#{stepExecutionContext['minValue']}") Long minValue,
#Value("#{stepExecutionContext['maxValue']}") Long maxValue) {
System.out.println("reading " + minValue + " to " + maxValue);
JdbcPagingItemReader<Customer> reader = new JdbcPagingItemReader<>();
reader.setDataSource(this.dataSource);
reader.setFetchSize(1000);
reader.setRowMapper(new CustomerRowMapper());
MySqlPagingQueryProvider queryProvider = new MySqlPagingQueryProvider();
queryProvider.setSelectClause("id, firstName, lastName, birthdate");
queryProvider.setFromClause("from CUSTOMER");
queryProvider.setWhereClause("where id >= " + minValue + " and id < " + maxValue);
Map<String, Order> sortKeys = new HashMap<>(1);
sortKeys.put("id", Order.ASCENDING);
queryProvider.setSortKeys(sortKeys);
reader.setQueryProvider(queryProvider);
return reader;
}
#Bean
#StepScope
public ItemProcessor<Customer, Customer> itemProcessor() {
return item -> {
Thread.sleep(WAITING_TIME);
System.out.println(item);
return item;
};
}
#Bean
#StepScope
public ItemWriter<Customer> customerItemWriter() {
return items -> {
System.out.printf("%d items were written%n", items.size());
};
}
}
Partitioner:
package com.example.batchdemo.job;
import org.springframework.batch.core.partition.support.Partitioner;
import org.springframework.batch.item.ExecutionContext;
import java.util.HashMap;
import java.util.Map;
public class CustomerIdRangePartitioner implements Partitioner {
private final int minId;
private final int maxId;
private final int gridSize;
public CustomerIdRangePartitioner(int minId, int maxId, int gridSize) {
this.minId = minId;
this.maxId = maxId;
this.gridSize = gridSize;
}
#Override
public Map<String, ExecutionContext> partition(int gridSize) {
int number = (maxId - minId) / this.gridSize + 1;
Map<String, ExecutionContext> result = new HashMap<>();
for (int i = 0; i < number; i++) {
ExecutionContext executionContext = new ExecutionContext();
int start = minId + (this.gridSize * i);
int end = start + (this.gridSize * (i + 1));
executionContext.putInt("minValue", start);
executionContext.putInt("maxValue", Math.min(end, maxId));
result.put("partition" + i, executionContext);
}
return result;
}
}
JobListener
package com.example.batchdemo.job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobExecutionListener;
import org.springframework.batch.core.configuration.annotation.JobScope;
import org.springframework.stereotype.Component;
#Component
#JobScope
public class JobListener implements JobExecutionListener {
#Override
public void beforeJob(JobExecution jobExecution) {
}
#Override
public void afterJob(JobExecution jobExecution) {
System.out.println(jobExecution.getJobId() + " was finished: " + jobExecution.getStatus());
}
}
AppConfiguration
package com.example.batchdemo.controller;
import org.springframework.batch.core.configuration.JobRegistry;
import org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor;
import org.springframework.batch.core.converter.DefaultJobParametersConverter;
import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.launch.JobOperator;
import org.springframework.batch.core.launch.support.SimpleJobLauncher;
import org.springframework.batch.core.launch.support.SimpleJobOperator;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
#Configuration
public class AppConfiguration {
private final JobExplorer jobExplorer;
private final JobRepository jobRepository;
private final JobRegistry jobRegistry;
private final ApplicationContext applicationContext;
public AppConfiguration(JobExplorer jobExplorer, JobRepository jobRepository, JobRegistry jobRegistry, ApplicationContext applicationContext) {
this.jobExplorer = jobExplorer;
this.jobRepository = jobRepository;
this.jobRegistry = jobRegistry;
this.applicationContext = applicationContext;
}
#Bean
public synchronized JobRegistryBeanPostProcessor jobRegistrar() throws Exception {
JobRegistryBeanPostProcessor registrar = new JobRegistryBeanPostProcessor();
registrar.setJobRegistry(jobRegistry);
registrar.setBeanFactory(applicationContext.getAutowireCapableBeanFactory());
registrar.afterPropertiesSet();
return registrar;
}
#Bean
public JobOperator jobOperator() throws Exception {
SimpleJobOperator simpleJobOperator = new SimpleJobOperator();
simpleJobOperator.setJobLauncher(getJobLauncher());
simpleJobOperator.setJobParametersConverter(new DefaultJobParametersConverter());
simpleJobOperator.setJobRepository(this.jobRepository);
simpleJobOperator.setJobExplorer(this.jobExplorer);
simpleJobOperator.setJobRegistry(this.jobRegistry);
simpleJobOperator.afterPropertiesSet();
return simpleJobOperator;
}
#Bean
public JobLauncher getJobLauncher() throws Exception {
SimpleJobLauncher jobLauncher = null;
jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(jobRepository);
jobLauncher.setTaskExecutor(jobOperatorExecutor());
jobLauncher.afterPropertiesSet();
return jobLauncher;
}
#Bean
public ThreadPoolTaskExecutor jobOperatorExecutor() {
ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor();
threadPoolTaskExecutor.setCorePoolSize(64);
threadPoolTaskExecutor.setMaxPoolSize(256);
threadPoolTaskExecutor.setWaitForTasksToCompleteOnShutdown(true);
return threadPoolTaskExecutor;
}
}
pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.5.6</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.example</groupId>
<artifactId>batch-demo</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>batch-demo</name>
<description>Demo project for Spring Boot</description>
<properties>
<java.version>11</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-batch</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-integration</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.integration</groupId>
<artifactId>spring-integration-kafka</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
This is a bug in Spring Batch. The listener is indeed called for the job that finishes earlier with the wrong JobExecution instance. Making the JobExecutionListener job-scoped does not solve the issue.
I will re-open the issue on Github for further investigation.

How to create custom retry logic for Spring Datasource?

I'm connecting to an Azure SQL database and my next task is to create custom retry logic when a connection has failed. I would like the retry logic to run both on startup (if needed) as well as any time there's a connection failure while the app is running. I did a test where I removed the IP restrictions from my app and that then caused an exception in my application (as excepted). I'd like to handle when that exception is thrown so that I can trigger a job that verifies both the app and the server are configured correctly. I'm looking for a solution where I can handle these exceptions and retry the DB transaction?
DataSource Config
#Bean
#Primary
public DataSource dataSource() {
return DataSourceBuilder
.create()
.username("username")
.password("password")
.url("jdbc:sqlserver://contoso.database.windows.net:1433;database=*********;user=******#*******;password=*****;encrypt=true;trustServerCertificate=false;hostNameInCertificate=*.database.windows.net;loginTimeout=30;")
.driverClassName("com.microsoft.sqlserver.jdbc.SQLServerDriver")
.build();
}
application.properties
spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.SQLServerDialect
spring.jpa.show-sql=true
logging.level.org.springframework.web: ERROR
logging.level.org.hibernate: ERROR
spring.datasource.tomcat.max-wait=10000
spring.datasource.tomcat.max-active=1
spring.datasource.tomcat.test-on-borrow=true
spring.jpa.hibernate.ddl-auto=update
The following code may help you create your retry logic for a data source on Spring Boot:
package com.example.demo;
import java.sql.Connection;
import java.sql.SQLException;
import javax.sql.DataSource;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.core.Ordered;
import org.springframework.core.annotation.Order;
import org.springframework.jdbc.datasource.AbstractDataSource;
import org.springframework.retry.annotation.Backoff;
import org.springframework.retry.annotation.EnableRetry;
import org.springframework.retry.annotation.Retryable;
#SpringBootApplication
#EnableRetry
public class DemoApplication {
#Order(Ordered.HIGHEST_PRECEDENCE)
private class RetryableDataSourceBeanPostProcessor implements BeanPostProcessor {
#Override
public Object postProcessBeforeInitialization(Object bean, String beanName)
throws BeansException {
if (bean instanceof DataSource) {
bean = new RetryableDataSource((DataSource)bean);
}
return bean;
}
#Override
public Object postProcessAfterInitialization(Object bean, String beanName)
throws BeansException {
return bean;
}
}
public static void main(String[] args) {
SpringApplication.run(DemoApplication.class, args);
}
#Bean
public BeanPostProcessor dataSouceWrapper() {
return new RetryableDataSourceBeanPostProcessor();
}
}
class RetryableDataSource extends AbstractDataSource {
private DataSource delegate;
public RetryableDataSource(DataSource delegate) {
this.delegate = delegate;
}
#Override
#Retryable(maxAttempts=10, backoff=#Backoff(multiplier=2.3, maxDelay=30000))
public Connection getConnection() throws SQLException {
return delegate.getConnection();
}
#Override
#Retryable(maxAttempts=10, backoff=#Backoff(multiplier=2.3, maxDelay=30000))
public Connection getConnection(String username, String password)
throws SQLException {
return delegate.getConnection(username, password);
}
}
Not sure what you deem custom but, there is an out-of-the-box option with Spring boot and Aspectj by leveraging the pom.xml in the mavern project, as spring-retry depends on Aspectj:
Add the following dependencies in your project pom.xml:
<dependency>
<groupId>org.springframework.retry</groupId>
<artifactId>spring-retry</artifactId>
<version>${version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-aspects</artifactId>
<version>${version}</version>
</dependency>
And then add the #EnableRetry annotation to your code:
package com.example.springretry;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.retry.annotation.EnableRetry;
#EnableRetry
#SpringBootApplication
public class SpringRetryApplication {
public static void main(String[] args) {
SpringApplication.run(SpringRetryApplication.class, args);
}
}
The Spring retry module example can be found here: https://howtodoinjava.com/spring-boot2/spring-retry-module/

NullPointerException in Selenium WebDriver using TestNg and Maven

I am currently building a Maven Project which also includes the TestNg Library within Eclipse. I am running on Java 8 (having switched from Java 9 as I've heard this has caused issues with others) using the latest Selenium release, 3.8.1. My project was working smoothly with no issues, tests were running great and then it began to throw NullPointerExceptions. I have tried building the project again to no avail.
Here is my set up:
Here is the TestBase class where I use #BeforeSuite and #AfterSuite to instantiate WebDriver. This launches with no issues.
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Properties;
import org.apache.log4j.Logger;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.ie.InternetExplorerDriver;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.BeforeSuite;
public class TestBaseMaven {
public WebDriver driver;
public static FileInputStream fis;
public static FileInputStream fis1;
public static Properties config = new Properties();
public static Properties OR = new Properties();
public static Logger log;
#BeforeSuite
public void setUp() throws IOException, InterruptedException {
fis = new FileInputStream("C:\\Users\\Kohout\\eclipse-workspace\\MavenProjects\\MavenProject2\\src\\test\\resources\\properties\\Config.properties");
config.load(fis);
fis1 = new FileInputStream("C:\\Users\\Kohout\\eclipse-workspace\\MavenProjects\\MavenProject2\\src\\test\\resources\\properties\\OR.properties");
OR.load(fis1);
if(driver==null) {
if(config.getProperty("browser").equals("chrome")) {
System.setProperty("webdriver.chrome.driver", "C:\\Users\\Kohout\\eclipse-workspace\\MavenProjects\\MavenProject2\\src\\test\\resources\\executables\\chromedriver.exe");
driver = new ChromeDriver();
} else if(config.getProperty("browser").equals("firefox")) {
System.setProperty("webdriver.gecko.driver", "C:\\Users\\Kohout\\eclipse-workspace\\MavenProjects\\MavenProject2\\src\\test\\resources\\executables\\geckodriver.exe");
DesiredCapabilities dc = DesiredCapabilities.firefox();
dc.setCapability("marionette", true);
driver=new FirefoxDriver();
} else if(config.getProperty("browser").equals("ie")) {
System.setProperty("webdriver.ie.driver", "C:\\Users\\Kohout\\eclipse-workspace\\MavenProjects\\MavenProject2\\src\\test\\resources\\executables\\IEDriverServer.exe");
driver = new InternetExplorerDriver();
}
}
driver.get(config.getProperty("testUrl"));
driver.manage().window().maximize();
Thread.sleep(2000);
}
public boolean isElementPresent(By by) {
try {
driver.findElement(by);
return true;
} catch(Throwable t) {
return false;
}
}
#AfterSuite
public void tearDown() {
if(driver!=null) {
driver.quit();
}
}
}
Here is my first Test, which also runs fine and is passing. Though at times this will also throw a NullPointerException where I use Webdriver, such as in driver.findElement...
import org.openqa.selenium.By;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.kohout.base.TestBaseMaven;
public class ManagerLoginTest extends TestBaseMaven {
#Test
public void managerLoginTest() throws InterruptedException {
driver.findElement(By.cssSelector(config.getProperty("managerLogin"))).click();
Thread.sleep(2000);
Assert.assertTrue(isElementPresent(By.cssSelector(OR.getProperty("addCust"))));
}
}
Here is my second Test. Now for whatever reason, this one ALWAYS throws a NullPointerException whenever I use my instantiated driver variable (instantiated in #BeforeSuite).
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.testng.annotations.Test;
import com.kohout.base.TestBaseMaven;
public class AddCustomerTest extends TestBaseMaven {
#Test
public void addCustomerTest() {
driver.findElement(By.xpath("/html/body/div[3]/div/div[2]/div/div[1]/button[1]"));
}
}
Here is my stack trace:
java.lang.NullPointerException
at com.kohout.testcases.AddCustomerTest.addCustomerTest(AddCustomerTest.java:14)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.testng.internal.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:124)
at org.testng.internal.Invoker.invokeMethod(Invoker.java:571)
at org.testng.internal.Invoker.invokeTestMethod(Invoker.java:707)
at org.testng.internal.Invoker.invokeTestMethods(Invoker.java:979)
at org.testng.internal.TestMethodWorker.invokeTestMethods(TestMethodWorker.java:125)
at org.testng.internal.TestMethodWorker.run(TestMethodWorker.java:109)
at org.testng.TestRunner.privateRun(TestRunner.java:648)
at org.testng.TestRunner.run(TestRunner.java:505)
at org.testng.SuiteRunner.runTest(SuiteRunner.java:455)
at org.testng.SuiteRunner.runSequentially(SuiteRunner.java:450)
at org.testng.SuiteRunner.privateRun(SuiteRunner.java:415)
at org.testng.SuiteRunner.run(SuiteRunner.java:364)
at org.testng.SuiteRunnerWorker.runSuite(SuiteRunnerWorker.java:52)
at org.testng.SuiteRunnerWorker.run(SuiteRunnerWorker.java:84)
at org.testng.TestNG.runSuitesSequentially(TestNG.java:1187)
at org.testng.TestNG.runSuitesLocally(TestNG.java:1116)
at org.testng.TestNG.runSuites(TestNG.java:1028)
at org.testng.TestNG.run(TestNG.java:996)
at org.testng.remote.AbstractRemoteTestNG.run(AbstractRemoteTestNG.java:114)
at org.testng.remote.RemoteTestNG.initAndRun(RemoteTestNG.java:251)
at org.testng.remote.RemoteTestNG.main(RemoteTestNG.java:77)
I also want to include my testng.xml, which I am also calling in my pom.xml:
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE suite SYSTEM "http://testng.org/testng-1.0.dtd">
<suite name="Bank Suite">
<test name="Login Test">
<classes>
<class name="com.kohout.testcases.ManagerLoginTest"/>
<class name="com.kohout.testcases.AddCustomerTest"/>
</classes>
</test> <!-- Test -->
</suite> <!-- Suite -->
Here is my POM.xml too:
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.kohout</groupId>
<artifactId>MavenProject2</artifactId>
<version>0.0.1-SNAPSHOT</version>
<dependencies>
<!-- https://mvnrepository.com/artifact/org.seleniumhq.selenium/selenium-java -->
<dependency>
<groupId>org.seleniumhq.selenium</groupId>
<artifactId>selenium-java</artifactId>
<version>3.8.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.testng/testng -->
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.13.1</version>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.poi/poi -->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>3.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.poi/poi-ooxml -->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>3.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.poi/poi-ooxml-schemas -->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml-schemas</artifactId>
<version>3.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.xmlbeans/xmlbeans -->
<dependency>
<groupId>org.apache.xmlbeans</groupId>
<artifactId>xmlbeans</artifactId>
<version>2.6.0</version>
</dependency>
</dependencies>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.20.1</version>
<configuration>
<suiteXmlFiles>
<suitXmlFile>C:\Users\Kohout\eclipse-workspace\MavenProjects\MavenProject2\testng.xml</suitXmlFile>
</suiteXmlFiles>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
</project>
Thank you for the time and the help.
The problem is in your test code. You are making use of a #BeforeSuite annotation in your base class to initialise the WebDriver instance.
TestNG executes a #BeforeSuite annotated method only once per <suite> tag.
Both your test classes have the same base class and so the initialising method setUp() is called only for your test class com.kohout.testcases.ManagerLoginTest and it wont get called for com.kohout.testcases.AddCustomerTest.
Please change it to start using #BeforeClass (or) #BeforeMethod annotation if your test class has exactly just one #Test method.
If your test classes have more than one #Test methods, then you should basically make use of a ThreadLocal variant (or) you should make use of a TestNG listener to initialise the webdriver and then query the webdriver instance.
You can refer to my blog post here to learn how to work with TestNG listeners to facilitate parallel execution.
Here's a sample that shows how the thread local variant would look like
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.ie.InternetExplorerDriver;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Properties;
import java.util.logging.Logger;
public class TestBaseMavenRefactored {
private static final String PREFIX = "C:\\Users\\Kohout\\eclipse-workspace\\MavenProjects\\MavenProject2";
private static final ThreadLocal<WebDriver> driver = new ThreadLocal<>();
private Properties config = new Properties();
private Properties OR = new Properties();
public static Logger log;
protected WebDriver getDriver() {
return driver.get();
}
protected Properties getConfig() {
return config;
}
public Properties getOR() {
return OR;
}
private void cleanupDriver() {
driver.get().quit();
driver.remove();
}
#BeforeMethod
public void setUp() throws IOException, InterruptedException {
FileInputStream fis = new FileInputStream(PREFIX + "\\src\\test\\resources\\properties\\Config.properties");
config.load(fis);
FileInputStream fis1 = new FileInputStream(PREFIX + "\\src\\test\\resources\\properties\\OR.properties");
OR.load(fis1);
switch (config.getProperty("browser")) {
case "chrome":
System.setProperty("webdriver.chrome.driver", PREFIX + "\\src\\test\\resources\\executables\\chromedriver.exe");
driver.set(new ChromeDriver());
break;
case "firefox":
System.setProperty("webdriver.gecko.driver", PREFIX + "\\src\\test\\resources\\executables\\geckodriver.exe");
DesiredCapabilities dc = DesiredCapabilities.firefox();
dc.setCapability("marionette", true);
driver.set(new FirefoxDriver());
break;
case "ie":
System.setProperty("webdriver.ie.driver", PREFIX + "\\src\\test\\resources\\executables\\IEDriverServer.exe");
driver.set(new InternetExplorerDriver());
break;
}
getDriver().get(config.getProperty("testUrl"));
getDriver().manage().window().maximize();
Thread.sleep(2000);
}
public boolean isElementPresent(By by) {
try {
getDriver().findElement(by);
return true;
} catch (Throwable t) {
return false;
}
}
#AfterMethod
public void tearDown() {
cleanupDriver();
}
}
Here's how the refactored test class would now look like :
import org.openqa.selenium.By;
import org.testng.Assert;
import org.testng.annotations.Test;
public class ManagerLoginTest extends TestBaseMavenRefactored {
#Test
public void managerLoginTest() throws InterruptedException {
getDriver().findElement(By.cssSelector(getConfig().getProperty("managerLogin"))).click();
Thread.sleep(2000);
Assert.assertTrue(isElementPresent(By.cssSelector(getOR().getProperty("addCust"))));
}
}

JdbcTemplate object is null in my springboot application - using postgres

Here are my spring.datasource properties in application.properties-
spring.datasource.url=jdbc:postgresql://<hostname goes here>
spring.datasource.username=<username goes here>
spring.datasource.password=password
spring.datasource.driverClassName=org.postgresql.Driver
My main class is as follows:
#PropertySources(value = {#PropertySource("classpath:application.properties")})
#PropertySource(value = "classpath:sql.properties")
#SpringBootApplication
public class MyApp implements CommandLineRunner{
public static void main(String[] args) {
SpringApplication.run(MyApp.class, args);
}
#Override
public void run(String... strings) throws Exception {
Execute execute = new Execute();
execute.executeCleanUp();
}
}
The Execute class is as follows:
import com.here.oat.repository.CleanUpEntries;
import com.here.oat.repository.CleanUpEntriesImpl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import java.io.IOException;
/***
*
*/
public class Execute {
#Autowired
private CleanUpEntries cleanUpEntries;
public void executeCleanUp() throws IOException {
cleanUpEntries = new CleanUpEntriesImpl();
cleanUpEntries.delete();
}
}
Here is the implementation class - CleanupEntriesImpl:
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;
#Component
public class CleanUpEntriesImpl implements CleanUpEntries{
#Autowired
private JdbcTemplate jdbcTemplate;
#Value(value = "${delete.query}")
private String deleteQuery;
#Override
public int delete() {
int id= jdbcTemplate.queryForObject(deleteQuery, Integer.class);
return id;
}
}
pom.xml has the following dependencies:
<!--jdbc driver-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>9.4-1201-jdbc41</version>
<scope>runtime</scope>
</dependency>
Not sure why jdbcTemplate object is null when the delete() method is called. Any ideas?
The issue was resolved by removing all new operators from my classes and autowiring everything and making Execute a Component class.
Thanks!