Need access to job launcher parameters when creating job - spring-batch

I am invoking my batch service via the command line and supply a few parameters for the job. I need access to these parameters when create the job as I need to look up data from the db for a 'site name' that is supplied as a parameter and dynamically create multiple steps. The issue is in the 'createJob' method. I've hard coded the site id currently but there is an exception for the itemizedReader method:
Error creating bean with name 'scopedTarget.itemizedReader' defined in billing.BillingConfig: Unsatisfied dependency expressed through method 'itemizedReader' parameter 1: No qualifying bean of type [java.lang.String]
Spring configuration
package billing;
import billing.components.AspiviaFieldSetter;
import billing.components.AspiviaPrepStatementSetter;
import billing.components.SummaryProcessor;
import billing.mapper.ItemizedCostingMapper;
import billing.model.BillingItem;
import billing.model.ItemizedCosting;
import billing.tasklet.SummaryOutputTasklet;
import billing.batch.common.AppProps;
import billing.batch.common.SqlConst;
import billing.batch.common.model.ItemizedPartner;
import billing.batch.common.repo.PartnerBillingRepo;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import java.sql.Timestamp;
import java.text.SimpleDateFormat;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.sql.DataSource;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.job.builder.SimpleJobBuilder;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.database.JdbcCursorItemReader;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineAggregator;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.batch.item.file.transform.FieldExtractor;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.EnableAspectJAutoProxy;
import org.springframework.context.annotation.Profile;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.core.io.FileSystemResource;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
#ComponentScan(basePackages = {"billing", "billing.batch.common"})
#Configuration
#EnableBatchProcessing
#EnableAspectJAutoProxy
#PropertySource("classpath:/app.properties")
public class BillingConfig {
private static final Logger LOG = LogManager.getLogger();
#Autowired
private AppProps appProps;
#Autowired
private PartnerBillingRepo billingRepo;
#Bean
#Profile("prod")
public DataSource datasource() {
final HikariConfig cfg = new HikariConfig();
cfg.setJdbcUrl(appProps.getPartnerBillingUrl());
cfg.setUsername(appProps.getPartnerBillingUsername());
cfg.setPassword(appProps.getPartnerBillingPassword());
cfg.addDataSourceProperty("cachePrepStmts", appProps.getCachePrepStatements());
cfg.addDataSourceProperty("prepStmtCacheSize", appProps.getPrepStatementCacheSize());
cfg.addDataSourceProperty("prepStmtCacheSqlLimit", appProps.getPrepStatementCacheSqlLimit());
HikariDataSource ds = new HikariDataSource(cfg);
return ds;
}
#Bean
public JdbcTemplate template(DataSource ds) {
return new JdbcTemplate(ds);
}
#Bean
#StepScope
public FlatFileItemReader billingFileReader(#Value("#{jobParameters['input.file']}") String inputFile) {
DefaultLineMapper lineMapper = new DefaultLineMapper();
lineMapper.setFieldSetMapper(new BillingFieldSetter());
lineMapper.setLineTokenizer(new DelimitedLineTokenizer());
FlatFileItemReader reader = new FlatFileItemReader();
reader.setLineMapper(lineMapper);
reader.setResource(new FileSystemResource(inputFile));
return reader;
}
#Bean
#StepScope
public JdbcBatchItemWriter BillingWriter(DataSource ds, BillingPrepStatementSetter setter) {
JdbcBatchItemWriter writer = new JdbcBatchItemWriter();
writer.setDataSource(ds);
writer.setItemPreparedStatementSetter(setter);
writer.setSql(SqlConst.INSERT_INTO_BILLING);
return writer;
}
#Bean
#StepScope
public BillingPrepStatementSetter prepStatementSetter() {
return new BillingPrepStatementSetter();
}
#Bean
#StepScope
public SummaryProcessor summaryProc() {
return new SummaryProcessor();
}
#Bean
#StepScope
public SummaryOutputTasklet summaryTask() {
return new SummaryOutputTasklet();
}
#Bean
#StepScope
public ItemReader<ItemizedCosting> itemizedReader(#Value("#{jobParameters['site.id']}") Integer siteId, String accountCodes,
#Value("#{jobParameter['start.date']") String startDate, #Value("#{jobParameters['end.date']") String endDate) {
JdbcCursorItemReader reader = new JdbcCursorItemReader();
reader.setDataSource(datasource());
reader.setSql(SqlConst.SELECT_ITEMIZED_BILLING_FOR_ACCOUNT_CODES);
reader.setRowMapper(new ItemizedCostingMapper());
reader.setPreparedStatementSetter((ps) -> {
try {
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
ps.setTimestamp(0, new Timestamp(formatter.parse(startDate).getTime()));
ps.setTimestamp(1, new Timestamp(formatter.parse(endDate).getTime()));
} catch (Exception err) {
LOG.error("Unable to parse dates, start: {} end: {}", startDate, endDate);
}
ps.setString(2, accountCodes);
ps.setInt(3, siteId);
});
return reader;
}
#Bean
#StepScope
public ItemWriter<ItemizedCosting> itemizedWriter(#Value("start.date") String startDate,
String partnerName) {
DelimitedLineAggregator lineAgg = new DelimitedLineAggregator();
FieldExtractor<ItemizedCosting> extractor = (f) -> {
Object[] output = new Object[9];
output[0] = f.getExtension();
output[1] = f.getPbxCallTime();
output[2] = f.getDuration();
output[3] = f.getAccountCode();
output[4] = f.getDigits();
output[5] = f.getCost();
output[6] = f.getDestination();
output[7] = f.getCarrier();
output[8] = f.getAttribute();
return output;
};
lineAgg.setFieldExtractor(extractor);
Timestamp start = null;
try {
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
start = new Timestamp(formatter.parse(startDate).getTime());
} catch (Exception e) {
LOG.error("Unable to parse date: {}", startDate);
}
FlatFileItemWriter<ItemizedCosting> writer = new FlatFileItemWriter<>();
writer.setEncoding("UTF-8");
writer.setLineAggregator(lineAgg);
writer.setResource(new FileSystemResource(String.format("%s/%2$tY-%2$tm_%s_",
appProps.getItemizedBillingOutputPath(), start, partnerName)));
return writer;
}
#Bean
public Job createJob(JobBuilderFactory jobBuilder, StepBuilderFactory stepBuilders, DataSource ds, FlatFileItemReader reader)
throws Exception {
Step findSiteIdStep = stepBuilders.get("find.site.id").tasklet((contribution, chunkContext) -> {
String siteName
= (String) chunkContext.getStepContext().getJobParameters().get(BillingConst.PARAM_SITE);
Integer siteId = billingRepo.findSiteIdByName(siteName);
chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext().put(
BillingConst.SITE_ID, siteId);
return RepeatStatus.FINISHED;
}).build();
Step processFileStep = stepBuilders.get("process.file").<BillingItem, BillingItem>chunk(appProps.getChunkSize())
.reader(reader)
.processor(summaryProc())
.writer(aspiviaWriter(ds, prepStatementSetter())).build();
Step outputSummary = stepBuilders.get("output.summary").tasklet(summaryTask()).build();
SimpleJobBuilder builder = jobBuilder.get("process.aspivia").incrementer(new RunIdIncrementer())
.start(findSiteIdStep)
.next(processFileStep)
.next(outputSummary);
List<ItemizedPartner> partners = billingRepo.findPartnersForSite("CPT");
Integer siteId = billingRepo.findSiteIdByName("CPT");
Map<String, String> partnerAccCodes = new HashMap<>();
partners.stream().forEach(i -> {
if (!partnerAccCodes.containsKey(i.getPartnerName())) {
partnerAccCodes.put(i.getPartnerName(), "");
}
String accCodes = partnerAccCodes.get(i.getPartnerName());
accCodes += i.getAccountCode().toString() + ", ";
partnerAccCodes.put(i.getPartnerName(), accCodes);
});
partnerAccCodes.forEach((k, v) -> {
Step itemizedReport = stepBuilders.get("itemized." + k).<ItemizedCosting, ItemizedCosting>chunk(appProps.getChunkSize())
.reader(itemizedReader(siteId, v, null, null))
.writer(itemizedWriter(null, k)).build();
builder.next(itemizedReport);
});
return builder.build();
}
#Bean
public static PropertySourcesPlaceholderConfigurer propCfg() {
return new PropertySourcesPlaceholderConfigurer();
}
#Bean
public DataSourceTransactionManager transactionManager(DataSource datasource) {
return new DataSourceTransactionManager(datasource);
}
}

The issue is due to the lifecycle on how spring batch works. If the bean is decorated for the #StepScope the job parameters are only available once it is launched.
final Job loadAspiviaDataJob = context.getBean(Job.class);
final JobLauncher launcher = context.getBean(JobLauncher.class);
JobParametersBuilder paramBuilder = new JobParametersBuilder();
paramBuilder.addString(AspiviaConst.PARAM_INPUT_FILE, inputFile);
paramBuilder.addString(AspiviaConst.PARAM_SITE, site);
paramBuilder.addString(AspiviaConst.PARAM_OUTPUT_FILE_PATH, summaryFile);
JobExecution runStatus = launcher.run(loadAspiviaDataJob, paramBuilder.toJobParameters());
In the above code same we retrieve the Job which is setup via the createJob bean method in my configuration. The job parameters are not available.
What I have done to get access to the values I need is as follows:
Added an extra #PropertySource("classpath:cli-runtime.properties")
The Application.java that launches the spring batch job will save the properties we need to cli-runtime.properties. When the Job is created in the #Configuration class the values will be loaded from the property file and I can create the additional steps in the job I require

Related

how to verify http connection pool can improve performance

I want to use http connection pool with Spring RestTemplate, but before using it, I need to verify whether it can improve performance.
I do a little programing here:
#Configuration
public class RestTemplateConfig {
#Bean
public RestTemplate restTemplate() {
return new RestTemplate();
}
}
and test code here
#SpringBootTest
class RestnopoolApplicationTests {
String url = "https://www.baidu.com/";
// String url = "http://localhost:8080/actuator/";
#Autowired
RestTemplate restTemplate;
#Test
void contextLoads() {
}
#Test
void verify_health() {
Instant start = Instant.now();
for(int i=0; i < 100; i ++) {
restTemplate.getForObject(url, String.class);
}
Instant end = Instant.now();
Duration d = Duration.between(start,end );
System.out.println("time span " + d.getSeconds());
}
Also, I write http connection pool below
import java.security.KeyManagementException;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.util.concurrent.TimeUnit;
import org.apache.http.HeaderElement;
import org.apache.http.HeaderElementIterator;
import org.apache.http.HttpResponse;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.config.Registry;
import org.apache.http.config.RegistryBuilder;
import org.apache.http.conn.ConnectionKeepAliveStrategy;
import org.apache.http.conn.socket.ConnectionSocketFactory;
import org.apache.http.conn.socket.PlainConnectionSocketFactory;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.conn.ssl.TrustSelfSignedStrategy;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.message.BasicHeaderElementIterator;
import org.apache.http.protocol.HTTP;
import org.apache.http.protocol.HttpContext;
import org.apache.http.ssl.SSLContextBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
/**
* - Supports both HTTP and HTTPS
* - Uses a connection pool to re-use connections and save overhead of creating connections.
* - Has a custom connection keep-alive strategy (to apply a default keep-alive if one isn't specified)
* - Starts an idle connection monitor to continuously clean up stale connections.
*/
#Configuration
#EnableScheduling
public class HttpClientConfig {
private static final Logger LOGGER = LoggerFactory.getLogger(HttpClientConfig.class);
// Determines the timeout in milliseconds until a connection is established.
private static final int CONNECT_TIMEOUT = 30000;
// The timeout when requesting a connection from the connection manager.
private static final int REQUEST_TIMEOUT = 30000;
// The timeout for waiting for data
private static final int SOCKET_TIMEOUT = 60000;
private static final int MAX_TOTAL_CONNECTIONS = 50;
private static final int DEFAULT_KEEP_ALIVE_TIME_MILLIS = 20 * 1000;
private static final int CLOSE_IDLE_CONNECTION_WAIT_TIME_SECS = 30;
#Bean
public PoolingHttpClientConnectionManager poolingConnectionManager() {
SSLContextBuilder builder = new SSLContextBuilder();
try {
builder.loadTrustMaterial(null, new TrustSelfSignedStrategy());
} catch (NoSuchAlgorithmException | KeyStoreException e) {
LOGGER.error("Pooling Connection Manager Initialisation failure because of " + e.getMessage(), e);
}
SSLConnectionSocketFactory sslsf = null;
try {
sslsf = new SSLConnectionSocketFactory(builder.build());
} catch (KeyManagementException | NoSuchAlgorithmException e) {
LOGGER.error("Pooling Connection Manager Initialisation failure because of " + e.getMessage(), e);
}
Registry<ConnectionSocketFactory> socketFactoryRegistry = RegistryBuilder
.<ConnectionSocketFactory>create().register("https", sslsf)
.register("http", new PlainConnectionSocketFactory())
.build();
PoolingHttpClientConnectionManager poolingConnectionManager = new PoolingHttpClientConnectionManager(socketFactoryRegistry);
poolingConnectionManager.setMaxTotal(MAX_TOTAL_CONNECTIONS);
return poolingConnectionManager;
}
#Bean
public ConnectionKeepAliveStrategy connectionKeepAliveStrategy() {
return new ConnectionKeepAliveStrategy() {
#Override
public long getKeepAliveDuration(HttpResponse response, HttpContext context) {
HeaderElementIterator it = new BasicHeaderElementIterator
(response.headerIterator(HTTP.CONN_KEEP_ALIVE));
while (it.hasNext()) {
HeaderElement he = it.nextElement();
String param = he.getName();
String value = he.getValue();
if (value != null && param.equalsIgnoreCase("timeout")) {
return Long.parseLong(value) * 1000;
}
}
return DEFAULT_KEEP_ALIVE_TIME_MILLIS;
}
};
}
#Bean
public CloseableHttpClient httpClient() {
RequestConfig requestConfig = RequestConfig.custom()
.setConnectionRequestTimeout(REQUEST_TIMEOUT)
.setConnectTimeout(CONNECT_TIMEOUT)
.setSocketTimeout(SOCKET_TIMEOUT).build();
return HttpClients.custom()
.setDefaultRequestConfig(requestConfig)
.setConnectionManager(poolingConnectionManager())
.setKeepAliveStrategy(connectionKeepAliveStrategy())
.build();
}
#Bean
public Runnable idleConnectionMonitor(final PoolingHttpClientConnectionManager connectionManager) {
return new Runnable() {
#Override
#Scheduled(fixedDelay = 10000)
public void run() {
try {
if (connectionManager != null) {
LOGGER.trace("run IdleConnectionMonitor - Closing expired and idle connections...");
connectionManager.closeExpiredConnections();
connectionManager.closeIdleConnections(CLOSE_IDLE_CONNECTION_WAIT_TIME_SECS, TimeUnit.SECONDS);
} else {
LOGGER.trace("run IdleConnectionMonitor - Http Client Connection manager is not initialised");
}
} catch (Exception e) {
LOGGER.error("run IdleConnectionMonitor - Exception occurred. msg={}, e={}", e.getMessage(), e);
}
}
};
}
}
and RestTemplateConfig below
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
import org.springframework.scheduling.TaskScheduler;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
import org.springframework.web.client.RestTemplate;
import org.apache.http.impl.client.CloseableHttpClient;
#Configuration
public class RestTemplateConfig {
#Autowired
CloseableHttpClient httpClient;
#Bean
public RestTemplate restTemplate() {
RestTemplate restTemplate = new RestTemplate(clientHttpRequestFactory());
return restTemplate;
}
#Bean
public HttpComponentsClientHttpRequestFactory clientHttpRequestFactory() {
HttpComponentsClientHttpRequestFactory clientHttpRequestFactory = new HttpComponentsClientHttpRequestFactory();
clientHttpRequestFactory.setHttpClient(httpClient);
return clientHttpRequestFactory;
}
#Bean
public TaskScheduler taskScheduler() {
ThreadPoolTaskScheduler scheduler = new ThreadPoolTaskScheduler();
scheduler.setThreadNamePrefix("poolScheduler");
scheduler.setPoolSize(50);
return scheduler;
}
}
The test result cannot prove that connection pool impvoe performance.
You have not used your new implementation. You are still using the default Apache client. Use your method httpClient() to get the ClosableHttpClient.
Please also note that your test is synchronous, no matter how many connections do you have in the pool, you will use it sequential. Use threads to execute the get request.

Run MyBatis migrations' 'up' command on startup of application

I have myBatis setup for my account. This by using the migrate command in the command line (in Jenkins). Now I want to integrate this with the application itself (Spring boot). Currently I have different sql files with #Undo and up sql code.
So When I start the Sping boot application I want to run the migrate up command without changing the sql files that I already have? Is this possible in MyBatis and Spring?
This is about MyBatis-Migrations, right?
Spring Boot does not provide out-of-box support, however, it seems to be possible to write a custom DatabasePopulator.
Here is a simple implementation.
It uses Migrations' Runtime Migration feature.
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
import java.util.TreeSet;
import java.util.stream.Collectors;
import javax.sql.DataSource;
import org.apache.ibatis.migration.Change;
import org.apache.ibatis.migration.DataSourceConnectionProvider;
import org.apache.ibatis.migration.MigrationException;
import org.apache.ibatis.migration.MigrationLoader;
import org.apache.ibatis.migration.MigrationReader;
import org.apache.ibatis.migration.operations.UpOperation;
import org.apache.ibatis.migration.options.DatabaseOperationOption;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.io.support.ResourcePatternResolver;
import org.springframework.jdbc.datasource.init.DataSourceInitializer;
import org.springframework.jdbc.datasource.init.DatabasePopulator;
import org.springframework.jdbc.datasource.init.ScriptException;
import org.springframework.jdbc.datasource.init.UncategorizedScriptException;
#Configuration
public class MyBatisMigrationsConfig {
private static final String scriptsDir = "scripts";
private static final String changelogTable = "changelog";
#Bean
public DataSourceInitializer dataSourceInitializer(DataSource dataSource) {
Properties properties = new Properties();
properties.setProperty("changelog", changelogTable);
DatabaseOperationOption options = new DatabaseOperationOption();
options.setChangelogTable(changelogTable);
MyBatisMigrationsPopulator populator = new MyBatisMigrationsPopulator(dataSource, scriptsDir, properties, options,
new PathMatchingResourcePatternResolver());
DataSourceInitializer dataSourceInitializer = new DataSourceInitializer();
dataSourceInitializer.setDataSource(dataSource);
dataSourceInitializer.setDatabasePopulator(populator);
return dataSourceInitializer;
}
private static class MyBatisMigrationsPopulator implements DatabasePopulator {
private final DataSource dataSource;
private final String scriptsDir;
private final Properties properties;
private final DatabaseOperationOption options;
private final ResourcePatternResolver resourcePatternResolver;
public MyBatisMigrationsPopulator(DataSource dataSource, String scriptsDir,
Properties properties, DatabaseOperationOption options, ResourcePatternResolver resourcePatternResolver) {
super();
this.dataSource = dataSource;
this.scriptsDir = scriptsDir;
this.properties = properties;
this.options = options;
this.resourcePatternResolver = resourcePatternResolver;
}
public void populate(Connection connection) throws SQLException, ScriptException {
try {
new UpOperation().operate(new DataSourceConnectionProvider(dataSource),
createMigrationsLoader(), options, System.out);
} catch (MigrationException e) {
throw new UncategorizedScriptException("Migration failed.", e.getCause());
}
}
protected MigrationLoader createMigrationsLoader() {
return new SpringMigrationLoader(resourcePatternResolver, scriptsDir, "utf-8", properties);
}
}
private static class SpringMigrationLoader implements MigrationLoader {
protected static final String BOOTSTRAP_SQL = "bootstrap.sql";
protected static final String ONABORT_SQL = "onabort.sql";
private ResourcePatternResolver resourcePatternResolver;
private String path;
private String charset;
private Properties properties;
public SpringMigrationLoader(
ResourcePatternResolver resourcePatternResolver,
String path,
String charset,
Properties properties) {
this.resourcePatternResolver = resourcePatternResolver;
this.path = path;
this.charset = charset;
this.properties = properties;
}
#Override
public List<Change> getMigrations() {
Collection<String> filenames = new TreeSet<>();
for (Resource res : getResources("/*.sql")) {
filenames.add(res.getFilename());
}
filenames.remove(BOOTSTRAP_SQL);
filenames.remove(ONABORT_SQL);
return filenames.stream()
.map(this::parseChangeFromFilename)
.collect(Collectors.toList());
}
#Override
public Reader getScriptReader(Change change, boolean undo) {
try {
return getReader(change.getFilename(), undo);
} catch (IOException e) {
throw new MigrationException("Failed to read bootstrap script.", e);
}
}
#Override
public Reader getBootstrapReader() {
try {
return getReader(BOOTSTRAP_SQL, false);
} catch (FileNotFoundException e) {
// ignore
} catch (IOException e) {
throw new MigrationException("Failed to read bootstrap script.", e);
}
return null;
}
#Override
public Reader getOnAbortReader() {
try {
return getReader(ONABORT_SQL, false);
} catch (FileNotFoundException e) {
// ignore
} catch (IOException e) {
throw new MigrationException("Failed to read onabort script.", e);
}
return null;
}
protected Resource getResource(String pattern) {
return this.resourcePatternResolver.getResource(this.path + "/" + pattern);
}
protected Resource[] getResources(String pattern) {
try {
return this.resourcePatternResolver.getResources(this.path + pattern);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
protected Change parseChangeFromFilename(String filename) {
try {
String name = filename.substring(0, filename.lastIndexOf("."));
int separator = name.indexOf("_");
BigDecimal id = new BigDecimal(name.substring(0, separator));
String description = name.substring(separator + 1).replace('_', ' ');
Change change = new Change(id);
change.setFilename(filename);
change.setDescription(description);
return change;
} catch (Exception e) {
throw new MigrationException("Error parsing change from file. Cause: " + e, e);
}
}
protected Reader getReader(String fileName, boolean undo) throws IOException {
InputStream inputStream = getResource(fileName).getURL().openStream();
return new MigrationReader(inputStream, charset, undo, properties);
}
}
}
Here is an executable demo project.
You may need to modify the datasource settings in application.properties.
Hope this helps!
For Spring:
import java.io.File;
import java.net.URL;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import javax.sql.DataSource;
import org.apache.ibatis.migration.ConnectionProvider;
import org.apache.ibatis.migration.FileMigrationLoader;
import org.apache.ibatis.migration.operations.UpOperation;
import org.apache.ibatis.migration.options.DatabaseOperationOption;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.datasource.init.DataSourceInitializer;
import org.springframework.jdbc.datasource.init.DatabasePopulator;
import org.springframework.jdbc.datasource.init.ScriptException;
#Configuration
public class MyBatisMigrationRuntimeConfiguration {
private static final String CHANGELOG_TABLE = "changelog";
private static final String MIGRATION_SCRIPTS = "migration/scripts";
#Bean
public DataSourceInitializer dataSourceInitializer(DataSource dataSource) {
DataSourceInitializer dataSourceInitializer = new DataSourceInitializer();
dataSourceInitializer.setDataSource(dataSource);
dataSourceInitializer.setDatabasePopulator(new Populator());
return dataSourceInitializer;
}
private DatabaseOperationOption getOption() {
DatabaseOperationOption options = new DatabaseOperationOption();
options.setChangelogTable(CHANGELOG_TABLE);
return options;
}
private Properties getProperties() {
Properties properties = new Properties();
properties.setProperty("changelog", CHANGELOG_TABLE);
return properties;
}
private File getScriptDir() {
URL url = getClass().getClassLoader().getResource(MIGRATION_SCRIPTS);
if (url == null) {
throw new IllegalArgumentException("file is not found!");
} else {
return new File(url.getFile());
}
}
private class Populator implements DatabasePopulator {
#Override
public void populate(Connection connection) throws SQLException, ScriptException {
new UpOperation().operate(
new SimplyConnectionProvider(connection),
new FileMigrationLoader(getScriptDir(), "utf-8", getProperties()),
getOption(),
System.out
);
}
}
private static class SimplyConnectionProvider implements ConnectionProvider {
private final Connection connection;
public SimplyConnectionProvider(Connection connection) {
this.connection = connection;
}
public Connection getConnection() {
return connection;
}
}
}

Reading data from kafka topic via golden gate

I have a Spring Boot code which reads data from Kafka topic. The code works as expected when data is feed to the topic via Kafka Producer Console. When I try to push data into the kafka topic via Golden Gate, the code doesn't reads the data from the topic, although I can see the golden gate is able to write the data into the kafka topic. Can anyone suggest why this change in behavior?
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.bson.Document;
import org.json.JSONArray;
import org.json.JSONObject;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.mongodb.BasicDBObject;
import com.mongodb.MongoClient;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
public class VideoConsumer implements Runnable {
private ObjectMapper objectMapper;
private KafkaStream<byte[], byte[]> kafkaStream;
private int threadNumber;
public VideoConsumer(KafkaStream<byte[], byte[]> kafkaStream, int threadNumber) {
this.threadNumber = threadNumber;
this.kafkaStream = kafkaStream;
this.objectMapper = new ObjectMapper();
}
#Override
public void run() {
ConsumerIterator<byte[], byte[]> it = kafkaStream.iterator();
while (it.hasNext()) {
byte[] messageData = it.next().message();
try {
//String videoFromMessage = objectMapper.readValue(messageData, String.class);
//byte[] videoFromMessage = it.next().message();
//System.out.print("got message");
String streamData = new String(messageData);
System.out.print("Thread:" + threadNumber + ".Consuming video: " + streamData + "\n");
String changed=streamData.toString();
int pos=changed.lastIndexOf("}}");
String change=changed.substring(0,pos );
change=change.replace("}}", "}},");
String res=change.concat("}}");
String result="[" +res+ "]";
System.out.println(result);
JSONArray json;
json = new JSONArray(result);
Map<String, List<JSONObject>> orderMongo = new HashMap<>();
Map<String, List<JSONObject>> orderItemMongo = new HashMap<>();
MongoClient mongoClient = new MongoClient( "localhost" , 27017 );
MongoDatabase db = mongoClient.getDatabase("Mongotest");
MongoCollection<Document> table = db.getCollection("test1");
Document doc1=new Document();
//Gson gson=new Gson();
BasicDBObject document = new BasicDBObject();
for (int i = 0; i < json.length(); i++) {
JSONObject obj = json.getJSONObject(i);
if(obj.getString("table").equals("TEST.S_ORDER_MONGO1")){
List<JSONObject> list = orderMongo.getOrDefault(obj.getString("table").equals("TEST.S_ORDER_MONGO1"),new ArrayList<>());
list.add(obj);
orderMongo.put(obj.getJSONObject("after").getString("ROW_ID"),list);
}
else if(obj.getString("table").equals("TEST.S_ORDER_ITEM_MONGO1")){
List<JSONObject> nextlist = orderItemMongo.getOrDefault(obj.getString("table").equals("TEST.S_ORDER_ITEM_MONGO1"),new ArrayList<>());
nextlist.add(obj);
orderItemMongo.put(obj.getJSONObject("after").getString("ORDER_ID"),nextlist);
}
}
System.out.println(orderMongo);
System.out.println(orderItemMongo);
// System.out.println(orderItemMongo);
for (Entry<String, List<JSONObject>> entry : orderMongo.entrySet()) {
for(Entry<String, List<JSONObject>> entry1 : orderItemMongo.entrySet()){
if(entry.getKey().equals(entry1.getKey())){
//String gsonString=gson.toJson(entry.getValue());
//System.out.println(gsonString);
List<JSONObject> listnext = entry.getValue();
List <JSONObject> orderlineList=entry1.getValue();
for(JSONObject obj:listnext){
Document doc = new Document("STATUS_CD", obj.getJSONObject("after").getString("STATUS_CD"));
if(obj.getJSONObject("after").isNull("INTEGRATION_ID")==true){
doc.append("INTEGRATION_ID", null);}
doc.append("X_CUST_REF", obj.getJSONObject("after").getString("X_CUST_REF"));
doc.append("REQ_SHIP_DT",obj.getJSONObject("after").getString("REQ_SHIP_DT"));
if(obj.getJSONObject("after").isNull("QUOTE_ID")==true){
doc.append("QUOTE_ID",null);}
doc.append("ACCNT_ID",obj.getJSONObject("after").getString("ACCNT_ID"));
doc.append("ACTIVE_FLG",obj.getJSONObject("after").getString("ACTIVE_FLG"));
doc.append("PROCESS_TIMESTAMP",obj.getJSONObject("after").getString("PROCESS_TIMESTAMP"));
doc.append("CONTACT_ID",obj.getJSONObject("after").getString("CONTACT_ID"));
doc.append("BU_ID", obj.getJSONObject("after").getString("BU_ID"));
doc.append("SHIP_CON_ID",obj.getJSONObject("after").getString("SHIP_CON_ID"));
doc.append("LAST_UPD", obj.getJSONObject("after").getString("LAST_UPD"));
if(obj.getJSONObject("after").isNull("X_CLOSE_DT")==true){
doc.append("X_CLOSE_DT", null);}
doc.append("X_SUB_STAT", obj.getJSONObject("after").getString("X_SUB_STAT"));
doc.append("ORDER_NUM", obj.getJSONObject("after").getString("ORDER_NUM"));
doc.append("SOFT_DELETE", obj.getJSONObject("after").getString("SOFT_DELETE"));
doc.append("ROW_ID", obj.getJSONObject("after").getString("ROW_ID"));
doc.append("LAST_UPD_BY",obj.getJSONObject("after").getString("LAST_UPD_BY"));
doc.append("REV_NUM",obj.getJSONObject("after").getString("REV_NUM"));
doc.append("ORDER_DT", obj.getJSONObject("after").getString("ORDER_DT"));
for(JSONObject object:orderlineList){
if(object.getJSONObject("after").isNull("ASSET_ID")==true){
doc1.append("ASSET_ID", null);}
if(object.getJSONObject("after").isNull("SERV_ACCNT_ID")==true){
doc1.append("SERV_ACCNT_ID", null);}
doc1.append("REQ_SHIP_DT",object.getJSONObject("after").getString("REQ_SHIP_DT"));
if(object.getJSONObject("after").isNull("X_PROD_DESC")==true){
doc1.append("X_PROD_DESC",null);}
if(object.getJSONObject("after").isNull("SHIP_CON_ID")==true){
doc1.append("SHIP_CON_ID",null);}
doc1.append("X_BES_STATUS",object.getJSONObject("after").getString("X_BES_STATUS"));
doc1.append("ROW_ID",object.getJSONObject("after").getString("ROW_ID"));
doc1.append("STATUS_CD",object.getJSONObject("after").getString("STATUS_CD"));
doc1.append("ORDER_ID",object.getJSONObject("after").getString("ORDER_ID"));
if(object.getJSONObject("after").isNull("COMPLETED_DT")==true){
doc1.append("COMPLETED_DT",null);}
doc1.append("LAST_UPD",object.getJSONObject("after").getString("LAST_UPD"));
doc1.append("SOFT_DELETE",object.getJSONObject("after").getString("SOFT_DELETE"));
doc1.append("INTEGRATION_ID",object.getJSONObject("after").getString("INTEGRATION_ID"));
doc1.append("X_CDD",object.getJSONObject("after").getString("X_CDD"));
doc1.append("ACTION_CD",object.getJSONObject("after").getString("ACTION_CD"));
doc1.append("X_ORDER_ITEM_SUBSTATUS",object.getJSONObject("after").getString("X_ORDER_ITEM_SUBSTATUS"));
if(object.getJSONObject("after").isNull("X_APPT_REF")==true){
doc1.append("X_APPT_REF",null);}
if(object.getJSONObject("after").isNull("X_CANCELLED_DT")==true){
doc1.append("X_CANCELLED_DT",null);}
doc1.append("PROD_ID",object.getJSONObject("after").getString("PROD_ID"));
if(object.getJSONObject("after").isNull("SERVICE_NUM")==true){
doc1.append("SERVICE_NUM",null);}
if(object.getJSONObject("after").isNull("MUST_DLVR_BY_DT")==true){
doc1.append("MUST_DLVR_BY_DT",null);}
doc1.append("ROLLUP_FLG",object.getJSONObject("after").getString("ROLLUP_FLG"));
doc1.append("ROOT_ORDER_ITEM_ID",object.getJSONObject("after").getString("ROOT_ORDER_ITEM_ID"));
doc1.append("BILL_ACCNT_ID",object.getJSONObject("after").getString("BILL_ACCNT_ID"));
doc1.append("PROCESS_TIMESTAMP",object.getJSONObject("after").getString("PROCESS_TIMESTAMP"));
doc1.append("QTY_REQ",object.getJSONObject("after").getString("QTY_REQ"));
}
doc.append("ORDERLINE", doc1);
table.insertOne(doc);
}
}
}
}
}
catch (Exception e) {
e.printStackTrace();
}
System.out.println("Shutting down Thread: " + kafkaStream);
}
}
}

How to replicate a page and all its children using replicator API?

Here is the code in which I am replicating the page
final String pagePath = blogEntryPage.getPath();
final Resource jcrContent= blogEntryPage.getContentResource();
final Node jcrNode = jcrContent.adaptTo(Node.class);
adminSession = jcrNode.getSession();
// REPLICATE THE DATE NODE
replicator.replicate(adminSession, ReplicationActionType.ACTIVATE, pagePath);
Here the problem is only the parent page is getting replicated I want to replicate the child pages also
How about just iterating over the child pages and replicate them as well:
Iterator<Page> childPages = blogEntryPage.listChildren();
while (childPages.hasNext()) {
Page childPage = childPages.next();
replicator.replicate(adminSession, ReplicationActionType.ACTIVATE, childPage.getPath());
}
You could even put this in a method and call it recursively.
Try this, (have not tested)
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.jcr.Session;
import com.day.cq.replication.Agent;
import com.day.cq.replication.AgentManager;
import com.day.cq.replication.ReplicationAction;
import com.day.cq.replication.ReplicationActionType;
import com.day.cq.replication.ReplicationContent;
import com.day.cq.replication.ReplicationException;
import com.day.cq.replication.ReplicationOptions;
import com.day.cq.replication.Replicator;
import com.day.cq.wcm.api.WCMException;
import com.day.cq.wcm.msm.api.LiveRelationship;
import com.day.cq.wcm.msm.api.LiveRelationshipManager;
import com.day.cq.wcm.msm.api.RolloutManager;
import org.osgi.framework.ServiceReference;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.api.resource.ResourceResolverFactory;
import org.apache.sling.api.resource.LoginException;
public class Activator implements BundleActivator {
/*
* (non-Javadoc)
* #see org.osgi.framework.BundleActivator#start(org.osgi.framework.BundleContext)
*/
public void start(BundleContext context) throws Exception {
AgentManager agentManager = getService(context,AgentManager.class);
Replicator replicator = getService(context,Replicator.class);
ResourceResolverFactory resourceFactory = getService(context,ResourceResolverFactory.class);
ResourceResolver resourceResolver = null;
Session session = null;
String path = "/content/geometrixx-gov";
try {
resourceResolver = resourceFactory.getAdministrativeResourceResolver(null);
session = resourceResolver.adaptTo(Session.class);
for (Map.Entry<String, Agent> e : agentManager.getAgents().entrySet()) {
if (e.getValue().getConfiguration().getTransportURI().contains("/bin/receive?sling:authRequestLogin=1")) {
Agent a = e.getValue();
try {
ReplicationAction ra = new ReplicationAction(ReplicationActionType.ACTIVATE, path);
ReplicationContent rc = a.buildContent(session, ra);
a.replicate(ra, rc, new ReplicationOptions());
System.out.println("Activator cache flush requested check queue");
} catch (ReplicationException ex) {
ex.printStackTrace();
}
}
}
} catch (LoginException e) {
e.printStackTrace();
}
}
public <T> T getService(BundleContext bc, Class<T> c)
{
ServiceReference sr = bc.getServiceReference(c.getName());
if (sr != null)
{
return (T) bc.getService(sr);
}
return null;
}
/*
* (non-Javadoc)
* #see org.osgi.framework.BundleActivator#stop(org.osgi.framework.BundleContext)
*/
public void stop(BundleContext context) throws Exception {
// TODO add cleanup code
}
}

Only one of the two AutoCompleteTextView are showing suggestions

I have two AutoCompleteTextView controls on the same page: ACTV1 and ACTV2 and only one (ACTV1 ) is showing suggestions from my database . For each databinding action I've made a java class separetely: ACTV1.java and ACTV2.java.
But if I am adding an intent filter (MAIN, LAUNCHER) in my manifest file for ACTV2.java class and setting in run configuration ACTV2.java as Launch Action then I won't get suggestions anymore for ACTV1 control but this time I'll get suggestions for ACTV2 control.
The two java classes are identically just that differ the name of some constants/controls name.
package com.fishing2;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.json.JSONArray;
import org.json.JSONObject;
import android.app.Activity;
import android.os.Bundle;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.widget.ArrayAdapter;
import android.widget.AutoCompleteTextView;
public class CompleteBalti extends Activity {
//private CustomAutoCompleteView CompleteBalti;
private ArrayAdapter<String> adaperbalti;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_partida);
}
final TextWatcher textChecker = new TextWatcher() {
public void afterTextChanged(Editable s) {}
public void beforeTextChanged(CharSequence s, int start, int count, int after) { }
public void onTextChanged(CharSequence s, int start, int before, int count)
{
adaperbalti.clear();
callPHP1();
}
};
private void callPHP1(){
String result = "";
InputStream is=null;
AutoCompleteTextView CompleteBalti = (AutoCompleteTextView) findViewById(R.id.nume_localitate);
ArrayList<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>();
nameValuePairs.add(new BasicNameValuePair("st",CompleteBalti.getText().toString()));
{
try{
HttpClient httpclient = new DefaultHttpClient();
HttpPost httppost = new HttpPost("http://192.168.3.159/wtf/balti.php");
httppost.setEntity(new UrlEncodedFormEntity(nameValuePairs,"utf-8"));
HttpResponse response = httpclient.execute(httppost);
HttpEntity entity = response.getEntity();
is = entity.getContent();
BufferedReader reader = new BufferedReader(new InputStreamReader(is,"utf-8"),8);
StringBuilder sb = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {
sb.append(line + "\n");
}
is.close();
result=sb.toString();
result = result.substring(1);
}catch(Exception e){
Log.e("log_tag", "Error in http connection "+e.toString());
}
try{
JSONArray jArray = new JSONArray(result);
JSONObject json_data = null;
for (int i=0;i<jArray.length(); i++)
{
json_data = jArray.getJSONObject(i);
adaperbalti.add(json_data.getString("nume_balta"));
}
} catch(Exception e1){
Log.e("log_tag", "Error converting result "+e1.toString());
}
}
}
}