I have a Spring Boot code which reads data from Kafka topic. The code works as expected when data is feed to the topic via Kafka Producer Console. When I try to push data into the kafka topic via Golden Gate, the code doesn't reads the data from the topic, although I can see the golden gate is able to write the data into the kafka topic. Can anyone suggest why this change in behavior?
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.bson.Document;
import org.json.JSONArray;
import org.json.JSONObject;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.mongodb.BasicDBObject;
import com.mongodb.MongoClient;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
public class VideoConsumer implements Runnable {
private ObjectMapper objectMapper;
private KafkaStream<byte[], byte[]> kafkaStream;
private int threadNumber;
public VideoConsumer(KafkaStream<byte[], byte[]> kafkaStream, int threadNumber) {
this.threadNumber = threadNumber;
this.kafkaStream = kafkaStream;
this.objectMapper = new ObjectMapper();
}
#Override
public void run() {
ConsumerIterator<byte[], byte[]> it = kafkaStream.iterator();
while (it.hasNext()) {
byte[] messageData = it.next().message();
try {
//String videoFromMessage = objectMapper.readValue(messageData, String.class);
//byte[] videoFromMessage = it.next().message();
//System.out.print("got message");
String streamData = new String(messageData);
System.out.print("Thread:" + threadNumber + ".Consuming video: " + streamData + "\n");
String changed=streamData.toString();
int pos=changed.lastIndexOf("}}");
String change=changed.substring(0,pos );
change=change.replace("}}", "}},");
String res=change.concat("}}");
String result="[" +res+ "]";
System.out.println(result);
JSONArray json;
json = new JSONArray(result);
Map<String, List<JSONObject>> orderMongo = new HashMap<>();
Map<String, List<JSONObject>> orderItemMongo = new HashMap<>();
MongoClient mongoClient = new MongoClient( "localhost" , 27017 );
MongoDatabase db = mongoClient.getDatabase("Mongotest");
MongoCollection<Document> table = db.getCollection("test1");
Document doc1=new Document();
//Gson gson=new Gson();
BasicDBObject document = new BasicDBObject();
for (int i = 0; i < json.length(); i++) {
JSONObject obj = json.getJSONObject(i);
if(obj.getString("table").equals("TEST.S_ORDER_MONGO1")){
List<JSONObject> list = orderMongo.getOrDefault(obj.getString("table").equals("TEST.S_ORDER_MONGO1"),new ArrayList<>());
list.add(obj);
orderMongo.put(obj.getJSONObject("after").getString("ROW_ID"),list);
}
else if(obj.getString("table").equals("TEST.S_ORDER_ITEM_MONGO1")){
List<JSONObject> nextlist = orderItemMongo.getOrDefault(obj.getString("table").equals("TEST.S_ORDER_ITEM_MONGO1"),new ArrayList<>());
nextlist.add(obj);
orderItemMongo.put(obj.getJSONObject("after").getString("ORDER_ID"),nextlist);
}
}
System.out.println(orderMongo);
System.out.println(orderItemMongo);
// System.out.println(orderItemMongo);
for (Entry<String, List<JSONObject>> entry : orderMongo.entrySet()) {
for(Entry<String, List<JSONObject>> entry1 : orderItemMongo.entrySet()){
if(entry.getKey().equals(entry1.getKey())){
//String gsonString=gson.toJson(entry.getValue());
//System.out.println(gsonString);
List<JSONObject> listnext = entry.getValue();
List <JSONObject> orderlineList=entry1.getValue();
for(JSONObject obj:listnext){
Document doc = new Document("STATUS_CD", obj.getJSONObject("after").getString("STATUS_CD"));
if(obj.getJSONObject("after").isNull("INTEGRATION_ID")==true){
doc.append("INTEGRATION_ID", null);}
doc.append("X_CUST_REF", obj.getJSONObject("after").getString("X_CUST_REF"));
doc.append("REQ_SHIP_DT",obj.getJSONObject("after").getString("REQ_SHIP_DT"));
if(obj.getJSONObject("after").isNull("QUOTE_ID")==true){
doc.append("QUOTE_ID",null);}
doc.append("ACCNT_ID",obj.getJSONObject("after").getString("ACCNT_ID"));
doc.append("ACTIVE_FLG",obj.getJSONObject("after").getString("ACTIVE_FLG"));
doc.append("PROCESS_TIMESTAMP",obj.getJSONObject("after").getString("PROCESS_TIMESTAMP"));
doc.append("CONTACT_ID",obj.getJSONObject("after").getString("CONTACT_ID"));
doc.append("BU_ID", obj.getJSONObject("after").getString("BU_ID"));
doc.append("SHIP_CON_ID",obj.getJSONObject("after").getString("SHIP_CON_ID"));
doc.append("LAST_UPD", obj.getJSONObject("after").getString("LAST_UPD"));
if(obj.getJSONObject("after").isNull("X_CLOSE_DT")==true){
doc.append("X_CLOSE_DT", null);}
doc.append("X_SUB_STAT", obj.getJSONObject("after").getString("X_SUB_STAT"));
doc.append("ORDER_NUM", obj.getJSONObject("after").getString("ORDER_NUM"));
doc.append("SOFT_DELETE", obj.getJSONObject("after").getString("SOFT_DELETE"));
doc.append("ROW_ID", obj.getJSONObject("after").getString("ROW_ID"));
doc.append("LAST_UPD_BY",obj.getJSONObject("after").getString("LAST_UPD_BY"));
doc.append("REV_NUM",obj.getJSONObject("after").getString("REV_NUM"));
doc.append("ORDER_DT", obj.getJSONObject("after").getString("ORDER_DT"));
for(JSONObject object:orderlineList){
if(object.getJSONObject("after").isNull("ASSET_ID")==true){
doc1.append("ASSET_ID", null);}
if(object.getJSONObject("after").isNull("SERV_ACCNT_ID")==true){
doc1.append("SERV_ACCNT_ID", null);}
doc1.append("REQ_SHIP_DT",object.getJSONObject("after").getString("REQ_SHIP_DT"));
if(object.getJSONObject("after").isNull("X_PROD_DESC")==true){
doc1.append("X_PROD_DESC",null);}
if(object.getJSONObject("after").isNull("SHIP_CON_ID")==true){
doc1.append("SHIP_CON_ID",null);}
doc1.append("X_BES_STATUS",object.getJSONObject("after").getString("X_BES_STATUS"));
doc1.append("ROW_ID",object.getJSONObject("after").getString("ROW_ID"));
doc1.append("STATUS_CD",object.getJSONObject("after").getString("STATUS_CD"));
doc1.append("ORDER_ID",object.getJSONObject("after").getString("ORDER_ID"));
if(object.getJSONObject("after").isNull("COMPLETED_DT")==true){
doc1.append("COMPLETED_DT",null);}
doc1.append("LAST_UPD",object.getJSONObject("after").getString("LAST_UPD"));
doc1.append("SOFT_DELETE",object.getJSONObject("after").getString("SOFT_DELETE"));
doc1.append("INTEGRATION_ID",object.getJSONObject("after").getString("INTEGRATION_ID"));
doc1.append("X_CDD",object.getJSONObject("after").getString("X_CDD"));
doc1.append("ACTION_CD",object.getJSONObject("after").getString("ACTION_CD"));
doc1.append("X_ORDER_ITEM_SUBSTATUS",object.getJSONObject("after").getString("X_ORDER_ITEM_SUBSTATUS"));
if(object.getJSONObject("after").isNull("X_APPT_REF")==true){
doc1.append("X_APPT_REF",null);}
if(object.getJSONObject("after").isNull("X_CANCELLED_DT")==true){
doc1.append("X_CANCELLED_DT",null);}
doc1.append("PROD_ID",object.getJSONObject("after").getString("PROD_ID"));
if(object.getJSONObject("after").isNull("SERVICE_NUM")==true){
doc1.append("SERVICE_NUM",null);}
if(object.getJSONObject("after").isNull("MUST_DLVR_BY_DT")==true){
doc1.append("MUST_DLVR_BY_DT",null);}
doc1.append("ROLLUP_FLG",object.getJSONObject("after").getString("ROLLUP_FLG"));
doc1.append("ROOT_ORDER_ITEM_ID",object.getJSONObject("after").getString("ROOT_ORDER_ITEM_ID"));
doc1.append("BILL_ACCNT_ID",object.getJSONObject("after").getString("BILL_ACCNT_ID"));
doc1.append("PROCESS_TIMESTAMP",object.getJSONObject("after").getString("PROCESS_TIMESTAMP"));
doc1.append("QTY_REQ",object.getJSONObject("after").getString("QTY_REQ"));
}
doc.append("ORDERLINE", doc1);
table.insertOne(doc);
}
}
}
}
}
catch (Exception e) {
e.printStackTrace();
}
System.out.println("Shutting down Thread: " + kafkaStream);
}
}
}
Related
Firstly I have started zookeeper, then kafka server and I have created a topic. Then I read tweets and try to store them into MongoDB but nothing is written to my collection although server is establishing connection.
Here is the java code:
package kafka;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.LinkedBlockingQueue;
import twitter4j.*;
import twitter4j.StallWarning;
import twitter4j.Status;
import twitter4j.StatusDeletionNotice;
import twitter4j.StatusListener;
import twitter4j.TwitterStream;
import twitter4j.TwitterStreamFactory;
import twitter4j.conf.ConfigurationBuilder;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.log4j.Logger;
import org.bson.Document;
import org.mortbay.util.ajax.JSON;
import com.mongodb.*;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
public class KafkaTwitterProducer {
#SuppressWarnings("resource")
public static void main(String[] args) throws Exception {
final LinkedBlockingQueue<Status> queue = new LinkedBlockingQueue<Status>(1000);
if (args.length < 4) {
System.out.println(
"Usage: KafkaTwitterProducer <twitter-consumer-key> <twitter-consumer-secret> <twitter-access-token> <twitter-access-token-secret> <topic-name> <twitter-search-keywords>");
return;
}
String consumerKey = args[0].toString();
String consumerSecret = args[1].toString();
String accessToken = args[2].toString();
String accessTokenSecret = args[3].toString();
String topicName = args[4].toString();
String[] arguments = args.clone();
String[] keyWords = Arrays.copyOfRange(arguments, 5, arguments.length);
// Set twitter oAuth tokens in the configuration
ConfigurationBuilder cb = new ConfigurationBuilder();
cb.setDebugEnabled(true).setOAuthConsumerKey(consumerKey).setOAuthConsumerSecret(consumerSecret)
.setOAuthAccessToken(accessToken).setOAuthAccessTokenSecret(accessTokenSecret).setJSONStoreEnabled(true);
// Create twitterstream using the configuration
TwitterStream twitterStream = new TwitterStreamFactory(cb.build()).getInstance();
StatusListener listener = new StatusListener() {
public void onStatus(Status status) {
queue.offer(status);
}
public void onDeletionNotice(StatusDeletionNotice statusDeletionNotice) {
System.out.println("Got a status deletion notice id:" + statusDeletionNotice.getStatusId());
}
public void onTrackLimitationNotice(int numberOfLimitedStatuses) {
System.out.println("Got track limitation notice:" + numberOfLimitedStatuses);
}
public void onScrubGeo(long userId, long upToStatusId) {
System.out.println("Got scrub_geo event userId:" + userId + "upToStatusId:" + upToStatusId);
}
public void onStallWarning(StallWarning warning) {
System.out.println("Got stall warning:" + warning);
}
public void onException(Exception ex) {
ex.printStackTrace();
}
};
twitterStream.addListener(listener);
// Filter keywords
FilterQuery query = new FilterQuery().track(keyWords);
twitterStream.filter(query);
// Thread.sleep(5000);
// Add Kafka producer config settings
Properties props = new Properties();
props.put("metadata.broker.list", "localhost:9092");
props.put("bootstrap.servers", "localhost:9092");
props.put("acks", "all");
props.put("retries", 0);
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
Producer<String, String> producer = new KafkaProducer<String, String>(props);
int i = 0;
int j = 0;
// Mongodb initialization parameters.
int port_no = 27017;
String host_name = "localhost", db_name = "bigdata", db_coll_name = "tweets";
// Mongodb connection string.
String client_url = "mongodb://" + host_name + ":" + port_no + "/" + db_name;
MongoClientURI uri = new MongoClientURI(client_url);
// Connecting to the mongodb server using the given client uri.
MongoClient mongo_client = new MongoClient(uri);
// Fetching the database from the mongodb.
MongoDatabase db = mongo_client.getDatabase(db_name);
// Fetching the collection from the mongodb.
MongoCollection<Document> coll = db.getCollection(db_coll_name);
//List<Document> docs = new ArrayList<Document>();
// poll for new tweets in the queue. If new tweets are added, send them
// to the topic
for(Status status:queue) {
String json = TwitterObjectFactory.getRawJSON(status);
BasicDBObject dbObject = (BasicDBObject) JSON.parse(json);
coll.insertOne(new Document(dbObject));
}
}
}
This is what mongod command prompt is showing me:
2019-05-26T19:26:29.181+0300 I NETWORK [initandlisten] waiting for connections on port 27017
2019-05-26T19:28:05.466+0300 I NETWORK [listener] connection accepted from 127.0.0.1:54232 #1 (1 connection now open)
2019-05-26T19:28:05.475+0300 I NETWORK [conn1] received client metadata from 127.0.0.1:54232 conn1: { driver: { name: "mongo-java-driver", version: "3.5.0" }, os: { type: "Windows", name: "Windows 10", architecture: "amd64", version: "10.0" }, platform: "Java/Oracle Corporation/1.8.0_152-b16" }
but no tweets are written to the database. Can someone please explain where is the error?
I have myBatis setup for my account. This by using the migrate command in the command line (in Jenkins). Now I want to integrate this with the application itself (Spring boot). Currently I have different sql files with #Undo and up sql code.
So When I start the Sping boot application I want to run the migrate up command without changing the sql files that I already have? Is this possible in MyBatis and Spring?
This is about MyBatis-Migrations, right?
Spring Boot does not provide out-of-box support, however, it seems to be possible to write a custom DatabasePopulator.
Here is a simple implementation.
It uses Migrations' Runtime Migration feature.
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
import java.util.TreeSet;
import java.util.stream.Collectors;
import javax.sql.DataSource;
import org.apache.ibatis.migration.Change;
import org.apache.ibatis.migration.DataSourceConnectionProvider;
import org.apache.ibatis.migration.MigrationException;
import org.apache.ibatis.migration.MigrationLoader;
import org.apache.ibatis.migration.MigrationReader;
import org.apache.ibatis.migration.operations.UpOperation;
import org.apache.ibatis.migration.options.DatabaseOperationOption;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.io.support.ResourcePatternResolver;
import org.springframework.jdbc.datasource.init.DataSourceInitializer;
import org.springframework.jdbc.datasource.init.DatabasePopulator;
import org.springframework.jdbc.datasource.init.ScriptException;
import org.springframework.jdbc.datasource.init.UncategorizedScriptException;
#Configuration
public class MyBatisMigrationsConfig {
private static final String scriptsDir = "scripts";
private static final String changelogTable = "changelog";
#Bean
public DataSourceInitializer dataSourceInitializer(DataSource dataSource) {
Properties properties = new Properties();
properties.setProperty("changelog", changelogTable);
DatabaseOperationOption options = new DatabaseOperationOption();
options.setChangelogTable(changelogTable);
MyBatisMigrationsPopulator populator = new MyBatisMigrationsPopulator(dataSource, scriptsDir, properties, options,
new PathMatchingResourcePatternResolver());
DataSourceInitializer dataSourceInitializer = new DataSourceInitializer();
dataSourceInitializer.setDataSource(dataSource);
dataSourceInitializer.setDatabasePopulator(populator);
return dataSourceInitializer;
}
private static class MyBatisMigrationsPopulator implements DatabasePopulator {
private final DataSource dataSource;
private final String scriptsDir;
private final Properties properties;
private final DatabaseOperationOption options;
private final ResourcePatternResolver resourcePatternResolver;
public MyBatisMigrationsPopulator(DataSource dataSource, String scriptsDir,
Properties properties, DatabaseOperationOption options, ResourcePatternResolver resourcePatternResolver) {
super();
this.dataSource = dataSource;
this.scriptsDir = scriptsDir;
this.properties = properties;
this.options = options;
this.resourcePatternResolver = resourcePatternResolver;
}
public void populate(Connection connection) throws SQLException, ScriptException {
try {
new UpOperation().operate(new DataSourceConnectionProvider(dataSource),
createMigrationsLoader(), options, System.out);
} catch (MigrationException e) {
throw new UncategorizedScriptException("Migration failed.", e.getCause());
}
}
protected MigrationLoader createMigrationsLoader() {
return new SpringMigrationLoader(resourcePatternResolver, scriptsDir, "utf-8", properties);
}
}
private static class SpringMigrationLoader implements MigrationLoader {
protected static final String BOOTSTRAP_SQL = "bootstrap.sql";
protected static final String ONABORT_SQL = "onabort.sql";
private ResourcePatternResolver resourcePatternResolver;
private String path;
private String charset;
private Properties properties;
public SpringMigrationLoader(
ResourcePatternResolver resourcePatternResolver,
String path,
String charset,
Properties properties) {
this.resourcePatternResolver = resourcePatternResolver;
this.path = path;
this.charset = charset;
this.properties = properties;
}
#Override
public List<Change> getMigrations() {
Collection<String> filenames = new TreeSet<>();
for (Resource res : getResources("/*.sql")) {
filenames.add(res.getFilename());
}
filenames.remove(BOOTSTRAP_SQL);
filenames.remove(ONABORT_SQL);
return filenames.stream()
.map(this::parseChangeFromFilename)
.collect(Collectors.toList());
}
#Override
public Reader getScriptReader(Change change, boolean undo) {
try {
return getReader(change.getFilename(), undo);
} catch (IOException e) {
throw new MigrationException("Failed to read bootstrap script.", e);
}
}
#Override
public Reader getBootstrapReader() {
try {
return getReader(BOOTSTRAP_SQL, false);
} catch (FileNotFoundException e) {
// ignore
} catch (IOException e) {
throw new MigrationException("Failed to read bootstrap script.", e);
}
return null;
}
#Override
public Reader getOnAbortReader() {
try {
return getReader(ONABORT_SQL, false);
} catch (FileNotFoundException e) {
// ignore
} catch (IOException e) {
throw new MigrationException("Failed to read onabort script.", e);
}
return null;
}
protected Resource getResource(String pattern) {
return this.resourcePatternResolver.getResource(this.path + "/" + pattern);
}
protected Resource[] getResources(String pattern) {
try {
return this.resourcePatternResolver.getResources(this.path + pattern);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
protected Change parseChangeFromFilename(String filename) {
try {
String name = filename.substring(0, filename.lastIndexOf("."));
int separator = name.indexOf("_");
BigDecimal id = new BigDecimal(name.substring(0, separator));
String description = name.substring(separator + 1).replace('_', ' ');
Change change = new Change(id);
change.setFilename(filename);
change.setDescription(description);
return change;
} catch (Exception e) {
throw new MigrationException("Error parsing change from file. Cause: " + e, e);
}
}
protected Reader getReader(String fileName, boolean undo) throws IOException {
InputStream inputStream = getResource(fileName).getURL().openStream();
return new MigrationReader(inputStream, charset, undo, properties);
}
}
}
Here is an executable demo project.
You may need to modify the datasource settings in application.properties.
Hope this helps!
For Spring:
import java.io.File;
import java.net.URL;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import javax.sql.DataSource;
import org.apache.ibatis.migration.ConnectionProvider;
import org.apache.ibatis.migration.FileMigrationLoader;
import org.apache.ibatis.migration.operations.UpOperation;
import org.apache.ibatis.migration.options.DatabaseOperationOption;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.datasource.init.DataSourceInitializer;
import org.springframework.jdbc.datasource.init.DatabasePopulator;
import org.springframework.jdbc.datasource.init.ScriptException;
#Configuration
public class MyBatisMigrationRuntimeConfiguration {
private static final String CHANGELOG_TABLE = "changelog";
private static final String MIGRATION_SCRIPTS = "migration/scripts";
#Bean
public DataSourceInitializer dataSourceInitializer(DataSource dataSource) {
DataSourceInitializer dataSourceInitializer = new DataSourceInitializer();
dataSourceInitializer.setDataSource(dataSource);
dataSourceInitializer.setDatabasePopulator(new Populator());
return dataSourceInitializer;
}
private DatabaseOperationOption getOption() {
DatabaseOperationOption options = new DatabaseOperationOption();
options.setChangelogTable(CHANGELOG_TABLE);
return options;
}
private Properties getProperties() {
Properties properties = new Properties();
properties.setProperty("changelog", CHANGELOG_TABLE);
return properties;
}
private File getScriptDir() {
URL url = getClass().getClassLoader().getResource(MIGRATION_SCRIPTS);
if (url == null) {
throw new IllegalArgumentException("file is not found!");
} else {
return new File(url.getFile());
}
}
private class Populator implements DatabasePopulator {
#Override
public void populate(Connection connection) throws SQLException, ScriptException {
new UpOperation().operate(
new SimplyConnectionProvider(connection),
new FileMigrationLoader(getScriptDir(), "utf-8", getProperties()),
getOption(),
System.out
);
}
}
private static class SimplyConnectionProvider implements ConnectionProvider {
private final Connection connection;
public SimplyConnectionProvider(Connection connection) {
this.connection = connection;
}
public Connection getConnection() {
return connection;
}
}
}
This is the StBolt.java class.
package com.storm.cassandra;
import java.util.Map;
import net.sf.json.JSONObject;
import net.sf.json.JSONSerializer;
import org.apache.log4j.Logger;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.BasicOutputCollector;
import org.apache.storm.topology.IBasicBolt;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.Session;
public class StBolt implements IBasicBolt {
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger
.getLogger(StBolt.class);
private static Session session = null;
private Cluster cluster = null;
String cassandraURL;
JSONObject eventJson = null;
String topicname = null;
String ip = null;
String menu = null;
String product = null;
Row row = null;
com.datastax.driver.core.ResultSet viewcount = null;
com.datastax.driver.core.ResultSet segmentlistResult = null;
com.datastax.driver.core.ResultSet newCountUpdatedResult = null;
public StBolt(String topicname) {
this.topicname = topicname;
}
public void prepare(Map stormConf, TopologyContext topologyContext) {
cluster = Cluster.builder().addContactPoint("127.0.0.1").build();
System.out.println("load cassandra ip");
session = cluster.connect();
System.out.println("CassandraCounterBolt prepare method ended");
}
public void execute(Tuple input, BasicOutputCollector collector) {
System.out.println("Execute");
Fields fields = input.getFields();
try {
eventJson = (JSONObject) JSONSerializer.toJSON((String) input
.getValueByField(fields.get(0)));
topicname = (String) eventJson.get("topicName");
ip = (String) eventJson.get("ip");
menu = (String) eventJson.get("menu");
product = (String) eventJson.get("product");
String ievent = "ievent";
String install = "install";
viewcount = session
.execute("update webapp.viewcount set count=count+1 where topicname='"+topicname+
"'and ip= '"+ip+"'and menu='"+menu+"'and product='"+product+"'" );
} catch (Exception e) {
e.printStackTrace();
}
}
public void declareOutputFields(OutputFieldsDeclarer declarer) {
}
public Map<String, Object> getComponentConfiguration() {
return null;
}
public void cleanup() {
}
}
Here is the StTopology.java class
package com.storm.cassandra;
import org.apache.storm.kafka.BrokerHosts;
import org.apache.storm.kafka.KafkaSpout;
import org.apache.storm.kafka.SpoutConfig;
import org.apache.storm.kafka.StringScheme;
import org.apache.storm.kafka.ZkHosts;
import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.StormSubmitter;
import org.apache.storm.spout.SchemeAsMultiScheme;
import org.apache.storm.topology.TopologyBuilder;
public class StTopology {
public static void main(String[] args) throws Exception {
if (args.length == 4) {
BrokerHosts hosts = new ZkHosts("localhost:2181");
//System.out
//.println("Insufficent Arguements - topologyName kafkaTopic ZKRoot ID");
SpoutConfig kafkaConf1 = new SpoutConfig(hosts, args[1], args[2],
args[3]);
//System.out
//.println("Insufficent Arguements - topologyName kafkaTopic ZKRoot ID");
//kafkaConf1.forceFromStart = false;
kafkaConf1.zkRoot = args[2];
kafkaConf1.scheme = new SchemeAsMultiScheme(new StringScheme());
KafkaSpout kafkaSpout1 = new KafkaSpout(kafkaConf1);
StBolt countbolt = new StBolt(args[1]);
TopologyBuilder builder = new TopologyBuilder();
builder.setSpout("kafkaspout", kafkaSpout1, 1);
builder.setBolt("counterbolt", countbolt, 1).shuffleGrouping(
"kafkaspout");
Config config = new Config();
config.setDebug(true);
config.put(Config.TOPOLOGY_TRIDENT_BATCH_EMIT_INTERVAL_MILLIS, 1);
config.setNumWorkers(1);
LocalCluster cluster = new LocalCluster();
cluster.submitTopology(args[0], config, builder.createTopology());
// StormSubmitter.submitTopology(args[0], config,
// builder.createTopology());
} else {
System.out
.println("Insufficent Arguements - topologyName kafkaTopic ZKRoot ID");
}
}
}
I am trying to get JSON data from the Kafka console producer, process it in Storm and store it into Cassandra.
For some reason, there is no response from the bolt when I run the code with parameters viewcount usercount /kafkastorm webapp1.
I have Kafka getting data from the console producer as topic usercount, and the correct table in Cassandra.
The code compiles and runs without any error but the console shows terminated.
I have no activity anywhere, despite providing the right JSON input to the Kafka console producer multiple times {"topicname":"usercount","ip":"127.0.0.1","menu":"dress","product":"tshirt"}.
There is no topology shown as being created in the Storm UI's Topology Summary either.
I believe I have all the Kafka, Storm and Cassandra dependencies in place.
Please point me in the right direction with this issue. Thanks.
I am invoking my batch service via the command line and supply a few parameters for the job. I need access to these parameters when create the job as I need to look up data from the db for a 'site name' that is supplied as a parameter and dynamically create multiple steps. The issue is in the 'createJob' method. I've hard coded the site id currently but there is an exception for the itemizedReader method:
Error creating bean with name 'scopedTarget.itemizedReader' defined in billing.BillingConfig: Unsatisfied dependency expressed through method 'itemizedReader' parameter 1: No qualifying bean of type [java.lang.String]
Spring configuration
package billing;
import billing.components.AspiviaFieldSetter;
import billing.components.AspiviaPrepStatementSetter;
import billing.components.SummaryProcessor;
import billing.mapper.ItemizedCostingMapper;
import billing.model.BillingItem;
import billing.model.ItemizedCosting;
import billing.tasklet.SummaryOutputTasklet;
import billing.batch.common.AppProps;
import billing.batch.common.SqlConst;
import billing.batch.common.model.ItemizedPartner;
import billing.batch.common.repo.PartnerBillingRepo;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import java.sql.Timestamp;
import java.text.SimpleDateFormat;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.sql.DataSource;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.job.builder.SimpleJobBuilder;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.database.JdbcCursorItemReader;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineAggregator;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.batch.item.file.transform.FieldExtractor;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.EnableAspectJAutoProxy;
import org.springframework.context.annotation.Profile;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.core.io.FileSystemResource;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
#ComponentScan(basePackages = {"billing", "billing.batch.common"})
#Configuration
#EnableBatchProcessing
#EnableAspectJAutoProxy
#PropertySource("classpath:/app.properties")
public class BillingConfig {
private static final Logger LOG = LogManager.getLogger();
#Autowired
private AppProps appProps;
#Autowired
private PartnerBillingRepo billingRepo;
#Bean
#Profile("prod")
public DataSource datasource() {
final HikariConfig cfg = new HikariConfig();
cfg.setJdbcUrl(appProps.getPartnerBillingUrl());
cfg.setUsername(appProps.getPartnerBillingUsername());
cfg.setPassword(appProps.getPartnerBillingPassword());
cfg.addDataSourceProperty("cachePrepStmts", appProps.getCachePrepStatements());
cfg.addDataSourceProperty("prepStmtCacheSize", appProps.getPrepStatementCacheSize());
cfg.addDataSourceProperty("prepStmtCacheSqlLimit", appProps.getPrepStatementCacheSqlLimit());
HikariDataSource ds = new HikariDataSource(cfg);
return ds;
}
#Bean
public JdbcTemplate template(DataSource ds) {
return new JdbcTemplate(ds);
}
#Bean
#StepScope
public FlatFileItemReader billingFileReader(#Value("#{jobParameters['input.file']}") String inputFile) {
DefaultLineMapper lineMapper = new DefaultLineMapper();
lineMapper.setFieldSetMapper(new BillingFieldSetter());
lineMapper.setLineTokenizer(new DelimitedLineTokenizer());
FlatFileItemReader reader = new FlatFileItemReader();
reader.setLineMapper(lineMapper);
reader.setResource(new FileSystemResource(inputFile));
return reader;
}
#Bean
#StepScope
public JdbcBatchItemWriter BillingWriter(DataSource ds, BillingPrepStatementSetter setter) {
JdbcBatchItemWriter writer = new JdbcBatchItemWriter();
writer.setDataSource(ds);
writer.setItemPreparedStatementSetter(setter);
writer.setSql(SqlConst.INSERT_INTO_BILLING);
return writer;
}
#Bean
#StepScope
public BillingPrepStatementSetter prepStatementSetter() {
return new BillingPrepStatementSetter();
}
#Bean
#StepScope
public SummaryProcessor summaryProc() {
return new SummaryProcessor();
}
#Bean
#StepScope
public SummaryOutputTasklet summaryTask() {
return new SummaryOutputTasklet();
}
#Bean
#StepScope
public ItemReader<ItemizedCosting> itemizedReader(#Value("#{jobParameters['site.id']}") Integer siteId, String accountCodes,
#Value("#{jobParameter['start.date']") String startDate, #Value("#{jobParameters['end.date']") String endDate) {
JdbcCursorItemReader reader = new JdbcCursorItemReader();
reader.setDataSource(datasource());
reader.setSql(SqlConst.SELECT_ITEMIZED_BILLING_FOR_ACCOUNT_CODES);
reader.setRowMapper(new ItemizedCostingMapper());
reader.setPreparedStatementSetter((ps) -> {
try {
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
ps.setTimestamp(0, new Timestamp(formatter.parse(startDate).getTime()));
ps.setTimestamp(1, new Timestamp(formatter.parse(endDate).getTime()));
} catch (Exception err) {
LOG.error("Unable to parse dates, start: {} end: {}", startDate, endDate);
}
ps.setString(2, accountCodes);
ps.setInt(3, siteId);
});
return reader;
}
#Bean
#StepScope
public ItemWriter<ItemizedCosting> itemizedWriter(#Value("start.date") String startDate,
String partnerName) {
DelimitedLineAggregator lineAgg = new DelimitedLineAggregator();
FieldExtractor<ItemizedCosting> extractor = (f) -> {
Object[] output = new Object[9];
output[0] = f.getExtension();
output[1] = f.getPbxCallTime();
output[2] = f.getDuration();
output[3] = f.getAccountCode();
output[4] = f.getDigits();
output[5] = f.getCost();
output[6] = f.getDestination();
output[7] = f.getCarrier();
output[8] = f.getAttribute();
return output;
};
lineAgg.setFieldExtractor(extractor);
Timestamp start = null;
try {
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
start = new Timestamp(formatter.parse(startDate).getTime());
} catch (Exception e) {
LOG.error("Unable to parse date: {}", startDate);
}
FlatFileItemWriter<ItemizedCosting> writer = new FlatFileItemWriter<>();
writer.setEncoding("UTF-8");
writer.setLineAggregator(lineAgg);
writer.setResource(new FileSystemResource(String.format("%s/%2$tY-%2$tm_%s_",
appProps.getItemizedBillingOutputPath(), start, partnerName)));
return writer;
}
#Bean
public Job createJob(JobBuilderFactory jobBuilder, StepBuilderFactory stepBuilders, DataSource ds, FlatFileItemReader reader)
throws Exception {
Step findSiteIdStep = stepBuilders.get("find.site.id").tasklet((contribution, chunkContext) -> {
String siteName
= (String) chunkContext.getStepContext().getJobParameters().get(BillingConst.PARAM_SITE);
Integer siteId = billingRepo.findSiteIdByName(siteName);
chunkContext.getStepContext().getStepExecution().getJobExecution().getExecutionContext().put(
BillingConst.SITE_ID, siteId);
return RepeatStatus.FINISHED;
}).build();
Step processFileStep = stepBuilders.get("process.file").<BillingItem, BillingItem>chunk(appProps.getChunkSize())
.reader(reader)
.processor(summaryProc())
.writer(aspiviaWriter(ds, prepStatementSetter())).build();
Step outputSummary = stepBuilders.get("output.summary").tasklet(summaryTask()).build();
SimpleJobBuilder builder = jobBuilder.get("process.aspivia").incrementer(new RunIdIncrementer())
.start(findSiteIdStep)
.next(processFileStep)
.next(outputSummary);
List<ItemizedPartner> partners = billingRepo.findPartnersForSite("CPT");
Integer siteId = billingRepo.findSiteIdByName("CPT");
Map<String, String> partnerAccCodes = new HashMap<>();
partners.stream().forEach(i -> {
if (!partnerAccCodes.containsKey(i.getPartnerName())) {
partnerAccCodes.put(i.getPartnerName(), "");
}
String accCodes = partnerAccCodes.get(i.getPartnerName());
accCodes += i.getAccountCode().toString() + ", ";
partnerAccCodes.put(i.getPartnerName(), accCodes);
});
partnerAccCodes.forEach((k, v) -> {
Step itemizedReport = stepBuilders.get("itemized." + k).<ItemizedCosting, ItemizedCosting>chunk(appProps.getChunkSize())
.reader(itemizedReader(siteId, v, null, null))
.writer(itemizedWriter(null, k)).build();
builder.next(itemizedReport);
});
return builder.build();
}
#Bean
public static PropertySourcesPlaceholderConfigurer propCfg() {
return new PropertySourcesPlaceholderConfigurer();
}
#Bean
public DataSourceTransactionManager transactionManager(DataSource datasource) {
return new DataSourceTransactionManager(datasource);
}
}
The issue is due to the lifecycle on how spring batch works. If the bean is decorated for the #StepScope the job parameters are only available once it is launched.
final Job loadAspiviaDataJob = context.getBean(Job.class);
final JobLauncher launcher = context.getBean(JobLauncher.class);
JobParametersBuilder paramBuilder = new JobParametersBuilder();
paramBuilder.addString(AspiviaConst.PARAM_INPUT_FILE, inputFile);
paramBuilder.addString(AspiviaConst.PARAM_SITE, site);
paramBuilder.addString(AspiviaConst.PARAM_OUTPUT_FILE_PATH, summaryFile);
JobExecution runStatus = launcher.run(loadAspiviaDataJob, paramBuilder.toJobParameters());
In the above code same we retrieve the Job which is setup via the createJob bean method in my configuration. The job parameters are not available.
What I have done to get access to the values I need is as follows:
Added an extra #PropertySource("classpath:cli-runtime.properties")
The Application.java that launches the spring batch job will save the properties we need to cli-runtime.properties. When the Job is created in the #Configuration class the values will be loaded from the property file and I can create the additional steps in the job I require
I have two AutoCompleteTextView controls on the same page: ACTV1 and ACTV2 and only one (ACTV1 ) is showing suggestions from my database . For each databinding action I've made a java class separetely: ACTV1.java and ACTV2.java.
But if I am adding an intent filter (MAIN, LAUNCHER) in my manifest file for ACTV2.java class and setting in run configuration ACTV2.java as Launch Action then I won't get suggestions anymore for ACTV1 control but this time I'll get suggestions for ACTV2 control.
The two java classes are identically just that differ the name of some constants/controls name.
package com.fishing2;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.json.JSONArray;
import org.json.JSONObject;
import android.app.Activity;
import android.os.Bundle;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.widget.ArrayAdapter;
import android.widget.AutoCompleteTextView;
public class CompleteBalti extends Activity {
//private CustomAutoCompleteView CompleteBalti;
private ArrayAdapter<String> adaperbalti;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_partida);
}
final TextWatcher textChecker = new TextWatcher() {
public void afterTextChanged(Editable s) {}
public void beforeTextChanged(CharSequence s, int start, int count, int after) { }
public void onTextChanged(CharSequence s, int start, int before, int count)
{
adaperbalti.clear();
callPHP1();
}
};
private void callPHP1(){
String result = "";
InputStream is=null;
AutoCompleteTextView CompleteBalti = (AutoCompleteTextView) findViewById(R.id.nume_localitate);
ArrayList<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>();
nameValuePairs.add(new BasicNameValuePair("st",CompleteBalti.getText().toString()));
{
try{
HttpClient httpclient = new DefaultHttpClient();
HttpPost httppost = new HttpPost("http://192.168.3.159/wtf/balti.php");
httppost.setEntity(new UrlEncodedFormEntity(nameValuePairs,"utf-8"));
HttpResponse response = httpclient.execute(httppost);
HttpEntity entity = response.getEntity();
is = entity.getContent();
BufferedReader reader = new BufferedReader(new InputStreamReader(is,"utf-8"),8);
StringBuilder sb = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {
sb.append(line + "\n");
}
is.close();
result=sb.toString();
result = result.substring(1);
}catch(Exception e){
Log.e("log_tag", "Error in http connection "+e.toString());
}
try{
JSONArray jArray = new JSONArray(result);
JSONObject json_data = null;
for (int i=0;i<jArray.length(); i++)
{
json_data = jArray.getJSONObject(i);
adaperbalti.add(json_data.getString("nume_balta"));
}
} catch(Exception e1){
Log.e("log_tag", "Error converting result "+e1.toString());
}
}
}
}