Kafka Consumer does not read data from Producer - apache-kafka

My Kafka consumer doesnt read from my producer. I noticed that after calling the poll method thae the code does not execute the print "Hello" and there is no error message showing.
The code execute well but it's like if it breaks after the poll method
Note: my producer works well. I created a consumer to test it.
Code:
public class ConsumerApp {
public static void main(String[] args) {
// Create Propety dictionary for the producer Config settings
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
KafkaConsumer<String, String> myconsumer = new KafkaConsumer<String, String>(props);
myconsumer.subscribe(Arrays.asList("test"));
try {
while (true) {
ConsumerRecords<String, String> records = myconsumer.poll(100);
System.out.println("hello");
// processing logic goes here
for (ConsumerRecord<String, String> record : records) {
// processing records
System.out.println(String.format(record.topic(), record.partition(), record.offset(), record.key(),
record.value()));
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
// Closing Consumer
myconsumer.close();
}
}
}

I found the solution i didnt set a connection with the zookeeper server , now that i did my consumer reads the Data ! Here is the code
public static void main(String[] args) {
//Create Propety dictionary for the producer Config settings
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("zookeeper.connect", "localhost:2181");
props.put("group.id", "console");
props.put("zookeeper.session.timeout.ms", "500");
props.put("zookeeper.sync.timeout.ms", "500");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
KafkaConsumer< String, String > myconsumer = new KafkaConsumer<String, String> (props);
myconsumer.subscribe(Collections.singletonList("test"));
try {
while(true){
ConsumerRecords<String, String> records = myconsumer.poll(100);
// processing logic goes here
for (ConsumerRecord<String, String> record : records) {
// processing records
System.out.printf("offset = %d, key = %s, value = %s\n",
record.offset(), record.key(), record.value());
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
// Closing Consumer
myconsumer.close();
}
}
}

Long time ago I was playing with this example and it worked well, try it:
Consumer:
package com.spnotes.kafka.simple;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.errors.WakeupException;
import java.util.Arrays;
import java.util.Properties;
import java.util.Scanner;
/**
* Created by sunilpatil on 12/28/15.
*/
public class Consumer {
private static Scanner in;
public static void main(String[] argv)throws Exception{
if (argv.length != 2) {
System.err.printf("Usage: %s <topicName> <groupId>\n",
Consumer.class.getSimpleName());
System.exit(-1);
}
in = new Scanner(System.in);
String topicName = argv[0];
String groupId = argv[1];
ConsumerThread consumerRunnable = new ConsumerThread(topicName,groupId);
consumerRunnable.start();
String line = "";
while (!line.equals("exit")) {
line = in.next();
}
consumerRunnable.getKafkaConsumer().wakeup();
System.out.println("Stopping consumer .....");
consumerRunnable.join();
}
private static class ConsumerThread extends Thread{
private String topicName;
private String groupId;
private KafkaConsumer<String,String> kafkaConsumer;
public ConsumerThread(String topicName, String groupId){
this.topicName = topicName;
this.groupId = groupId;
}
public void run() {
Properties configProperties = new Properties();
configProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
configProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer");
configProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
configProperties.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
configProperties.put(ConsumerConfig.CLIENT_ID_CONFIG, "simple");
//Figure out where to start processing messages from
kafkaConsumer = new KafkaConsumer<String, String>(configProperties);
kafkaConsumer.subscribe(Arrays.asList(topicName));
//Start processing messages
try {
while (true) {
ConsumerRecords<String, String> records = kafkaConsumer.poll(100);
for (ConsumerRecord<String, String> record : records)
System.out.println(record.value());
}
}catch(WakeupException ex){
System.out.println("Exception caught " + ex.getMessage());
}finally{
kafkaConsumer.close();
System.out.println("After closing KafkaConsumer");
}
}
public KafkaConsumer<String,String> getKafkaConsumer(){
return this.kafkaConsumer;
}
}
}
Producer:
package com.spnotes.kafka.simple;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.Properties;
import java.util.Scanner;
/**
* Created by sunilpatil on 12/28/15.
*/
public class Producer {
private static Scanner in;
public static void main(String[] argv)throws Exception {
if (argv.length != 1) {
System.err.println("Please specify 1 parameters ");
System.exit(-1);
}
String topicName = argv[0];
in = new Scanner(System.in);
System.out.println("Enter message(type exit to quit)");
//Configure the Producer
Properties configProperties = new Properties();
configProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"localhost:9092");
configProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.ByteArraySerializer");
configProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer");
org.apache.kafka.clients.producer.Producer producer = new KafkaProducer(configProperties);
String line = in.nextLine();
while(!line.equals("exit")) {
//TODO: Make sure to use the ProducerRecord constructor that does not take parition Id
ProducerRecord<String, String> rec = new ProducerRecord<String, String>(topicName,line);
producer.send(rec);
line = in.nextLine();
}
in.close();
producer.close();
}
}
You can find another one nice example here: https://www.codenotfound.com/spring-kafka-consumer-producer-example.html

Related

Write in Topic in Kafka through Java Code

i am trying to write in Kafka Topic through JAVA, as i have created the Topic, but want to insert some data in that topic.
Thanks in advance.
Here's an example of a synchronous producer. It should work with Kafka 0.11 (and a few prior releases too):
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.LongSerializer;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;
public class MyKafkaProducer {
private final static String TOPIC = "my-example-topic";
private final static String BOOTSTRAP_SERVERS = "localhost:9092,localhost:9093,localhost:9094";
private static Producer<Long, String> createProducer() {
Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
props.put(ProducerConfig.CLIENT_ID_CONFIG, "MyKafkaProducer");
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName());
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
return new KafkaProducer<>(props);
}
static void runProducer(final int sendMessageCount) throws Exception {
final Producer<Long, String> producer = createProducer();
try {
for (long index = 1; index <= sendMessageCount; index++) {
final ProducerRecord<Long, String> record = new ProducerRecord<>(TOPIC, index, "Message " + index);
RecordMetadata metadata = producer.send(record).get();
System.out.printf("sent record(key=%s value='%s')" + " metadata(partition=%d, offset=%d)\n",
record.key(), record.value(), metadata.partition(), metadata.offset());
}
} finally {
producer.flush();
producer.close();
}
}
public static void main(String[] args) throws Exception {
if (args.length == 0) {
runProducer(5);
} else {
runProducer(Integer.parseInt(args[0]));
}
}
}
You may need to modify some of the hard-coded settings.
Reference: http://cloudurable.com/blog/kafka-tutorial-kafka-producer/index.html

Error to serialize message when sending to kafka topic

i need to test a message, which contains headers, so i need to use MessageBuilder, but I can not serialize.
I tried adding the serialization settings on the producer props but it did not work.
Can someone help me?
this error:
org.apache.kafka.common.errors.SerializationException: Can't convert value of class org.springframework.messaging.support.GenericMessage to class org.apache.kafka.common.serialization.StringSerializer specified in value.serializer
My test class:
public class TransactionMastercardAdapterTest extends AbstractTest{
#Autowired
private KafkaTemplate<String, Message<String>> template;
#ClassRule
public static KafkaEmbedded embeddedKafka = new KafkaEmbedded(1);
#BeforeClass
public static void setUp() {
System.setProperty("spring.kafka.bootstrap-servers", embeddedKafka.getBrokersAsString());
System.setProperty("spring.cloud.stream.kafka.binder.zkNodes", embeddedKafka.getZookeeperConnectionString());
}
#Test
public void sendTransactionCommandTest(){
String payload = "{\"o2oTransactionId\" : \"" + UUID.randomUUID().toString().toUpperCase() + "\","
+ "\"cardId\" : \"11\","
+ "\"transactionId\" : \"20110405123456\","
+ "\"amount\" : 200.59,"
+ "\"partnerId\" : \"11\"}";
Map<String, Object> props = KafkaTestUtils.producerProps(embeddedKafka);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
Producer<String, Message<String>> producer = new KafkaProducer<>(props);
producer.send(new ProducerRecord<String, Message<String>> ("notification_topic", MessageBuilder.withPayload(payload)
.setHeader("status", "RECEIVED")
.setHeader("service", "MASTERCARD")
.build()));
Map<String, Object> configs = KafkaTestUtils.consumerProps("test1", "false", embeddedKafka);
configs.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
ConsumerFactory<byte[], byte[]> cf = new DefaultKafkaConsumerFactory<>(configs);
Consumer<byte[], byte[]> consumer = cf.createConsumer();
consumer.subscribe(Collections.singleton("transaction_topic"));
ConsumerRecords<byte[], byte[]> records = consumer.poll(10_000);
consumer.commitSync();
assertThat(records.count()).isEqualTo(1);
}
}
I'd say the error is obvious:
Can't convert value of class org.springframework.messaging.support.GenericMessage to class org.apache.kafka.common.serialization.StringSerializer specified in value.serializer
Where your value is GenericMessage, but StringSerializer can work only with strings.
What you need is called JavaSerializer which does not exist, but not so difficult to write:
public class JavaSerializer implements Serializer<Object> {
#Override
public byte[] serialize(String topic, Object data) {
try {
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
ObjectOutputStream objectStream = new ObjectOutputStream(byteStream);
objectStream.writeObject(data);
objectStream.flush();
objectStream.close();
return byteStream.toByteArray();
}
catch (IOException e) {
throw new IllegalStateException("Can't serialize object: " + data, e);
}
}
#Override
public void configure(Map<String, ?> configs, boolean isKey) {
}
#Override
public void close() {
}
}
And configure it for that value.serializer property.
private void configureProducer() {
Properties props = new Properties();
props.put("key.serializer",
"org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer",
"org.apache.kafka.common.serialization.ByteArraySerializer");
producer = new KafkaProducer<String, String>(props);
}
This will do the job.
In my case i am using spring cloud and did not added the below property in the properties file
spring.cloud.stream.kafka.binder.configuration.value.serializer=org.apache.kafka.common.serialization.StringSerializer
This is what I used and it worked for me
Map<String, Object> configProps = new HashMap<>();
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.StringSerializer.class);
configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, org.springframework.kafka.support.serializer.JsonSerializer.class);
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.IntegerSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
annotate the JSON class with #XmlRootElement

Kafka consumer API is not working properly

I am new to Kafka .i started doing on Kafka i am facing below issue please help me to solve this one thank in advance.
First i am writing producer API it is working fine but while doing Consumer API messages are not display.
My code is like this :
import java.util.Arrays;
import java.util.Properties;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.ConsumerRecord;
public class ConsumerGroup {
public static void main(String[] args) throws Exception {
String topic = "Hello-Kafka";
String group = "myGroup";
Properties props = new Properties();
props.put("bootstrap.servers", "XXX.XX.XX.XX:9092");
props.put("group.id", group);
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);
try {
consumer.subscribe(Arrays.asList(topic));
System.out.println("Subscribed to topic " + topic);
ConsumerRecords<String, String> records = consumer.poll(100);
System.out.println("records ::" + records);
System.out.println(records.toString());
for (ConsumerRecord<String, String> record : records) {
System.out.println("Record::" + record.offset());
System.out.println(record.key());
System.out.println(record.value());
}
consumer.commitSync();
} catch (Exception e) {
e.printStackTrace();
} finally {
consumer.commitSync();
consumer.close();
}
}
}
Response ::
Subscribed to topic Hello-Kafka
records ::org.apache.kafka.clients.consumer.ConsumerRecords#76b0bfab
org.apache.kafka.clients.consumer.ConsumerRecords#76b0bfab
here not printing the Offset,key,value
Control is not coming to for (ConsumerRecord record : records) {
that for loop it self please help me.
You are trying to print empty records, hence only records.toString() is printing in your code, which essentially is the name of the class.
I made some changes in your code and got it working. Do have a look if this helps.
public class ConsumerGroup {
public static void main(String[] args) throws Exception {
String topic = "Hello-Kafka";
String group = "myGroup";
Properties props = new Properties();
props.put("bootstrap.servers", "xx.xx.xx.xx:9092");
props.put("group.id", group);
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);
try {
consumer.subscribe(Arrays.asList(topic));
System.out.println("Subscribed to topic " + topic);
while(true){
ConsumerRecords<String, String> records = consumer.poll(1000);
if(records.isEmpty()){
}
else{
System.out.println("records ::" + records);
System.out.println(records.toString());
for (ConsumerRecord<String, String> record : records) {
System.out.println("Record::" + record.offset());
System.out.println(record.key());
System.out.println(record.value());
}
consumer.commitSync();
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
consumer.commitSync();
consumer.close();
}
}
}

Error producing to embedded kafka

I'm trying to embed a kafkaserver in my code. I've used the following example code to try to learn how to do so but for some reason, my producer can't send messages to the embedded server (it times out after 60 secs). I'm using kafka 0.8.2.2. Can someone tell me what I'm doing wrong?
import kafka.api.FetchRequest;
import kafka.api.FetchRequestBuilder;
import kafka.javaapi.FetchResponse;
import kafka.javaapi.TopicMetadata;
import kafka.javaapi.consumer.SimpleConsumer;
import kafka.javaapi.message.ByteBufferMessageSet;
import kafka.message.MessageAndOffset;
import kafka.producer.ProducerConfig;
import kafka.server.KafkaConfig;
import kafka.server.KafkaServer;
import kafka.utils.Time;
import kafka.utils.Utils;
import org.apache.commons.collections.functors.ExceptionPredicate;
import org.apache.curator.test.TestingServer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import java.io.File;
import java.nio.ByteBuffer;
import java.util.Properties;
public class KafkaLocalBroker {
public static final String TEST_TOPIC = "test-topic";
public KafkaConfig kafkaConfig;
public KafkaServer kafkaServer;
public TestingServer zookeeper;
public KafkaLocalBroker() throws Exception{
zookeeper = new TestingServer(true);
Properties props = new Properties();
props.put("zookeeper.connect", zookeeper.getConnectString());
props.put("broker.id", 0);
kafkaConfig = new KafkaConfig(props);
kafkaServer = new KafkaServer(kafkaConfig, new Time() {
public long nanoseconds() {
return System.nanoTime();
}
public long milliseconds() {
return System.currentTimeMillis();
}
public void sleep(long ms) {
try {
Thread.sleep(ms);
} catch(InterruptedException e){
// Do Nothing
}
}
});
kafkaServer.startup();
System.out.println("embedded kafka is up");
}
public void stop(){
kafkaServer.shutdown();
System.out.println("embedded kafka stop");
}
/**
* a main that tests the embedded kafka
* #param args
*/
public static void main(String[] args) {
KafkaLocalBroker kafkaLocalBroker = null;
//init kafka server and start it:
try {
kafkaLocalBroker = new KafkaLocalBroker();
} catch (Exception e){
}
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("acks", "all");
props.put("retries", 1);
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
KafkaProducer<String, String> producer = new KafkaProducer<String, String>(props);
//send one message to local kafka server:
for (int i=0; i<10; i++){
ProducerRecord<String, String> data = new ProducerRecord<String, String>(TEST_TOPIC, "test-message" + i);
producer.send(data, (metadata, exception) -> {
if (exception != null) {
System.out.println("Failed to write log message: " + exception.getMessage());
} else {
System.out.println("Successful write to offset {} in partition {} on topic {}: " +
metadata.offset() + ", " + metadata.partition() + ", "+ metadata.topic());
}
});
}
//consume messages from Kafka:
SimpleConsumer consumer = new SimpleConsumer("localhost", 9092, 10000, 1024000, "clientId");
long offset = 0L;
while (offset < 160) { //this is an exit criteria just for this test so we are not stuck in enless loop
// create a fetch request for topic “test”, partition 0, current offset, and fetch size of 1MB
FetchRequest fetchRequest = new FetchRequestBuilder().addFetch(TEST_TOPIC, 0, offset, 100000).build();//new FetchRequest(TEST_TOPIC, 0, offset, 1000000);
// get the message set from the consumer and print them out
FetchResponse messages = consumer.fetch(fetchRequest);
for(MessageAndOffset msg : messages.messageSet(TEST_TOPIC, 0)) {
ByteBuffer payload = msg.message().payload();
byte[] bytes = new byte[payload.limit()];
payload.get(bytes);
try {
System.out.println(new String(bytes, "UTF-8"));
} catch (Exception e){
}
// advance the offset after consuming each message
offset = msg.offset();
}
}
producer.close();
//close the consumer
consumer.close();
//stop the kafka broker:
if(kafkaLocalBroker != null) {
kafkaLocalBroker.stop();
}
}
}
EDIT: I've included the exception returned from the producer below:
org.apache.kafka.common.errors.TimeoutException: Failed to update metadata after 60000 ms.
The properties used to create kafka producer is not valid for 0.8. Go through producerconfig and change the properties. or update kafka version

How to overcome kafka.consumer.ConsumerTimeoutException?

I'm using kafka 2.11 version to write a consumer. Im continuously getting timeout exception. Im not sure I use right APIs here
Can anybody help me?
Executor
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
public class MessageListener {
private Properties properties;
private ConsumerConnector consumerConnector;
private String topic;
private ExecutorService executor;
public MessageListener(String topic) {
this.topic = topic;
KafkaConfigurationLoader confLoader = new KafkaConfigurationLoader();
try {
properties = confLoader.loadConsumerConfig();
ConsumerConfig consumerConfig = new ConsumerConfig(properties);
consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
public void start(File file) {
Map<String, Integer> topicCountMap = new HashMap<>();
topicCountMap.put(topic, new Integer(CoreConstants.THREAD_SIZE));
Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumerConnector
.createMessageStreams(topicCountMap);
List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);
executor = Executors.newFixedThreadPool(CoreConstants.THREAD_SIZE);
for (KafkaStream<byte[], byte[]> stream : streams) {
executor.submit(new ListenerThread(stream));
}
}
}
Thread
import kafka.consumer.ConsumerIterator;
import kafka.consumer.ConsumerTimeoutException;
import kafka.consumer.KafkaStream;
import kafka.message.MessageAndMetadata;
public class ListenerThread implements Runnable {
private KafkaStream<byte[], byte[]> stream;;
public ListenerThread(KafkaStream<byte[], byte[]> msgStream) {
this.stream = msgStream;
}
#Override
public void run() {
try {
ConsumerIterator<byte[], byte[]> it = stream.iterator();
while (it.hasNext()) {
MessageAndMetadata<byte[], byte[]> messageAndMetadata = it.makeNext();
String topic = messageAndMetadata.topic();
byte[] message = messageAndMetadata.message();
System.out.println("111111111111111111111111111");
FileProcessor processor = new FileProcessor();
processor.processFile(topic, message);
}
} catch (ConsumerTimeoutException cte) {
System.out.println("Consumer timed out");
}
catch (Exception ex) {
ex.printStackTrace();
}
}
}
you can set consumer.timeout.ms=-1 if you don't want this exception to be thrown.