JHipster with kafka real app usage example - apache-kafka

In JHipster version 6.6.0 the Kafka usage model has been changed from the standard Producer/Consumer classes to the WebResource level. There are no real examples what is an advantage of this change and how this change can be used in real apps.
Let`s say we have Service A and Service B. The communication between these two services has to be accomplished via Kafka events.
The question is - what I have to do so Service B starts to listen to events from the Service A topic. In the current configuration, it looks like I have to manually trigger /consumes endpoint, but it makes no sense because I'm expecting that the service will start to listen for the specified list of topics after the app is up and running.
I would appreciate any comment on this topic to help me understand this.
Example:
jhipster 7.1.0 generates this resources:
Service A - gateway
package com.stukans.refirmware.gateway.web.rest;
import com.stukans.refirmware.gateway.config.KafkaProperties;
import java.time.Instant;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.kafka.receiver.KafkaReceiver;
import reactor.kafka.receiver.ReceiverOptions;
import reactor.kafka.sender.KafkaSender;
import reactor.kafka.sender.SenderOptions;
import reactor.kafka.sender.SenderRecord;
import reactor.kafka.sender.SenderResult;
#RestController
#RequestMapping("/api/gateway-kafka")
public class GatewayKafkaResource {
private final Logger log = LoggerFactory.getLogger(GatewayKafkaResource.class);
private final KafkaProperties kafkaProperties;
private KafkaSender<String, String> sender;
public GatewayKafkaResource(KafkaProperties kafkaProperties) {
this.kafkaProperties = kafkaProperties;
this.sender = KafkaSender.create(SenderOptions.create(kafkaProperties.getProducerProps()));
}
#PostMapping("/publish/{topic}")
public Mono<PublishResult> publish(
#PathVariable String topic,
#RequestParam String message,
#RequestParam(required = false) String key
) {
log.debug("REST request to send to Kafka topic {} with key {} the message : {}", topic, key, message);
return Mono
.just(SenderRecord.create(topic, null, null, key, message, null))
.as(sender::send)
.next()
.map(SenderResult::recordMetadata)
.map(
metadata ->
new PublishResult(metadata.topic(), metadata.partition(), metadata.offset(), Instant.ofEpochMilli(metadata.timestamp()))
);
}
#GetMapping("/consume")
public Flux<String> consume(#RequestParam("topic") List<String> topics, #RequestParam Map<String, String> consumerParams) {
log.debug("REST request to consume records from Kafka topics {}", topics);
Map<String, Object> consumerProps = kafkaProperties.getConsumerProps();
consumerProps.putAll(consumerParams);
consumerProps.remove("topic");
ReceiverOptions<String, String> receiverOptions = ReceiverOptions.<String, String>create(consumerProps).subscription(topics);
return KafkaReceiver.create(receiverOptions).receive().map(ConsumerRecord::value);
}
private static class PublishResult {
public final String topic;
public final int partition;
public final long offset;
public final Instant timestamp;
private PublishResult(String topic, int partition, long offset, Instant timestamp) {
this.topic = topic;
this.partition = partition;
this.offset = offset;
this.timestamp = timestamp;
}
}
}
Service B - agent
package com.stukans.refirmware.agent.web.rest;
import com.stukans.refirmware.agent.config.KafkaProperties;
import java.time.Instant;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.kafka.receiver.KafkaReceiver;
import reactor.kafka.receiver.ReceiverOptions;
import reactor.kafka.sender.KafkaSender;
import reactor.kafka.sender.SenderOptions;
import reactor.kafka.sender.SenderRecord;
import reactor.kafka.sender.SenderResult;
#RestController
#RequestMapping("/api/agent-kafka")
public class AgentKafkaResource {
private final Logger log = LoggerFactory.getLogger(AgentKafkaResource.class);
private final KafkaProperties kafkaProperties;
private KafkaSender<String, String> sender;
public AgentKafkaResource(KafkaProperties kafkaProperties) {
this.kafkaProperties = kafkaProperties;
this.sender = KafkaSender.create(SenderOptions.create(kafkaProperties.getProducerProps()));
}
#PostMapping("/publish/{topic}")
public Mono<PublishResult> publish(
#PathVariable String topic,
#RequestParam String message,
#RequestParam(required = false) String key
) {
log.debug("REST request to send to Kafka topic {} with key {} the message : {}", topic, key, message);
return Mono
.just(SenderRecord.create(topic, null, null, key, message, null))
.as(sender::send)
.next()
.map(SenderResult::recordMetadata)
.map(
metadata ->
new PublishResult(metadata.topic(), metadata.partition(), metadata.offset(), Instant.ofEpochMilli(metadata.timestamp()))
);
}
#GetMapping("/consume")
public Flux<String> consume(#RequestParam("topic") List<String> topics, #RequestParam Map<String, String> consumerParams) {
log.debug("REST request to consume records from Kafka topics {}", topics);
Map<String, Object> consumerProps = kafkaProperties.getConsumerProps();
consumerProps.putAll(consumerParams);
consumerProps.remove("topic");
ReceiverOptions<String, String> receiverOptions = ReceiverOptions.<String, String>create(consumerProps).subscription(topics);
return KafkaReceiver.create(receiverOptions).receive().map(ConsumerRecord::value);
}
private static class PublishResult {
public final String topic;
public final int partition;
public final long offset;
public final Instant timestamp;
private PublishResult(String topic, int partition, long offset, Instant timestamp) {
this.topic = topic;
this.partition = partition;
this.offset = offset;
this.timestamp = timestamp;
}
}
}
That`s the only code related to Kafka that is available.
Prior to version 6.6.0, JHipster generated standard Producer/Consumer classes which I could use to define which topics to listen to. Now it is not clear how to use generated code to emit/listen to the events.

First, since we cannot see your code, we don't know how it does (or should) work...
In any case, you should not be "triggering" consumers via REST; they should be automatically started when the service starts. If there are no messages, then they are polling in the background and idling until the "producer service" pushes a message to the topic.

I make a service like this auto triggering consumers:
#Service
public class KafkaConsumerService {
private final Logger log = LoggerFactory.getLogger(KafkaConsumerService.class);
private static final String GROUP_USER_CREATE_TOPIC = "GROUP_STORE.GROUP_USER.SAVE"; //<application name>.<dataset name>.<event>
private final KafkaProperties kafkaProperties;
private KafkaReceiver<String, String> kafkaReceiver;
private final ObjectMapper objectMapper = new ObjectMapper();
private final GroupMemberService groupMemberService;
public KafkaConsumerService(KafkaProperties kafkaProperties, GroupMemberService groupMemberService) {
this.kafkaProperties = kafkaProperties;
this.groupMemberService = groupMemberService;
}
#PostConstruct
public void start() {
log.info("Kafka consumer starting...");
Map<String, Object> consumerProps = kafkaProperties.getConsumerProps();
ReceiverOptions<String, String> receiverOptions = ReceiverOptions.<String, String>create(consumerProps)
.subscription(Collections.singletonList(GROUP_USER_CREATE_TOPIC));
this.kafkaReceiver = KafkaReceiver.create(receiverOptions);
consumeGroupMember().subscribe();
}
public Flux<GroupMemberDTO> consumeGroupMember() {
log.debug("consumer group member....");
return this.kafkaReceiver
.receive()
.map(ConsumerRecord::value)
.flatMap(
record -> {
try {
GroupMemberDTO groupMemberDTO = objectMapper.readValue(record, GroupMemberDTO.class);
log.debug("Complete convert object: {}", groupMemberDTO);
return groupMemberService.insert(groupMemberDTO);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
);
}
public void shutdown() {
log.info("Shutdown kafka consumer");
}
}

Related

Kakfa Listener and Consumer not invoking

I'm building a simple Kafka application with a producer and a consumer. I'm sending a string through postman and pushing through the topic. The topic is receiving the message but the consumer isn't consuming it.
ConsumerConfig.Java
#EnableKafka
#Configuration
#ConditionalOnProperty(name = "kafka.enabled", havingValue = "true")
public class KafkaConsumerConfig {
#Bean
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory(){
ConcurrentKafkaListenerContainerFactory<String,String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
#Bean
public Map<String,Object> config(){
Map<String,Object> config = new HashMap<>();
config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
config.put(ConsumerConfig.GROUP_ID_CONFIG, "group_Id");
config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
config.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
config.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100");
config.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000");
return config;
}
#Bean
public ConsumerFactory<String,String> consumerFactory(){
return new DefaultKafkaConsumerFactory<>(config());
}
}
CosumerService.Java
#Service
#ConditionalOnProperty(name = "kafka.enabled", havingValue = "true")
#Component
public class KafkaConsumerService {
private static final Logger log = LoggerFactory.getLogger(KafkaConsumerService.class);
private static final String TOPIC = "Kafka_Test";
#KafkaListener(topics = TOPIC, groupId= "group_Id")
public void consumeOTP(String otp) {
log.debug("The OTP Sent to Kafka is:" + otp);
}
}
Based on your question I am assuming you're using spring-kafka with Spring Boot. For a simple example, with this setup you can avoid all the Bean configuration and use the DefaultBean from Spring Kafka so you can basically do the setup using the application.yml file, there's better explanation in this post but basically:
Producer:
#Service
public class SimpleProducer {
private KafkaTemplate<String, String> simpleProducer;
public SimpleProducer(KafkaTemplate<String, String> simpleProducer) {
this.simpleProducer = simpleProducer;
}
public void send(String message) {
simpleProducer.send("simple-message", message);
}
}
Consumer:
#Slf4j
#Service
public class SimpleConsumer {
#KafkaListener(id = "simple-consumer", topics = "simple-message")
public void consumeMessage(String message) {
log.info("Consumer got message: {}", message);
}
}
Api so you can produce sending a message:
#RestController
#RequestMapping("/api")
public class MessageApi {
private final SimpleProducer simpleProducer;
public MessageApi(SimpleProducer simpleProducer) {
this.simpleProducer = simpleProducer;
}
#PostMapping("/message")
public ResponseEntity<String> message(#RequestBody String message) {
simpleProducer.send(message);
return ResponseEntity.ok("Message received: " + message);
}
}
Because you're using the defaults with String as key and String as value you don't even have to add any specific configuration to the spring-boot props or yaml files.

JsonSerializer not working properly kafka

I am using kafka I have a Notification class that i am serializing using spring-kafka.
package com.code2hack.notification;
public class Notification {
private Object message;
private NotificationType type;
public static Notification create(NotificationType type, Object message){
return new Notification(message,type);
}
public Notification(){
}
public Notification(Object message, NotificationType type){
this.message = message;
this.type = type;
}
#Override
public String toString() {
return "Notification{" +
"message=" + message +
", type=" + type +
'}';
}
public <T> T getMessage(Class<T> type){
return (T)this.message;
}
public NotificationType getType(){
return this.type;
}
public void setType(NotificationType type){
this.type = type;
}
public void setMessage(Object message){
this.message = message;
}
}
here is my configuration
spring:
kafka:
producer:
bootstrap-servers: localhost:9092
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.springframework.kafka.support.serializer.JsonSerializer
When i try to consume the notification from consumer my message part is missing in Notification I am able to to receive type.
I even tried kafka console-consumer there also it prints only type field from my notification message is missing here also.
I don't know what i am missing.
My Consumer configuration is
package com.code2hack.booking;
import com.code2hack.notification.Notification;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import java.util.HashMap;
import java.util.Map;
#Configuration
#EnableKafka
public class KafkaConfiguration {
#Value("${spring.kafka.consumer.bootstrap-servers}")
private String address;
#Bean
public ConsumerFactory<String, Notification> consumerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
address);
props.put(
ConsumerConfig.GROUP_ID_CONFIG,
"booking");
JsonDeserializer<Notification> ds = new JsonDeserializer<>();
ds.addTrustedPackages("*");
return new DefaultKafkaConsumerFactory<>(props,
new StringDeserializer(),
ds);
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, Notification>
kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Notification> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
}
Here is my consumer
#KafkaListener(topics = "sql-insert",groupId = "booking")
public void onNotification(#Payload Notification notification){
handleNotification(notification);
}
Please help me.
Note: Actually problem is with JsonSerializer in Kafka. i tried below code and it is not serializing the object properly.
public static void main(String[] args) {
//SpringApplication.run(BookingServiceApplication.class, args);
Notification notification = Notification.create(NotificationType.NEW_SCHEDULED,"Hellow how are you");
byte[] serialize = new JsonSerializer<Notification>().serialize("sql-insert", notification);
System.out.println(new String(serialize));
}
It is giving me the output.
{"type":"NEW_SCHEDULED"}
Is there any way to fix it.
Unless you create a custom serializer, Jackson only works with JavaBean semantics; there is no getter for message; you need to add a simple getter for the message property.

Multi-Tenancy in Reactive Spring boot application using mongodb-reactive

How can we create a multi-tenant application in spring webflux using Mongodb-reactive repository?
I cannot find any complete resources on the web for reactive applications. all the resources available are for non-reactive applications.
UPDATE:
In a non-reactive application, we used to store contextual data in ThreadLocal but this cannot be done with reactive applications as there is thread switching. There is a way to store contextual info in reactor Context inside a WebFilter, But I don't how get hold of that data in ReactiveMongoDatabaseFactory class.
Thanks.
I was able to Implement Multi-Tenancy in Spring Reactive application using mangodb. Main classes responsible for realizing were: Custom MongoDbFactory class, WebFilter class (instead of Servlet Filter) for capturing tenant info and a ThreadLocal class for storing tenant info. Flow is very simple:
Capture Tenant related info from the request in WebFilter and set it in ThreadLocal. Here I am sending Tenant info using header: X-Tenant
Implement Custom MondoDbFactory class and override getMongoDatabase() method to return database based on current tenant available in ThreadLocal class.
Source code is:
CurrentTenantHolder.java
package com.jazasoft.demo;
public class CurrentTenantHolder {
private static final ThreadLocal<String> currentTenant = new InheritableThreadLocal<>();
public static String get() {
return currentTenant.get();
}
public static void set(String tenant) {
currentTenant.set(tenant);
}
public static String remove() {
synchronized (currentTenant) {
String tenant = currentTenant.get();
currentTenant.remove();
return tenant;
}
}
}
TenantContextWebFilter.java
package com.example.demo;
import org.springframework.http.server.reactive.ServerHttpRequest;
import org.springframework.stereotype.Component;
import org.springframework.web.server.ServerWebExchange;
import org.springframework.web.server.WebFilter;
import org.springframework.web.server.WebFilterChain;
import reactor.core.publisher.Mono;
#Component
public class TenantContextWebFilter implements WebFilter {
public static final String TENANT_HTTP_HEADER = "X-Tenant";
#Override
public Mono<Void> filter(ServerWebExchange exchange, WebFilterChain chain) {
ServerHttpRequest request = exchange.getRequest();
if (request.getHeaders().containsKey(TENANT_HTTP_HEADER)) {
String tenant = request.getHeaders().getFirst(TENANT_HTTP_HEADER);
CurrentTenantHolder.set(tenant);
}
return chain.filter(exchange).doOnSuccessOrError((Void v, Throwable throwable) -> CurrentTenantHolder.remove());
}
}
MultiTenantMongoDbFactory.java
package com.example.demo;
import com.mongodb.reactivestreams.client.MongoClient;
import com.mongodb.reactivestreams.client.MongoDatabase;
import org.springframework.dao.DataAccessException;
import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory;
public class MultiTenantMongoDbFactory extends SimpleReactiveMongoDatabaseFactory {
private final String defaultDatabase;
public MultiTenantMongoDbFactory(MongoClient mongoClient, String databaseName) {
super(mongoClient, databaseName);
this.defaultDatabase = databaseName;
}
#Override
public MongoDatabase getMongoDatabase() throws DataAccessException {
final String tlName = CurrentTenantHolder.get();
final String dbToUse = (tlName != null ? tlName : this.defaultDatabase);
return super.getMongoDatabase(dbToUse);
}
}
MongoDbConfig.java
package com.example.demo;
import com.mongodb.reactivestreams.client.MongoClient;
import com.mongodb.reactivestreams.client.MongoClients;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.mongodb.core.ReactiveMongoClientFactoryBean;
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
#Configuration
public class MongoDbConfig {
#Bean
public ReactiveMongoTemplate reactiveMongoTemplate(MultiTenantMongoDbFactory multiTenantMongoDbFactory) {
return new ReactiveMongoTemplate(multiTenantMongoDbFactory);
}
#Bean
public MultiTenantMongoDbFactory multiTenantMangoDbFactory(MongoClient mongoClient) {
return new MultiTenantMongoDbFactory(mongoClient, "test1");
}
#Bean
public ReactiveMongoClientFactoryBean mongoClient() {
ReactiveMongoClientFactoryBean clientFactory = new ReactiveMongoClientFactoryBean();
clientFactory.setHost("localhost");
return clientFactory;
}
}
UPDATE:
In reactive-stream we cannot store contextual information in ThreadLocal any more as the request is not tied to a single thread, So, This is not the correct solution.
However, Contextual information can be stored reactor Context in WebFilter like this. chain.filter(exchange).subscriberContext(context -> context.put("tenant", tenant));. Problem is how do get hold of this contextual info in ReactiveMongoDatabaseFactory implementation class.
Here is my very rough working solution for Spring WebFlux - they have since updated the ReactiveMongoDatabaseFactory - getMongoDatabase to return a Mono
Create web filter
public class TenantContextFilter implements WebFilter {
private static final Logger LOGGER = LoggerFactory.getLogger(TenantContextFilter.class);
#Override
public Mono<Void> filter(ServerWebExchange swe, WebFilterChain wfc) {
ServerHttpRequest request = swe.getRequest();
HttpHeaders headers = request.getHeaders();
if(headers.getFirst("X-TENANT-ID") == null){
LOGGER.info(String.format("Missing X-TENANT-ID header"));
throw new ResponseStatusException(HttpStatus.UNAUTHORIZED);
}
String tenantId = headers.getFirst("X-TENANT-ID");
LOGGER.info(String.format("Processing request with tenant identifier [%s]", tenantId));
return wfc.filter(swe)
.contextWrite(TenantContextHolder.setTenantId(tenantId));
}
}
Create class to get context (credit to somewhere I found this)
public class TenantContextHolder {
public static final String TENANT_ID = TenantContextHolder.class.getName() + ".TENANT_ID";
public static Context setTenantId(String id) {
return Context.of(TENANT_ID, Mono.just(id));
}
public static Mono<String> getTenantId() {
return Mono.deferContextual(contextView -> {
if (contextView.hasKey(TENANT_ID)) {
return contextView.get(TENANT_ID);
}
return Mono.empty();
}
);
}
public static Function<Context, Context> clearContext() {
return (context) -> context.delete(TENANT_ID);
}
}
My spring security setup (all requests allowed for testing)
#EnableWebFluxSecurity
#EnableReactiveMethodSecurity
public class SecurityConfig {
#Bean
public SecurityWebFilterChain WebFilterChain(ServerHttpSecurity http) {
return http
.formLogin(it -> it.disable())
.cors(it -> it.disable()) //fix this
.httpBasic(it -> it.disable())
.csrf(it -> it.disable())
.securityContextRepository(NoOpServerSecurityContextRepository.getInstance())
.authorizeExchange(it -> it.anyExchange().permitAll()) //allow anonymous
.addFilterAt(new TenantContextFilter(), SecurityWebFiltersOrder.HTTP_BASIC)
.build();
}
}
Create Tenant Mongo DB Factory
I still have some clean-up work for defaults etc...
public class MultiTenantMongoDBFactory extends SimpleReactiveMongoDatabaseFactory {
private static final Logger LOGGER = LoggerFactory.getLogger(MultiTenantMongoDBFactory.class);
private final String defaultDb;
public MultiTenantMongoDBFactory(MongoClient mongoClient, String databaseName) {
super(mongoClient, databaseName);
this.defaultDb = databaseName;
}
#Override
public Mono<MongoDatabase> getMongoDatabase() throws DataAccessException {
return TenantContextHolder.getTenantId()
.map(id -> {
LOGGER.info(String.format("Database trying to retrieved is [%s]", id));
return super.getMongoDatabase(id);
})
.flatMap(db -> {
return db;
})
.log();
}
}
Configuration Class
#Configuration
#EnableReactiveMongoAuditing
#EnableReactiveMongoRepositories(basePackages = {"com.order.repository"})
class MongoDbConfiguration {
#Bean
public ReactiveMongoDatabaseFactory reactiveMongoDatabaseFactory() {
return new MultiTenantMongoDBFactory(MongoClients.create("mongodb://user:password#localhost:27017"), "tenant_catalog");
}
#Bean
public ReactiveMongoTemplate reactiveMongoTemplate() {
ReactiveMongoTemplate template = new ReactiveMongoTemplate(reactiveMongoDatabaseFactory());
template.setWriteResultChecking(WriteResultChecking.EXCEPTION);
return template;
}
}
Entity Class
#Document(collection = "order")
//getters
//setters
Testing
Create two mongo db's with same collection, put different documents in both
In Postman I just did a get request with the "X-TENANT-ID" header and database name as the value (e.g. tenant-12343 or tenant-34383) and good to go!

Attempt to heart beat failed since the group is rebalancing, try to re-join group

In my project, I used #KakfaListener configure kafka containerFactory and topic.
Topic Name:
public static final String CONNECT_DEVICE_MESSAGE_TOPIC = "connectDeviceMessageTopic";
Topic Listen:
#KafkaListener(containerFactory = "receiveKafkaListenerContainerFactory", topics = KafkaQueueName.CONNECT_DEVICE_MESSAGE_TOPIC)
public void onMessageListener(MessageTemplate message){
}
Kafka Config:
package me.hekr.bot.parse.core.kafka;
import lombok.extern.slf4j.Slf4j;
import me.hekr.bot.utils.IpUtil;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.*;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.kafka.support.converter.StringJsonMessageConverter;
import java.util.HashMap;
import java.util.Map;
/**
* Created by Neon Wang on 2016/10/20.
*/
#EnableKafka
#Configuration
#Slf4j
public class KafkaConfig {
#Value("${bot.kafka.servers}")
private String servers;
/*********************** Producer Config ***************************/
private ProducerFactory<String, String> producerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfigs());
}
private Map<String, Object> producerConfigs() {
return new CustomHashMap().put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers)
.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, 1000 * 2);
}
#Bean
public KafkaTemplate<String, String> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}
/*********************** Consumer Config ***************************/
private Map<String, Object> consumerProps() {
return new CustomHashMap()
.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers)
.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true)
.put(ConsumerConfig.GROUP_ID_CONFIG, "parseReceiveMessageFormConnection")
.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100")
.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000")
.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class)
.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
}
#Bean
KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>>
receiveKafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.setConcurrency(3);
factory.setMessageConverter(new StringJsonMessageConverter());
factory.getContainerProperties().setPollTimeout(3000L);
return factory;
}
private ConsumerFactory<String, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerProps());
}
class CustomHashMap extends HashMap<String, Object> {
CustomHashMap(){
super();
}
#Override
public CustomHashMap put(String key, Object value) {
super.put(key, value);
return this;
}
}
}
Started project, kafka configuration information was successed, but I found every topic information had logged three times, is it normal?
And when the first message reviced, turn out print
2017-08-16T11:12:43.633+0800 INFO [org.springframework.kafka.KafkaListenerEndpointContainer#0-2-kafka-consumer-1] o.a.k.c.c.i.AbstractCoordinator.handle:623 - Attempt to heart beat failed since the group is rebalancing, try to re-join group.
Then the first message had received again and again, all result turn out was same!
After three times, hadn't received any message, I had no idea, who can help me?
The group id had changed twice, in last version had used localhost ip.
IpUtil.getLocalhostAddress().replace(".", "")

How to set a global custom Jackson deserializer in Camel w/o spring using REST

I would like to set a global custom date/Time deserializer on a camel route that is configured with rest.
What I already found is Camel + Jackson : Register a module for all deserialization
But I do not have the unmarshal() method in the route, but use the
RestDefinition rest(String path)
method from
org.apache.camel.builder.RouteBuilder.
We do not use Spring, but plain Camel with Scala and REST all configuration done programmatically (no xml).
My current solution is to use
#JsonDeserialize(using = classOf[MyDeserializer])
annotation on every date/time field, but that is not a satisfying solution.
Does anybody have a clue how to configure Camel to use the custom deserializer everywhere?
By default camel use DATES_AS_TIMESTAMPS feature. to disable this featuer just add .dataFormatProperty("json.out.disableFeatures", "WRITE_DATES_AS_TIMESTAMPS")
Also you can add custom serializer:
module.addSerializer(Date.class, sample.new DateSerializer());
and binding with name json-jackson
jndiContext.bind("json-jackson", jackson);
example:
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.GregorianCalendar;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.jackson.JacksonDataFormat;
import org.apache.camel.impl.DefaultCamelContext;
import org.apache.camel.model.rest.RestBindingMode;
import org.apache.camel.spi.DataFormat;
import org.apache.camel.util.jndi.JndiContext;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.Version;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.module.SimpleModule;
public class Samlpe3 {
public static void main(String[] args) throws Exception {
Samlpe3 sample = new Samlpe3();
//custom mapper
ObjectMapper objectMapper = new ObjectMapper();
SimpleModule module = new SimpleModule("DefaultModule", new Version(0, 0, 1, null, null, null));
module.addSerializer(Date.class, sample.new DateSerializer());
module.addSerializer(Person.class, sample.new PersonSerializer());
objectMapper.registerModule(module);
JacksonDataFormat jackson = new JacksonDataFormat(objectMapper, null);
JndiContext jndiContext = new JndiContext();
jndiContext.bind("json-jackson", jackson);
CamelContext context = new DefaultCamelContext(jndiContext);
context.addRoutes(new RouteBuilder() {
public void configure() throws Exception {
restConfiguration().component("jetty").bindingMode(RestBindingMode.json)
.host("0.0.0.0").contextPath("/test").port(8080)
//disableFeatures WRITE_DATES_AS_TIMESTAMPS
.dataFormatProperty("json.out.disableFeatures", "WRITE_DATES_AS_TIMESTAMPS")
;
rest("/v1/").produces("application/json")
.get("persons")
.to("direct:getPersons");
from("direct:getPersons")
.process(new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.getIn().setBody(new Person("Sergey", new GregorianCalendar().getTime()));
}
})
;
}
});
context.start();
Thread.sleep(60000);
context.stop();
}
public class DateSerializer extends JsonSerializer<Date> {
#Override
public void serialize(Date value, JsonGenerator gen, SerializerProvider serializers)
throws IOException, JsonProcessingException {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
String date = sdf.format(value);
gen.writeString(date);
}
}
public class PersonSerializer extends JsonSerializer<Person> {
#Override
public void serialize(Person value, JsonGenerator gen, SerializerProvider serializers)
throws IOException, JsonProcessingException {
gen.writeStartObject();
gen.writeFieldName("Changed_n");
gen.writeObject(value.getName() + " Changed");
gen.writeFieldName("Changed_b");
gen.writeObject(value.getBirthday());
gen.writeEndObject();
}
}
}
Person.java:
import java.util.Date;
public class Person {
private String name;
private Date birthday;
Person(String name, Date birhday){
System.out.println("Person");
this.setBirthday(birhday);
this.setName(name);
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Date getBirthday() {
return birthday;
}
public void setBirthday(Date birthday) {
this.birthday = birthday;
}
}