JsonSerializer not working properly kafka - apache-kafka

I am using kafka I have a Notification class that i am serializing using spring-kafka.
package com.code2hack.notification;
public class Notification {
private Object message;
private NotificationType type;
public static Notification create(NotificationType type, Object message){
return new Notification(message,type);
}
public Notification(){
}
public Notification(Object message, NotificationType type){
this.message = message;
this.type = type;
}
#Override
public String toString() {
return "Notification{" +
"message=" + message +
", type=" + type +
'}';
}
public <T> T getMessage(Class<T> type){
return (T)this.message;
}
public NotificationType getType(){
return this.type;
}
public void setType(NotificationType type){
this.type = type;
}
public void setMessage(Object message){
this.message = message;
}
}
here is my configuration
spring:
kafka:
producer:
bootstrap-servers: localhost:9092
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.springframework.kafka.support.serializer.JsonSerializer
When i try to consume the notification from consumer my message part is missing in Notification I am able to to receive type.
I even tried kafka console-consumer there also it prints only type field from my notification message is missing here also.
I don't know what i am missing.
My Consumer configuration is
package com.code2hack.booking;
import com.code2hack.notification.Notification;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import java.util.HashMap;
import java.util.Map;
#Configuration
#EnableKafka
public class KafkaConfiguration {
#Value("${spring.kafka.consumer.bootstrap-servers}")
private String address;
#Bean
public ConsumerFactory<String, Notification> consumerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
address);
props.put(
ConsumerConfig.GROUP_ID_CONFIG,
"booking");
JsonDeserializer<Notification> ds = new JsonDeserializer<>();
ds.addTrustedPackages("*");
return new DefaultKafkaConsumerFactory<>(props,
new StringDeserializer(),
ds);
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, Notification>
kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Notification> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
}
Here is my consumer
#KafkaListener(topics = "sql-insert",groupId = "booking")
public void onNotification(#Payload Notification notification){
handleNotification(notification);
}
Please help me.
Note: Actually problem is with JsonSerializer in Kafka. i tried below code and it is not serializing the object properly.
public static void main(String[] args) {
//SpringApplication.run(BookingServiceApplication.class, args);
Notification notification = Notification.create(NotificationType.NEW_SCHEDULED,"Hellow how are you");
byte[] serialize = new JsonSerializer<Notification>().serialize("sql-insert", notification);
System.out.println(new String(serialize));
}
It is giving me the output.
{"type":"NEW_SCHEDULED"}
Is there any way to fix it.

Unless you create a custom serializer, Jackson only works with JavaBean semantics; there is no getter for message; you need to add a simple getter for the message property.

Related

Kakfa Listener and Consumer not invoking

I'm building a simple Kafka application with a producer and a consumer. I'm sending a string through postman and pushing through the topic. The topic is receiving the message but the consumer isn't consuming it.
ConsumerConfig.Java
#EnableKafka
#Configuration
#ConditionalOnProperty(name = "kafka.enabled", havingValue = "true")
public class KafkaConsumerConfig {
#Bean
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory(){
ConcurrentKafkaListenerContainerFactory<String,String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
#Bean
public Map<String,Object> config(){
Map<String,Object> config = new HashMap<>();
config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
config.put(ConsumerConfig.GROUP_ID_CONFIG, "group_Id");
config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
config.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
config.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100");
config.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000");
return config;
}
#Bean
public ConsumerFactory<String,String> consumerFactory(){
return new DefaultKafkaConsumerFactory<>(config());
}
}
CosumerService.Java
#Service
#ConditionalOnProperty(name = "kafka.enabled", havingValue = "true")
#Component
public class KafkaConsumerService {
private static final Logger log = LoggerFactory.getLogger(KafkaConsumerService.class);
private static final String TOPIC = "Kafka_Test";
#KafkaListener(topics = TOPIC, groupId= "group_Id")
public void consumeOTP(String otp) {
log.debug("The OTP Sent to Kafka is:" + otp);
}
}
Based on your question I am assuming you're using spring-kafka with Spring Boot. For a simple example, with this setup you can avoid all the Bean configuration and use the DefaultBean from Spring Kafka so you can basically do the setup using the application.yml file, there's better explanation in this post but basically:
Producer:
#Service
public class SimpleProducer {
private KafkaTemplate<String, String> simpleProducer;
public SimpleProducer(KafkaTemplate<String, String> simpleProducer) {
this.simpleProducer = simpleProducer;
}
public void send(String message) {
simpleProducer.send("simple-message", message);
}
}
Consumer:
#Slf4j
#Service
public class SimpleConsumer {
#KafkaListener(id = "simple-consumer", topics = "simple-message")
public void consumeMessage(String message) {
log.info("Consumer got message: {}", message);
}
}
Api so you can produce sending a message:
#RestController
#RequestMapping("/api")
public class MessageApi {
private final SimpleProducer simpleProducer;
public MessageApi(SimpleProducer simpleProducer) {
this.simpleProducer = simpleProducer;
}
#PostMapping("/message")
public ResponseEntity<String> message(#RequestBody String message) {
simpleProducer.send(message);
return ResponseEntity.ok("Message received: " + message);
}
}
Because you're using the defaults with String as key and String as value you don't even have to add any specific configuration to the spring-boot props or yaml files.

JHipster with kafka real app usage example

In JHipster version 6.6.0 the Kafka usage model has been changed from the standard Producer/Consumer classes to the WebResource level. There are no real examples what is an advantage of this change and how this change can be used in real apps.
Let`s say we have Service A and Service B. The communication between these two services has to be accomplished via Kafka events.
The question is - what I have to do so Service B starts to listen to events from the Service A topic. In the current configuration, it looks like I have to manually trigger /consumes endpoint, but it makes no sense because I'm expecting that the service will start to listen for the specified list of topics after the app is up and running.
I would appreciate any comment on this topic to help me understand this.
Example:
jhipster 7.1.0 generates this resources:
Service A - gateway
package com.stukans.refirmware.gateway.web.rest;
import com.stukans.refirmware.gateway.config.KafkaProperties;
import java.time.Instant;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.kafka.receiver.KafkaReceiver;
import reactor.kafka.receiver.ReceiverOptions;
import reactor.kafka.sender.KafkaSender;
import reactor.kafka.sender.SenderOptions;
import reactor.kafka.sender.SenderRecord;
import reactor.kafka.sender.SenderResult;
#RestController
#RequestMapping("/api/gateway-kafka")
public class GatewayKafkaResource {
private final Logger log = LoggerFactory.getLogger(GatewayKafkaResource.class);
private final KafkaProperties kafkaProperties;
private KafkaSender<String, String> sender;
public GatewayKafkaResource(KafkaProperties kafkaProperties) {
this.kafkaProperties = kafkaProperties;
this.sender = KafkaSender.create(SenderOptions.create(kafkaProperties.getProducerProps()));
}
#PostMapping("/publish/{topic}")
public Mono<PublishResult> publish(
#PathVariable String topic,
#RequestParam String message,
#RequestParam(required = false) String key
) {
log.debug("REST request to send to Kafka topic {} with key {} the message : {}", topic, key, message);
return Mono
.just(SenderRecord.create(topic, null, null, key, message, null))
.as(sender::send)
.next()
.map(SenderResult::recordMetadata)
.map(
metadata ->
new PublishResult(metadata.topic(), metadata.partition(), metadata.offset(), Instant.ofEpochMilli(metadata.timestamp()))
);
}
#GetMapping("/consume")
public Flux<String> consume(#RequestParam("topic") List<String> topics, #RequestParam Map<String, String> consumerParams) {
log.debug("REST request to consume records from Kafka topics {}", topics);
Map<String, Object> consumerProps = kafkaProperties.getConsumerProps();
consumerProps.putAll(consumerParams);
consumerProps.remove("topic");
ReceiverOptions<String, String> receiverOptions = ReceiverOptions.<String, String>create(consumerProps).subscription(topics);
return KafkaReceiver.create(receiverOptions).receive().map(ConsumerRecord::value);
}
private static class PublishResult {
public final String topic;
public final int partition;
public final long offset;
public final Instant timestamp;
private PublishResult(String topic, int partition, long offset, Instant timestamp) {
this.topic = topic;
this.partition = partition;
this.offset = offset;
this.timestamp = timestamp;
}
}
}
Service B - agent
package com.stukans.refirmware.agent.web.rest;
import com.stukans.refirmware.agent.config.KafkaProperties;
import java.time.Instant;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.kafka.receiver.KafkaReceiver;
import reactor.kafka.receiver.ReceiverOptions;
import reactor.kafka.sender.KafkaSender;
import reactor.kafka.sender.SenderOptions;
import reactor.kafka.sender.SenderRecord;
import reactor.kafka.sender.SenderResult;
#RestController
#RequestMapping("/api/agent-kafka")
public class AgentKafkaResource {
private final Logger log = LoggerFactory.getLogger(AgentKafkaResource.class);
private final KafkaProperties kafkaProperties;
private KafkaSender<String, String> sender;
public AgentKafkaResource(KafkaProperties kafkaProperties) {
this.kafkaProperties = kafkaProperties;
this.sender = KafkaSender.create(SenderOptions.create(kafkaProperties.getProducerProps()));
}
#PostMapping("/publish/{topic}")
public Mono<PublishResult> publish(
#PathVariable String topic,
#RequestParam String message,
#RequestParam(required = false) String key
) {
log.debug("REST request to send to Kafka topic {} with key {} the message : {}", topic, key, message);
return Mono
.just(SenderRecord.create(topic, null, null, key, message, null))
.as(sender::send)
.next()
.map(SenderResult::recordMetadata)
.map(
metadata ->
new PublishResult(metadata.topic(), metadata.partition(), metadata.offset(), Instant.ofEpochMilli(metadata.timestamp()))
);
}
#GetMapping("/consume")
public Flux<String> consume(#RequestParam("topic") List<String> topics, #RequestParam Map<String, String> consumerParams) {
log.debug("REST request to consume records from Kafka topics {}", topics);
Map<String, Object> consumerProps = kafkaProperties.getConsumerProps();
consumerProps.putAll(consumerParams);
consumerProps.remove("topic");
ReceiverOptions<String, String> receiverOptions = ReceiverOptions.<String, String>create(consumerProps).subscription(topics);
return KafkaReceiver.create(receiverOptions).receive().map(ConsumerRecord::value);
}
private static class PublishResult {
public final String topic;
public final int partition;
public final long offset;
public final Instant timestamp;
private PublishResult(String topic, int partition, long offset, Instant timestamp) {
this.topic = topic;
this.partition = partition;
this.offset = offset;
this.timestamp = timestamp;
}
}
}
That`s the only code related to Kafka that is available.
Prior to version 6.6.0, JHipster generated standard Producer/Consumer classes which I could use to define which topics to listen to. Now it is not clear how to use generated code to emit/listen to the events.
First, since we cannot see your code, we don't know how it does (or should) work...
In any case, you should not be "triggering" consumers via REST; they should be automatically started when the service starts. If there are no messages, then they are polling in the background and idling until the "producer service" pushes a message to the topic.
I make a service like this auto triggering consumers:
#Service
public class KafkaConsumerService {
private final Logger log = LoggerFactory.getLogger(KafkaConsumerService.class);
private static final String GROUP_USER_CREATE_TOPIC = "GROUP_STORE.GROUP_USER.SAVE"; //<application name>.<dataset name>.<event>
private final KafkaProperties kafkaProperties;
private KafkaReceiver<String, String> kafkaReceiver;
private final ObjectMapper objectMapper = new ObjectMapper();
private final GroupMemberService groupMemberService;
public KafkaConsumerService(KafkaProperties kafkaProperties, GroupMemberService groupMemberService) {
this.kafkaProperties = kafkaProperties;
this.groupMemberService = groupMemberService;
}
#PostConstruct
public void start() {
log.info("Kafka consumer starting...");
Map<String, Object> consumerProps = kafkaProperties.getConsumerProps();
ReceiverOptions<String, String> receiverOptions = ReceiverOptions.<String, String>create(consumerProps)
.subscription(Collections.singletonList(GROUP_USER_CREATE_TOPIC));
this.kafkaReceiver = KafkaReceiver.create(receiverOptions);
consumeGroupMember().subscribe();
}
public Flux<GroupMemberDTO> consumeGroupMember() {
log.debug("consumer group member....");
return this.kafkaReceiver
.receive()
.map(ConsumerRecord::value)
.flatMap(
record -> {
try {
GroupMemberDTO groupMemberDTO = objectMapper.readValue(record, GroupMemberDTO.class);
log.debug("Complete convert object: {}", groupMemberDTO);
return groupMemberService.insert(groupMemberDTO);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
);
}
public void shutdown() {
log.info("Shutdown kafka consumer");
}
}

Attempt to heart beat failed since the group is rebalancing, try to re-join group

In my project, I used #KakfaListener configure kafka containerFactory and topic.
Topic Name:
public static final String CONNECT_DEVICE_MESSAGE_TOPIC = "connectDeviceMessageTopic";
Topic Listen:
#KafkaListener(containerFactory = "receiveKafkaListenerContainerFactory", topics = KafkaQueueName.CONNECT_DEVICE_MESSAGE_TOPIC)
public void onMessageListener(MessageTemplate message){
}
Kafka Config:
package me.hekr.bot.parse.core.kafka;
import lombok.extern.slf4j.Slf4j;
import me.hekr.bot.utils.IpUtil;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.*;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.kafka.support.converter.StringJsonMessageConverter;
import java.util.HashMap;
import java.util.Map;
/**
* Created by Neon Wang on 2016/10/20.
*/
#EnableKafka
#Configuration
#Slf4j
public class KafkaConfig {
#Value("${bot.kafka.servers}")
private String servers;
/*********************** Producer Config ***************************/
private ProducerFactory<String, String> producerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfigs());
}
private Map<String, Object> producerConfigs() {
return new CustomHashMap().put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers)
.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, 1000 * 2);
}
#Bean
public KafkaTemplate<String, String> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}
/*********************** Consumer Config ***************************/
private Map<String, Object> consumerProps() {
return new CustomHashMap()
.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers)
.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true)
.put(ConsumerConfig.GROUP_ID_CONFIG, "parseReceiveMessageFormConnection")
.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100")
.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000")
.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class)
.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
}
#Bean
KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>>
receiveKafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.setConcurrency(3);
factory.setMessageConverter(new StringJsonMessageConverter());
factory.getContainerProperties().setPollTimeout(3000L);
return factory;
}
private ConsumerFactory<String, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerProps());
}
class CustomHashMap extends HashMap<String, Object> {
CustomHashMap(){
super();
}
#Override
public CustomHashMap put(String key, Object value) {
super.put(key, value);
return this;
}
}
}
Started project, kafka configuration information was successed, but I found every topic information had logged three times, is it normal?
And when the first message reviced, turn out print
2017-08-16T11:12:43.633+0800 INFO [org.springframework.kafka.KafkaListenerEndpointContainer#0-2-kafka-consumer-1] o.a.k.c.c.i.AbstractCoordinator.handle:623 - Attempt to heart beat failed since the group is rebalancing, try to re-join group.
Then the first message had received again and again, all result turn out was same!
After three times, hadn't received any message, I had no idea, who can help me?
The group id had changed twice, in last version had used localhost ip.
IpUtil.getLocalhostAddress().replace(".", "")

How to set a global custom Jackson deserializer in Camel w/o spring using REST

I would like to set a global custom date/Time deserializer on a camel route that is configured with rest.
What I already found is Camel + Jackson : Register a module for all deserialization
But I do not have the unmarshal() method in the route, but use the
RestDefinition rest(String path)
method from
org.apache.camel.builder.RouteBuilder.
We do not use Spring, but plain Camel with Scala and REST all configuration done programmatically (no xml).
My current solution is to use
#JsonDeserialize(using = classOf[MyDeserializer])
annotation on every date/time field, but that is not a satisfying solution.
Does anybody have a clue how to configure Camel to use the custom deserializer everywhere?
By default camel use DATES_AS_TIMESTAMPS feature. to disable this featuer just add .dataFormatProperty("json.out.disableFeatures", "WRITE_DATES_AS_TIMESTAMPS")
Also you can add custom serializer:
module.addSerializer(Date.class, sample.new DateSerializer());
and binding with name json-jackson
jndiContext.bind("json-jackson", jackson);
example:
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.GregorianCalendar;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.jackson.JacksonDataFormat;
import org.apache.camel.impl.DefaultCamelContext;
import org.apache.camel.model.rest.RestBindingMode;
import org.apache.camel.spi.DataFormat;
import org.apache.camel.util.jndi.JndiContext;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.Version;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.module.SimpleModule;
public class Samlpe3 {
public static void main(String[] args) throws Exception {
Samlpe3 sample = new Samlpe3();
//custom mapper
ObjectMapper objectMapper = new ObjectMapper();
SimpleModule module = new SimpleModule("DefaultModule", new Version(0, 0, 1, null, null, null));
module.addSerializer(Date.class, sample.new DateSerializer());
module.addSerializer(Person.class, sample.new PersonSerializer());
objectMapper.registerModule(module);
JacksonDataFormat jackson = new JacksonDataFormat(objectMapper, null);
JndiContext jndiContext = new JndiContext();
jndiContext.bind("json-jackson", jackson);
CamelContext context = new DefaultCamelContext(jndiContext);
context.addRoutes(new RouteBuilder() {
public void configure() throws Exception {
restConfiguration().component("jetty").bindingMode(RestBindingMode.json)
.host("0.0.0.0").contextPath("/test").port(8080)
//disableFeatures WRITE_DATES_AS_TIMESTAMPS
.dataFormatProperty("json.out.disableFeatures", "WRITE_DATES_AS_TIMESTAMPS")
;
rest("/v1/").produces("application/json")
.get("persons")
.to("direct:getPersons");
from("direct:getPersons")
.process(new Processor() {
public void process(Exchange exchange) throws Exception {
exchange.getIn().setBody(new Person("Sergey", new GregorianCalendar().getTime()));
}
})
;
}
});
context.start();
Thread.sleep(60000);
context.stop();
}
public class DateSerializer extends JsonSerializer<Date> {
#Override
public void serialize(Date value, JsonGenerator gen, SerializerProvider serializers)
throws IOException, JsonProcessingException {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
String date = sdf.format(value);
gen.writeString(date);
}
}
public class PersonSerializer extends JsonSerializer<Person> {
#Override
public void serialize(Person value, JsonGenerator gen, SerializerProvider serializers)
throws IOException, JsonProcessingException {
gen.writeStartObject();
gen.writeFieldName("Changed_n");
gen.writeObject(value.getName() + " Changed");
gen.writeFieldName("Changed_b");
gen.writeObject(value.getBirthday());
gen.writeEndObject();
}
}
}
Person.java:
import java.util.Date;
public class Person {
private String name;
private Date birthday;
Person(String name, Date birhday){
System.out.println("Person");
this.setBirthday(birhday);
this.setName(name);
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Date getBirthday() {
return birthday;
}
public void setBirthday(Date birthday) {
this.birthday = birthday;
}
}

Getting error "No serializer found for class org.springframework.data.mongodb.core.convert.DefaultDbRefResolver$LazyLoadingInterceptor"

I am using spring data mongodb,after doing lazy loading true i am getting error "No serializer found for class org.springframework.data.mongodb.core.convert.DefaultDbRefResolver$LazyLoadingInterceptor".
My domain class is
public class Preference extends BaseEntity {
#DBRef(lazy = true)
User user;
MetadataEnum preferenceType;
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
public MetadataEnum getPreferenceType() {
return preferenceType;
}
public void setPreferenceType(MetadataEnum preferenceType) {
this.preferenceType = preferenceType;
}
public List<Subtype> getSubtypes() {
return subtypes;
}
public void setSubtypes(List<Subtype> subtypes) {
this.subtypes = subtypes;
}
List<Subtype> subtypes = new ArrayList<Subtype>();
boolean enableSearch;
}
i have wasted my time alot,but i am unable to get suitable answer of it? anyone can help me to reslove it?
Thanks in advance
Add this configuration code for your requirement
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.mongodb.core.convert.LazyLoadingProxy;
import com.fasterxml.jackson.core.Version;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
#Configuration
public class LazyLoadingSerializationConfig {
#Bean
public ObjectMapper objectMapper() {
ObjectMapper om = new ObjectMapper();
final SimpleModule module = new SimpleModule("<your entity>", new Version(1, 0, 0,null));
module.addSerializer(LazyLoadingProxy.class, new LazyLoadingSerializer());
om.registerModule(module);
return om;
}
}
and
import java.io.IOException;
import org.springframework.data.mongodb.core.convert.LazyLoadingProxy;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
public class LazyLoadingSerializer extends JsonSerializer<LazyLoadingProxy> {
#Override
public void serialize(LazyLoadingProxy value, JsonGenerator jgen,
SerializerProvider provider) throws IOException,
JsonProcessingException {
jgen.writeStartObject();
jgen.writeStringField("id", value.toDBRef().getId().toString());
jgen.writeEndObject();
}
}
Hope this may help you!
switch off serializer for that bean because lazy loading with #DBRef works for list of entity only , jackson automatically serialize the #DBRef(lazy = true) List user but not for '#DBRef(lazy = true) User user. So you have to serialize it mannually or switch off serializer for that bean'.