java.lang.ClassCastException: class .$Proxy143 cannot be cast to class .MessageChannel (... are in unnamed module of loader 'app') - spring-cloud

I am writing the tests for a Spring Cloud Stream application. This has a KStream reading from topicA. In the test I use a KafkaTemplate to publish the messages and wait for the KStream logs to show up.
The tests throw the following exception:
java.lang.ClassCastException: class com.sun.proxy.$Proxy143 cannot be cast to class org.springframework.messaging.MessageChannel (com.sun.proxy.$Proxy143 and org.springframework.messaging.MessageChannel are in unnamed module of loader 'app')
at org.springframework.cloud.stream.test.binder.TestSupportBinder.bindConsumer(TestSupportBinder.java:66) ~[spring-cloud-stream-test-support-3.0.1.RELEASE.jar:3.0.1.RELEASE]
at org.springframework.cloud.stream.binding.BindingService.doBindConsumer(BindingService.java:169) ~[spring-cloud-stream-3.0.2.BUILD-SNAPSHOT.jar:3.0.2.BUILD-SNAPSHOT]
at org.springframework.cloud.stream.binding.BindingService.bindConsumer(BindingService.java:115) ~[spring-cloud-stream-3.0.2.BUILD-SNAPSHOT.jar:3.0.2.BUILD-SNAPSHOT]
at org.springframework.cloud.stream.binding.AbstractBindableProxyFactory.createAndBindInputs(AbstractBindableProxyFactory.java:112) ~[spring-cloud-stream-3.0.2.BUILD-SNAPSHOT.jar:3.0.2.BUILD-SNAPSHOT]
at org.springframework.cloud.stream.binding.InputBindingLifecycle.doStartWithBindable(InputBindingLifecycle.java:58) ~[spring-cloud-stream-3.0.2.BUILD-SNAPSHOT.jar:3.0.2.BUILD-SNAPSHOT]
at java.base/java.util.LinkedHashMap$LinkedValues.forEach(LinkedHashMap.java:608) ~[na:na]
This exception doesn't show up in the normal execution of the application.
KSTREAM:
#Configuration
class MyKStream() {
private val logger = LoggerFactory.getLogger(javaClass)
#Bean
fun processSomething(): Consumer<KStream<XX, XX>> {
return Consumer { something ->
something.foreach { key, value ->
logger.info("--------> Processing xxx key {} - value {}", key, value)
}
}
TEST:
#TestInstance(PER_CLASS)
#EmbeddedKafka
#SpringBootTest(properties = [
"spring.profiles.active=local",
"schema-registry.user=",
"schema-registry.password=",
"spring.cloud.stream.bindings.processSomething-in-0.destination=topicA",
"spring.cloud.stream.bindings.processSomething-in-0.producer.useNativeEncoding=true",
"spring.cloud.stream.bindings.processSomethingElse-in-0.destination=topicB",
"spring.cloud.stream.bindings.processSomethingElse-in-0.producer.useNativeEncoding=true",
"spring.cloud.stream.kafka.streams.binder.configuration.application.server=localhost:8080",
"spring.cloud.stream.function.definition=processSomething;processSomethingElse"])
class MyKStreamTests {
private val logger = LoggerFactory.getLogger(javaClass)
#Autowired
private lateinit var embeddedKafka: EmbeddedKafkaBroker
#Autowired
private lateinit var schemaRegistryMock: SchemaRegistryMock
#AfterAll
fun afterAll() {
embeddedKafka.kafkaServers.forEach { it.shutdown() }
embeddedKafka.kafkaServers.forEach { it.awaitShutdown() }
}
#Test
fun `should send and process something`() {
val producer = createProducer()
logger.debug("**********----> presend")
val msg = MessageBuilder.withPayload(xxx)
.setHeader(KafkaHeaders.MESSAGE_KEY, xxx)
.setHeader(KafkaHeaders.TIMESTAMP, 1L)
.build()
producer.send(msg).get()
logger.debug("**********----> sent")
Thread.sleep(100000)
}
}
#Configuration
class KafkaTestConfiguration(private val embeddedKafkaBroker: EmbeddedKafkaBroker) {
private val schemaRegistryMock = SchemaRegistryMock()
#PostConstruct
fun init() {
System.setProperty("spring.kafka.bootstrap-servers", embeddedKafkaBroker.brokersAsString)
System.setProperty("spring.cloud.stream.kafka.streams.binder.brokers", embeddedKafkaBroker.brokersAsString)
schemaRegistryMock.start()
System.setProperty("spring.cloud.stream.kafka.streams.binder.configuration.schema.registry.url", schemaRegistryMock.url)
}
#Bean
fun schemaRegistryMock(): SchemaRegistryMock {
return schemaRegistryMock
}
#PreDestroy
fun preDestroy() {
schemaRegistryMock.stop()
}
}

You are probably using spring-cloud-stream-test-support as a dependency and this dependency bypasses some of the core functionality of the binder API resulting in this error.
https://cloud.spring.io/spring-cloud-static/spring-cloud-stream/3.0.3.RELEASE/reference/html/spring-cloud-stream.html#_testing

Related

RabbitTransactionManager cannot commit transaction at ChainedTransactionManager

I'm trying to use one transaction manager for Rabbit and Kafka. First I'm getting a message from a Rabbit callback, then sending it to Kafka topic. But I always get an exception indicating that Rabbit cannot finish the transaction correctly:
2019-11-14 16:02:46.572 ERROR 15640 --- [cTaskExecutor-1] o.s.a.r.c.CachingConnectionFactory : Could not configure the channel to receive publisher confirms java.io.IOException: null
at com.rabbitmq.client.impl.AMQChannel.wrap(AMQChannel.java:126)
at com.rabbitmq.client.impl.AMQChannel.wrap(AMQChannel.java:122)
at com.rabbitmq.client.impl.AMQChannel.exnWrappingRpc(AMQChannel.java:144)
at com.rabbitmq.client.impl.ChannelN.confirmSelect(ChannelN.java:1552)
at com.rabbitmq.client.impl.ChannelN.confirmSelect(ChannelN.java:52)
at org.springframework.amqp.rabbit.connection.CachingConnectionFactory.doCreateBareChannel(CachingConnectionFactory.java:602)
at org.springframework.amqp.rabbit.connection.CachingConnectionFactory.createBareChannel(CachingConnectionFactory.java:582)
at org.springframework.amqp.rabbit.connection.CachingConnectionFactory.access$600(CachingConnectionFactory.java:99)
at org.springframework.amqp.rabbit.connection.CachingConnectionFactory$CachedChannelInvocationHandler.invoke(CachingConnectionFactory.java:1053)
at com.sun.proxy.$Proxy124.txCommit(Unknown Source)
at org.springframework.amqp.rabbit.connection.RabbitResourceHolder.commitAll(RabbitResourceHolder.java:164)
at org.springframework.amqp.rabbit.transaction.RabbitTransactionManager.doCommit(RabbitTransactionManager.java:187)
at org.springframework.transaction.support.AbstractPlatformTransactionManager.processCommit(AbstractPlatformTransactionManager.java:746)
at org.springframework.transaction.support.AbstractPlatformTransactionManager.commit(AbstractPlatformTransactionManager.java:714)
at org.springframework.data.transaction.MultiTransactionStatus.commit(MultiTransactionStatus.java:74)
at org.springframework.data.transaction.ChainedTransactionManager.commit(ChainedTransactionManager.java:150)
at org.springframework.transaction.interceptor.TransactionAspectSupport.commitTransactionAfterReturning(TransactionAspectSupport.java:532)
at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:304)
at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:98)
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:185)
at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:688)
at com.listener.ExecutionCallbackListener$$EnhancerBySpringCGLIB$$9b575a95.receiveCallback(<generated>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:181)
at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.invoke(InvocableHandlerMethod.java:114)
at org.springframework.amqp.rabbit.listener.adapter.HandlerAdapter.invoke(HandlerAdapter.java:51)
at org.springframework.amqp.rabbit.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:188)
at org.springframework.amqp.rabbit.listener.adapter.MessagingMessageListenerAdapter.onMessage(MessagingMessageListenerAdapter.java:126)
at org.springframework.amqp.rabbit.listener.AbstractMessageListenerContainer.doInvokeListener(AbstractMessageListenerContainer.java:1445)
at org.springframework.amqp.rabbit.listener.AbstractMessageListenerContainer.actualInvokeListener(AbstractMessageListenerContainer.java:1368)
at org.springframework.amqp.rabbit.listener.AbstractMessageListenerContainer.invokeListener(AbstractMessageListenerContainer.java:1355)
at org.springframework.amqp.rabbit.listener.AbstractMessageListenerContainer.executeListener(AbstractMessageListenerContainer.java:1334)
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer.doReceiveAndExecute(SimpleMessageListenerContainer.java:817)
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer.receiveAndExecute(SimpleMessageListenerContainer.java:801)
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer.access$700(SimpleMessageListenerContainer.java:77)
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer$AsyncMessageProcessingConsumer.run(SimpleMessageListenerContainer.java:1042)
at java.lang.Thread.run(Thread.java:748)
Caused by: com.rabbitmq.client.ShutdownSignalException: channel error; protocol method: #method<channel.close>(reply-code=406, reply-text=PRECONDITION_FAILED - cannot switch from tx to confirm mode, class-id=85, method-id=10)
at com.rabbitmq.utility.ValueOrException.getValue(ValueOrException.java:66)
at com.rabbitmq.utility.BlockingValueOrException.uninterruptibleGetValue(BlockingValueOrException.java:36)
at com.rabbitmq.client.impl.AMQChannel$BlockingRpcContinuation.getReply(AMQChannel.java:494)
at com.rabbitmq.client.impl.AMQChannel.privateRpc(AMQChannel.java:288)
at com.rabbitmq.client.impl.AMQChannel.exnWrappingRpc(AMQChannel.java:138)
... 37 common frames omitted
Here is the method where problem occurs in Rabbit listener:
#RabbitListener(queues = ["\${queue}"])
#Transactional("chainedTransactionManager")
fun receiveCallback(message: Message<List<CallbackMessage>>) {
traceMessage(message)
val callbacks = message.payload
callbacks.forEach { callback ->
kafkaService.sendAfterCallBack(Object())
}
}
And method in KafkaService:
#Transactional("chainedTransactionManager")
fun sendAfterCallBack(object: Object) {
convertAndSend(kafkaServiceProperties.topics.name, object)
}
Here is TransactionManager configuration:
#Configuration
class TransactionManagerConfiguration {
#Bean
fun chainedTransactionManager(
rabbitTransactionManager: RabbitTransactionManager,
kafkaTransactionManager: KafkaTransactionManager<*, *>
): ChainedTransactionManager {
return ChainedTransactionManager(kafkaTransactionManager, rabbitTransactionManager)
}
}
Rabbit configuration:
#Configuration
#EnableRabbit
#Import(RabbitAutoCreationConfiguration::class)
class RabbitConfiguration(
private val integrationProperties: IntegrationProperties,
private var clientProperties: RabbitClientProperties,
private val jacksonObjectMapper: ObjectMapper
) : RabbitListenerConfigurer {
#Bean
fun rabbitListenerContainerFactory(connectionFactory: ConnectionFactory): SimpleRabbitListenerContainerFactory {
val factory = SimpleRabbitListenerContainerFactory()
factory.setConnectionFactory(connectionFactory)
factory.setErrorHandler { t -> throw AmqpRejectAndDontRequeueException(t) }
return factory
}
#Bean
fun messageConverter(): MessageConverter {
val messageConverter = MappingJackson2MessageConverter()
messageConverter.objectMapper = jacksonObjectMapper
return messageConverter
}
#Bean
fun messageHandlerFactory(): MessageHandlerMethodFactory {
val factory = DefaultMessageHandlerMethodFactory()
factory.setMessageConverter(messageConverter())
return factory
}
#Bean
#ConditionalOnBean(CachingConnectionFactory::class)
fun rabbitConnectionFactoryCustomizer(factory: CachingConnectionFactory): SmartInitializingSingleton {
return SmartInitializingSingleton {
factory.rabbitConnectionFactory.clientProperties.apply {
clientProperties.copyright?.let { put("copyright", it) }
put("os", System.getProperty("os.name"))
put("host", InetAddress.getLocalHost().hostName)
clientProperties.platform?.let { put("platform", it) }
clientProperties.product?.let { put("product", it) }
clientProperties.service?.let { put("service", it) }
}
}
}
override fun configureRabbitListeners(registrar: RabbitListenerEndpointRegistrar?) {
registrar!!.messageHandlerMethodFactory = messageHandlerFactory()
}
#Bean
fun rabbitTemplate(
connectionFactory: ConnectionFactory,
jsonObjectMapper: ObjectMapper
): RabbitTemplate {
val rabbitTemplate = RabbitTemplate(connectionFactory)
val retryTemplate = RetryTemplate()
retryTemplate.setRetryPolicy(SimpleRetryPolicy(integrationProperties.callbackRetry))
rabbitTemplate.setRetryTemplate(retryTemplate)
rabbitTemplate.isChannelTransacted = true
return rabbitTemplate
}
#Bean
fun rabbitTransactionManager(connectionFactory: ConnectionFactory): RabbitTransactionManager {
val rtm = RabbitTransactionManager(connectionFactory)
rtm.transactionSynchronization = AbstractPlatformTransactionManager.SYNCHRONIZATION_ON_ACTUAL_TRANSACTION
return rtm
}
}
Kafka configuration:
#Configuration
#EnableKafka
class KafkaConfiguration(
#Qualifier("kafkaExchangeMessageConverter")
private val messageConverter: MessagingMessageConverter
) {
#Bean
fun kafkaListenerContainerFactory(
configurer: ConcurrentKafkaListenerContainerFactoryConfigurer,
consumerFactory: ConsumerFactory<Any, Any>
): ConcurrentKafkaListenerContainerFactory<Any, Any> {
val factory = ConcurrentKafkaListenerContainerFactory<Any, Any>()
factory.setMessageConverter(messageConverter)
configurer.configure(factory, consumerFactory)
return factory
}
#Bean
fun adminClient(kafkaAdmin: KafkaAdmin): AdminClient = AdminClient.create(kafkaAdmin.config)
#Bean
fun kafkaTransactionManager(
producerFactory: ProducerFactory<*, *>
): KafkaTransactionManager<*, *> {
val ktm = KafkaTransactionManager(producerFactory)
ktm.transactionSynchronization = AbstractPlatformTransactionManager.SYNCHRONIZATION_ON_ACTUAL_TRANSACTION
return ktm
}
}
Did I miss something in RabbitConfiguration or problem is in something else?
reply-text=PRECONDITION_FAILED - cannot switch from tx to confirm mode,
You cannot use publisher confirms and transactions on the same channel. Turn off publisher confirms.
Also, it's better to inject the chained transaction manager into the listener container rather than using #Transactional.

KafkaStreamsStateStore not working when the store value is an Avro SpecificRecord

I have a Spring Cloud Kafka Streams application that uses a StateStore in the Processor API, when using a transformer to perform a deduplication.
The state store key-value are of the following types: <String, TransferEmitted>.
When running the application, at the moment of putting a value in the state store (dedupStore.put(key, value)), I get this exception:
Caused by: java.lang.ClassCastException: com.codependent.outboxpattern.account.TransferEmitted cannot be cast to java.lang.String
This is due to the fact that the default value serde for the KafkaStreamsStateStore is a StringSerde.
Thus, I have added the valueSerde parameter in the KafkaStreamsStateStore annotation, indicating the one for a SpecificAvroSerde:
#KafkaStreamsStateStore(name = DEDUP_STORE, type = KafkaStreamsStateStoreProperties.StoreType.KEYVALUE,
valueSerde = "io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde")
Now I get a NullPointerException in AbstractKafkaAvroSerializer.serializeImpl because at id = this.schemaRegistry.getId(subject, schema); schemaRegistry is null:
Caused by: org.apache.kafka.common.errors.SerializationException: Error serializing Avro message
Caused by: java.lang.NullPointerException
at io.confluent.kafka.serializers.AbstractKafkaAvroSerializer.serializeImpl(AbstractKafkaAvroSerializer.java:82)
at io.confluent.kafka.serializers.KafkaAvroSerializer.serialize(KafkaAvroSerializer.java:53)
at io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer.serialize(SpecificAvroSerializer.java:65)
at io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer.serialize(SpecificAvroSerializer.java:38)
Despite having configured the schema registry as a Spring bean...
#Configuration
class SchemaRegistryConfiguration {
#Bean
fun schemaRegistryClient(#Value("\${spring.cloud.stream.schema-registry-client.endpoint}") endpoint: String): SchemaRegistryClient {
val client = ConfluentSchemaRegistryClient()
client.setEndpoint(endpoint)
return client
}
}
...when Kafka sets up the SpecificAvroSerde it uses the no-params constructor so it doesn't initialize the schema registry client:
public class SpecificAvroSerde<T extends SpecificRecord> implements Serde<T> {
private final Serde<T> inner;
public SpecificAvroSerde() {
this.inner = Serdes.serdeFrom(new SpecificAvroSerializer(), new SpecificAvroDeserializer());
}
public SpecificAvroSerde(SchemaRegistryClient client) {
if (client == null) {
throw new IllegalArgumentException("schema registry client must not be null");
} else {
this.inner = Serdes.serdeFrom(new SpecificAvroSerializer(client), new SpecificAvroDeserializer(client));
}
}
How can I configure this application so that it allows to serialize a StateStore<String, TransferEmitted>?
EXCERPTS FROM THE PROJECT (source available at https://github.com/codependent/kafka-outbox-pattern)
KStream
const val DEDUP_STORE = "dedup-store"
#EnableBinding(KafkaStreamsProcessor::class)
class FraudKafkaStreamsConfiguration(private val fraudDetectionService: FraudDetectionService) {
#KafkaStreamsStateStore(name = DEDUP_STORE, type = KafkaStreamsStateStoreProperties.StoreType.KEYVALUE)
#StreamListener
#SendTo("output")
fun process(#Input("input") input: KStream<String, TransferEmitted>): KStream<String, TransferEmitted> {
return input
.transform(TransformerSupplier { DeduplicationTransformer() }, DEDUP_STORE)
.filter { _, value -> fraudDetectionService.isFraudulent(value) }
}
}
Transformer
#Suppress("UNCHECKED_CAST")
class DeduplicationTransformer : Transformer<String, TransferEmitted, KeyValue<String, TransferEmitted>> {
private lateinit var dedupStore: KeyValueStore<String, TransferEmitted>
private lateinit var context: ProcessorContext
override fun init(context: ProcessorContext) {
this.context = context
dedupStore = context.getStateStore(DEDUP_STORE) as KeyValueStore<String, TransferEmitted>
}
override fun transform(key: String, value: TransferEmitted): KeyValue<String, TransferEmitted>? {
return if (isDuplicate(key)) {
null
} else {
dedupStore.put(key, value)
KeyValue(key, value)
}
}
private fun isDuplicate(key: String) = dedupStore[key] != null
override fun close() {
}
}
application.yml
spring:
application:
name: fraud-service
cloud:
stream:
schema-registry-client:
endpoint: http://localhost:8081
kafka:
streams:
binder:
configuration:
application:
id: fraud-service
default:
key:
serde: org.apache.kafka.common.serialization.Serdes$StringSerde
schema:
registry:
url: http://localhost:8081
bindings:
input:
destination: transfer
contentType: application/*+avro
output:
destination: fraudulent-transfer
contentType: application/*+avro
server:
port: 8086
logging:
level:
org.springframework.cloud.stream: debug
I ran into the same issue and forgot that schema.registry.url needs to be passed in to make sure that you can store Avro records in your State store.
For eg:
#Bean
public StoreBuilder eventStore(Map<String, String> schemaConfig) {
final Duration windowSize = Duration.ofMinutes(DUPLICATION_WINDOW_DURATION);
// retention period must be at least window size -- for this use case, we don't need a longer retention period
// and thus just use the window size as retention time
final Duration retentionPeriod = windowSize;
// We have to specify schema.registry.url here, otherwise schemaRegistry value will end up null
KafkaAvroSerializer serializer = new KafkaAvroSerializer();
KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer();
serializer.configure(schemaConfig, true);
deserializer.configure(schemaConfig, true);
final StoreBuilder<WindowStore<Object, Long>> dedupStoreBuilder = Stores.windowStoreBuilder(
Stores.persistentWindowStore(STORE_NAME,
retentionPeriod,
windowSize,
false
),
Serdes.serdeFrom(serializer, deserializer),
// timestamp value is long
Serdes.Long());
return dedupStoreBuilder;
}
#Bean
public Map<String, String> schemaConfig(#Value("${spring.cloud.stream.schemaRegistryClient.endpoint}") String url) {
return Collections.singletonMap("schema.registry.url", "http://localhost:8081");
}
Here's the application.yml file:
spring:
cloud:
stream:
schemaRegistryClient:
endpoint: http://localhost:8081
After I did this, I was able to get this Store properly configured and didn't see a NullPointerException anymore.

Usage of timer and side input on ParDo in Apache Beam

I'm trying to write a ParDo, which will use both Timer and Side Input, but it crashes when I try to run it with beam-runners-direct-java with IllegalArgumentException on a line https://github.com/apache/beam/blob/master/runners/direct-java/src/main/java/org/apache/beam/runners/direct/QuiescenceDriver.java#L167, because there are actually two inputs to ParDo (main PCollection and side input), while only one is expected.
Is there some way to workaround this? Is this a bug in Beam?
Here's the code snippet that reproduces that behaviour:
public class TestCrashesForTimerAndSideInput {
#Rule
public final transient TestPipeline p = TestPipeline.create();
#RequiredArgsConstructor
private static class DoFnWithTimer extends DoFn<KV<String, String>, String> {
private final PCollectionView<Map<String, String>> sideInput;
#TimerId("t")
private final TimerSpec tSpec = TimerSpecs.timer(TimeDomain.PROCESSING_TIME);
#ProcessElement
public void processElement(ProcessContext c, #TimerId("t") Timer t) {
KV<String, String> element = c.element();
c.output(element.getKey() + c.sideInput(sideInput).get(element));
t.offset(Duration.standardSeconds(1)).setRelative();
}
#OnTimer("t")
public void onTimerFire(OnTimerContext x) {
x.output("Timer fired");
}
}
#Test
public void testCrashesForTimerAndSideInput() {
ImmutableMap<String, String> sideData = ImmutableMap.<String, String>builder().
put("x", "X").
put("y", "Y").
build();
PCollectionView<Map<String, String>> sideInput =
p.apply(Create.of(sideData)).apply(View.asMap());
TestStream<String> testStream = TestStream.create(StringUtf8Coder.of()).
addElements("x").
advanceProcessingTime(Duration.standardSeconds(1)).
addElements("y").
advanceProcessingTime(Duration.standardSeconds(1)).
advanceWatermarkToInfinity();
PCollection<String> result = p.
apply(testStream).
apply(MapElements.into(kvs(strings(), strings())).via(v -> KV.of(v, v))).
apply(ParDo.of(new DoFnWithTimer(sideInput)).withSideInputs(sideInput));
PAssert.that(result).containsInAnyOrder("X", "Y", "Timer fired");
p.run();
}
}
and the exception:
java.lang.IllegalArgumentException: expected one element but was: <ParDo(DoFnWithTimer)/ParMultiDo(DoFnWithTimer)/To KeyedWorkItem/ParMultiDo(ToKeyedWorkItem).output [PCollection], View.AsMap/View.VoidKeyToMultimapMaterialization/ParDo(VoidKeyToMultimapMaterialization)/ParMultiDo(VoidKeyToMultimapMaterialization).output [PCollection]>
at org.apache.beam.repackaged.beam_runners_direct_java.com.google.common.collect.Iterators.getOnlyElement(Iterators.java:322)
at org.apache.beam.repackaged.beam_runners_direct_java.com.google.common.collect.Iterables.getOnlyElement(Iterables.java:294)
at org.apache.beam.runners.direct.QuiescenceDriver.fireTimers(QuiescenceDriver.java:167)
at org.apache.beam.runners.direct.QuiescenceDriver.drive(QuiescenceDriver.java:110)
at org.apache.beam.runners.direct.ExecutorServiceParallelExecutor$2.run(ExecutorServiceParallelExecutor.java:170)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)

Can I use repository populator bean with fongo?

I'm using Fongo not only for unit tests but also for integration tests so I would like to initialize Fongo with some collections, is that possible?
This is my java config (based on Oliver G. answer):
#EnableAutoConfiguration(exclude = {
EmbeddedMongoAutoConfiguration.class,
MongoAutoConfiguration.class,
MongoDataAutoConfiguration.class
})
#Configuration
#ComponentScan(basePackages = { "com.foo" },
excludeFilters = { #ComponentScan.Filter(classes = { SpringBootApplication.class })
})
public class ConfigServerWithFongoConfiguration extends AbstractFongoBaseConfiguration {
private static final Logger log = LoggerFactory.getLogger(ConfigServerWithFongoConfiguration.class);
#Autowired
ResourcePatternResolver resourceResolver;
#Bean
public Jackson2RepositoryPopulatorFactoryBean repositoryPopulator() {
Jackson2RepositoryPopulatorFactoryBean factory = new Jackson2RepositoryPopulatorFactoryBean();
try {
factory.setResources(resourceResolver.getResources("classpath:static/collections/*.json"));
} catch (IOException e) {
log.error("Could not load data", e);
}
return factory;
}
}
When I run my IT tests, on the log it appears Reading resource: file *.json but the tests fails because they retrieve nothing (null) from Fongo database.
Tests are annotated with:
#RunWith(SpringRunner.class)
#SpringBootTest(classes={ConfigServerWithFongoConfiguration.class})
#AutoConfigureMockMvc
#TestPropertySource(properties = {"spring.data.mongodb.database=fake"})
#DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS)
Lol, I feel so stupid right now. Was format issue. JSON collections must be formated like this:
[
{/*doc1*/},
{/*doc2*/},
{/*doc3*/}
]
I was missing the [] and comma separated documents.

xtext parameterized xtext runner

Purpose: Run parameterized tests within xtext/xtend context.
Progress: So far I have achieved getting it to run, but it is appearing wrong in the junit window.
Issues:
The failure trace and results of both tests appear in the last test, as shown in the figure below.
The first test, marked by the red pen, is sort of unresolved and does not contain any failure trace.
Here is the test class:
#RunWith(typeof(Parameterized))
#InjectWith(SemanticAdaptationInjectorProvider)
#Parameterized.UseParametersRunnerFactory(XtextParametersRunnerFactory)
class CgCppAutoTest extends AbstractSemanticAdaptationTest {
new (List<File> files)
{
f = files;
}
#Inject extension ParseHelper<SemanticAdaptation>
#Inject extension ValidationTestHelper
#Parameters(name = "{index}")
def static Collection<Object[]> data() {
val files = new ArrayList<List<File>>();
listf("test_input", files);
val test = new ArrayList();
test.add(files.get(0));
return Arrays.asList(test.toArray(), test.toArray());
}
def static void listf(String directoryName, List<List<File>> files) {
...
}
var List<File> f;
#Test def allSemanticAdaptations() {
System.out.println("fail");
assertTrue(false);
}
}
ParameterizedXtextRunner (Inspiration from here: https://www.eclipse.org/forums/index.php?t=msg&th=1075706&goto=1726802&):
class ParameterizedXtextRunner extends XtextRunner {
val Object[] parameters;
val String annotatedName;
new(TestWithParameters test) throws InitializationError {
super(test.testClass.javaClass)
parameters = test.parameters;
annotatedName = test.name;
}
override protected getName() {
return super.name + annotatedName;
}
override protected createTest() throws Exception {
val object = testClass.onlyConstructor.newInstance(parameters)
val injectorProvider = getOrCreateInjectorProvider
if (injectorProvider != null) {
val injector = injectorProvider.injector
if (injector != null)
injector.injectMembers(object)
}
return object;
}
override protected void validateConstructor(List<Throwable> errors) {
validateOnlyOneConstructor(errors)
}
And finally XtextParametersRunnerFactory:
class XtextParametersRunnerFactory implements ParametersRunnerFactory {
override createRunnerForTestWithParameters(TestWithParameters test) throws InitializationError {
new ParameterizedXtextRunner(test)
}
}
By looking at the XtextRunner class it inherits from BlockJUnit4ClassRunner.
Parameterized does not extend this runner, but
ParentRunner. However, so does BlockJUnit4ClassRunner
Therefore we implemented it as below:
public class XtextParametersRunnerFactory implements ParametersRunnerFactory {
#Override
public Runner createRunnerForTestWithParameters(TestWithParameters test) throws InitializationError {
return new XtextRunnerWithParameters(test);
}
}
And used the code from XtextRunner and put it into the new runner -- it is necessary to extract InjectorProviders from Xtext as well
public class XtextRunnerWithParameters extends BlockJUnit4ClassRunnerWithParameters {
public XtextRunnerWithParameters(TestWithParameters test) throws InitializationError {
super(test);
}
#Override
public Object createTest() throws Exception {
Object object = super.createTest();
IInjectorProvider injectorProvider = getOrCreateInjectorProvider();
if (injectorProvider != null) {
Injector injector = injectorProvider.getInjector();
if (injector != null)
injector.injectMembers(object);
}
return object;
}
#Override
protected Statement methodBlock(FrameworkMethod method) {
IInjectorProvider injectorProvider = getOrCreateInjectorProvider();
if (injectorProvider instanceof IRegistryConfigurator) {
final IRegistryConfigurator registryConfigurator = (IRegistryConfigurator) injectorProvider;
registryConfigurator.setupRegistry();
final Statement methodBlock = superMethodBlock(method);
return new Statement() {
#Override
public void evaluate() throws Throwable {
try {
methodBlock.evaluate();
} finally {
registryConfigurator.restoreRegistry();
}
}
};
}else{
return superMethodBlock(method);
}
}
protected Statement superMethodBlock(FrameworkMethod method) {
return super.methodBlock(method);
}
protected IInjectorProvider getOrCreateInjectorProvider() {
return InjectorProviders.getOrCreateInjectorProvider(getTestClass());
}
protected IInjectorProvider getInjectorProvider() {
return InjectorProviders.getInjectorProvider(getTestClass());
}
protected IInjectorProvider createInjectorProvider() {
return InjectorProviders.createInjectorProvider(getTestClass());
}
}
Creating a test:
#RunWith(typeof(Parameterized))
#InjectWith(SemanticAdaptationInjectorProvider)
#Parameterized.UseParametersRunnerFactory(XtextParametersRunnerFactory)
class xxx
{
#Inject extension ParseHelper<SemanticAdaptation>
#Inject extension ValidationTestHelper
// Here goes standard parameterized stuff
}
due to OSGi import package constraits and deprecation I use this adoption of the original code:
package de.uni_leipzig.pkr.handparser.tests.runners;
import org.eclipse.xtext.testing.IInjectorProvider;
import org.eclipse.xtext.testing.IRegistryConfigurator;
import org.eclipse.xtext.testing.XtextRunner;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.InitializationError;
import org.junit.runners.model.Statement;
import org.junit.runners.parameterized.BlockJUnit4ClassRunnerWithParameters;
import org.junit.runners.parameterized.TestWithParameters;
import com.google.inject.Injector;
public class XtextRunnerWithParameters extends BlockJUnit4ClassRunnerWithParameters {
public static class MyXtextRunner extends XtextRunner {
public MyXtextRunner(Class<?> testClass) throws InitializationError {
super(testClass);
}
public IInjectorProvider getOrCreateInjectorProvider() {
return super.getOrCreateInjectorProvider();
}
}
private MyXtextRunner xtextRunner;
public XtextRunnerWithParameters(TestWithParameters test) throws InitializationError {
super(test);
xtextRunner = new MyXtextRunner(test.getTestClass().getJavaClass());
}
#Override
public Object createTest() throws Exception {
Object object = super.createTest();
IInjectorProvider injectorProvider = xtextRunner.getOrCreateInjectorProvider();
if (injectorProvider != null) {
Injector injector = injectorProvider.getInjector();
if (injector != null)
injector.injectMembers(object);
}
return object;
}
#Override
protected Statement methodBlock(FrameworkMethod method) {
IInjectorProvider injectorProvider = xtextRunner.getOrCreateInjectorProvider();
if (injectorProvider instanceof IRegistryConfigurator) {
final IRegistryConfigurator registryConfigurator = (IRegistryConfigurator) injectorProvider;
registryConfigurator.setupRegistry();
final Statement methodBlock = superMethodBlock(method);
return new Statement() {
#Override
public void evaluate() throws Throwable {
try {
methodBlock.evaluate();
} finally {
registryConfigurator.restoreRegistry();
}
}
};
} else {
return superMethodBlock(method);
}
}
protected Statement superMethodBlock(FrameworkMethod method) {
return super.methodBlock(method);
}
}