i'm new in using apache Flink ,
I tried to consume avro data in Flink using the ConfluentAvroDeserializer ,
but the when i run the program , and produce some data to kafka using kafka-avro-console-producer
the program shows errors
here's the consumer i wrote using Scala in Flink
object Test {
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.createLocalEnvironment();
val properties = new Properties();
properties.setProperty("bootstrap.servers", "localhost:9092");
properties.setProperty("group.id", "test");
val STRING_SCHEMA = "{\"type\":"record\",\"name\":"c1\",\"namespace\":"com.test\",\"fields\":[{\"name\":"f1\",\"type\":"string\"}]}" // the schema used to deserialize the data
val s = new Schema.Parser().parse(STRING_SCHEMA)
val schemaRegistryUrl = "http://localhost:8081"; // the schema registry url
val stream = env.addSource(new FlinkKafkaConsumer[GenericRecord]("second_topic",ConfluentRegistryAvroDeserializationSchema.forGeneric(s,schemaRegistryUrl), properties));
stream.print();
env.execute();
}
}
and these are the errors i got :
Exception in thread "main" org.apache.flink.runtime.client.JobExecutionException: Job execution failed.
at org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:144)
at org.apache.flink.runtime.minicluster.MiniClusterJobClient.lambda$getJobExecutionResult$3(MiniClusterJobClient.java:137)
at java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:616)
at java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:591)
at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975)
at org.apache.flink.runtime.rpc.akka.AkkaInvocationHandler.lambda$invokeRpc$0(AkkaInvocationHandler.java:237)
at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:774)
at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:750)
at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975)
at org.apache.flink.runtime.concurrent.FutureUtils$1.onComplete(FutureUtils.java:1081)
at akka.dispatch.OnComplete.internal(Future.scala:264)
at akka.dispatch.OnComplete.internal(Future.scala:261)
at akka.dispatch.japi$CallbackBridge.apply(Future.scala:191)
at akka.dispatch.japi$CallbackBridge.apply(Future.scala:188)
at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:36)
at org.apache.flink.runtime.concurrent.Executors$DirectExecutionContext.execute(Executors.java:73)
at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:44)
at scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:252)
at akka.pattern.PromiseActorRef.$bang(AskSupport.scala:572)
at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:22)
at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:21)
at scala.concurrent.Future$$anonfun$andThen$1.apply(Future.scala:436)
at scala.concurrent.Future$$anonfun$andThen$1.apply(Future.scala:435)
at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:36)
at akka.dispatch.BatchingExecutor$AbstractBatch.processBatch(BatchingExecutor.scala:55)
at akka.dispatch.BatchingExecutor$BlockableBatch$$anonfun$run$1.apply$mcV$sp(BatchingExecutor.scala:91)
at akka.dispatch.BatchingExecutor$BlockableBatch$$anonfun$run$1.apply(BatchingExecutor.scala:91)
at akka.dispatch.BatchingExecutor$BlockableBatch$$anonfun$run$1.apply(BatchingExecutor.scala:91)
at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:72)
at akka.dispatch.BatchingExecutor$BlockableBatch.run(BatchingExecutor.scala:90)
at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:40)
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:44)
at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
at akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
Caused by: org.apache.flink.runtime.JobException: Recovery is suppressed by NoRestartBackoffTimeStrategy
at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:138)
at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:82)
at org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:207)
at org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:197)
at org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:188)
at org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:677)
at org.apache.flink.runtime.scheduler.SchedulerNG.updateTaskExecutionState(SchedulerNG.java:79)
at org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:435)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:305)
at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:212)
at org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:77)
at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:158)
at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:26)
at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:21)
at scala.PartialFunction$class.applyOrElse(PartialFunction.scala:123)
at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:21)
at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:170)
at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)
at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)
at akka.actor.Actor$class.aroundReceive(Actor.scala:517)
at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:225)
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:592)
at akka.actor.ActorCell.invoke(ActorCell.scala:561)
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:258)
at akka.dispatch.Mailbox.run(Mailbox.scala:225)
at akka.dispatch.Mailbox.exec(Mailbox.scala:235)
... 4 more
Caused by: com.esotericsoftware.kryo.KryoException: java.lang.UnsupportedOperationException
Serialization trace:
reserved (org.apache.avro.Schema$Field)
fieldMap (org.apache.avro.Schema$RecordSchema)
schema (org.apache.avro.generic.GenericData$Record)
at com.esotericsoftware.kryo.serializers.ObjectField.read(ObjectField.java:125)
at com.esotericsoftware.kryo.serializers.FieldSerializer.read(FieldSerializer.java:528)
at com.esotericsoftware.kryo.Kryo.readClassAndObject(Kryo.java:761)
at com.esotericsoftware.kryo.serializers.MapSerializer.read(MapSerializer.java:143)
at com.esotericsoftware.kryo.serializers.MapSerializer.read(MapSerializer.java:21)
at com.esotericsoftware.kryo.Kryo.readObject(Kryo.java:679)
at com.esotericsoftware.kryo.serializers.ObjectField.read(ObjectField.java:106)
at com.esotericsoftware.kryo.serializers.FieldSerializer.read(FieldSerializer.java:528)
at com.esotericsoftware.kryo.Kryo.readObject(Kryo.java:679)
at com.esotericsoftware.kryo.serializers.ObjectField.read(ObjectField.java:106)
at com.esotericsoftware.kryo.serializers.FieldSerializer.read(FieldSerializer.java:528)
at com.esotericsoftware.kryo.Kryo.readObject(Kryo.java:657)
at org.apache.flink.api.java.typeutils.runtime.kryo.KryoSerializer.copy(KryoSerializer.java:273)
at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.pushToOperator(CopyingChainingOutput.java:69)
at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:46)
at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:26)
at org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:50)
at org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:28)
at org.apache.flink.streaming.api.operators.StreamSourceContexts$ManualWatermarkContext.processAndCollectWithTimestamp(StreamSourceContexts.java:322)
at org.apache.flink.streaming.api.operators.StreamSourceContexts$WatermarkContext.collectWithTimestamp(StreamSourceContexts.java:426)
at org.apache.flink.streaming.connectors.kafka.internals.AbstractFetcher.emitRecordsWithTimestamps(AbstractFetcher.java:365)
at org.apache.flink.streaming.connectors.kafka.internals.KafkaFetcher.partitionConsumerRecordsHandler(KafkaFetcher.java:183)
at org.apache.flink.streaming.connectors.kafka.internals.KafkaFetcher.runFetchLoop(KafkaFetcher.java:142)
at org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase.run(FlinkKafkaConsumerBase.java:826)
at org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:110)
at org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:66)
at org.apache.flink.streaming.runtime.tasks.SourceStreamTask$LegacySourceFunctionThread.run(SourceStreamTask.java:269)
Caused by: java.lang.UnsupportedOperationException
at java.util.Collections$UnmodifiableCollection.add(Collections.java:1057)
at com.esotericsoftware.kryo.serializers.CollectionSerializer.read(CollectionSerializer.java:109)
at com.esotericsoftware.kryo.serializers.CollectionSerializer.read(CollectionSerializer.java:22)
at com.esotericsoftware.kryo.Kryo.readObject(Kryo.java:679)
at com.esotericsoftware.kryo.serializers.ObjectField.read(ObjectField.java:106)
Can you tell me what's the problem please ,
Thank you in advance
Related
I am trying to implement ReplyingKafkaTemplate with spring batch integration.
It is actually running eagerly and throwing the "Dispatcher has no subscribers for channel" error before the subscriber bean is created. As you can see in the log.
org.springframework.messaging.MessageDeliveryException: Dispatcher has no subscribers for channel 'kafka Cloud Task.gatewayinputchannel'.;
Adding {kafka:outbound-gateway:kafkaConfig.outGateway.serviceActivator} as a subscriber to the 'gatewayinputchannel' channel
2022-08-18 02:02:08.068 INFO 24232 --- [ main] o.s.integration.channel.DirectChannel : Channel 'kafka Cloud Task.gatewayinputchannel' has 1 subscriber(s).
2022-08-18 02:02:08.068 INFO 24232 --- [ main] o.s.i.endpoint.EventDrivenConsumer : started bean 'kafkaConfig.outGateway.serviceActivator'
Is there a way to prevent it from running eagerly? and wait till all the beans are created.
My ReplyingKafkaTemplate is described as below.
#Bean
#ServiceActivator(inputChannel = "gatewayinputchannel")
public KafkaProducerMessageHandler<String, Message<?>> outGateway(
ReplyingKafkaTemplate<String, Message<?>, Message<?>> kafkaTemplate) {
KafkaProducerMessageHandler<String,Message<?>> kpmh = new KafkaProducerMessageHandler<String,Message<?>>(kafkaTemplate);
kpmh.setMessageKeyExpression(new LiteralExpression(kafka_processing_topic));
kpmh.setTopicExpression(new LiteralExpression(kafka_processing_topic));
kpmh.setOutputChannel(gatewayoutputchannel());
return kpmh;
}
Edited
#Configuration
#MessagingGateway
#EnableIntegration
#IntegrationComponentScan
public interface IntegrationGateway {
#Gateway(requestChannel = "gatewayinputchannel", replyChannel = "gatewayoutputchannel")
public Message<?> sendToKafka(Message<?> input);
}
We are calling it from the item processor of a batch job
Message<?> request= MessageBuilder.withPayload(inpMessage).copyHeaders(header).build();
Message<?> response = integrationGateway.sendToKafka(request);
-------------------------- Edit 3 --------------------------
How you start the batch job ? :
We are trying to start the batch job from a spring boot project with #Scheduled method programmatically using JobLuncher.
I have below flag to prevent the job starting automatically.
spring.batch.job.enabled=false
2022-08-19 16:00:24.781 ERROR 13732 --- [ task-1] o.s.batch.core.step.AbstractStep : Encountered an error executing step Synchronous Processing : Read -> Process -> Write in job fixedLengthFileJob
org.springframework.messaging.MessageDeliveryException: Dispatcher has no subscribers for channel 'IMPF Cloud Task.gatewayinputchannel'.; nested exception is org.springframework.integration.MessageDispatchingException: Dispatcher has no subscribers, failedMessage=GenericMessage [payload=10001,Bruce,S,Willis,M,16/07/1974,#1 1st main,1st cross,Moody,Alabama,#2 2nd main,2nd cross,Acmar,Alabama,Mobile,(205)710-2385,,Landline,(20,01, headers={RunGUID=00a04208-c0d8-4354-97f0-c5d8bfb5368d, replyChannel=org.springframework.messaging.core.GenericMessagingTemplate$TemporaryReplyChannel#27192c74, errorChannel=org.springframework.messaging.core.GenericMessagingTemplate$TemporaryReplyChannel#27192c74, IngestionPath=/opt/usr/conf/xyz/, id=d52d89f7-4ec0-5c3e-b84b-be4dc58223b5, kafka_replyTopic=impf_int_batch_success, timestamp=1660905024776}]
at org.springframework.integration.channel.AbstractSubscribableChannel.doSend(AbstractSubscribableChannel.java:76) ~[spring-integration-core-5.5.9.jar:5.5.9]
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:317) ~[spring-integration-core-5.5.9.jar:5.5.9]
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:272) ~[spring-integration-core-5.5.9.jar:5.5.9]
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:187) ~[spring-messaging-5.3.16.jar:5.3.16]
at org.springframework.messaging.core.GenericMessagingTemplate.doSendAndReceive(GenericMessagingTemplate.java:233) ~[spring-messaging-5.3.16.jar:5.3.16]
at org.springframework.messaging.core.GenericMessagingTemplate.doSendAndReceive(GenericMessagingTemplate.java:47) ~[spring-messaging-5.3.16.jar:5.3.16]
at org.springframework.messaging.core.AbstractMessagingTemplate.sendAndReceive(AbstractMessagingTemplate.java:46) ~[spring-messaging-5.3.16.jar:5.3.16]
at org.springframework.integration.core.MessagingTemplate.sendAndReceive(MessagingTemplate.java:97) ~[spring-integration-core-5.5.9.jar:5.5.9]
at org.springframework.integration.gateway.MessagingGatewaySupport.doSendAndReceive(MessagingGatewaySupport.java:522) ~[spring-integration-core-5.5.9.jar:5.5.9]
at org.springframework.integration.gateway.MessagingGatewaySupport.sendAndReceiveMessage(MessagingGatewaySupport.java:492) ~[spring-integration-core-5.5.9.jar:5.5.9]
at org.springframework.integration.gateway.GatewayProxyFactoryBean.sendOrSendAndReceive(GatewayProxyFactoryBean.java:652) ~[spring-integration-core-5.5.9.jar:5.5.9]
at org.springframework.integration.gateway.GatewayProxyFactoryBean.invokeGatewayMethod(GatewayProxyFactoryBean.java:588) ~[spring-integration-core-5.5.9.jar:5.5.9]
at org.springframework.integration.gateway.GatewayProxyFactoryBean.doInvoke(GatewayProxyFactoryBean.java:555) ~[spring-integration-core-5.5.9.jar:5.5.9]
at org.springframework.integration.gateway.GatewayProxyFactoryBean.invoke(GatewayProxyFactoryBean.java:544) ~[spring-integration-core-5.5.9.jar:5.5.9]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) [spring-aop-5.3.16.jar:5.3.16]
at org.springframework.aop.framework.JdkDynamicAopProxy.invoke(JdkDynamicAopProxy.java:215) [spring-aop-5.3.16.jar:5.3.16]
at com.sun.proxy.$Proxy91.sendToKafka(Unknown Source) ~[na:na]
at com.cloudtask.batchconfig.CloudTaskBatchSyncConfig.lambda$syncProcessor$2(CloudTaskBatchSyncConfig.java:204) ~[classes/:na]
at org.springframework.batch.core.step.item.SimpleChunkProcessor.doProcess(SimpleChunkProcessor.java:134) ~[spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.batch.core.step.item.SimpleChunkProcessor.transform(SimpleChunkProcessor.java:319) ~[spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.batch.core.step.item.SimpleChunkProcessor.process(SimpleChunkProcessor.java:210) ~[spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.batch.core.step.item.ChunkOrientedTasklet.execute(ChunkOrientedTasklet.java:77) ~[spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.batch.core.step.tasklet.TaskletStep$ChunkTransactionCallback.doInTransaction(TaskletStep.java:407) ~[spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.batch.core.step.tasklet.TaskletStep$ChunkTransactionCallback.doInTransaction(TaskletStep.java:331) ~[spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.transaction.support.TransactionTemplate.execute(TransactionTemplate.java:140) ~[spring-tx-5.3.16.jar:5.3.16]
at org.springframework.batch.core.step.tasklet.TaskletStep$2.doInChunkContext(TaskletStep.java:273) ~[spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.batch.core.scope.context.StepContextRepeatCallback.doInIteration(StepContextRepeatCallback.java:82) ~[spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.batch.repeat.support.RepeatTemplate.getNextResult(RepeatTemplate.java:375) ~[spring-batch-infrastructure-4.3.5.jar:4.3.5]
at org.springframework.batch.repeat.support.RepeatTemplate.executeInternal(RepeatTemplate.java:215) ~[spring-batch-infrastructure-4.3.5.jar:4.3.5]
at org.springframework.batch.repeat.support.RepeatTemplate.iterate(RepeatTemplate.java:145) ~[spring-batch-infrastructure-4.3.5.jar:4.3.5]
at org.springframework.batch.core.step.tasklet.TaskletStep.doExecute(TaskletStep.java:258) ~[spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.batch.core.step.AbstractStep.execute(AbstractStep.java:208) ~[spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.batch.core.job.SimpleStepHandler.handleStep(SimpleStepHandler.java:152) [spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.batch.core.job.flow.JobFlowExecutor.executeStep(JobFlowExecutor.java:68) [spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.batch.core.job.flow.support.state.StepState.handle(StepState.java:68) [spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.batch.core.job.flow.support.SimpleFlow.resume(SimpleFlow.java:169) [spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.batch.core.job.flow.support.SimpleFlow.start(SimpleFlow.java:144) [spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.batch.core.job.flow.FlowJob.doExecute(FlowJob.java:137) [spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.batch.core.job.AbstractJob.execute(AbstractJob.java:320) [spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.batch.core.launch.support.SimpleJobLauncher$1.run(SimpleJobLauncher.java:149) [spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:50) [spring-core-5.3.16.jar:5.3.16]
at org.springframework.batch.core.launch.support.SimpleJobLauncher.run(SimpleJobLauncher.java:140) [spring-batch-core-4.3.5.jar:4.3.5]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_331]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_331]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_331]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_331]
at org.springframework.aop.support.AopUtils.invokeJoinpointUsingReflection(AopUtils.java:344) [spring-aop-5.3.16.jar:5.3.16]
at org.springframework.aop.framework.ReflectiveMethodInvocation.invokeJoinpoint(ReflectiveMethodInvocation.java:198) [spring-aop-5.3.16.jar:5.3.16]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) [spring-aop-5.3.16.jar:5.3.16]
at org.springframework.batch.core.configuration.annotation.SimpleBatchConfiguration$PassthruAdvice.invoke(SimpleBatchConfiguration.java:128) [spring-batch-core-4.3.5.jar:4.3.5]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) [spring-aop-5.3.16.jar:5.3.16]
at org.springframework.aop.framework.JdkDynamicAopProxy.invoke(JdkDynamicAopProxy.java:215) [spring-aop-5.3.16.jar:5.3.16]
at com.sun.proxy.$Proxy93.run(Unknown Source) [na:na]
at com.cloudtask.batchconfig.runner.JobRunner.runJob(JobRunner.java:71) [classes/:na]
at com.cloudtask.batchconfig.runner.JobRunner.runFlatFileBatchJob(JobRunner.java:43) [classes/:na]
at com.cloudtask.batchconfig.runner.JobRunner$$FastClassBySpringCGLIB$$e5597de2.invoke(<generated>) [classes/:na]
at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) [spring-core-5.3.16.jar:5.3.16]
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:783) [spring-aop-5.3.16.jar:5.3.16]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) [spring-aop-5.3.16.jar:5.3.16]
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:753) [spring-aop-5.3.16.jar:5.3.16]
at org.springframework.aop.interceptor.AsyncExecutionInterceptor.lambda$invoke$0(AsyncExecutionInterceptor.java:115) [spring-aop-5.3.16.jar:5.3.16]
at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[na:1.8.0_331]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) ~[na:1.8.0_331]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) ~[na:1.8.0_331]
at java.lang.Thread.run(Thread.java:750) ~[na:1.8.0_331]
Caused by: org.springframework.integration.MessageDispatchingException: Dispatcher has no subscribers
at org.springframework.integration.dispatcher.UnicastingDispatcher.doDispatch(UnicastingDispatcher.java:139) ~[spring-integration-core-5.5.9.jar:5.5.9]
at org.springframework.integration.dispatcher.UnicastingDispatcher.dispatch(UnicastingDispatcher.java:106) ~[spring-integration-core-5.5.9.jar:5.5.9]
at org.springframework.integration.channel.AbstractSubscribableChannel.doSend(AbstractSubscribableChannel.java:72) ~[spring-integration-core-5.5.9.jar:5.5.9]
... 64 common frames omitted
--------------------------- Edit 4 Job laucnhing code---------------
#Bean
#Scheduled( fixedRate = 150000L)
public void jobScheduled(){
logger.info("Job triggered");
getAllFileNames().stream().forEach( fileName-> {
jobRunner.runFlatFileBatchJob(fileName);
});
}
Thanks
Santrupta
It is not a problem of this outGateway bean and its gatewayinputchannel subscription. It is a problem of producer which sends to this gatewayinputchannel too early. So, share with us, please, what and how does that instead.
I am using a feeder to plug in values in a request body.
val feeder = csv("feeders/project_tenants.csv").circular
def updateDbSettings = feed(feeder)
exec(http("Update Project Settings")
.post("/url/here")
.body(StringBody("""{"sqlServer":"${server}.test.int","sqlDatabase":"COMP_DB", "dataCenter":"HAI"}""")).asJson)
My csv file looks like this
server,test
check,a
check1,b
check2,c
check3,d
Each time I run the test I'm getting IllegalStateException: Feeder is now empty, stopping engine exception which I don't understand because my feeder is circular. I understand that should mean the values will iterate over again once reaching the end of the file. But even if I load only one session I get that error.
Stacktrace with a single user per one second:
14:18:39.076 [ERROR] i.g.a.Gatling$ - Run crashed
java.lang.IllegalStateException: Feeder is now empty, stopping engine
at io.gatling.core.action.SingletonFeed$$anonfun$receive$1.applyOrElse(SingletonFeed.scala:67)
at akka.actor.Actor.aroundReceive(Actor.scala:539)
at akka.actor.Actor.aroundReceive$(Actor.scala:537)
at io.gatling.core.akka.BaseActor.aroundReceive(BaseActor.scala:25)
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:612)
at akka.actor.ActorCell.invoke(ActorCell.scala:581)
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:268)
at akka.dispatch.Mailbox.run(Mailbox.scala:229)
at akka.dispatch.Mailbox.exec(Mailbox.scala:241)
java.lang.reflect.InvocationTargetException
at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at io.gatling.mojo.MainWithArgsInFile.runMain(MainWithArgsInFile.java:50)
at io.gatling.mojo.MainWithArgsInFile.main(MainWithArgsInFile.java:33)
Caused by: java.lang.IllegalStateException: Feeder is now empty, stopping engine
[ERROR] [07/25/2022 14:18:39.118] [GatlingSystem-akka.actor.default-dispatcher-7] [akka.actor.ActorSystemImpl(GatlingSystem)] ex at io.gatling.core.action.SingletonFeed$$anonfun$receive$1.applyOrElse(SingletonFeed.scala:67)
ception while executing timer task
at akka.actor.Actor.aroundReceive(Actor.scala:539)
java.lang.NullPointerException
at io.gatling.core.akka.BaseActor.system(BaseActor.scala:27)
at akka.actor.Actor.aroundReceive$(Actor.scala:537)
at io.gatling.graphite.sender.TcpSender.askForConnection(TcpSender.scala:103)
at io.gatling.core.akka.BaseActor.aroundReceive(BaseActor.scala:25)
at io.gatling.graphite.sender.TcpSender$$anonfun$1.$anonfun$applyOrElse$1(TcpSender.scala:58)
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:612)
at akka.actor.Scheduler$$anon$4.run(Scheduler.scala:202)
at akka.actor.ActorCell.invoke(ActorCell.scala:581)
at akka.actor.LightArrayRevolverScheduler$TaskHolder.run(LightArrayRevolverScheduler.scala:343)
at akka.actor.LightArrayRevolverScheduler.$anonfun$close$1(LightArrayRevolverScheduler.scala:145)
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:268)
at akka.dispatch.Mailbox.run(Mailbox.scala:229)
at akka.actor.LightArrayRevolverScheduler.$anonfun$close$1$adapted(LightArrayRevolverScheduler.scala:144)
at akka.dispatch.Mailbox.exec(Mailbox.scala:241)
at scala.collection.Iterator.foreach(Iterator.scala:941)
at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
at scala.collection.Iterator.foreach$(Iterator.scala:941)
at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
at akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
at scala.collection.IterableLike.foreach(IterableLike.scala:74)
at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
at akka.actor.LightArrayRevolverScheduler.close(LightArrayRevolverScheduler.scala:144)
at akka.actor.ActorSystemImpl.stopScheduler(ActorSystem.scala:980)
at akka.actor.ActorSystemImpl.$anonfun$_start$1(ActorSystem.scala:910)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at akka.actor.ActorSystemImpl$$anon$2.run(ActorSystem.scala:931)
at akka.actor.ActorSystemImpl$TerminationCallbacks$$anonfun$addRec$1$1.applyOrElse(ActorSystem.scala:1118)
at akka.actor.ActorSystemImpl$TerminationCallbacks$$anonfun$addRec$1$1.applyOrElse(ActorSystem.scala:1118)
at scala.concurrent.Future.$anonfun$andThen$1(Future.scala:536)
at scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33)
at scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)
at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)
at akka.dispatch.BatchingExecutor$AbstractBatch.processBatch(BatchingExecutor.scala:55)
at akka.dispatch.BatchingExecutor$BlockableBatch.$anonfun$run$1(BatchingExecutor.scala:92)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)
at akka.dispatch.BatchingExecutor$BlockableBatch.run(BatchingExecutor.scala:92)
at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:41)
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:49)
at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
at akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
I'm trying to run the following test at scale:
test("RawData read") {
val typp = ""
val spark = SparkSession.builder().master("local[1]").appName("SparkByExamples.com").getOrCreate()
val actual = new DataReader(spark, typp).readRawData("./src/test/resources/input/parquet/part-00000-512bb01c-ef7a-4760-bfe9-905f504e4840.c000.snappy.parquet")
actual.coalesce(1).limit(100).write.mode(SaveMode.Overwrite).save("./src/test/resources/output/")
import sqlContext.implicits._
val expected: Dataset[RawData] = spark
.read
.parquet("./src/test/resources/output/")
.map(row => RawData(
row.getString(0),
row.getDouble(1),
row.getInt(2),
row.getString(3),
row.getDouble(4),
row.getLong(5),
row.getString(6)
))
assert(actual.limit(100).collectAsList() === expected.collectAsList())
}
The error that I recive is this:
Error while instantiating
'org.apache.spark.sql.hive.HiveSessionStateBuilder':
java.lang.IllegalArgumentException: Error while instantiating
'org.apache.spark.sql.hive.HiveSessionStateBuilder': at
org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1062)
at
org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:137)
at
org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:136)
at scala.Option.getOrElse(Option.scala:121) at
org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:136)
at
org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:133)
at
org.apache.spark.sql.SparkSession$Builder$$anonfun$getOrCreate$3.apply(SparkSession.scala:902)
at
org.apache.spark.sql.SparkSession$Builder$$anonfun$getOrCreate$3.apply(SparkSession.scala:902)
at
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:130)
at
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:130)
at
scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:236)
at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
at scala.collection.mutable.HashMap.foreach(HashMap.scala:130) at
org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:902)
at
eu.man.bigdata.commons.importer.DataReaderTest$$anonfun$2.apply(DataReaderTest.scala:23)
at
eu.man.bigdata.commons.importer.DataReaderTest$$anonfun$2.apply(DataReaderTest.scala:21)
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) at
org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) at
org.scalatest.Transformer.apply(Transformer.scala:22) at
org.scalatest.Transformer.apply(Transformer.scala:20) at
org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) at
org.scalatest.TestSuite$class.withFixture(TestSuite.scala:196) at
org.scalatest.FunSuite.withFixture(FunSuite.scala:1560) at
org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
at
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
at
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289) at
org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196) at
eu.man.bigdata.commons.importer.DataReaderTest.org$scalatest$BeforeAndAfterEach$$super$runTest(DataReaderTest.scala:13)
at
org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:221)
at
eu.man.bigdata.commons.importer.DataReaderTest.runTest(DataReaderTest.scala:13)
at
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
at
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
at
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
at
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
at scala.collection.immutable.List.foreach(List.scala:392) at
org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384) at
org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461) at
org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229) at
org.scalatest.FunSuite.runTests(FunSuite.scala:1560) at
org.scalatest.Suite$class.run(Suite.scala:1147) at
org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
at
org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
at
org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
at org.scalatest.SuperEngine.runImpl(Engine.scala:521) at
org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233) at
eu.man.bigdata.commons.importer.DataReaderTest.org$scalatest$BeforeAndAfterAll$$super$run(DataReaderTest.scala:13)
at
org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
at
org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
at
eu.man.bigdata.commons.importer.DataReaderTest.run(DataReaderTest.scala:13)
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45) at
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1346)
at
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1340)
at scala.collection.immutable.List.foreach(List.scala:392) at
org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1340)
at
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1011)
at
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1010)
at
org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1506)
at
org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
at org.scalatest.tools.Runner$.run(Runner.scala:850) at
org.scalatest.tools.Runner.run(Runner.scala) at
org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2or3(ScalaTestRunner.java:38)
at
org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:25)
Caused by: org.apache.spark.sql.AnalysisException:
java.lang.RuntimeException: java.lang.RuntimeException: The root
scratch dir: /tmp/hive on HDFS should be writable. Current permissions
are: rw-rw-rw-; at
org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:106)
at
org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:194)
at
org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105)
at
org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93)
at
org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39)
at
org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54)
at
org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52)
at
org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35)
at
org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289)
at
org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1059)
... 63 more Caused by: java.lang.RuntimeException:
java.lang.RuntimeException: The root scratch dir: /tmp/hive on HDFS
should be writable. Current permissions are: rw-rw-rw- at
org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:522)
at
org.apache.spark.sql.hive.client.HiveClientImpl.(HiveClientImpl.scala:191)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
Method) at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at
org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
at
org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362)
at
org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266)
at
org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
at
org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
at
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:195)
at
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
at
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
at
org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
... 72 more Caused by: java.lang.RuntimeException: The root scratch
dir: /tmp/hive on HDFS should be writable. Current permissions are:
rw-rw-rw- at
org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:612)
at
org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)
at
org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
... 86 more
I'm using sbt version 1.0.3, I have tried many of the solutions posed in the various questions related to this problem but nothing has worked
After looking at all the solutions, I am asking this question.I have been trying to create a view from dataframe.I am able to connect dataframe but not able to create a view on top of that. I am not able to connect to local metastore using the latest version of Intellij.
I am using Hadoop 3.0.0.Below is my POM.
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.12.12</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.12</artifactId>
<version>3.0.1</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.12</artifactId>
<version>3.0.1</version>
</dependency>
My Spark configuration looks like this.
def getSparkSession(): SparkSession = {
var session: SparkSession = null
var flag: Boolean = false
try {
System.setProperty("HADOOP_HOME", constants.HADOOP_HOME)
System.setProperty("hadoop.home.dir", constants.HADOOP_HOME_DIR)
session = SparkSession
.builder()
.appName("Validation").master("local[1]")
.config("hive.metastore.uris", "thrift://localhost:9083")
.config("fs.s3.impl", constants.S3_IMPL)
.config("fs.s3.awsAccessKeyId", prop.keys.get(constants.S3_FS_ACCESS_KEY))
.config("fs.s3.awsSecretAccessKey", prop.keys.get(constants.S3_FS_SECRET_KEY))
.enableHiveSupport()
.getOrCreate()
println("-:Spark session created successfully:-" + session)
flag = true
} catch {
case t: Exception =>
log.info(t.printStackTrace())
}
session
}
Below is the code for reading the dataframe and creating the view.
val jdbcDF = session.read
.format("jdbc")
.option("url", "jdbc:postgresql://hostname/dbname?user=xxxxxxx&password=xxxxxxx")
.option("dbtable","offer")
.option("driver","org.postgresql.Driver")
.load().cache()
println(jdbcDF.show(20, false))
jdbcDF.createOrReplaceGlobalTempView("abc")
Please find the logs below.
21/03/26 15:05:27 WARN metastore: Failed to connect to the MetaStore Server...
21/03/26 15:05:27 INFO metastore: Waiting 1 seconds before next connection attempt.
21/03/26 15:05:27 WARN ProcfsMetricsGetter: Exception when trying to compute pagesize, as a result reporting of ProcessTree metrics is stopped
21/03/26 15:05:28 WARN Hive: Failed to register all functions.
java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
at org.apache.hadoop.hive.ql.metadata.Hive.<init>(Hive.java:388)
at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:225)
at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:103)
at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:225)
at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:60)
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.getGlobalTempView(SessionCatalog.scala:587)
at org.apache.spark.sql.execution.command.CreateViewCommand.run(views.scala:120)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3618)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3616)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:229)
at org.apache.spark.sql.Dataset$.$anonfun$ofRows$1(Dataset.scala:92)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:89)
at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$withPlan(Dataset.scala:3646)
at org.apache.spark.sql.Dataset.createOrReplaceGlobalTempView(Dataset.scala:3294)
at com.amp.eolas.aim.validation.checks.dataqualitychecksoperations$.createTempTables(dataqualitycheckoperations.scala:262)
at com.amp.eolas.aim.validation.checks.dataqualitychecksoperations$.$anonfun$allDqandTruncateReloadFunctions$1(dataqualitycheckoperations.scala:303)
at com.amp.eolas.aim.validation.checks.dataqualitychecksoperations$.$anonfun$allDqandTruncateReloadFunctions$1$adapted(dataqualitycheckoperations.scala:300)
at scala.collection.Iterator.foreach(Iterator.scala:943)
at scala.collection.Iterator.foreach$(Iterator.scala:943)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
at scala.collection.IterableLike.foreach(IterableLike.scala:74)
at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
at com.amp.eolas.aim.validation.checks.dataqualitychecksoperations$.allDqandTruncateReloadFunctions(dataqualitycheckoperations.scala:300)
at com.amp.eolas.aim.validation.jobs.jobs$.main(jobs.scala:10)
at com.amp.eolas.aim.validation.jobs.jobs.main(jobs.scala)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
... 61 more
Caused by: MetaException(message:Could not connect to meta store using any of the URIs provided. Most recent failure: org.apache.thrift.transport.TTransportException: java.net.ConnectException: Connection refused: connect
at org.apache.thrift.transport.TSocket.open(TSocket.java:226)
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:480)
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:247)
at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:70)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
at org.apache.hadoop.hive.ql.metadata.Hive.<init>(Hive.java:388)
at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:225)
at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:103)
at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:225)
at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:60)
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.getGlobalTempView(SessionCatalog.scala:587)
at org.apache.spark.sql.execution.command.CreateViewCommand.run(views.scala:120)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3618)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3616)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:229)
at org.apache.spark.sql.Dataset$.$anonfun$ofRows$1(Dataset.scala:92)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:89)
at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$withPlan(Dataset.scala:3646)
at org.apache.spark.sql.Dataset.createOrReplaceGlobalTempView(Dataset.scala:3294)
at com.amp.eolas.aim.validation.checks.dataqualitychecksoperations$.createTempTables(dataqualitycheckoperations.scala:262)
at com.amp.eolas.aim.validation.checks.dataqualitychecksoperations$.$anonfun$allDqandTruncateReloadFunctions$1(dataqualitycheckoperations.scala:303)
at com.amp.eolas.aim.validation.checks.dataqualitychecksoperations$.$anonfun$allDqandTruncateReloadFunctions$1$adapted(dataqualitycheckoperations.scala:300)
at scala.collection.Iterator.foreach(Iterator.scala:943)
at scala.collection.Iterator.foreach$(Iterator.scala:943)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
at scala.collection.IterableLike.foreach(IterableLike.scala:74)
at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
at com.amp.eolas.aim.validation.checks.dataqualitychecksoperations$.allDqandTruncateReloadFunctions(dataqualitycheckoperations.scala:300)
at com.amp.eolas.aim.validation.jobs.jobs$.main(jobs.scala:10)
at com.amp.eolas.aim.validation.jobs.jobs.main(jobs.scala)
Caused by: java.net.ConnectException: Connection refused: connect
at java.net.DualStackPlainSocketImpl.waitForConnect(Native Method)
at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:81)
at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
at java.net.Socket.connect(Socket.java:606)
at org.apache.thrift.transport.TSocket.open(TSocket.java:221)
... 69 more
)
Any help is appreciated.
The Apache CXF and the JAX-WS always seems to cause issues. I'm not sure if I'm using it wrongly!
Here is the exception that I get:
20151203-15:14:06.836+0100 [play-akka.actor.default-dispatcher-9] ERROR my.proj - Asset E911S (SOAP) -- unknown error occurred at 2015-12-03T14:14:05.778Z while reading tag getSlave
javax.xml.ws.WebServiceException: Could not send Message.
at org.apache.cxf.jaxws.JaxWsClientProxy.invoke(JaxWsClientProxy.java:150) ~[cxf-rt-frontend-jaxws-3.1.4.jar:3.1.4]
at com.sun.proxy.$Proxy57.getSlave(Unknown Source) ~[na:na]
at my.proj.connectors.soap.MyServiceClient$$anonfun$getSlave$1.apply(MyServiceClient.scala:174) ~[classes/:na]
at my.proj.connectors.soap.MyServiceClient$$anonfun$getSlave$1.apply(MyServiceClient.scala:97) ~[classes/:na]
at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24) ~[scala-library-2.11.7.jar:na]
at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24) ~[scala-library-2.11.7.jar:na]
at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:41) [akka-actor_2.11-2.3.4.jar:na]
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:393) [akka-actor_2.11-2.3.4.jar:na]
at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) [scala-library-2.11.7.jar:na]
at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) [scala-library-2.11.7.jar:na]
at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) [scala-library-2.11.7.jar:na]
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) [scala-library-2.11.7.jar:na]
Caused by: java.net.SocketTimeoutException: SocketTimeoutException invoking http://1/c.2.3.4/gi-bin/cgi.cgi?WebService: Read timed out
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[na:1.8.0_60]
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[na:1.8.0_60]
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[na:1.8.0_60]
at java.lang.reflect.Constructor.newInstance(Constructor.java:422) ~[na:1.8.0_60]
at org.apache.cxf.transport.http.HTTPConduit$WrappedOutputStream.mapException(HTTPConduit.java:1376) ~[cxf-rt-transports-http-3.1.4.jar:3.1.4]
at org.apache.cxf.transport.http.HTTPConduit$WrappedOutputStream.close(HTTPConduit.java:1360) ~[cxf-rt-transports-http-3.1.4.jar:3.1.4]
at org.apache.cxf.transport.AbstractConduit.close(AbstractConduit.java:56) ~[cxf-core-3.1.4.jar:3.1.4]
at org.apache.cxf.transport.http.HTTPConduit.close(HTTPConduit.java:651) ~[cxf-rt-transports-http-3.1.4.jar:3.1.4]
at org.apache.cxf.interceptor.MessageSenderInterceptor$MessageSenderEndingInterceptor.handleMessage(MessageSenderInterceptor.java:62) ~[cxf-core-3.1.4.jar:3.1.4]
at org.apache.cxf.phase.PhaseInterceptorChain.doIntercept(PhaseInterceptorChain.java:308) ~[cxf-core-3.1.4.jar:3.1.4]
at org.apache.cxf.endpoint.ClientImpl.doInvoke(ClientImpl.java:514) ~[cxf-core-3.1.4.jar:3.1.4]
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:423) ~[cxf-core-3.1.4.jar:3.1.4]
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:324) ~[cxf-core-3.1.4.jar:3.1.4]
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:277) ~[cxf-core-3.1.4.jar:3.1.4]
at org.apache.cxf.frontend.ClientProxy.invokeSync(ClientProxy.java:96) ~[cxf-rt-frontend-simple-3.1.4.jar:3.1.4]
at org.apache.cxf.jaxws.JaxWsClientProxy.invoke(JaxWsClientProxy.java:139) ~[cxf-rt-frontend-jaxws-3.1.4.jar:3.1.4]
... 11 common frames omitted
Caused by: java.net.SocketTimeoutException: Read timed out
at java.net.SocketInputStream.socketRead0(Native Method) ~[na:1.8.0_60]
at java.net.SocketInputStream.socketRead(SocketInputStream.java:116) ~[na:1.8.0_60]
at java.net.SocketInputStream.read(SocketInputStream.java:170) ~[na:1.8.0_60]
at java.net.SocketInputStream.read(SocketInputStream.java:141) ~[na:1.8.0_60]
at java.io.BufferedInputStream.fill(BufferedInputStream.java:246) ~[na:1.8.0_60]
at java.io.BufferedInputStream.read1(BufferedInputStream.java:286) ~[na:1.8.0_60]
Here is what I do to set the context!
private def setRequestContext(proxy: Client, withCredentials: Boolean = false): Unit = {
// set the time out!
val httpConduit = proxy.getConduit.asInstanceOf[HTTPConduit]
val httpClientPolicy = new HTTPClientPolicy()
httpClientPolicy.setConnectionTimeout(mySoapClientConfig.requestTimeout.toSeconds)
httpClientPolicy.setReceiveTimeout(mySoapClientConfig.requestTimeout.toSeconds)
httpConduit.setClient(httpClientPolicy)
if (withCredentials) {
val userCredentials = new UserCredentials
userCredentials.setPassword(mySoapClientConfig.password.orNull)
userCredentials.setUserName(mySoapClientConfig.userName.orNull)
val headerList = Seq(
new Header(
new QName(mySoapClientConfig.targetNamespace, "UserCredentials"),
userCredentials,
new JAXBDataBinding(classOf[UserCredentials])
)
)
import scala.collection.JavaConverters._
proxy.getRequestContext.put(Header.HEADER_LIST, headerList.asJava)
}
}
Could anyone please help me out? I'm able to connect to the WebService using SOAP UI!
I figured out what the problem was. Pay close attention to the following line:
httpClientPolicy.setConnectionTimeout(mySoapClientConfig.requestTimeout.toSeconds)
httpClientPolicy.setReceiveTimeout(mySoapClientConfig.requestTimeout.toSeconds)
The setXXXTimeout takes a long in millis and what I have been passing in was 4 seconds which was understood as 4 millis, which was causing the read time outs!