java.lang.IndexOutOfBoundsException: Index: 0, Size: 0 with Javers #JaversSpringDataAuditable - spring-data-jpa

#JaversSpringDataAuditable works fine with Spring Data Repository. But when I use this
#Lock(LockModeType.PESSIMISTIC_WRITE) #QueryHints({#QueryHint(name = "javax.persistence.lock.timeout", value = "3000")})
for pessimistic locking for a query, findByXXAndXX() and used multiple Threads to call the particular api under which this query is being called, we get an IndexOutOfBoundsException.
java.lang.IndexOutOfBoundsException: Index: 0, Size: 0
at java.util.ArrayList.rangeCheck(ArrayList.java:659)
at java.util.ArrayList.get(ArrayList.java:435)
at org.javers.repository.sql.finders.CdoSnapshotFinder.lambda$getLatest$1(CdoSnapshotFinder.java:51)
at java.util.Optional.map(Optional.java:215)
at org.javers.repository.sql.finders.CdoSnapshotFinder.getLatest(CdoSnapshotFinder.java:46)
at org.javers.repository.sql.JaversSqlRepository.lambda$getLatest$0(JaversSqlRepository.java:69)
at java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:193)
at java.util.HashMap$KeySpliterator.forEachRemaining(HashMap.java:1556)
at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:482)
at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:472)
at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708)
at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
at java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:499)
at org.javers.repository.sql.JaversSqlRepository.getLatest(JaversSqlRepository.java:72)
at org.javers.repository.api.JaversExtendedRepository.getLatest(JaversExtendedRepository.java:98)
at org.javers.core.snapshot.SnapshotGraphFactory.createLatest(SnapshotGraphFactory.java:25)
at org.javers.core.commit.CommitFactory.createCommit(CommitFactory.java:77)
at org.javers.core.commit.CommitFactory.create(CommitFactory.java:72)
at org.javers.core.JaversCore.commit(JaversCore.java:83)
at org.javers.spring.jpa.JaversTransactionalDecorator.commit(JaversTransactionalDecorator.java:82)
at org.javers.spring.jpa.JaversTransactionalDecorator$$FastClassBySpringCGLIB$$acb40bd0.invoke(<generated>)
at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218)
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:750)
How can we solve this problem ?
Thanks in advance.

Related

Infinispan with hanging threads in level 1 clean up

We are trying to upgrade from Wildfly 18 to 22 and are experiencing some trouble with threads left dangling in the jgroups thread group. When the number of threads reaches the default max number (200) everything stops up (naturally). This takes two to three days in our test environment.
In the thread dump we are left with 200 dangling threads that look like this:
at jdk.internal.misc.Unsafe.park(java.base#11.0.8/Native Method)
at java.util.concurrent.locks.LockSupport.parkNanos(java.base#11.0.8/LockSupport.java:234)
at java.util.concurrent.CompletableFuture$Signaller.block(java.base#11.0.8/CompletableFuture.java:1798)
at java.util.concurrent.ForkJoinPool.managedBlock(java.base#11.0.8/ForkJoinPool.java:3128)
at java.util.concurrent.CompletableFuture.timedGet(java.base#11.0.8/CompletableFuture.java:1868)
at java.util.concurrent.CompletableFuture.get(java.base#11.0.8/CompletableFuture.java:2021)
at org.infinispan.util.concurrent.CompletableFutures.await(CompletableFutures.java:125)
at org.infinispan.util.concurrent.CompletionStages.join(CompletionStages.java:80)
at org.infinispan.interceptors.distribution.L1NonTxInterceptor.lambda$handleDataWriteCommand$10(L1NonTxInterceptor.java:399)
at org.infinispan.interceptors.distribution.L1NonTxInterceptor$$Lambda$1806/0x0000000841ace040.apply(Unknown Source)
at org.infinispan.interceptors.impl.QueueAsyncInvocationStage.invokeQueuedHandlers(QueueAsyncInvocationStage.java:125)
at org.infinispan.interceptors.impl.QueueAsyncInvocationStage.accept(QueueAsyncInvocationStage.java:88)
at org.infinispan.interceptors.impl.QueueAsyncInvocationStage.accept(QueueAsyncInvocationStage.java:33)
at java.util.concurrent.CompletableFuture.uniWhenComplete(java.base#11.0.8/CompletableFuture.java:859)
at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(java.base#11.0.8/CompletableFuture.java:837)
at java.util.concurrent.CompletableFuture.postComplete(java.base#11.0.8/CompletableFuture.java:506)
at java.util.concurrent.CompletableFuture.complete(java.base#11.0.8/CompletableFuture.java:2073)
at org.infinispan.interceptors.distribution.PrimaryOwnerOnlyCollector.primaryResult(PrimaryOwnerOnlyCollector.java:30)
at org.infinispan.interceptors.distribution.TriangleDistributionInterceptor.lambda$forwardToPrimary$6(TriangleDistributionInterceptor.java:526)
at org.infinispan.interceptors.distribution.TriangleDistributionInterceptor$$Lambda$1953/0x0000000841d51040.apply(Unknown Source)
at java.util.concurrent.CompletableFuture.uniHandle(java.base#11.0.8/CompletableFuture.java:930)
at java.util.concurrent.CompletableFuture$UniHandle.tryFire(java.base#11.0.8/CompletableFuture.java:907)
at java.util.concurrent.CompletableFuture.postComplete(java.base#11.0.8/CompletableFuture.java:506)
at java.util.concurrent.CompletableFuture.complete(java.base#11.0.8/CompletableFuture.java:2073)
at org.infinispan.remoting.transport.AbstractRequest.complete(AbstractRequest.java:67)
at org.infinispan.remoting.transport.impl.SingleTargetRequest.onResponse(SingleTargetRequest.java:45)
at org.infinispan.remoting.transport.impl.RequestRepository.addResponse(RequestRepository.java:52)
at org.infinispan.remoting.transport.jgroups.JGroupsTransport.processResponse(JGroupsTransport.java:1402)
at org.infinispan.remoting.transport.jgroups.JGroupsTransport.processMessage(JGroupsTransport.java:1305)
at org.infinispan.remoting.transport.jgroups.JGroupsTransport.access$300(JGroupsTransport.java:131)
at org.infinispan.remoting.transport.jgroups.JGroupsTransport$ChannelCallbacks.up(JGroupsTransport.java:1445)
at org.jgroups.JChannel.up(JChannel.java:784)
at org.jgroups.stack.ProtocolStack.up(ProtocolStack.java:913)
at org.jgroups.protocols.FRAG3.up(FRAG3.java:165)
at org.jgroups.protocols.FlowControl.up(FlowControl.java:343)
at org.jgroups.protocols.FlowControl.up(FlowControl.java:343)
at org.jgroups.protocols.pbcast.GMS.up(GMS.java:876)
at org.jgroups.protocols.pbcast.STABLE.up(STABLE.java:243)
at org.jgroups.protocols.UNICAST3.deliverMessage(UNICAST3.java:1049)
at org.jgroups.protocols.UNICAST3.addMessage(UNICAST3.java:772)
at org.jgroups.protocols.UNICAST3.handleDataReceived(UNICAST3.java:753)
at org.jgroups.protocols.UNICAST3.up(UNICAST3.java:405)
at org.jgroups.protocols.pbcast.NAKACK2.up(NAKACK2.java:592)
at org.jgroups.protocols.VERIFY_SUSPECT.up(VERIFY_SUSPECT.java:132)
at org.jgroups.protocols.FailureDetection.up(FailureDetection.java:186)
at org.jgroups.protocols.FD_SOCK.up(FD_SOCK.java:254)
at org.jgroups.protocols.MERGE3.up(MERGE3.java:281)
at org.jgroups.protocols.Discovery.up(Discovery.java:300)
at org.jgroups.protocols.TP.passMessageUp(TP.java:1396)
at org.jgroups.util.SubmitToThreadPool$SingleMessageHandler.run(SubmitToThreadPool.java:87)
at java.util.concurrent.ThreadPoolExecutor.runWorker(java.base#11.0.8/ThreadPoolExecutor.java:1128)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(java.base#11.0.8/ThreadPoolExecutor.java:628)
at java.lang.Thread.run(java.base#11.0.8/Thread.java:834)
These threads are hanging on a CompletableFuture.get(...) with a infinispan-hardcoded timeout of 24 hours. This is probably not something that should happen, which is confirmed by the exception we get when the timeout is finally reached:
2021-03-03 09:00:46,638 ERROR [org.infinispan.interceptors.impl.InvocationContextInterceptor] (jgroups-263,xxxxxxxxxxxx) ISPN000136: Error executing command ReadWriteKeyCommand on Cache 'sessionmanager.session', writing keys [xxxx#xxxxxx]: java.lang.IllegalStateException: This should never happen!
at org.infinispan#11.0.8.Final//org.infinispan.util.concurrent.CompletableFutures.await(CompletableFutures.java:127)
at org.infinispan#11.0.8.Final//org.infinispan.util.concurrent.CompletionStages.join(CompletionStages.java:80)
at org.infinispan#11.0.8.Final//org.infinispan.interceptors.distribution.L1NonTxInterceptor.lambda$handleDataWriteCommand$10(L1NonTxInterceptor.java:399)
at org.infinispan#11.0.8.Final//org.infinispan.interceptors.impl.QueueAsyncInvocationStage.invokeQueuedHandlers(QueueAsyncInvocationStage.java:125)
at org.infinispan#11.0.8.Final//org.infinispan.interceptors.impl.QueueAsyncInvocationStage.accept(QueueAsyncInvocationStage.java:88)
at org.infinispan#11.0.8.Final//org.infinispan.interceptors.impl.QueueAsyncInvocationStage.accept(QueueAsyncInvocationStage.java:33)
at java.base/java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:859)
at java.base/java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:837)
at java.base/java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:506)
at java.base/java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:2073)
at org.infinispan#11.0.8.Final//org.infinispan.interceptors.distribution.PrimaryOwnerOnlyCollector.primaryResult(PrimaryOwnerOnlyCollector.java:30)
at org.infinispan#11.0.8.Final//org.infinispan.interceptors.distribution.TriangleDistributionInterceptor.lambda$forwardToPrimary$6(TriangleDistributionInterceptor.java:526)
at java.base/java.util.concurrent.CompletableFuture.uniHandle(CompletableFuture.java:930)
at java.base/java.util.concurrent.CompletableFuture$UniHandle.tryFire(CompletableFuture.java:907)
at java.base/java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:506)
at java.base/java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:2073)
at org.infinispan#11.0.8.Final//org.infinispan.remoting.transport.AbstractRequest.complete(AbstractRequest.java:67)
at org.infinispan#11.0.8.Final//org.infinispan.remoting.transport.impl.SingleTargetRequest.onResponse(SingleTargetRequest.java:45)
at org.infinispan#11.0.8.Final//org.infinispan.remoting.transport.impl.RequestRepository.addResponse(RequestRepository.java:52)
at org.infinispan#11.0.8.Final//org.infinispan.remoting.transport.jgroups.JGroupsTransport.processResponse(JGroupsTransport.java:1402)
at org.infinispan#11.0.8.Final//org.infinispan.remoting.transport.jgroups.JGroupsTransport.processMessage(JGroupsTransport.java:1305)
at org.infinispan#11.0.8.Final//org.infinispan.remoting.transport.jgroups.JGroupsTransport.access$300(JGroupsTransport.java:131)
at org.infinispan#11.0.8.Final//org.infinispan.remoting.transport.jgroups.JGroupsTransport$ChannelCallbacks.up(JGroupsTransport.java:1445)
at org.jgroups#4.2.10.Final//org.jgroups.JChannel.up(JChannel.java:784)
at org.jgroups#4.2.10.Final//org.jgroups.stack.ProtocolStack.up(ProtocolStack.java:913)
at org.jgroups#4.2.10.Final//org.jgroups.protocols.FRAG3.up(FRAG3.java:165)
at org.jgroups#4.2.10.Final//org.jgroups.protocols.FlowControl.up(FlowControl.java:343)
at org.jgroups#4.2.10.Final//org.jgroups.protocols.FlowControl.up(FlowControl.java:343)
at org.jgroups#4.2.10.Final//org.jgroups.protocols.pbcast.GMS.up(GMS.java:876)
at org.jgroups#4.2.10.Final//org.jgroups.protocols.pbcast.STABLE.up(STABLE.java:243)
at org.jgroups#4.2.10.Final//org.jgroups.protocols.UNICAST3.deliverMessage(UNICAST3.java:1049)
at org.jgroups#4.2.10.Final//org.jgroups.protocols.UNICAST3.addMessage(UNICAST3.java:772)
at org.jgroups#4.2.10.Final//org.jgroups.protocols.UNICAST3.handleDataReceived(UNICAST3.java:753)
at org.jgroups#4.2.10.Final//org.jgroups.protocols.UNICAST3.up(UNICAST3.java:405)
at org.jgroups#4.2.10.Final//org.jgroups.protocols.pbcast.NAKACK2.up(NAKACK2.java:592)
at org.jgroups#4.2.10.Final//org.jgroups.protocols.VERIFY_SUSPECT.up(VERIFY_SUSPECT.java:132)
at org.jgroups#4.2.10.Final//org.jgroups.protocols.FailureDetection.up(FailureDetection.java:186)
at org.jgroups#4.2.10.Final//org.jgroups.protocols.FD_SOCK.up(FD_SOCK.java:254)
at org.jgroups#4.2.10.Final//org.jgroups.protocols.MERGE3.up(MERGE3.java:281)
at org.jgroups#4.2.10.Final//org.jgroups.protocols.Discovery.up(Discovery.java:300)
at org.jgroups#4.2.10.Final//org.jgroups.protocols.TP.passMessageUp(TP.java:1396)
at org.jgroups#4.2.10.Final//org.jgroups.util.SubmitToThreadPool$SingleMessageHandler.run(SubmitToThreadPool.java:87)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:834)
Caused by: java.util.concurrent.TimeoutException
at java.base/java.util.concurrent.CompletableFuture.timedGet(CompletableFuture.java:1886)
at java.base/java.util.concurrent.CompletableFuture.get(CompletableFuture.java:2021)
at org.infinispan#11.0.8.Final//org.infinispan.util.concurrent.CompletableFutures.await(CompletableFutures.java:125)
... 44 more
Have anyone encountered a similar problem? What can possibly lead to this situation?

How to read MongoDB DBRef on PrestoDB/PrestoSQL?

MongoDB has references from one document to another (something similar to a foreign key) with DBRefs, these are like other property with two parts, a name to the collection they are referencing ($ref) and the Id of the document they are referring ($id).
Both of them start with $, these DBRefs are not picked up automatically when querying the collections.
I defined them manually in the _schema collection like this:
{
"name" : "otherCollection",
"type" : "row($id ObjectId, $ref varchar)",
"hidden" : false
}
But it throws an exception at querying time. I'm using PrestoDB v0.232 but planning to move to PrestoSQL.
2020-03-12T20:56:32.164-0600 ERROR remote-task-callback-7 com.facebook.presto.execution.StageExecutionStateMachine Stage execution 20200313_025632_00008_hnu22.2.0 failed
com.google.common.util.concurrent.UncheckedExecutionException: java.lang.IllegalArgumentException: Bad type signature: 'row($id ObjectId)'
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2050)
at com.google.common.cache.LocalCache.get(LocalCache.java:3952)
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3974)
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4958)
at com.google.common.cache.LocalCache$LocalLoadingCache.getUnchecked(LocalCache.java:4964)
at com.facebook.presto.mongodb.MongoSession.getTable(MongoSession.java:153)
at com.facebook.presto.mongodb.MongoMetadata.getTableMetadata(MongoMetadata.java:278)
at com.facebook.presto.mongodb.MongoMetadata.listTableColumns(MongoMetadata.java:130)
at com.facebook.presto.metadata.MetadataManager.listTableColumns(MetadataManager.java:590)
at com.facebook.presto.metadata.MetadataListing.listTableColumns(MetadataListing.java:93)
at com.facebook.presto.connector.system.jdbc.ColumnJdbcTable.cursor(ColumnJdbcTable.java:126)
at com.facebook.presto.connector.system.SystemPageSourceProvider$1.cursor(SystemPageSourceProvider.java:124)
at com.facebook.presto.split.MappedRecordSet.cursor(MappedRecordSet.java:53)
at com.facebook.presto.spi.RecordPageSource.<init>(RecordPageSource.java:38)
at com.facebook.presto.connector.system.SystemPageSourceProvider.createPageSource(SystemPageSourceProvider.java:103)
at com.facebook.presto.spi.connector.ConnectorPageSourceProvider.createPageSource(ConnectorPageSourceProvider.java:40)
at com.facebook.presto.split.PageSourceManager.createPageSource(PageSourceManager.java:58)
at com.facebook.presto.operator.ScanFilterAndProjectOperator.getOutput(ScanFilterAndProjectOperator.java:227)
at com.facebook.presto.operator.Driver.processInternal(Driver.java:379)
at com.facebook.presto.operator.Driver.lambda$processFor$8(Driver.java:283)
at com.facebook.presto.operator.Driver.tryWithLock(Driver.java:675)
at com.facebook.presto.operator.Driver.processFor(Driver.java:276)
at com.facebook.presto.execution.SqlTaskExecution$DriverSplitRunner.processFor(SqlTaskExecution.java:1077)
at com.facebook.presto.execution.executor.PrioritizedSplitRunner.process(PrioritizedSplitRunner.java:162)
at com.facebook.presto.execution.executor.TaskExecutor$TaskRunner.run(TaskExecutor.java:545)
at com.facebook.presto.$gen.Presto_0_232_cc1019c____20200313_025536_1.run(Unknown Source)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.IllegalArgumentException: Bad type signature: 'row($id ObjectId)'
at com.facebook.presto.spi.type.TypeSignature.checkArgument(TypeSignature.java:360)
at com.facebook.presto.spi.type.TypeSignature.parseRowTypeSignature(TypeSignature.java:200)
at com.facebook.presto.spi.type.TypeSignature.parseTypeSignature(TypeSignature.java:119)
at com.facebook.presto.spi.type.TypeSignature.parseTypeSignature(TypeSignature.java:106)
at com.facebook.presto.mongodb.MongoSession.buildColumnHandle(MongoSession.java:197)
at com.facebook.presto.mongodb.MongoSession.loadTableSchema(MongoSession.java:183)
at com.google.common.cache.CacheLoader$FunctionToCacheLoader.load(CacheLoader.java:165)
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3528)
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2277)
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2154)
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2044)
... 28 more
Is there a way to read these kind of fields?
UPDATE:
I tried moving to PrestoSQL latest version (331) and surrounding with ", like this:
"{
"name" : "otherCollection",
"type" : "row(\"$id\" ObjectId, \"$ref\" varchar)",
"hidden" : false
}"
And the column started to be displayed on querys, but the contents are always null. Also tried surrounding with ' but in that case got this exception:
java.util.concurrent.ExecutionException: io.prestosql.sql.parser.ParsingException: line 1:5: mismatched input ''$id''. Expecting: <type>
at com.google.common.util.concurrent.AbstractFuture.getDoneValue(AbstractFuture.java:531)
at com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:492)
at com.google.common.util.concurrent.AbstractFuture$TrustedFuture.get(AbstractFuture.java:83)
at com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:196)
at com.google.common.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2312)
at com.google.common.cache.LocalCache$Segment$1.run(LocalCache.java:2292)
at com.google.common.util.concurrent.MoreExecutors$DirectExecutor.execute(MoreExecutors.java:398)
at com.google.common.util.concurrent.AbstractFuture.executeListener(AbstractFuture.java:1029)
at com.google.common.util.concurrent.AbstractFuture.addListener(AbstractFuture.java:675)
at com.google.common.util.concurrent.AbstractFuture$TrustedFuture.addListener(AbstractFuture.java:105)
at com.google.common.cache.LocalCache$Segment.loadAsync(LocalCache.java:2287)
at com.google.common.cache.LocalCache$Segment.refresh(LocalCache.java:2359)
at com.google.common.cache.LocalCache$Segment.scheduleRefresh(LocalCache.java:2337)
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2034)
at com.google.common.cache.LocalCache.get(LocalCache.java:3952)
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3974)
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4958)
at com.google.common.cache.LocalCache$LocalLoadingCache.getUnchecked(LocalCache.java:4964)
at io.prestosql.plugin.mongodb.MongoSession.getTable(MongoSession.java:164)
at io.prestosql.plugin.mongodb.MongoMetadata.getColumnHandles(MongoMetadata.java:114)
at io.prestosql.plugin.mongodb.MongoMetadata.getTableProperties(MongoMetadata.java:226)
at io.prestosql.metadata.MetadataManager.getTableProperties(MetadataManager.java:415)
at io.prestosql.sql.planner.DistributedExecutionPlanner.getTableInfo(DistributedExecutionPlanner.java:145)
at io.prestosql.sql.planner.DistributedExecutionPlanner.lambda$doPlan$0(DistributedExecutionPlanner.java:133)
at com.google.common.collect.CollectCollectors.lambda$toImmutableMap$1(CollectCollectors.java:61)
at java.util.stream.ReduceOps$3ReducingSink.accept(ReduceOps.java:169)
at java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:193)
at java.util.Collections$2.tryAdvance(Collections.java:4719)
at java.util.Collections$2.forEachRemaining(Collections.java:4727)
at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:482)
at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:472)
at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708)
at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
at java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:499)
at io.prestosql.sql.planner.DistributedExecutionPlanner.doPlan(DistributedExecutionPlanner.java:133)
at io.prestosql.sql.planner.DistributedExecutionPlanner.doPlan(DistributedExecutionPlanner.java:124)
at io.prestosql.sql.planner.DistributedExecutionPlanner.plan(DistributedExecutionPlanner.java:96)
at io.prestosql.execution.SqlQueryExecution.planDistribution(SqlQueryExecution.java:433)
at io.prestosql.execution.SqlQueryExecution.start(SqlQueryExecution.java:339)
at io.prestosql.$gen.Presto_331____20200318_020345_2.run(Unknown Source)
at io.prestosql.execution.SqlQueryManager.createQuery(SqlQueryManager.java:240)
at io.prestosql.dispatcher.LocalDispatchQuery.lambda$startExecution$7(LocalDispatchQuery.java:132)
at io.prestosql.$gen.Presto_331____20200318_020345_2.run(Unknown Source)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: io.prestosql.sql.parser.ParsingException: line 1:5: mismatched input ''$id''. Expecting: <type>
at io.prestosql.sql.parser.ErrorHandler.syntaxError(ErrorHandler.java:108)
at org.antlr.v4.runtime.ProxyErrorListener.syntaxError(ProxyErrorListener.java:41)
at org.antlr.v4.runtime.Parser.notifyErrorListeners(Parser.java:544)
at org.antlr.v4.runtime.DefaultErrorStrategy.reportNoViableAlternative(DefaultErrorStrategy.java:310)
at org.antlr.v4.runtime.DefaultErrorStrategy.reportError(DefaultErrorStrategy.java:136)
at io.prestosql.sql.parser.SqlBaseParser.type(SqlBaseParser.java:10326)
at io.prestosql.sql.parser.SqlBaseParser.standaloneType(SqlBaseParser.java:386)
at io.prestosql.sql.parser.SqlParser.invokeParser(SqlParser.java:146)
at io.prestosql.sql.parser.SqlParser.createType(SqlParser.java:96)
at io.prestosql.metadata.TypeRegistry.fromSqlType(TypeRegistry.java:164)
at io.prestosql.metadata.MetadataManager.fromSqlType(MetadataManager.java:1262)
at io.prestosql.type.InternalTypeManager.fromSqlType(InternalTypeManager.java:51)
at io.prestosql.plugin.mongodb.MongoSession.buildColumnHandle(MongoSession.java:208)
at io.prestosql.plugin.mongodb.MongoSession.loadTableSchema(MongoSession.java:194)
at com.google.common.cache.CacheLoader$FunctionToCacheLoader.load(CacheLoader.java:165)
at com.google.common.cache.CacheLoader.reload(CacheLoader.java:100)
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3531)
at com.google.common.cache.LocalCache$Segment.loadAsync(LocalCache.java:2286)
... 35 more
Caused by: org.antlr.v4.runtime.NoViableAltException
at org.antlr.v4.runtime.atn.ParserATNSimulator.noViableAlt(ParserATNSimulator.java:2028)
at org.antlr.v4.runtime.atn.ParserATNSimulator.execATN(ParserATNSimulator.java:467)
at org.antlr.v4.runtime.atn.ParserATNSimulator.adaptivePredict(ParserATNSimulator.java:393)
at io.prestosql.sql.parser.SqlBaseParser.type(SqlBaseParser.java:10012)
... 47 more

Failed to execute user defined function($anonfun$9: (string) => double) on using String Indexer for multiple columns

I am trying to apply string indexer on multiple columns. Here is my code
val stringIndexers = Categorical_Model.map { colName =>new StringIndexer().setInputCol(colName).setOutputCol(colName + "_indexed")}
var dfStringIndexed = stringIndexers(0).fit(df3).transform(df3) // 'fit's a model then 'transform's data
for(x<-1 to stringIndexers.length-1)
{dfStringIndexed = stringIndexers(x).fit(dfStringIndexed).transform(dfStringIndexed)
}
dfStringIndexed = dfStringIndexed.drop(Categorical_Model: _*)
The Schema shows up with all columns having nullable as false
The stringIndexers array shows up like this
stringIndexers: Array[org.apache.spark.ml.feature.StringIndexer] = Array(strIdx_c53c3bdf464c, strIdx_61e685c520f7, strIdx_d6e59b2fc69d, ......)
dfStringIndexed.show(10)
This throws the following error
org.apache.spark.SparkException: Failed to execute user defined function($anonfun$9: (string) => double)
Why is it that print schema is showing up but no data is available .
Update: If I loop the string Indexers manually like so instead of the loop. This code works. Which is wierd.
var dfStringIndexed = stringIndexers(0).fit(df3).transform(df3) // 'fit's a model then 'transform's data
dfStringIndexed = stringIndexers(1).fit(dfStringIndexed).transform(dfStringIndexed)
dfStringIndexed = stringIndexers(2).fit(dfStringIndexed).transform(dfStringIndexed)
dfStringIndexed = stringIndexers(3).fit(dfStringIndexed).transform(dfStringIndexed)
dfStringIndexed = stringIndexers(4).fit(dfStringIndexed).transform(dfStringIndexed)
Adding Stacktrace on request
Driver stacktrace:
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1602)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1590)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1589)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1589)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
at scala.Option.foreach(Option.scala:257)
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:831)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1823)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1772)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1761)
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:642)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2034)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2055)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2074)
at org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:363)
at org.apache.spark.sql.execution.CollectLimitExec.executeCollect(limit.scala:38)
at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collectFromPlan(Dataset.scala:3273)
at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:2484)
at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:2484)
at org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3254)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77)
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3253)
at org.apache.spark.sql.Dataset.head(Dataset.scala:2484)
at org.apache.spark.sql.Dataset.take(Dataset.scala:2698)
at org.apache.spark.sql.Dataset.showString(Dataset.scala:254)
at org.apache.spark.sql.Dataset.show(Dataset.scala:723)
at org.apache.spark.sql.Dataset.show(Dataset.scala:682)
... 63 elided
Caused by: org.apache.spark.SparkException: Failed to execute user defined function($anonfun$9: (string) => double)
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage1.processNext(Unknown Source)
at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.hasNext(WholeStageCodegenExec.scala:614)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:253)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:247)
at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$25.apply(RDD.scala:830)
at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$25.apply(RDD.scala:830)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:109)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
... 3 more
Caused by: org.apache.spark.SparkException: StringIndexer encountered NULL value. To handle or skip NULLS, try setting StringIndexer.handleInvalid.
at org.apache.spark.ml.feature.StringIndexerModel$$anonfun$9.apply(StringIndexer.scala:251)
at org.apache.spark.ml.feature.StringIndexerModel$$anonfun$9.apply(StringIndexer.scala:246)
... 19 more
I have also been getting a similar issue, even on a tiny subset of 50 rows, none of which have nulls in the column I am string indexing. But it didn't work even when I ran it manually.
I can avoid the error by including .setHandleInvalid("keep"), and I've checked the outputs and it's not doing anything strange like setting everything to be 0 or the same value or anything. I'm still unhappy about that being the resolution as it seems quite unsafe. Would be very interested to know if you found a more reasonable answer and resolution!
dfStringIndexed = stringIndexers(1).setHandleInvalid("keep").fit(dfStringIndexed).transform(dfStringIndexed)
I think it might also be fixed by changing the nullability of your column, even if it doesn't contain nulls in it, which I did as per here
Can I change the nullability of a column in my Spark dataframe?

:INIT_FAILURE, Fail to create InputInitializerManager

I am using EMR services from Amazon Web Services and am attempting to run a count query on an external table that I've built. The data for the table is stored in mongodb and the table is an external table in Hive. The query I'm trying to run is
select user_id, count (*) from myTable group by user_id;
I can query select * from myTable but I cannot do any other queries. When I try to I get this error:
----------------------------------------------------------------------------------------------
VERTICES MODE STATUS TOTAL COMPLETED RUNNING PENDING FAILED KILLED
----------------------------------------------------------------------------------------------
Map 1 container FAILED -1 0 0 -1 0 0
Reducer 2 container KILLED 1 0 0 1 0 0
----------------------------------------------------------------------------------------------
VERTICES: 00/02 [>>--------------------------] 0% ELAPSED TIME: 0.03 s
----------------------------------------------------------------------------------------------
Status: Failed
Vertex failed, vertexName=Map 1, vertexId=vertex_1476467351971_0008_2_00, diagnostics=[Vertex vertex_1476467351971_0008_2_00 [Map 1] killed/failed due to:INIT_FAILURE, Fail to create InputInitializerManager, org.apache.tez.dag.api.TezReflectionException: Unable to instantiate class with 1 arguments: org.apache.hadoop.hive.ql.exec.tez.HiveSplitGenerator
at org.apache.tez.common.ReflectionUtils.getNewInstance(ReflectionUtils.java:70)
at org.apache.tez.common.ReflectionUtils.createClazzInstance(ReflectionUtils.java:89)
at org.apache.tez.dag.app.dag.RootInputInitializerManager$1.run(RootInputInitializerManager.java:151)
at org.apache.tez.dag.app.dag.RootInputInitializerManager$1.run(RootInputInitializerManager.java:148)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.tez.dag.app.dag.RootInputInitializerManager.createInitializer(RootInputInitializerManager.java:148)
at org.apache.tez.dag.app.dag.RootInputInitializerManager.runInputInitializers(RootInputInitializerManager.java:121)
at org.apache.tez.dag.app.dag.impl.VertexImpl.setupInputInitializerManager(VertexImpl.java:3986)
at org.apache.tez.dag.app.dag.impl.VertexImpl.access$3100(VertexImpl.java:204)
at org.apache.tez.dag.app.dag.impl.VertexImpl$InitTransition.handleInitEvent(VertexImpl.java:2818)
at org.apache.tez.dag.app.dag.impl.VertexImpl$InitTransition.transition(VertexImpl.java:2765)
at org.apache.tez.dag.app.dag.impl.VertexImpl$InitTransition.transition(VertexImpl.java:2747)
at org.apache.hadoop.yarn.state.StateMachineFactory$MultipleInternalArc.doTransition(StateMachineFactory.java:385)
at org.apache.hadoop.yarn.state.StateMachineFactory.doTransition(StateMachineFactory.java:302)
at org.apache.hadoop.yarn.state.StateMachineFactory.access$300(StateMachineFactory.java:46)
at org.apache.hadoop.yarn.state.StateMachineFactory$InternalStateMachine.doTransition(StateMachineFactory.java:448)
at org.apache.tez.state.StateMachineTez.doTransition(StateMachineTez.java:59)
at org.apache.tez.dag.app.dag.impl.VertexImpl.handle(VertexImpl.java:1888)
at org.apache.tez.dag.app.dag.impl.VertexImpl.handle(VertexImpl.java:203)
at org.apache.tez.dag.app.DAGAppMaster$VertexEventDispatcher.handle(DAGAppMaster.java:2242)
at org.apache.tez.dag.app.DAGAppMaster$VertexEventDispatcher.handle(DAGAppMaster.java:2228)
at org.apache.tez.common.AsyncDispatcher.dispatch(AsyncDispatcher.java:183)
at org.apache.tez.common.AsyncDispatcher$1.run(AsyncDispatcher.java:114)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.tez.common.ReflectionUtils.getNewInstance(ReflectionUtils.java:68)
... 25 more
Caused by: java.lang.RuntimeException: Failed to load plan: hdfs://ip-172-31-33-88.ec2.internal:8020/tmp/hive/hadoop/8c1eca9a-84ba-4d79-b39c-e633f1c6a646/hive_2016-10-14_18-57-38_728_4862537458701624850-1/hadoop/_tez_scratch_dir/157c0e27-0bfa-4acf-b0e9-b2fed595a8de/map.xml: org.apache.hive.com.esotericsoftware.kryo.KryoException: Unable to find class: com.mongodb.hadoop.hive.input.HiveMongoInputFormat
Serialization trace:
inputFileFormatClass (org.apache.hadoop.hive.ql.plan.PartitionDesc)
aliasToPartnInfo (org.apache.hadoop.hive.ql.plan.MapWork)
at org.apache.hadoop.hive.ql.exec.Utilities.getBaseWork(Utilities.java:451)
at org.apache.hadoop.hive.ql.exec.Utilities.getMapWork(Utilities.java:298)
at org.apache.hadoop.hive.ql.exec.tez.HiveSplitGenerator.<init>(HiveSplitGenerator.java:131)
... 30 more
Caused by: org.apache.hive.com.esotericsoftware.kryo.KryoException: Unable to find class: com.mongodb.hadoop.hive.input.HiveMongoInputFormat
Serialization trace:
inputFileFormatClass (org.apache.hadoop.hive.ql.plan.PartitionDesc)
aliasToPartnInfo (org.apache.hadoop.hive.ql.plan.MapWork)
at org.apache.hive.com.esotericsoftware.kryo.util.DefaultClassResolver.readName(DefaultClassResolver.java:156)
at org.apache.hive.com.esotericsoftware.kryo.util.DefaultClassResolver.readClass(DefaultClassResolver.java:133)
at org.apache.hive.com.esotericsoftware.kryo.Kryo.readClass(Kryo.java:670)
at org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readClass(SerializationUtilities.java:180)
at org.apache.hive.com.esotericsoftware.kryo.serializers.DefaultSerializers$ClassSerializer.read(DefaultSerializers.java:326)
at org.apache.hive.com.esotericsoftware.kryo.serializers.DefaultSerializers$ClassSerializer.read(DefaultSerializers.java:314)
at org.apache.hive.com.esotericsoftware.kryo.Kryo.readObjectOrNull(Kryo.java:759)
at org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObjectOrNull(SerializationUtilities.java:198)
at org.apache.hive.com.esotericsoftware.kryo.serializers.ObjectField.read(ObjectField.java:132)
at org.apache.hive.com.esotericsoftware.kryo.serializers.FieldSerializer.read(FieldSerializer.java:551)
at org.apache.hive.com.esotericsoftware.kryo.Kryo.readClassAndObject(Kryo.java:790)
at org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readClassAndObject(SerializationUtilities.java:175)
at org.apache.hive.com.esotericsoftware.kryo.serializers.MapSerializer.read(MapSerializer.java:161)
at org.apache.hive.com.esotericsoftware.kryo.serializers.MapSerializer.read(MapSerializer.java:39)
at org.apache.hive.com.esotericsoftware.kryo.Kryo.readObject(Kryo.java:708)
at org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:213)
at org.apache.hive.com.esotericsoftware.kryo.serializers.ObjectField.read(ObjectField.java:125)
at org.apache.hive.com.esotericsoftware.kryo.serializers.FieldSerializer.read(FieldSerializer.java:551)
at org.apache.hive.com.esotericsoftware.kryo.Kryo.readObject(Kryo.java:686)
at org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:205)
at org.apache.hadoop.hive.ql.exec.SerializationUtilities.deserializeObjectByKryo(SerializationUtilities.java:583)
at org.apache.hadoop.hive.ql.exec.SerializationUtilities.deserializePlan(SerializationUtilities.java:492)
at org.apache.hadoop.hive.ql.exec.SerializationUtilities.deserializePlan(SerializationUtilities.java:469)
at org.apache.hadoop.hive.ql.exec.Utilities.getBaseWork(Utilities.java:411)
... 32 more
Caused by: java.lang.ClassNotFoundException: com.mongodb.hadoop.hive.input.HiveMongoInputFormat
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at org.apache.hive.com.esotericsoftware.kryo.util.DefaultClassResolver.readName(DefaultClassResolver.java:154)
... 55 more
]
Vertex killed, vertexName=Reducer 2, vertexId=vertex_1476467351971_0008_2_01, diagnostics=[Vertex received Kill in NEW state., Vertex vertex_1476467351971_0008_2_01 [Reducer 2] killed/failed due to:OTHER_VERTEX_FAILURE]
DAG did not succeed due to VERTEX_FAILURE. failedVertices:1 killedVertices:1
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.tez.TezTask. Vertex failed, vertexName=Map 1, vertexId=vertex_1476467351971_0008_2_00, diagnostics=[Vertex vertex_1476467351971_0008_2_00 [Map 1] killed/failed due to:INIT_FAILURE, Fail to create InputInitializerManager, org.apache.tez.dag.api.TezReflectionException: Unable to instantiate class with 1 arguments: org.apache.hadoop.hive.ql.exec.tez.HiveSplitGenerator
at org.apache.tez.common.ReflectionUtils.getNewInstance(ReflectionUtils.java:70)
at org.apache.tez.common.ReflectionUtils.createClazzInstance(ReflectionUtils.java:89)
at org.apache.tez.dag.app.dag.RootInputInitializerManager$1.run(RootInputInitializerManager.java:151)
at org.apache.tez.dag.app.dag.RootInputInitializerManager$1.run(RootInputInitializerManager.java:148)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.tez.dag.app.dag.RootInputInitializerManager.createInitializer(RootInputInitializerManager.java:148)
at org.apache.tez.dag.app.dag.RootInputInitializerManager.runInputInitializers(RootInputInitializerManager.java:121)
at org.apache.tez.dag.app.dag.impl.VertexImpl.setupInputInitializerManager(VertexImpl.java:3986)
at org.apache.tez.dag.app.dag.impl.VertexImpl.access$3100(VertexImpl.java:204)
at org.apache.tez.dag.app.dag.impl.VertexImpl$InitTransition.handleInitEvent(VertexImpl.java:2818)
at org.apache.tez.dag.app.dag.impl.VertexImpl$InitTransition.transition(VertexImpl.java:2765)
at org.apache.tez.dag.app.dag.impl.VertexImpl$InitTransition.transition(VertexImpl.java:2747)
at org.apache.hadoop.yarn.state.StateMachineFactory$MultipleInternalArc.doTransition(StateMachineFactory.java:385)
at org.apache.hadoop.yarn.state.StateMachineFactory.doTransition(StateMachineFactory.java:302)
at org.apache.hadoop.yarn.state.StateMachineFactory.access$300(StateMachineFactory.java:46)
at org.apache.hadoop.yarn.state.StateMachineFactory$InternalStateMachine.doTransition(StateMachineFactory.java:448)
at org.apache.tez.state.StateMachineTez.doTransition(StateMachineTez.java:59)
at org.apache.tez.dag.app.dag.impl.VertexImpl.handle(VertexImpl.java:1888)
at org.apache.tez.dag.app.dag.impl.VertexImpl.handle(VertexImpl.java:203)
at org.apache.tez.dag.app.DAGAppMaster$VertexEventDispatcher.handle(DAGAppMaster.java:2242)
at org.apache.tez.dag.app.DAGAppMaster$VertexEventDispatcher.handle(DAGAppMaster.java:2228)
at org.apache.tez.common.AsyncDispatcher.dispatch(AsyncDispatcher.java:183)
at org.apache.tez.common.AsyncDispatcher$1.run(AsyncDispatcher.java:114)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.tez.common.ReflectionUtils.getNewInstance(ReflectionUtils.java:68)
... 25 more
Caused by: java.lang.RuntimeException: Failed to load plan: hdfs://ip-172-31-33-88.ec2.internal:8020/tmp/hive/hadoop/8c1eca9a-84ba-4d79-b39c-e633f1c6a646/hive_2016-10-14_18-57-38_728_4862537458701624850-1/hadoop/_tez_scratch_dir/157c0e27-0bfa-4acf-b0e9-b2fed595a8de/map.xml: org.apache.hive.com.esotericsoftware.kryo.KryoException: Unable to find class: com.mongodb.hadoop.hive.input.HiveMongoInputFormat
Serialization trace:
inputFileFormatClass (org.apache.hadoop.hive.ql.plan.PartitionDesc)
aliasToPartnInfo (org.apache.hadoop.hive.ql.plan.MapWork)
at org.apache.hadoop.hive.ql.exec.Utilities.getBaseWork(Utilities.java:451)
at org.apache.hadoop.hive.ql.exec.Utilities.getMapWork(Utilities.java:298)
at org.apache.hadoop.hive.ql.exec.tez.HiveSplitGenerator.<init>(HiveSplitGenerator.java:131)
... 30 more
Caused by: org.apache.hive.com.esotericsoftware.kryo.KryoException: Unable to find class: com.mongodb.hadoop.hive.input.HiveMongoInputFormat
Serialization trace:
inputFileFormatClass (org.apache.hadoop.hive.ql.plan.PartitionDesc)
aliasToPartnInfo (org.apache.hadoop.hive.ql.plan.MapWork)
at org.apache.hive.com.esotericsoftware.kryo.util.DefaultClassResolver.readName(DefaultClassResolver.java:156)
at org.apache.hive.com.esotericsoftware.kryo.util.DefaultClassResolver.readClass(DefaultClassResolver.java:133)
at org.apache.hive.com.esotericsoftware.kryo.Kryo.readClass(Kryo.java:670)
at org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readClass(SerializationUtilities.java:180)
at org.apache.hive.com.esotericsoftware.kryo.serializers.DefaultSerializers$ClassSerializer.read(DefaultSerializers.java:326)
at org.apache.hive.com.esotericsoftware.kryo.serializers.DefaultSerializers$ClassSerializer.read(DefaultSerializers.java:314)
at org.apache.hive.com.esotericsoftware.kryo.Kryo.readObjectOrNull(Kryo.java:759)
at org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObjectOrNull(SerializationUtilities.java:198)
at org.apache.hive.com.esotericsoftware.kryo.serializers.ObjectField.read(ObjectField.java:132)
at org.apache.hive.com.esotericsoftware.kryo.serializers.FieldSerializer.read(FieldSerializer.java:551)
at org.apache.hive.com.esotericsoftware.kryo.Kryo.readClassAndObject(Kryo.java:790)
at org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readClassAndObject(SerializationUtilities.java:175)
at org.apache.hive.com.esotericsoftware.kryo.serializers.MapSerializer.read(MapSerializer.java:161)
at org.apache.hive.com.esotericsoftware.kryo.serializers.MapSerializer.read(MapSerializer.java:39)
at org.apache.hive.com.esotericsoftware.kryo.Kryo.readObject(Kryo.java:708)
at org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:213)
at org.apache.hive.com.esotericsoftware.kryo.serializers.ObjectField.read(ObjectField.java:125)
at org.apache.hive.com.esotericsoftware.kryo.serializers.FieldSerializer.read(FieldSerializer.java:551)
at org.apache.hive.com.esotericsoftware.kryo.Kryo.readObject(Kryo.java:686)
at org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:205)
at org.apache.hadoop.hive.ql.exec.SerializationUtilities.deserializeObjectByKryo(SerializationUtilities.java:583)
at org.apache.hadoop.hive.ql.exec.SerializationUtilities.deserializePlan(SerializationUtilities.java:492)
at org.apache.hadoop.hive.ql.exec.SerializationUtilities.deserializePlan(SerializationUtilities.java:469)
at org.apache.hadoop.hive.ql.exec.Utilities.getBaseWork(Utilities.java:411)
... 32 more
Caused by: java.lang.ClassNotFoundException: com.mongodb.hadoop.hive.input.HiveMongoInputFormat
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at org.apache.hive.com.esotericsoftware.kryo.util.DefaultClassResolver.readName(DefaultClassResolver.java:154)
... 55 more
]Vertex killed, vertexName=Reducer 2, vertexId=vertex_1476467351971_0008_2_01, diagnostics=[Vertex received Kill in NEW state., Vertex vertex_1476467351971_0008_2_01 [Reducer 2] killed/failed due to:OTHER_VERTEX_FAILURE]DAG did not succeed due to VERTEX_FAILURE. failedVertices:1 killedVertices:1
hive>
I am using a single node EMR cluster from AWS with the "Core Hadoop" configuration, but I had the same errors using a three node cluster. I have already added the Mongo Hadoop Core, Mongo Hadoop Hive, Mongo Java driver to the master node.. These are the jars I'm using:
mongo-hadoop-hive-2.0.1.jar
mongo-java-driver-3.3.0.jar
remotecontent?filepath=org%2Fmongodb%2Fmongo-hadoop%2Fmongo-hadoop-core%2F2.0.1%2Fmongo-hadoop-core-2.0.1.jar
These jars were downloaded from Maven.
A similar question had previously been asked on StackOverflow and was resolved by adding the src for Apache Tez 0.8.4 and building it. Apache Tez 0.8.4 is already on the cluster though, at least according to the EMR configuration we chose.
We are using Hadoop 2.7.2 and Hive 2.1.0.

Weblogic 12c (12.1.3) and eclipselink 2.3.1 Getting a ClassCastException for Named Native Queries

Recently we migrated from weblogic 10 to weblogic12c. The application was working fine until we migrated to the new weblogic12c server. My web application is using JPA and eclipselink to fetch some information from the database. All other named query seems to be working fine, expect the Native Named query. I'm getting a classcastException. Below is the code. Any help is greatly appreciated.
This is my Named Native Query
#NamedNativeQueries({
#NamedNativeQuery(name="Channel.findValue",
query="SELECT ID, APP_ID , APPLICATION_TYPE_ID, CHANNEL_ID, VALUE, EFFECTIVE_DATE FROM TABLENAME "+
" WHERE APP_ID = ? AND STATUS= ? AND APPLICATION_TYPE_ID = ? AND CHANNEL_ID = ? AND EXPIRY_DATE >= ? AND EFFECTIVE_DATE <= ? "
,resultSetMapping="findFeeByAppTypeAndChannel")
})
#SqlResultSetMappings({
#SqlResultSetMapping(name="findValue",
entities={#EntityResult(entityClass=Channel.class)}
)
})
Code that's calling that's calling query:
List<Channel> channels = em.createNamedQuery("channel.findVale").setParameter(1, app.getId())
.setParameter(2, ConstStatus.ACTIVE)
.setParameter(3, appType.getId())
.setParameter(4, channelId)
.setParameter(5, effectiveDate)
.setParameter(6, effectiveDate).setHint("eclipselink.refresh", "true").getResultList();
Error that I am getting:
java.lang.ClassCastException: org.eclipse.persistence.queries.ResultSetMappingQuery cannot be cast to org.eclipse.persistence.queries.ObjectLevelReadQuery
Full Stack trace:
SEVERE: Value Service Response:EJB Exception: : java.lang.ClassCastException: org.eclipse.persistence.queries.ResultSetMappingQuery cannot be cast to org.eclipse.persistence.queries.ObjectLevelReadQuery
at org.eclipse.persistence.mappings.ForeignReferenceMapping.valueFromRowInternal(ForeignReferenceMapping.java:2073)
at org.eclipse.persistence.mappings.ForeignReferenceMapping.valueFromRow(ForeignReferenceMapping.java:1987)
at org.eclipse.persistence.mappings.ForeignReferenceMapping.readFromRowIntoObject(ForeignReferenceMapping.java:1353)
at org.eclipse.persistence.internal.descriptors.ObjectBuilder.buildAttributesIntoObject(ObjectBuilder.java:445)
at org.eclipse.persistence.internal.descriptors.ObjectBuilder.refreshObjectIfRequired(ObjectBuilder.java:3591)
at org.eclipse.persistence.internal.descriptors.ObjectBuilder.buildObject(ObjectBuilder.java:826)
at org.eclipse.persistence.internal.descriptors.ObjectBuilder.buildWorkingCopyCloneNormally(ObjectBuilder.java:715)
at org.eclipse.persistence.internal.descriptors.ObjectBuilder.buildObjectInUnitOfWork(ObjectBuilder.java:668)
at org.eclipse.persistence.internal.descriptors.ObjectBuilder.buildObject(ObjectBuilder.java:601)
at org.eclipse.persistence.queries.EntityResult.getValueFromRecord(EntityResult.java:192)
at org.eclipse.persistence.queries.ResultSetMappingQuery.buildObjectsFromRecords(ResultSetMappingQuery.java:144)
at org.eclipse.persistence.queries.ResultSetMappingQuery.executeDatabaseQuery(ResultSetMappingQuery.java:195)
at org.eclipse.persistence.queries.DatabaseQuery.execute(DatabaseQuery.java:844)
at org.eclipse.persistence.queries.DatabaseQuery.executeInUnitOfWork(DatabaseQuery.java:743)
at org.eclipse.persistence.internal.sessions.UnitOfWorkImpl.internalExecuteQuery(UnitOfWorkImpl.java:2871)
at org.eclipse.persistence.internal.sessions.AbstractSession.executeQuery(AbstractSession.java:1516)
at org.eclipse.persistence.internal.sessions.AbstractSession.executeQuery(AbstractSession.java:1498)
at org.eclipse.persistence.internal.sessions.AbstractSession.executeQuery(AbstractSession.java:1463)
at org.eclipse.persistence.internal.jpa.EJBQueryImpl.executeReadQuery(EJBQueryImpl.java:485)
at org.eclipse.persistence.internal.jpa.EJBQueryImpl.getResultList(EJBQueryImpl.java:742)
at weblogic.persistence.QueryProxyImpl.getResultList(QueryProxyImpl.java:140)
at com.database.service.app.app.service.appServiceBean.getFeeByAppTypeAndChannel(appServiceBean.java:413)
at com.database.service.app.app.service.appServiceBean_7omfhq_appServiceLocalImpl.__WL_invoke(Unknown Source)
at weblogic.ejb.container.internal.SessionLocalMethodInvoker.invoke(SessionLocalMethodInvoker.java:33)
at com.database.service.app.app.service.appServiceBean_7omfhq_appServiceLocalImpl.getFeeByAppTypeAndChannel(Unknown Source)
at com.database.service.app.ws.service.FeeService.getFeeInfo(FeeService.java:89)
at com.database.service.app.ws.service.FeeService.getFee(FeeService.java:191)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at weblogic.wsee.jaxws.WLSInstanceResolver$WLSInvoker.invoke(WLSInstanceResolver.java:117)
at weblogic.wsee.jaxws.WLSInstanceResolver$WLSInvoker.invoke(WLSInstanceResolver.java:91)
at com.sun.xml.ws.server.InvokerTube$2.invoke(InvokerTube.java:149)
at com.sun.xml.ws.server.sei.SEIInvokerTube.processRequest(SEIInvokerTube.java:88)
at com.sun.xml.ws.api.pipe.Fiber.__doRun(Fiber.java:1136)
at com.sun.xml.ws.api.pipe.Fiber._doRun(Fiber.java:1050)
at com.sun.xml.ws.api.pipe.Fiber.doRun(Fiber.java:1019)
at com.sun.xml.ws.api.pipe.Fiber.runSync(Fiber.java:877)
at com.sun.xml.ws.server.WSEndpointImpl$2.process(WSEndpointImpl.java:419)
at com.sun.xml.ws.transport.http.HttpAdapter$HttpToolkit.handle(HttpAdapter.java:868)
at com.sun.xml.ws.transport.http.HttpAdapter.handle(HttpAdapter.java:422)
at com.sun.xml.ws.transport.http.servlet.ServletAdapter.handle(ServletAdapter.java:169)
at weblogic.wsee.jaxws.WLSServletAdapter.handle(WLSServletAdapter.java:199)
at weblogic.wsee.jaxws.HttpServletAdapter$AuthorizedInvoke.run(HttpServletAdapter.java:640)
at weblogic.security.acl.internal.AuthenticatedSubject.doAs(AuthenticatedSubject.java:363)
at weblogic.security.service.SecurityManager.runAs(SecurityManager.java:146)
at weblogic.wsee.util.ServerSecurityHelper.authenticatedInvoke(ServerSecurityHelper.java:108)
at weblogic.wsee.jaxws.HttpServletAdapter$3.run(HttpServletAdapter.java:284)
at weblogic.wsee.jaxws.HttpServletAdapter.post(HttpServletAdapter.java:293)
at weblogic.wsee.jaxws.JAXWSServlet.doRequest(JAXWSServlet.java:128)
at weblogic.servlet.http.AbstractAsyncServlet.service(AbstractAsyncServlet.java:99)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:844)
at weblogic.servlet.internal.StubSecurityHelper$ServletServiceAction.run(StubSecurityHelper.java:280)
at weblogic.servlet.internal.StubSecurityHelper$ServletServiceAction.run(StubSecurityHelper.java:254)
at weblogic.servlet.internal.StubSecurityHelper.invokeServlet(StubSecurityHelper.java:136)
at weblogic.servlet.internal.ServletStubImpl.execute(ServletStubImpl.java:346)
at weblogic.servlet.internal.ServletStubImpl.execute(ServletStubImpl.java:243)
at weblogic.servlet.internal.WebAppServletContext$ServletInvocationAction.wrapRun(WebAppServletContext.java:3432)
at weblogic.servlet.internal.WebAppServletContext$ServletInvocationAction.run(WebAppServletContext.java:3402)
at weblogic.security.acl.internal.AuthenticatedSubject.doAs(AuthenticatedSubject.java:321)
at weblogic.security.service.SecurityManager.runAs(SecurityManager.java:120)
at weblogic.servlet.provider.WlsSubjectHandle.run(WlsSubjectHandle.java:57)
at weblogic.servlet.internal.WebAppServletContext.doSecuredExecute(WebAppServletContext.java:2285)
at weblogic.servlet.internal.WebAppServletContext.securedExecute(WebAppServletContext.java:2201)
at weblogic.servlet.internal.WebAppServletContext.execute(WebAppServletContext.java:2179)
at weblogic.servlet.internal.ServletRequestImpl.run(ServletRequestImpl.java:1572)
at weblogic.servlet.provider.ContainerSupportProviderImpl$WlsRequestExecutor.run(ContainerSupportProviderImpl.java:255)
at weblogic.work.ExecuteThread.execute(ExecuteThread.java:311)
at weblogic.work.ExecuteThread.run(ExecuteThread.java:263)>
<31-Mar-2016 11:26:58 o'clock AM ADT> <Error> <com.database.service.app.ws.service.FeeService> <BEA-000000> <Value Service Response:EJB Exception: : java.lang.ClassCastException: org.eclipse.persistence.queries.ResultSetMappingQuery cannot be cast to org.eclipse.persistence.queries.ObjectLevelReadQuery
at org.eclipse.persistence.mappings.ForeignReferenceMapping.valueFromRowInternal(ForeignReferenceMapping.java:2073)
at org.eclipse.persistence.mappings.ForeignReferenceMapping.valueFromRow(ForeignReferenceMapping.java:1987)
at org.eclipse.persistence.mappings.ForeignReferenceMapping.readFromRowIntoObject(ForeignReferenceMapping.java:1353)
at org.eclipse.persistence.internal.descriptors.ObjectBuilder.buildAttributesIntoObject(ObjectBuilder.java:445)
at org.eclipse.persistence.internal.descriptors.ObjectBuilder.refreshObjectIfRequired(ObjectBuilder.java:3591)
at org.eclipse.persistence.internal.descriptors.ObjectBuilder.buildObject(ObjectBuilder.java:826)
at org.eclipse.persistence.internal.descriptors.ObjectBuilder.buildWorkingCopyCloneNormally(ObjectBuilder.java:715)
at org.eclipse.persistence.internal.descriptors.ObjectBuilder.buildObjectInUnitOfWork(ObjectBuilder.java:668)
at org.eclipse.persistence.internal.descriptors.ObjectBuilder.buildObject(ObjectBuilder.java:601)
at org.eclipse.persistence.queries.EntityResult.getValueFromRecord(EntityResult.java:192)
at org.eclipse.persistence.queries.ResultSetMappingQuery.buildObjectsFromRecords(ResultSetMappingQuery.java:144)
at org.eclipse.persistence.queries.ResultSetMappingQuery.executeDatabaseQuery(ResultSetMappingQuery.java:195)
at org.eclipse.persistence.queries.DatabaseQuery.execute(DatabaseQuery.java:844)
at org.eclipse.persistence.queries.DatabaseQuery.executeInUnitOfWork(DatabaseQuery.java:743)
at org.eclipse.persistence.internal.sessions.UnitOfWorkImpl.internalExecuteQuery(UnitOfWorkImpl.java:2871)
at org.eclipse.persistence.internal.sessions.AbstractSession.executeQuery(AbstractSession.java:1516)
at org.eclipse.persistence.internal.sessions.AbstractSession.executeQuery(AbstractSession.java:1498)
at org.eclipse.persistence.internal.sessions.AbstractSession.executeQuery(AbstractSession.java:1463)
at org.eclipse.persistence.internal.jpa.EJBQueryImpl.executeReadQuery(EJBQueryImpl.java:485)
at org.eclipse.persistence.internal.jpa.EJBQueryImpl.getResultList(EJBQueryImpl.java:742)
at weblogic.persistence.QueryProxyImpl.getResultList(QueryProxyImpl.java:140)
at com.database.service.app.app.service.appServiceBean.getFeeByAppTypeAndChannel(appServiceBean.java:413)
at com.database.service.app.app.service.appServiceBean_7omfhq_appServiceLocalImpl.__WL_invoke(Unknown Source)
at weblogic.ejb.container.internal.SessionLocalMethodInvoker.invoke(SessionLocalMethodInvoker.java:33)
at com.database.service.app.app.service.appServiceBean_7omfhq_appServiceLocalImpl.getFeeByAppTypeAndChannel(Unknown Source)
at com.database.service.app.ws.service.FeeService.getFeeInfo(FeeService.java:89)
at com.database.service.app.ws.service.FeeService.getFee(FeeService.java:191)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at weblogic.wsee.jaxws.WLSInstanceResolver$WLSInvoker.invoke(WLSInstanceResolver.java:117)
at weblogic.wsee.jaxws.WLSInstanceResolver$WLSInvoker.invoke(WLSInstanceResolver.java:91)
at com.sun.xml.ws.server.InvokerTube$2.invoke(InvokerTube.java:149)
at com.sun.xml.ws.server.sei.SEIInvokerTube.processRequest(SEIInvokerTube.java:88)
at com.sun.xml.ws.api.pipe.Fiber.__doRun(Fiber.java:1136)
at com.sun.xml.ws.api.pipe.Fiber._doRun(Fiber.java:1050)
at com.sun.xml.ws.api.pipe.Fiber.doRun(Fiber.java:1019)
at com.sun.xml.ws.api.pipe.Fiber.runSync(Fiber.java:877)
at com.sun.xml.ws.server.WSEndpointImpl$2.process(WSEndpointImpl.java:419)
at com.sun.xml.ws.transport.http.HttpAdapter$HttpToolkit.handle(HttpAdapter.java:868)
at com.sun.xml.ws.transport.http.HttpAdapter.handle(HttpAdapter.java:422)
at com.sun.xml.ws.transport.http.servlet.ServletAdapter.handle(ServletAdapter.java:169)
at weblogic.wsee.jaxws.WLSServletAdapter.handle(WLSServletAdapter.java:199)
at weblogic.wsee.jaxws.HttpServletAdapter$AuthorizedInvoke.run(HttpServletAdapter.java:640)
at weblogic.security.acl.internal.AuthenticatedSubject.doAs(AuthenticatedSubject.java:363)
at weblogic.security.service.SecurityManager.runAs(SecurityManager.java:146)
at weblogic.wsee.util.ServerSecurityHelper.authenticatedInvoke(ServerSecurityHelper.java:108)
at weblogic.wsee.jaxws.HttpServletAdapter$3.run(HttpServletAdapter.java:284)
at weblogic.wsee.jaxws.HttpServletAdapter.post(HttpServletAdapter.java:293)
at weblogic.wsee.jaxws.JAXWSServlet.doRequest(JAXWSServlet.java:128)
at weblogic.servlet.http.AbstractAsyncServlet.service(AbstractAsyncServlet.java:99)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:844)
at weblogic.servlet.internal.StubSecurityHelper$ServletServiceAction.run(StubSecurityHelper.java:280)
at weblogic.servlet.internal.StubSecurityHelper$ServletServiceAction.run(StubSecurityHelper.java:254)
at weblogic.servlet.internal.StubSecurityHelper.invokeServlet(StubSecurityHelper.java:136)
at weblogic.servlet.internal.ServletStubImpl.execute(ServletStubImpl.java:346)
at weblogic.servlet.internal.ServletStubImpl.execute(ServletStubImpl.java:243)
at weblogic.servlet.internal.WebAppServletContext$ServletInvocationAction.wrapRun(WebAppServletContext.java:3432)
at weblogic.servlet.internal.WebAppServletContext$ServletInvocationAction.run(WebAppServletContext.java:3402)
at weblogic.security.acl.internal.AuthenticatedSubject.doAs(AuthenticatedSubject.java:321)
at weblogic.security.service.SecurityManager.runAs(SecurityManager.java:120)
at weblogic.servlet.provider.WlsSubjectHandle.run(WlsSubjectHandle.java:57)
at weblogic.servlet.internal.WebAppServletContext.doSecuredExecute(WebAppServletContext.java:2285)
at weblogic.servlet.internal.WebAppServletContext.securedExecute(WebAppServletContext.java:2201)
at weblogic.servlet.internal.WebAppServletContext.execute(WebAppServletContext.java:2179)
at weblogic.servlet.internal.ServletRequestImpl.run(ServletRequestImpl.java:1572)
at weblogic.servlet.provider.ContainerSupportProviderImpl$WlsRequestExecutor.run(ContainerSupportProviderImpl.java:255)
at weblogic.work.ExecuteThread.execute(ExecuteThread.java:311)
at weblogic.work.ExecuteThread.run(ExecuteThread.java:263)
Error got resolved when I removed the
setHint("eclipselink.refresh", "true")
from
List<Channel> channels = em.createNamedQuery("channel.findVale")
.setParameter(1, app.getId())
.setParameter(2, ConstStatus.ACTIVE)
.setParameter(3, appType.getId())
.setParameter(4, channelId) .setParameter(5, effectiveDate)
.setParameter(6, effectiveDate)
.setHint("eclipselink.refresh", "true").getResultList();