When i run the JDeveloper( version 12c) and use ADF framework to create a web application, i create a business components and use the data control to drag and drop table from database, but when i run the application the web-logic give me this error (Error 500--Internal Server Error), Below is the full Error Log. Please help me to solve the problem
Error 500--Internal Server Error
oracle.jbo.JboException: JBO-29114 ADFContext is not setup to process messages for this exception. Use the exception stack trace and error code to investigate the root cause of this exception. Root cause error code is JBO-29000. Error message parameters are {0=oracle.adf.model.adapter.AdapterException, 1=DCA-40002: The WSDL document is invalid due to the following reason : null.}
at oracle.adf.model.adapter.DataControlFactoryImpl.createSession(DataControlFactoryImpl.java:228)
at oracle.adf.model.binding.DCDataControlReference.getDataControl(DCDataControlReference.java:274)
at oracle.adf.model.BindingContext.instantiateDataControl(BindingContext.java:1416)
at oracle.adf.model.dcframe.DataControlFrameImpl.doFindDataControl(DataControlFrameImpl.java:1906)
at oracle.adf.model.dcframe.DataControlFrameImpl.internalFindDataControl(DataControlFrameImpl.java:1768)
at oracle.adf.model.dcframe.DataControlFrameImpl.findDataControl(DataControlFrameImpl.java:1728)
at oracle.adf.model.BindingContext.internalFindDataControl(BindingContext.java:1549)
at oracle.adf.model.BindingContext.get(BindingContext.java:1499)
at oracle.adf.model.binding.DCUtil.findSpelObject(DCUtil.java:340)
at oracle.adf.model.binding.DCBindingContainer.evaluateParameterWithElCheck(DCBindingContainer.java:1521)
at oracle.adf.model.binding.DCBindingContainer.findDataControl(DCBindingContainer.java:1627)
at oracle.jbo.uicli.binding.JUCtrlActionBinding.internalGetDataControl(JUCtrlActionBinding.java:588)
at oracle.jbo.uicli.binding.JUCtrlActionBinding.getDataControl(JUCtrlActionBinding.java:648)
at oracle.jbo.uicli.binding.JUCtrlActionBinding.isOperationEnabled(JUCtrlActionBinding.java:336)
at oracle.jbo.uicli.binding.JUCtrlActionBinding.isActionEnabled(JUCtrlActionBinding.java:300)
at oracle.jbo.uicli.binding.JUCtrlActionBinding.getEnabled(JUCtrlActionBinding.java:1922)
at oracle.jbo.uicli.binding.JUCtrlActionBinding.internalGet(JUCtrlActionBinding.java:2016)
at oracle.adfinternal.view.faces.model.binding.FacesCtrlActionBinding.internalGet(FacesCtrlActionBinding.java:368)
at oracle.adf.model.binding.DCControlBinding.get(DCControlBinding.java:763)
at javax.el.MapELResolver.getValue(MapELResolver.java:199)
at com.sun.faces.el.DemuxCompositeELResolver._getValue(DemuxCompositeELResolver.java:176)
at com.sun.faces.el.DemuxCompositeELResolver.getValue(DemuxCompositeELResolver.java:203)
at com.sun.el.parser.AstValue.getValue(AstValue.java:139)
at com.sun.el.parser.AstValue.getValue(AstValue.java:203)
at com.sun.el.parser.AstNot.getValue(AstNot.java:63)
at com.sun.el.ValueExpressionImpl.getValue(ValueExpressionImpl.java:226)
at com.sun.faces.facelets.el.TagValueExpression.getValue(TagValueExpression.java:109)
at org.apache.myfaces.trinidad.bean.FacesBeanImpl.getProperty(FacesBeanImpl.java:73)
at oracle.adfinternal.view.faces.renderkit.rich.ButtonRenderer.getDisabled(ButtonRenderer.java:449)
at oracle.adfinternal.view.faces.renderkit.rich.ButtonRenderer.encodeAll(ButtonRenderer.java:284)
at oracle.adf.view.rich.render.RichRenderer.encodeAll(RichRenderer.java:1650)
at org.apache.myfaces.trinidad.render.CoreRenderer.encodeEnd(CoreRenderer.java:538)
at org.apache.myfaces.trinidad.component.UIXComponentBase.encodeEnd(UIXComponentBase.java:1230)
at javax.faces.component.UIComponent.encodeAll(UIComponent.java:1863)
at org.apache.myfaces.trinidad.render.CoreRenderer.encodeChild(CoreRenderer.java:660)
at org.apache.myfaces.trinidad.render.CoreRenderer.encodeAllChildren(CoreRenderer.java:677)
at oracle.adf.view.rich.render.RichRenderer.encodeAllChildrenInContext(RichRenderer.java:3284)
at oracle.adfinternal.view.faces.renderkit.rich.FormRenderer.encodeAll(FormRenderer.java:275)
at oracle.adf.view.rich.render.RichRenderer.encodeAll(RichRenderer.java:1650)
at org.apache.myfaces.trinidad.render.CoreRenderer.encodeEnd(CoreRenderer.java:538)
at org.apache.myfaces.trinidad.component.UIXComponentBase.encodeEnd(UIXComponentBase.java:1230)
at javax.faces.component.UIComponent.encodeAll(UIComponent.java:1863)
at org.apache.myfaces.trinidad.render.CoreRenderer.encodeChild(CoreRenderer.java:660)
at org.apache.myfaces.trinidad.render.CoreRenderer.encodeAllChildren(CoreRenderer.java:677)
at oracle.adf.view.rich.render.RichRenderer.encodeAllChildrenInContext(RichRenderer.java:3284)
at oracle.adfinternal.view.faces.renderkit.rich.DocumentRenderer.encodeAll(DocumentRenderer.java:1428)
at oracle.adf.view.rich.render.RichRenderer.encodeAll(RichRenderer.java:1650)
at org.apache.myfaces.trinidad.render.CoreRenderer.encodeEnd(CoreRenderer.java:538)
at org.apache.myfaces.trinidad.component.UIXComponentBase.encodeEnd(UIXComponentBase.java:1230)
at javax.faces.component.UIComponent.encodeAll(UIComponent.java:1863)
at javax.faces.component.UIComponent.encodeAll(UIComponent.java:1859)
at oracle.adfinternal.view.faces.component.AdfViewRoot.encodeAll(AdfViewRoot.java:102)
at com.sun.faces.application.view.FaceletViewHandlingStrategy.renderView(FaceletViewHandlingStrategy.java:458)
at org.apache.myfaces.trinidad.view.ViewDeclarationLanguageWrapper.renderView(ViewDeclarationLanguageWrapper.java:101)
at org.apache.myfaces.trinidad.view.ViewDeclarationLanguageWrapper.renderView(ViewDeclarationLanguageWrapper.java:101)
at org.apache.myfaces.trinidadinternal.application.ViewDeclarationLanguageFactoryImpl$ChangeApplyingVDLWrapper.renderView(ViewDeclarationLanguageFactoryImpl.java:338)
at com.sun.faces.application.view.MultiViewHandler.renderView(MultiViewHandler.java:134)
at javax.faces.application.ViewHandlerWrapper.renderView(ViewHandlerWrapper.java:337)
at org.apache.myfaces.trinidadinternal.application.ViewHandlerImpl.renderView(ViewHandlerImpl.java:170)
at oracle.adfinternal.view.faces.lifecycle.ResponseRenderManager.runRenderView(ResponseRenderManager.java:52)
at oracle.adfinternal.view.faces.lifecycle.LifecycleImpl._renderResponse(LifecycleImpl.java:1228)
at oracle.adfinternal.view.faces.lifecycle.LifecycleImpl._executeRenderResponse(LifecycleImpl.java:1040)
at oracle.adfinternal.view.faces.lifecycle.LifecycleImpl._executePhase(LifecycleImpl.java:332)
at oracle.adfinternal.view.faces.lifecycle.LifecycleImpl.render(LifecycleImpl.java:254)
at javax.faces.webapp.FacesServlet.service(FacesServlet.java:651)
at weblogic.servlet.internal.StubSecurityHelper$ServletServiceAction.run(StubSecurityHelper.java:286)
at weblogic.servlet.internal.StubSecurityHelper$ServletServiceAction.run(StubSecurityHelper.java:260)
at weblogic.servlet.internal.StubSecurityHelper.invokeServlet(StubSecurityHelper.java:137)
at weblogic.servlet.internal.ServletStubImpl.execute(ServletStubImpl.java:350)
at weblogic.servlet.internal.TailFilter.doFilter(TailFilter.java:25)
at weblogic.servlet.internal.FilterChainImpl.doFilter(FilterChainImpl.java:78)
at oracle.adf.model.servlet.ADFBindingFilter.doFilter(ADFBindingFilter.java:194)
at weblogic.servlet.internal.FilterChainImpl.doFilter(FilterChainImpl.java:78)
at oracle.adfinternal.view.faces.webapp.rich.RegistrationFilter.doFilter(RegistrationFilter.java:105)
at org.apache.myfaces.trinidadinternal.webapp.TrinidadFilterImpl$FilterListChain.doFilter(TrinidadFilterImpl.java:529)
at oracle.adfinternal.view.faces.activedata.AdsFilter.doFilter(AdsFilter.java:60)
at org.apache.myfaces.trinidadinternal.webapp.TrinidadFilterImpl$FilterListChain.doFilter(TrinidadFilterImpl.java:529)
at org.apache.myfaces.trinidadinternal.webapp.TrinidadFilterImpl._doFilterImpl(TrinidadFilterImpl.java:354)
at org.apache.myfaces.trinidadinternal.webapp.TrinidadFilterImpl.doFilter(TrinidadFilterImpl.java:232)
at org.apache.myfaces.trinidad.webapp.TrinidadFilter.doFilter(TrinidadFilter.java:92)
at weblogic.servlet.internal.FilterChainImpl.doFilter(FilterChainImpl.java:78)
at oracle.security.jps.ee.http.JpsAbsFilter$1.run(JpsAbsFilter.java:141)
at java.security.AccessController.doPrivileged(Native Method)
at oracle.security.jps.util.JpsSubject.doAsPrivileged(JpsSubject.java:315)
at oracle.security.jps.ee.util.JpsPlatformUtil.runJaasMode(JpsPlatformUtil.java:649)
at oracle.security.jps.ee.http.JpsAbsFilter.runJaasMode(JpsAbsFilter.java:124)
at oracle.security.jps.ee.http.JpsAbsFilter.doFilter(JpsAbsFilter.java:232)
at oracle.security.jps.ee.http.JpsFilter.doFilter(JpsFilter.java:94)
at weblogic.servlet.internal.FilterChainImpl.doFilter(FilterChainImpl.java:78)
at oracle.dms.servlet.DMSServletFilter.doFilter(DMSServletFilter.java:224)
at weblogic.servlet.internal.FilterChainImpl.doFilter(FilterChainImpl.java:78)
at weblogic.servlet.internal.RequestEventsFilter.doFilter(RequestEventsFilter.java:32)
at weblogic.servlet.internal.FilterChainImpl.doFilter(FilterChainImpl.java:78)
at weblogic.servlet.internal.WebAppServletContext$ServletInvocationAction.wrapRun(WebAppServletContext.java:3654)
at weblogic.servlet.internal.WebAppServletContext$ServletInvocationAction.run(WebAppServletContext.java:3620)
at weblogic.security.acl.internal.AuthenticatedSubject.doAs(AuthenticatedSubject.java:326)
at weblogic.security.service.SecurityManager.runAsForUserCode(SecurityManager.java:196)
at weblogic.servlet.provider.WlsSecurityProvider.runAsForUserCode(WlsSecurityProvider.java:203)
at weblogic.servlet.provider.WlsSubjectHandle.run(WlsSubjectHandle.java:71)
at weblogic.servlet.internal.WebAppServletContext.doSecuredExecute(WebAppServletContext.java:2423)
at weblogic.servlet.internal.WebAppServletContext.securedExecute(WebAppServletContext.java:2280)
at weblogic.servlet.internal.WebAppServletContext.execute(WebAppServletContext.java:2258)
at weblogic.servlet.internal.ServletRequestImpl.runInternal(ServletRequestImpl.java:1626)
at weblogic.servlet.internal.ServletRequestImpl.run(ServletRequestImpl.java:1586)
at weblogic.servlet.provider.ContainerSupportProviderImpl$WlsRequestExecutor.run(ContainerSupportProviderImpl.java:270)
at weblogic.invocation.ComponentInvocationContextManager._runAs(ComponentInvocationContextManager.java:348)
at weblogic.invocation.ComponentInvocationContextManager.runAs(ComponentInvocationContextManager.java:333)
at weblogic.work.LivePartitionUtility.doRunWorkUnderContext(LivePartitionUtility.java:54)
at weblogic.work.PartitionUtility.runWorkUnderContext(PartitionUtility.java:41)
at weblogic.work.SelfTuningWorkManagerImpl.runWorkUnderContext(SelfTuningWorkManagerImpl.java:617)
at weblogic.work.ExecuteThread.execute(ExecuteThread.java:397)
at weblogic.work.ExecuteThread.run(ExecuteThread.java:346)
Caused by: oracle.adf.model.adapter.AdapterException: JBO-29114 ADFContext is not setup to process messages for this exception. Use the exception stack trace and error code to investigate the root cause of this exception. Root cause error code is DCA-40002. Error message parameters are {0=null}
at oracle.adfinternal.model.adapter.webservice.WSModel.createModelFromWSDL(WSModel.java:725)
at oracle.adfinternal.model.adapter.webservice.WSModel.(WSModel.java:215)
at oracle.adfinternal.model.adapter.webservice.WSDefinition.createWSModel(WSDefinition.java:1922)
at oracle.adfinternal.model.adapter.webservice.WSDefinition.loadDefaultModel(WSDefinition.java:1917)
at oracle.adfinternal.model.adapter.webservice.WSDefinition.loadDCOperations(WSDefinition.java:1905)
at oracle.adfinternal.model.adapter.webservice.WSDefinition.createDataControl(WSDefinition.java:800)
at oracle.adf.model.adapter.DataControlFactoryImpl.createSession(DataControlFactoryImpl.java:202)
... 111 more
Try using https://www.wsdl-analyzer.com/ to analyze whether WSDL is correct / service is up etc .
Related
・Python3.8
・JDK 11
I've started learning pyflink and write a code instructed by official web which is https://nightlies.apache.org/flink/flink-docs-master/docs/dev/python/datastream/intro_to_datastream_api/
And here is my code
from pyflink.common.serialization import JsonRowDeserializationSchema,JsonRowSerializationSchema
from pyflink.common import WatermarkStrategy, Row
from pyflink.common.serialization import Encoder
from pyflink.common.typeinfo import Types
from pyflink.datastream import StreamExecutionEnvironment
from pyflink.datastream.connectors import FlinkKafkaConsumer,FlinkKafkaProducer
def streaming():
env = StreamExecutionEnvironment.get_execution_environment()
deserialization_schema =JsonRowDeserializationSchema.builder().type_info(
type_info=Types.ROW([Types.INT(), Types.STRING()])).build()
kafka_consumer = FlinkKafkaConsumer(
topics='test',
deserialization_schema=deserialization_schema,
properties={'bootstrap.servers': 'localhost:9092','group.id': 'test_group'})
ds = env.add_source(kafka_consumer)
ds = ds.map(lambda a: Row(a % 4, 1),
output_type=Types.ROW([Types.LONG(), Types.LONG()])) \
.key_by(lambda a: a[0]) \
.reduce(lambda a, b: Row(a[0], a[1] + b[1]))
serialization_schema = JsonRowSerializationSchema.builder().with_type_info(
type_info=Types.ROW([Types.LONG(), Types.LONG()])).build()
kafka_sink = FlinkKafkaProducer(
topic='test_sink_topic',
serialization_schema=serialization_schema,
producer_config={'bootstrap.servers': 'localhost:9092',
'group.id': 'test_group'})
ds.add_sink(kafka_sink)
env.execute('datastream_api_demo')
if __name__ == '__main__':
streaming()
Firstly it said to me to specify jarfile. So I downloaded flink-connector-kafka and kafka-clients jarfile for each from https://mvnrepository.com/artifact/org.apache.flink and put them into pyflink/lib directory.
And now I'm at next step getting this error;
(pyflink_demo) C:\work\pyflink_demo>python Kafka_stream_Kafka.py
WARNING: An illegal reflective access operation has occurred
WARNING: Illegal reflective access by org.apache.flink.api.java.ClosureCleaner (file:/C:/work/pyflink_demo/Lib/site-packages/pyflink/lib/flink-dist_2.11-1.14.4.jar) to field java.util.P
roperties.serialVersionUID
WARNING: Please consider reporting this to the maintainers of org.apache.flink.api.java.ClosureCleaner
WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations
WARNING: All illegal access operations will be denied in a future release
Traceback (most recent call last):
File "Kafka_stream_Kafka.py", line 38, in <module>
streaming()
File "Kafka_stream_Kafka.py", line 33, in streaming
env.execute('datastream_api_demo')
File "C:\work\pyflink_demo\lib\site-packages\pyflink\datastream\stream_execution_environment.py", line 691, in execute
return JobExecutionResult(self._j_stream_execution_environment.execute(j_stream_graph))
File "C:\work\pyflink_demo\lib\site-packages\py4j\java_gateway.py", line 1285, in __call__
return_value = get_return_value(
File "C:\work\pyflink_demo\lib\site-packages\pyflink\util\exceptions.py", line 146, in deco
return f(*a, **kw)
File "C:\work\pyflink_demo\lib\site-packages\py4j\protocol.py", line 326, in get_return_value
raise Py4JJavaError(
py4j.protocol.Py4JJavaError: An error occurred while calling o0.execute.
: org.apache.flink.runtime.client.JobExecutionException: Job execution failed.
at org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:144)
at org.apache.flink.runtime.minicluster.MiniClusterJobClient.lambda$getJobExecutionResult$3(MiniClusterJobClient.java:137)
at java.base/java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:642)
at java.base/java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:506)
at java.base/java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:2073)
at org.apache.flink.runtime.rpc.akka.AkkaInvocationHandler.lambda$invokeRpc$1(AkkaInvocationHandler.java:258)
at java.base/java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:859)
at java.base/java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:837)
at java.base/java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:506)
at java.base/java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:2073)
at org.apache.flink.util.concurrent.FutureUtils.doForward(FutureUtils.java:1389)
at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$null$1(ClassLoadingUtils.java:93)
at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:68)
at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$guardCompletionWithContextClassLoader$2(ClassLoadingUtils.java:92)
at java.base/java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:859)
at java.base/java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:837)
at java.base/java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:506)
at java.base/java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:2073)
at org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$1.onComplete(AkkaFutureUtils.java:47)
at akka.dispatch.OnComplete.internal(Future.scala:300)
at akka.dispatch.OnComplete.internal(Future.scala:297)
at akka.dispatch.japi$CallbackBridge.apply(Future.scala:224)
at akka.dispatch.japi$CallbackBridge.apply(Future.scala:221)
at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60)
at org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$DirectExecutionContext.execute(AkkaFutureUtils.java:65)
at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:68)
at scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:284)
at scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:284)
at scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)
at akka.pattern.PromiseActorRef.$bang(AskSupport.scala:621)
at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:24)
at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:23)
at scala.concurrent.Future.$anonfun$andThen$1(Future.scala:532)
at scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:29)
at scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:29)
at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60)
at akka.dispatch.BatchingExecutor$AbstractBatch.processBatch(BatchingExecutor.scala:63)
at akka.dispatch.BatchingExecutor$BlockableBatch.$anonfun$run$1(BatchingExecutor.scala:100)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12)
at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:81)
at akka.dispatch.BatchingExecutor$BlockableBatch.run(BatchingExecutor.scala:100)
at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:49)
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:48)
at java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290)
at java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020)
at java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656)
at java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594)
at java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:183)
Caused by: org.apache.flink.runtime.JobException: Recovery is suppressed by NoRestartBackoffTimeStrategy
at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:138)
at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:82)
at org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:252)
at org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:242)
at org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:233)
at org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:684)
at org.apache.flink.runtime.scheduler.SchedulerNG.updateTaskExecutionState(SchedulerNG.java:79)
at org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:444)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.lambda$handleRpcInvocation$1(AkkaRpcActor.java:316)
at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:83)
at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:314)
at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:217)
at org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:78)
at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:163)
at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:24)
at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:20)
at scala.PartialFunction.applyOrElse(PartialFunction.scala:123)
at scala.PartialFunction.applyOrElse$(PartialFunction.scala:122)
at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:20)
at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)
at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172)
at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172)
at akka.actor.Actor.aroundReceive(Actor.scala:537)
at akka.actor.Actor.aroundReceive$(Actor.scala:535)
at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:220)
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:580)
at akka.actor.ActorCell.invoke(ActorCell.scala:548)
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:270)
at akka.dispatch.Mailbox.run(Mailbox.scala:231)
at akka.dispatch.Mailbox.exec(Mailbox.scala:243)
... 5 more
Caused by: java.lang.RuntimeException: Failed to create stage bundle factory! INFO:root:Initializing Python harness: C:\work\pyflink_demo\lib\site-packages\pyflink\fn_execution\beam\bea
m_boot.py --id=4-1 --provision_endpoint=localhost:51794
INFO:root:Starting up Python harness in loopback mode.
at org.apache.flink.streaming.api.runners.python.beam.BeamPythonFunctionRunner.createStageBundleFactory(BeamPythonFunctionRunner.java:566)
at org.apache.flink.streaming.api.runners.python.beam.BeamPythonFunctionRunner.open(BeamPythonFunctionRunner.java:255)
at org.apache.flink.streaming.api.operators.python.AbstractPythonFunctionOperator.open(AbstractPythonFunctionOperator.java:131)
at org.apache.flink.streaming.api.operators.python.AbstractOneInputPythonFunctionOperator.open(AbstractOneInputPythonFunctionOperator.java:116)
at org.apache.flink.streaming.api.operators.python.PythonProcessOperator.open(PythonProcessOperator.java:59)
at org.apache.flink.streaming.runtime.tasks.RegularOperatorChain.initializeStateAndOpenOperators(RegularOperatorChain.java:110)
at org.apache.flink.streaming.runtime.tasks.StreamTask.restoreGates(StreamTask.java:711)
at org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$SynchronizedStreamTaskActionExecutor.call(StreamTaskActionExecutor.java:100)
at org.apache.flink.streaming.runtime.tasks.StreamTask.restoreInternal(StreamTask.java:687)
at org.apache.flink.streaming.runtime.tasks.StreamTask.restore(StreamTask.java:654)
at org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958)
at org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:927)
at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766)
at org.apache.flink.runtime.taskmanager.Task.run(Task.java:575)
at java.base/java.lang.Thread.run(Thread.java:834)
Caused by: org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.UncheckedExecutionException: java.lang.IllegalStateException: Process died with exit code 0
at org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2050)
at org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache.get(LocalCache.java:3952)
at org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3974)
at org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4958)
at org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$LocalLoadingCache.getUnchecked(LocalCache.java:4964)
at org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory$SimpleStageBundleFactory.<init>(DefaultJobBundleFactory.java:451)
at org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory$SimpleStageBundleFactory.<init>(DefaultJobBundleFactory.java:436)
at org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory.forStage(DefaultJobBundleFactory.java:303)
at org.apache.flink.streaming.api.runners.python.beam.BeamPythonFunctionRunner.createStageBundleFactory(BeamPythonFunctionRunner.java:564)
... 14 more
Caused by: java.lang.IllegalStateException: Process died with exit code 0
at org.apache.beam.runners.fnexecution.environment.ProcessManager$RunningProcess.isAliveOrThrow(ProcessManager.java:75)
at org.apache.beam.runners.fnexecution.environment.ProcessEnvironmentFactory.createEnvironment(ProcessEnvironmentFactory.java:112)
at org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory$1.load(DefaultJobBundleFactory.java:252)
at org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory$1.load(DefaultJobBundleFactory.java:231)
at org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3528)
at org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2277)
at org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2154)
at org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2044)
... 22 more
I tried to figure out what's going on and found very similar question What's wrong with my Pyflink setup that Python UDFs throw py4j exceptions?
It says that was caused by network proxy problem. JVM and python uses local socket communication. So set local communication with no proxy.
I set environment valuable "no_proxy" but it doesn't work.
enter image description here
Could anyone provide solution for this?
There is no useful information in the exception stack to help to identify the problem. This should be caused by a known issue(FLINK-26543, already solved, however still not released). This issue only occurs in loopback mode which is enabled by default when executing the job locally.
For now, you could try to force the job run in process mode instead of loopback mode by setting environment variable _python_worker_execution_mode to process. After doing this, you should see the root cause of the failure.
Besides, there is also a small issue in your code. I guess you meant ds.map(lambda a: Row(a[0] % 4, 1), output_type=Types.ROW([Types.LONG(), Types.LONG()])) instead of ds.map(lambda a: Row(a % 4, 1), output_type=Types.ROW([Types.LONG(), Types.LONG()])) as it doesn't support % operation in Row object.
I have tried the script. I am not quite sure what caused the error. Try to start kafka first and create the topics, before running the script. Or start kafka and run the script a second time after first failure.
1.Why Unable to serialize DslActivationStatus state happens
2. How to solve this error
3.I have provided screenshot of this error
Please look at the error screenshot
Unable to serialize DslActivationStatus state
com.intellij.util.xmlb.XmlSerializationException: Can't serialize instance of class org.jetbrains.plugins.groovy.dsl.DslActivationStatus$State
at com.intellij.configurationStore.XmlSerializer.serialize(xmlSerializer.kt:63)
at com.intellij.configurationStore.XmlSerializer.serialize$default(xmlSerializer.kt:47)
at com.intellij.configurationStore.SaveSessionBaseKt.serializeState(SaveSessionBase.kt:46)
at com.intellij.configurationStore.SaveSessionBase.setState(SaveSessionBase.kt:20)
at com.intellij.configurationStore.ComponentStoreImpl.commitComponent(ComponentStoreImpl.kt:283)
at com.intellij.configurationStore.ComponentStoreImpl.commitComponents$intellij_platform_configurationStore_impl(ComponentStoreImpl.kt:199)
at com.intellij.configurationStore.ComponentStoreWithExtraComponents.commitComponents$intellij_platform_configurationStore_impl(ComponentStoreWithExtraComponents.kt:89)
at com.intellij.configurationStore.ComponentStoreImpl.doCreateSaveSessionManagerAndCommitComponents$intellij_platform_configurationStore_impl(ComponentStoreImpl.kt:155)
at com.intellij.configurationStore.ComponentStoreImpl$createSaveSessionManagerAndSaveComponents$3.invokeSuspend(ComponentStoreImpl.kt:146)
at kotlin.coroutines.jvm.internal.BaseContinuationImpl.resumeWith(ContinuationImpl.kt:33)
at kotlinx.coroutines.DispatchedTask.run(Dispatched.kt:236)
at com.intellij.openapi.application.TransactionGuardImpl$2.run(TransactionGuardImpl.java:315)
at com.intellij.openapi.application.impl.LaterInvocator$FlushQueue.doRun(LaterInvocator.java:435)
at com.intellij.openapi.application.impl.LaterInvocator$FlushQueue.runNextEvent(LaterInvocator.java:419)
at com.intellij.openapi.application.impl.LaterInvocator$FlushQueue.run(LaterInvocator.java:403)
at java.awt.event.InvocationEvent.dispatch(InvocationEvent.java:311)
at java.awt.EventQueue.dispatchEventImpl(EventQueue.java:764)
at java.awt.EventQueue.access$500(EventQueue.java:98)
at java.awt.EventQueue$3.run(EventQueue.java:715)
at java.awt.EventQueue$3.run(EventQueue.java:709)
at java.security.AccessController.doPrivileged(Native Method)
at java.security.ProtectionDomain$JavaSecurityAccessImpl.doIntersectionPrivilege(ProtectionDomain.java:74)
at java.awt.EventQueue.dispatchEvent(EventQueue.java:734)
at com.intellij.ide.IdeEventQueue.defaultDispatchEvent(IdeEventQueue.java:757)
at com.intellij.ide.IdeEventQueue._dispatchEvent(IdeEventQueue.java:706)
at com.intellij.ide.IdeEventQueue.dispatchEvent(IdeEventQueue.java:375)
at java.awt.EventDispatchThread.pumpOneEventForFilters(EventDispatchThread.java:205)
at java.awt.EventDispatchThread.pumpEventsForFilter(EventDispatchThread.java:116)
at java.awt.EventDispatchThread.pumpEventsForHierarchy(EventDispatchThread.java:105)
at java.awt.EventDispatchThread.pumpEvents(EventDispatchThread.java:101)
at java.awt.EventDispatchThread.pumpEvents(EventDispatchThread.java:93)
at java.awt.EventDispatchThread.run(EventDispatchThread.java:82)
Caused by: org.jdom.IllegalDataException: The data "org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
categoryTransformgdsl: 1: unexpected char: 0x0 # line 1, column 5.
����
In your function list you are calling the functions on list creation. This makes flutter throws this exception (which is correct because Navigator tries to route during the build phase of the other widget).
Apart from that, the code is quite weird to me but to get it to work you need to use this list:
final List<Function> ontaps = [
functionOne,
functionTwo,
functionThree,
functionFour,
functionFive,
functionSix,
functionSeven,
functionEight,
functionNine,
functionTen,
functionEleven,
functionTwelve,
functionThirteen,
functionFourteen,
functionFifteen
];
I want to load data into titan ,which's backend is hbase.So I changed the example from https://github.com/thinkaurelius/titan/blob/titan10/titan-core/src/main/java/com/thinkaurelius/titan/example/GraphOfTheGodsFactory.java
a little.But it ocurred many Exceptions when i connect.
Exception in thread "main" com.thinkaurelius.titan.core.TitanException: Could not open global configuration
at com.thinkaurelius.titan.diskstorage.Backend.getStandaloneGlobalConfiguration(Backend.java:451)
at com.thinkaurelius.titan.graphdb.configuration.GraphDatabaseConfiguration.(GraphDatabaseConfiguration.java:1322)
at com.thinkaurelius.titan.core.TitanFactory.open(TitanFactory.java:94)
at com.thinkaurelius.titan.core.TitanFactory.open(TitanFactory.java:84)
at com.thinkaurelius.titan.core.TitanFactory$Builder.open(TitanFactory.java:139)
at titantest.TitanLoad.main(TitanLoad.java:106)
Caused by: com.thinkaurelius.titan.diskstorage.TemporaryBackendException: Temporary failure in storage backend
at com.thinkaurelius.titan.diskstorage.hbase.HBaseStoreManager.ensureTableExists(HBaseStoreManager.java:759)
at com.thinkaurelius.titan.diskstorage.hbase.HBaseStoreManager.ensureColumnFamilyExists(HBaseStoreManager.java:831)
at com.thinkaurelius.titan.diskstorage.hbase.HBaseStoreManager.openDatabase(HBaseStoreManager.java:456)
at com.thinkaurelius.titan.diskstorage.keycolumnvalue.KeyColumnValueStoreManager.openDatabase(KeyColumnValueStoreManager.java:29)
at com.thinkaurelius.titan.diskstorage.Backend.getStandaloneGlobalConfiguration(Backend.java:449)
... 5 more
Caused by: org.apache.hadoop.hbase.DoNotRetryIOException: java.lang.IllegalAccessError: tried to access method com.google.common.base.Stopwatch.()V from class org.apache.hadoop.hbase.zookeeper.MetaTableLocator
at org.apache.hadoop.hbase.client.RpcRetryingCaller.translateException(RpcRetryingCaller.java:229)
at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:202)
at org.apache.hadoop.hbase.client.ClientScanner.call(ClientScanner.java:299)
at org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:278)
at org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:140)
at org.apache.hadoop.hbase.client.ClientScanner.(ClientScanner.java:135)
at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:845)
at org.apache.hadoop.hbase.MetaTableAccessor.fullScan(MetaTableAccessor.java:600)
at org.apache.hadoop.hbase.MetaTableAccessor.tableExists(MetaTableAccessor.java:364)
at org.apache.hadoop.hbase.client.HBaseAdmin.tableExists(HBaseAdmin.java:281)
at com.thinkaurelius.titan.diskstorage.hbase.HBaseAdmin1_0.tableExists(HBaseAdmin1_0.java:70)
at com.thinkaurelius.titan.diskstorage.hbase.HBaseStoreManager.ensureTableExists(HBaseStoreManager.java:753)
... 9 more
Caused by: java.lang.IllegalAccessError: tried to access method com.google.common.base.Stopwatch.()V from class org.apache.hadoop.hbase.zookeeper.MetaTableLocator
at org.apache.hadoop.hbase.zookeeper.MetaTableLocator.blockUntilAvailable(MetaTableLocator.java:434)
at org.apache.hadoop.hbase.client.ZooKeeperRegistry.getMetaRegionLocation(ZooKeeperRegistry.java:60)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateMeta(ConnectionManager.java:1139)
at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.locateRegion(ConnectionManager.java:1106)
at org.apache.hadoop.hbase.client.RpcRetryingCallerWithReadReplicas.getRegionLocations(RpcRetryingCallerWithReadReplicas.java:293)
at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:147)
at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:56)
at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
... 19 more
Here is the code:
TitanGraph g = TitanFactory.build().set("storage.backend","hbase").set("storage.hostname","192.168.200.121,192.168.200.115")
.set("storage.hbase.table","mytitan").set("storage.hbase.ext.zookeeper.znode.parent","/hbase-unsecure").set("storage.port",2181).open();
BTW,I can connect to the Hbase cluster,it looks fine.
Stack trace:
javax.xml.ws.soap.SOAPFaultException: Could not parse message.
at org.apache.cxf.jaxws.JaxWsClientProxy.invoke(JaxWsClientProxy.java:143)
Caused by: org.codehaus.stax2.typed.TypedXMLStreamException: ParseError at [row,col]:[2,603]
Message: Element content can not contain child START_ELEMENT when using Typed Access methods
at com.ctc.wstx.sr.BasicStreamReader._constructTypeException(BasicStreamReader.java:5482)
at com.ctc.wstx.sr.BasicStreamReader._constructUnexpectedInTyped(BasicStreamReader.java:5475)
at com.ctc.wstx.sr.BasicStreamReader.getElementText(BasicStreamReader.java:658)
at org.apache.cxf.binding.soap.interceptor.Soap11FaultInInterceptor.unmarshalFault(Soap11FaultInInterceptor.java:61)
at org.apache.cxf.binding.soap.interceptor.Soap11FaultInInterceptor.handleMessage(Soap11FaultInInterceptor.java:46)
at org.apache.cxf.binding.soap.interceptor.Soap11FaultInInterceptor.handleMessage(Soap11FaultInInterceptor.java:35)
at org.apache.cxf.phase.PhaseInterceptorChain.doIntercept(PhaseInterceptorChain.java:236)
at org.apache.cxf.interceptor.AbstractFaultChainInitiatorObserver.onMessage(AbstractFaultChainInitiatorObserver.java:96)
at org.apache.cxf.binding.soap.interceptor.CheckFaultInterceptor.handleMessage(CheckFaultInterceptor.java:69)
at org.apache.cxf.binding.soap.interceptor.CheckFaultInterceptor.handleMessage(CheckFaultInterceptor.java:34)
at org.apache.cxf.phase.PhaseInterceptorChain.doIntercept(PhaseInterceptorChain.java:236)
at org.apache.cxf.endpoint.ClientImpl.onMessage(ClientImpl.java:658)
at org.apache.cxf.transport.http.HTTPConduit$WrappedOutputStream.handleResponseInternal(HTTPConduit.java:2139)
at org.apache.cxf.transport.http.HTTPConduit$WrappedOutputStream.handleResponse(HTTPConduit.java:2022)
at org.apache.cxf.transport.http.HTTPConduit$WrappedOutputStream.close(HTTPConduit.java:1947)
at org.apache.cxf.transport.AbstractConduit.close(AbstractConduit.java:66)
at org.apache.cxf.transport.http.HTTPConduit.close(HTTPConduit.java:632)
at org.apache.cxf.interceptor.MessageSenderInterceptor$MessageSenderEndingInterceptor.handleMessage(MessageSenderInterceptor.java:62)
at org.apache.cxf.phase.PhaseInterceptorChain.doIntercept(PhaseInterceptorChain.java:236)
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:472)
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:302)
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:254)
at org.apache.cxf.frontend.ClientProxy.invokeSync(ClientProxy.java:73)
at org.apache.cxf.jaxws.JaxWsClientProxy.invoke(JaxWsClientProxy.java:123)
... 57 more
Sometimes when a scalatest fails, Play test framework does not report the cause of the failure, instead I see the following. Any ideas why?
Reporter completed abruptly with an exception after receiving event:
TestFailed(org.scalatest.events.Ordinal#6db,java.util.NoSuchElementException,AsgUpdateActorTest,Some(com.netflix.tachyon.nac.AsgUpdateActorTest),parse xml,Some(java.util.NoSuchElementException),Some(42),None,Some(<function7>),None,play-thread-1,1324496590792).
java.lang.NullPointerException
at play.test.TestEngine$TestResults.add(TestEngine.java:191)
at org.scalatest.tools.PlayReporter.apply(ScalaTestRunner.scala:122)
at org.scalatest.DispatchReporter$$anonfun$1$$anonfun$apply$1$$anonfun$apply$2.apply(DispatchReporter.scala:152)
at org.scalatest.DispatchReporter$$anonfun$1$$anonfun$apply$1$$anonfun$apply$2.apply(DispatchReporter.scala:151)
at scala.collection.LinearSeqOptimized$class.foreach(LinearSeqOptimized.scala:61)
at scala.collection.immutable.List.foreach(List.scala:45)
at org.scalatest.DispatchReporter$$anonfun$1$$anonfun$apply$1.apply(DispatchReporter.scala:151)
at org.scalatest.DispatchReporter$$anonfun$1$$anonfun$apply$1.apply(DispatchReporter.scala:101)
at scala.actors.Actor$class.receive(Actor.scala:524)
at scala.actors.Actor$$anon$1.receive(Actor.scala:134)
at scala.actors.Actor$.receive(Actor.scala:189)
at org.scalatest.DispatchReporter$$anonfun$1.apply(DispatchReporter.scala:101)
at org.scalatest.DispatchReporter$$anonfun$1.apply(DispatchReporter.scala:50)
at scala.Function0$class.apply$mcV$sp(Function0.scala:39)
at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:17)
at scala.actors.Actor$$anon$1.act(Actor.scala:135)
at scala.actors.Reactor$$anonfun$dostart$1.apply(Reactor.scala:222)
at scala.actors.Reactor$$anonfun$dostart$1.apply(Reactor.scala:222)
at scala.actors.ReactorTask.run(ReactorTask.scala:36)
at scala.concurrent.forkjoin.ForkJoinPool$AdaptedRunnable.exec(ForkJoinPool.java:611)
at scala.concurrent.forkjoin.ForkJoinTask.quietlyExec(ForkJoinTask.java:422)
at scala.concurrent.forkjoin.ForkJoinWorkerThread.mainLoop(ForkJoinWorkerThread.java:340)
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:325)
11:43:11,038 ERROR ~
#68nef9k71
Cannot ouput XML unit output
Template execution error (In {module:testrunner}/app/views/TestRunner/results-xunit.xml around line 23)
Execution error occured in template {module:testrunner}/app/views/TestRunner/results-xunit.xml. Exception raised was NullPointerException : Cannot invoke method escapeXml() on null object.
play.exceptions.TemplateExecutionException: Cannot invoke method escapeXml() on null object
at play.templates.BaseTemplate.throwException(BaseTemplate.java:86)
at play.templates.GroovyTemplate.internalRender(GroovyTemplate.java:257)
at play.templates.Template.render(Template.java:26)
at play.templates.GroovyTemplate.render(GroovyTemplate.java:187)
at controllers.TestRunner.run(TestRunner.java:79)
at play.mvc.ActionInvoker.invokeWithContinuation(ActionInvoker.java:548)
at play.mvc.ActionInvoker.invoke(ActionInvoker.java:502)
at play.mvc.ActionInvoker.invokeControllerMethod(ActionInvoker.java:478)
at play.mvc.ActionInvoker.invokeControllerMethod(ActionInvoker.java:473)
at play.mvc.ActionInvoker.invoke(ActionInvoker.java:161)
at Invocation.HTTP Request(Play!)
Caused by: java.lang.NullPointerException: Cannot invoke method escapeXml() on null object
at {module:testrunner}/app/views/TestRunner/results-xunit.xml.(line:23)
at play.templates.GroovyTemplate.internalRender(GroovyTemplate.java:232)
... 9 more
Update:
It happened because of the following exception that the test had triggered. Does the test framework supposed to blow up when a test throws an exception?
java.util.NoSuchElementException
at scala.collection.IterableLike$class.head(IterableLike.scala:101)
at scala.xml.NodeSeq.head(NodeSeq.scala:43)