I am getting a Py4JJavaError when I try to write a PySpark Dataframe to a csv. It is creating a folder in the path with the title of my csv file but there is nothing inside of the folder. Here is my code:
students.write.mode("overwrite").csv('write_test.csv')
When I run that code I get the following error:
---------------------------------------------------------------------------
Py4JJavaError Traceback (most recent call last)
C:\Users\MICHAE~1\AppData\Local\Temp/ipykernel_3904/767700963.py in <module>
----> 1 students.write.mode("overwrite").csv('write_test.csv')
~\anaconda3\lib\site-packages\pyspark\sql\readwriter.py in csv(self, path, mode, compression, sep, quote, escape, header, nullValue, escapeQuotes, quoteAll, dateFormat, timestampFormat, ignoreLeadingWhiteSpace, ignoreTrailingWhiteSpace, charToEscapeQuoteEscaping, encoding, emptyValue, lineSep)
953 charToEscapeQuoteEscaping=charToEscapeQuoteEscaping,
954 encoding=encoding, emptyValue=emptyValue, lineSep=lineSep)
--> 955 self._jwrite.csv(path)
956
957 def orc(self, path, mode=None, partitionBy=None, compression=None):
~\anaconda3\lib\site-packages\py4j\java_gateway.py in __call__(self, *args)
1319
1320 answer = self.gateway_client.send_command(command)
-> 1321 return_value = get_return_value(
1322 answer, self.gateway_client, self.target_id, self.name)
1323
~\anaconda3\lib\site-packages\pyspark\sql\utils.py in deco(*a, **kw)
109 def deco(*a, **kw):
110 try:
--> 111 return f(*a, **kw)
112 except py4j.protocol.Py4JJavaError as e:
113 converted = convert_exception(e.java_exception)
~\anaconda3\lib\site-packages\py4j\protocol.py in get_return_value(answer, gateway_client, target_id, name)
324 value = OUTPUT_CONVERTER[type](answer[2:], gateway_client)
325 if answer[1] == REFERENCE_TYPE:
--> 326 raise Py4JJavaError(
327 "An error occurred while calling {0}{1}{2}.\n".
328 format(target_id, ".", name), value)
Py4JJavaError: An error occurred while calling o264.csv.
: org.apache.spark.SparkException: Job aborted.
at org.apache.spark.sql.errors.QueryExecutionErrors$.jobAbortedError(QueryExecutionErrors.scala:496)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:251)
at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run(InsertIntoHadoopFsRelationCommand.scala:186)
at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:113)
at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:111)
at org.apache.spark.sql.execution.command.DataWritingCommandExec.executeCollect(commands.scala:125)
at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:110)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:110)
at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:106)
at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:481)
at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:82)
at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:481)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:457)
at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:106)
at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:93)
at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:91)
at org.apache.spark.sql.execution.QueryExecution.assertCommandExecuted(QueryExecution.scala:128)
at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:848)
at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:382)
at org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:355)
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:239)
at org.apache.spark.sql.DataFrameWriter.csv(DataFrameWriter.scala:839)
at sun.reflect.GeneratedMethodAccessor59.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
at py4j.Gateway.invoke(Gateway.java:282)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 43.0 failed 1 times, most recent failure: Lost task 0.0 in stage 43.0 (TID 39) (AXIOM-NINJA executor driver): java.io.IOException: (null) entry in command string: null chmod 0644 C:\Users\MichaelDallas\Desktop\PySpark_Course\Jupyter\Code_Along\write_test.csv\_temporary\0\_temporary\attempt_202201281318408036701241482801437_0043_m_000000_39\part-00000-e4b8b459-f358-464a-ac99-6d80a2741fb7-c000.csv
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:773)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:869)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:852)
at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:733)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:225)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:209)
at org.apache.hadoop.fs.RawLocalFileSystem.createOutputStreamWithMode(RawLocalFileSystem.java:307)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:296)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:328)
at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSOutputSummer.<init>(ChecksumFileSystem.java:398)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:461)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:440)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:892)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:789)
at org.apache.spark.sql.execution.datasources.CodecStreams$.createOutputStream(CodecStreams.scala:81)
at org.apache.spark.sql.execution.datasources.CodecStreams$.createOutputStreamWriter(CodecStreams.scala:92)
at org.apache.spark.sql.execution.datasources.csv.CsvOutputWriter.<init>(CsvOutputWriter.scala:38)
at org.apache.spark.sql.execution.datasources.csv.CSVFileFormat$$anon$1.newInstance(CSVFileFormat.scala:85)
at org.apache.spark.sql.execution.datasources.SingleDirectoryDataWriter.newOutputWriter(FileFormatDataWriter.scala:161)
at org.apache.spark.sql.execution.datasources.SingleDirectoryDataWriter.<init>(FileFormatDataWriter.scala:146)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$.executeTask(FileFormatWriter.scala:290)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$.$anonfun$write$16(FileFormatWriter.scala:229)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
at org.apache.spark.scheduler.Task.run(Task.scala:131)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:506)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1462)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:509)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Driver stacktrace:
at org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2403)
at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2352)
at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2351)
at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2351)
at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1109)
at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1109)
at scala.Option.foreach(Option.scala:407)
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1109)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2591)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2533)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2522)
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:898)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2214)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:218)
... 41 more
Caused by: java.io.IOException: (null) entry in command string: null chmod 0644 C:\Users\MichaelDallas\Desktop\PySpark_Course\Jupyter\Code_Along\write_test.csv\_temporary\0\_temporary\attempt_202201281318408036701241482801437_0043_m_000000_39\part-00000-e4b8b459-f358-464a-ac99-6d80a2741fb7-c000.csv
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:773)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:869)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:852)
at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:733)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:225)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:209)
at org.apache.hadoop.fs.RawLocalFileSystem.createOutputStreamWithMode(RawLocalFileSystem.java:307)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:296)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:328)
at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSOutputSummer.<init>(ChecksumFileSystem.java:398)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:461)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:440)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:892)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:789)
at org.apache.spark.sql.execution.datasources.CodecStreams$.createOutputStream(CodecStreams.scala:81)
at org.apache.spark.sql.execution.datasources.CodecStreams$.createOutputStreamWriter(CodecStreams.scala:92)
at org.apache.spark.sql.execution.datasources.csv.CsvOutputWriter.<init>(CsvOutputWriter.scala:38)
at org.apache.spark.sql.execution.datasources.csv.CSVFileFormat$$anon$1.newInstance(CSVFileFormat.scala:85)
at org.apache.spark.sql.execution.datasources.SingleDirectoryDataWriter.newOutputWriter(FileFormatDataWriter.scala:161)
at org.apache.spark.sql.execution.datasources.SingleDirectoryDataWriter.<init>(FileFormatDataWriter.scala:146)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$.executeTask(FileFormatWriter.scala:290)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$.$anonfun$write$16(FileFormatWriter.scala:229)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
at org.apache.spark.scheduler.Task.run(Task.scala:131)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:506)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1462)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:509)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
... 1 more
I was able to get it to work by deleting the hadoop.dll file from the folder that contained winutils.exe. I also made sure that the hadoop.dll file was located in my System32 folder. I ensured that I had all of the hadoop, spark, and java jdk installs completed correctly. I ensure that the Environment Variables were pointing to the correct directories. Also, the Java 8 jdk seems to work the best with the hadoop 2.7 configuration.
I am facing an issue when starting Kafka server in my local machine (Windows 10) using the command
.\bin\windows\kafka-server-start.bat .\config\server.properties
I am getting the error shown below. I already have zookeeper server running.
[2020-07-27 16:08:05,144] INFO Registered kafka:type=kafka.Log4jController MBean (kafka.utils.Log4jControllerRegistration$)
[2020-07-27 16:08:05,394] ERROR Exiting Kafka due to fatal exception (kafka.Kafka$)
java.lang.VerifyError: Uninitialized object exists on backward branch 79
Exception Details:
Location:
scala/util/matching/Regex.unapplySeq(Lscala/util/matching/Regex$Match;)Lscala/Option; #126: goto
Reason:
Error exists in the bytecode
Bytecode:
0000000: 2bb6 00b5 c700 07b2 0080 b02b b600 b8b6
0000010: 00b9 2ab6 004b 4d59 c700 0b57 2cc6 000d
0000020: a700 752c b600 bd99 006e b200 3757 bb00
0000030: 5559 b200 5a57 2bb6 00be 3ebb 0060 59b7
0000040: 0064 3a04 1904 1db9 006a 0200 0336 0515
0000050: 051d a200 2f2b 1505 0460 b600 bf3a 0819
0000060: 0801 3a08 3a07 1907 013a 073a 0619 0419
0000070: 06b6 0072 5701 3a06 1505 0460 3605 a7ff
0000080: d119 04b9 0076 0100 c000 7801 3a04 013a
0000090: 06b7 007b b02a 2bb6 00b5 b600 c1b0
Stackmap Table:
same_frame(#11)
full_frame(#35,{Object[#2],Object[#24],Object[#62]},{Object[#62]})
same_frame(#42)
full_frame(#79,{Object[#2],Object[#24],Object[#62],Integer,Object[#96],Integer},{Uninitialized[#46],Uninitialized[#46]})
full_frame(#129,{Object[#2],Object[#24],Object[#62],Integer,Object[#96],Integer},{Uninitialized[#46],Uninitialized[#46]})
chop_frame(#149,3)
at scala.collection.StringOps$.r$extension(StringOps.scala:843)
at kafka.cluster.EndPoint$.<clinit>(EndPoint.scala:29)
at kafka.server.Defaults$.<clinit>(KafkaConfig.scala:79)
at kafka.server.KafkaConfig$.<clinit>(KafkaConfig.scala:961)
at kafka.metrics.KafkaMetricsConfig.<init>(KafkaMetricsConfig.scala:33)
at kafka.metrics.KafkaMetricsReporter$.startReporters(KafkaMetricsReporter.scala:62)
at kafka.server.KafkaServerStartable$.fromProps(KafkaServerStartable.scala:33)
at kafka.Kafka$.main(Kafka.scala:68)
at kafka.Kafka.main(Kafka.scala)
Any ideas how to fix this?
There seems to be an issue with your JAVA_HOME environment variable. Make sure your JAVA_HOME has the path to your JRE (Java runtime environment) and not JDK (Java Development Kit).
Just to be on the safe side, not sure if it's actually causing an incompatibility consider using 1.8.0_241 for Scala supporting libraries.
e.g. my JAVA_HOME environment variable: C:\Program Files\Java\jre1.8.0_241
This error prevents me from running the project.
Here are versions info :
~ ❯❯❯ java -version
java version "1.8.0_45"
Java(TM) SE Runtime Environment (build 1.8.0_45-b14)
Java HotSpot(TM) 64-Bit Server VM (build 25.45-b02, mixed mode)
~ ❯❯❯ scala -version
cat: /release: No such file or directory
Scala code runner version 2.12.1 -- Copyright 2002-2016, LAMP/EPFL and Lightbend, Inc.
And I use IntelliJ 2016.3
In an external terminal and sbt, it works !
Error:scalac: Error: org.jetbrains.jps.incremental.scala.remote.ServerException
java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:483)
at sbt.compiler.RawCompiler.apply(RawCompiler.scala:26)
at sbt.compiler.AnalyzingCompiler$$anonfun$compileSources$1$$anonfun$apply$2.apply(AnalyzingCompiler.scala:146)
at sbt.compiler.AnalyzingCompiler$$anonfun$compileSources$1$$anonfun$apply$2.apply(AnalyzingCompiler.scala:142)
at sbt.IO$.withTemporaryDirectory(IO.scala:291)
at sbt.compiler.AnalyzingCompiler$$anonfun$compileSources$1.apply(AnalyzingCompiler.scala:142)
at sbt.compiler.AnalyzingCompiler$$anonfun$compileSources$1.apply(AnalyzingCompiler.scala:139)
at sbt.IO$.withTemporaryDirectory(IO.scala:291)
at sbt.compiler.AnalyzingCompiler$.compileSources(AnalyzingCompiler.scala:139)
at sbt.compiler.IC$.compileInterfaceJar(IncrementalCompiler.scala:52)
at org.jetbrains.jps.incremental.scala.local.CompilerFactoryImpl$.org$jetbrains$jps$incremental$scala$local$CompilerFactoryImpl$$getOrCompileInterfaceJar(CompilerFactoryImpl.scala:93)
at org.jetbrains.jps.incremental.scala.local.CompilerFactoryImpl$$anonfun$getScalac$1.apply(CompilerFactoryImpl.scala:50)
at org.jetbrains.jps.incremental.scala.local.CompilerFactoryImpl$$anonfun$getScalac$1.apply(CompilerFactoryImpl.scala:49)
at scala.Option.map(Option.scala:146)
at org.jetbrains.jps.incremental.scala.local.CompilerFactoryImpl.getScalac(CompilerFactoryImpl.scala:49)
at org.jetbrains.jps.incremental.scala.local.CompilerFactoryImpl.createCompiler(CompilerFactoryImpl.scala:22)
at org.jetbrains.jps.incremental.scala.local.CachingFactory$$anonfun$createCompiler$1.apply(CachingFactory.scala:24)
at org.jetbrains.jps.incremental.scala.local.CachingFactory$$anonfun$createCompiler$1.apply(CachingFactory.scala:24)
at org.jetbrains.jps.incremental.scala.local.Cache$$anonfun$getOrUpdate$2.apply(Cache.scala:20)
at scala.Option.getOrElse(Option.scala:121)
at org.jetbrains.jps.incremental.scala.local.Cache.getOrUpdate(Cache.scala:19)
at org.jetbrains.jps.incremental.scala.local.CachingFactory.createCompiler(CachingFactory.scala:23)
at org.jetbrains.jps.incremental.scala.local.LocalServer.compile(LocalServer.scala:22)
at org.jetbrains.jps.incremental.scala.remote.Main$.make(Main.scala:67)
at org.jetbrains.jps.incremental.scala.remote.Main$.nailMain(Main.scala:24)
at org.jetbrains.jps.incremental.scala.remote.Main.nailMain(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:483)
at com.martiansoftware.nailgun.NGSession.run(NGSession.java:319)
Caused by: java.lang.VerifyError: Uninitialized object exists on backward branch 209
Exception Details:
Location:
scala/collection/immutable/HashMap$HashTrieMap.split()Lscala/collection/immutable/Seq; #249: goto
Reason:
Error exists in the bytecode
Bytecode:
0000000: 2ab6 0057 04a0 001e b200 afb2 00b4 04bd
0000010: 0002 5903 2a53 c000 b6b6 00ba b600 bec0
0000020: 00c0 b02a b600 31b8 003b 3c1b 04a4 015e
0000030: 1b05 6c3d 2a1b 056c 2ab6 0031 b700 c23e
0000040: 2ab6 0031 021d 787e 3604 2ab6 0031 0210
0000050: 201d 647c 7e36 05bb 0014 59b2 00b4 2ab6
0000060: 0033 c000 b6b6 00c6 b700 c91c b600 cd3a
0000070: 0619 06c6 001a 1906 b600 d1c0 007d 3a07
0000080: 1906 b600 d4c0 007d 3a08 a700 0dbb 00d6
0000090: 5919 06b7 00d9 bf19 073a 0919 083a 0abb
00000a0: 0002 5915 0419 09bb 0014 59b2 00b4 1909
00000b0: c000 b6b6 00c6 b700 c903 b800 df3a 0e3a
00000c0: 0d03 190d b900 e301 0019 0e3a 1136 1036
00000d0: 0f15 0f15 109f 0027 150f 0460 1510 190d
00000e0: 150f b900 e602 00c0 0005 3a17 1911 1917
00000f0: b800 ea3a 1136 1036 0fa7 ffd8 1911 b800
0000100: eeb7 005c 3a0b bb00 0259 1505 190a bb00
0000110: 1459 b200 b419 0ac0 00b6 b600 c6b7 00c9
0000120: 03b8 00df 3a13 3a12 0319 12b9 00e3 0100
0000130: 1913 3a16 3615 3614 1514 1515 9f00 2715
0000140: 1404 6015 1519 1215 14b9 00e6 0200 c000
0000150: 053a 1819 1619 18b8 00f1 3a16 3615 3614
0000160: a7ff d819 16b8 00ee b700 5c3a 0cb2 00f6
0000170: b200 b405 bd00 0259 0319 0b53 5904 190c
0000180: 53c0 00b6 b600 bab6 00f9 b02a b600 3303
0000190: 32b6 00fb b0
Stackmap Table:
same_frame(#35)
full_frame(#141,{Object[#2],Integer,Integer,Integer,Integer,Integer,Object[#105]},{})
append_frame(#151,Object[#125],Object[#125])
full_frame(#209,{Object[#2],Integer,Integer,Integer,Integer,Integer,Object[#105],Object[#125],Object[#125],Object[#125],Object[#125],Top,Top,Object[#20],Object[#55],Integer,Integer,Object[#103]},{Uninitialized[#159],Uninitialized[#159],Integer,Object[#125]})
full_frame(#252,{Object[#2],Integer,Integer,Integer,Integer,Integer,Object[#105],Object[#125],Object[#125],Object[#125],Object[#125],Top,Top,Object[#20],Object[#55],Integer,Integer,Object[#103]},{Uninitialized[#159],Uninitialized[#159],Integer,Object[#125]})
full_frame(#312,{Object[#2],Integer,Integer,Integer,Integer,Integer,Object[#105],Object[#125],Object[#125],Object[#125],Object[#125],Object[#2],Top,Object[#20],Object[#55],Integer,Integer,Object[#103],Object[#20],Object[#55],Integer,Integer,Object[#103]},{Uninitialized[#262],Uninitialized[#262],Integer,Object[#125]})
full_frame(#355,{Object[#2],Integer,Integer,Integer,Integer,Integer,Object[#105],Object[#125],Object[#125],Object[#125],Object[#125],Object[#2],Top,Object[#20],Object[#55],Integer,Integer,Object[#103],Object[#20],Object[#55],Integer,Integer,Object[#103]},{Uninitialized[#262],Uninitialized[#262],Integer,Object[#125]})
full_frame(#395,{Object[#2],Integer},{})
at scala.collection.immutable.HashMap$.scala$collection$immutable$HashMap$$makeHashTrieMap(HashMap.scala:179)
at scala.collection.immutable.HashMap$.scala$collection$immutable$HashMap$$makeHashTrieMap(HashMap.scala:183)
at scala.collection.immutable.HashMap$HashMap1.updated0(HashMap.scala:211)
at scala.collection.immutable.HashMap.$plus(HashMap.scala:59)
at scala.collection.immutable.HashMap.$plus(HashMap.scala:62)
at scala.collection.immutable.Map$Map4.updated(Map.scala:201)
at scala.collection.immutable.Map$Map4.$plus(Map.scala:202)
at scala.collection.immutable.Map$Map4.$plus(Map.scala:180)
at scala.collection.mutable.MapBuilder.$plus$eq(MapBuilder.scala:29)
at scala.collection.mutable.MapBuilder.$plus$eq(MapBuilder.scala:25)
at scala.collection.generic.Growable.$anonfun$$plus$plus$eq$1(Growable.scala:59)
at scala.collection.generic.Growable$$Lambda$19/1738535896.apply(Unknown Source)
at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:52)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at scala.collection.generic.Growable.$plus$plus$eq(Growable.scala:59)
at scala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:50)
at scala.collection.mutable.MapBuilder.$plus$plus$eq(MapBuilder.scala:25)
at scala.collection.generic.GenMapFactory.apply(GenMapFactory.scala:48)
at scala.sys.package$.env(package.scala:61)
at scala.tools.nsc.settings.ScalaSettings.defaultClasspath(ScalaSettings.scala:30)
at scala.tools.nsc.settings.ScalaSettings.defaultClasspath$(ScalaSettings.scala:30)
at scala.tools.nsc.settings.MutableSettings.defaultClasspath(MutableSettings.scala:19)
at scala.tools.nsc.settings.ScalaSettings.$init$(ScalaSettings.scala:57)
at scala.tools.nsc.settings.MutableSettings.<init>(MutableSettings.scala:20)
at scala.tools.nsc.Settings.<init>(Settings.scala:12)
at scala.tools.nsc.Driver.process(Driver.scala:41)
at scala.tools.nsc.Main.process(Main.scala)
... 34 more
This question puts me on the way
so I check the JDK version and surprisingly IntelliJ and Nailgun didn't run with the same JDK.
~ ❯❯❯ ps -ef | grep java
501 2290 2017 0 4:12PM ?? 0:01.81 /Library/Java/JavaVirtualMachines/jdk1.8.0_11.jdk/Contents/Home/bin/java … org.jetbrains.plugins.scala.nailgun.NailgunRunner 3200 6a73f540-1a80-476d-96d2-83ab8faaad95
501 2291 2017 0 4:12PM ?? 0:02.15 /Library/Java/JavaVirtualMachines/jdk1.8.0_112.jdk/Contents/Home/bin/java …
So I searched in the IntelliJ preferences and found out that Scala Compile Server was configured with an old JDK : 1.8.0_11. Bim !
I've just unchecked the checkbox Use external compile server
go to editconfiguration in intellij and choose in JRE drop down the jdk version which is installed in your system. and do rebuild, It worked for me.
Note that in scala-2.12, scala binary file is a wrapper script.
It looks like your JAVA_HOME directory is not correctly set.
Take a look at line that reads (inside $SCALA_HOME/bin/scala):
java_release="$(cat $JAVA_HOME/release | grep JAVA_VERSION)"
Based on your output, it looks like JAVA_HOME is not set. What you get is:
cat: /release: No such file or directory
Late to the party but another possible solution for someone for whom the above answers are not working:
Right click on your project and select module settings (for mac the shortcut is usually CMD + down arrow key)
in the sidebar select Global libraries
make sure that the correct scala SDK version is present. if not delete the existing one with the - button and add the correct sdk corresponding to the scala version in your pom.xml/ build.sbt using + button
a mismatch of scala version that intellij provides and the one defined by you in your build file can also cause this error.
To put in a nutshell, I have a C# application doing lots of mciSendString calls ( via dllimport ) to control wav files playback ( essentially open, play, pause, stop, status, close ). And after running for a while, the application crashes without notice with an 'access violation'.
Even though I'm running the app from my vs2012, the exception is not caught by visual studio. Even with the 'force break on an exception' option, I've had no luck in debugging this from the vs2012. So instead I've setup WER to generate me crash dumps and I am using windbg with psscor2.dll plugin to debug it.
Then in sequence, using the following commands, this is what I get ( shorten to the essential for readability purposes ) :
$>.ecxr
eax=00000001 ebx=00000000 ecx=00000401 edx=00000000 esi=049725b8 edi=00000002
eip=4e88159e esp=0a4efa38 ebp=0a4efa54 iopl=0 nv up ei pl nz ac pe nc
cs=0023 ss=002b ds=002b es=002b fs=0053 gs=002b efl=00010216
<Unloaded_mciwave.dll>+0x159e:
4e88159e ?? ???
$>~*kb
# 19 Id: 105c.28cc Suspend: 1 Teb: 7ef06000
Unfrozen
user32!NtUserGetMessage+0x15
user32!GetMessageA+0xa1
winmm!mciwindow+0x102
kernel32!BaseThreadInitThunk+0xe
ntdll!__RtlUserThreadStart+0x70
ntdll!_RtlUserThreadStart+0x1b
# 30 Id: 105c.15f8 Suspend: 0 Teb: 7ef1b000 Unfrozen
ntdll!ZwWaitForMultipleObjects+0x15
KERNELBASE!WaitForMultipleObjectsEx+0x100
kernel32!WaitForMultipleObjectsExImplementation+0xe0
kernel32!WaitForMultipleObjects+0x18
kernel32!WerpReportFaultInternal+0x186
kernel32!WerpReportFault+0x70
kernel32!BasepReportFault+0x20
kernel32!UnhandledExceptionFilter+0x1af
ntdll!__RtlUserThreadStart+0x62
ntdll!_EH4_CallFilterFunc+0x12
ntdll!_except_handler4+0x8e
ntdll!ExecuteHandler2+0x26
ntdll!ExecuteHandler+0x24
ntdll!RtlDispatchException+0x127
ntdll!KiUserExceptionDispatcher+0xf
WARNING: Frame IP not in any known module. Following frames may be wrong.
<Unloaded_mciwave.dll>+0x159e
# 31 Id: 105c.2310 Suspend: 1 Teb: 7ef00000 Unfrozen
user32!NtUserGetMessage+0x15
user32!GetMessageW+0x33
mciwave!TaskBlock+0x1d
mciwave!PlayFile+0xcb
mciwave!mwTask+0x98
winmm!mmStartTask+0x22
kernel32!BaseThreadInitThunk+0xe
ntdll!__RtlUserThreadStart+0x70
ntdll!_RtlUserThreadStart+0x1b:
$>!analyze -v
FAULTING_IP:
mciwave_4e880000!TaskBlock+1d
4e88159e ?? ???
EXCEPTION_RECORD: ffffffff -- (.exr 0xffffffffffffffff)
ExceptionAddress: 4e88159e (mciwave_4e880000!TaskBlock+0x0000001d)
ExceptionCode: c0000005 (Access violation)
ExceptionFlags: 00000000
NumberParameters: 2
Parameter[0]: 00000008
Parameter[1]: 4e88159e
Attempt to execute non-executable address 4e88159e
PROCESS_NAME: Titan.vshost.exe
ERROR_CODE: (NTSTATUS) 0xc0000005 - The instruction at 0x%08lx referenced memory at 0x%08lx. The memory could not be %s.
EXCEPTION_CODE: (NTSTATUS) 0xc0000005 - The instruction at 0x%08lx referenced memory at 0x%08lx. The memory could not be %s.
EXCEPTION_PARAMETER1: 00000008
EXCEPTION_PARAMETER2: 4e88159e
WRITE_ADDRESS: 4e88159e
FOLLOWUP_IP:
mciwave_4e880000!TaskBlock+1d
4e88159e ?? ???
MOD_LIST: <ANALYSIS/>
NTGLOBALFLAG: 0
APPLICATION_VERIFIER_FLAGS: 0
MANAGED_STACK: !dumpstack -EE
OS Thread Id: 0x15f8 (30)
====> Exception cxr#a4ef750
FAULTING_THREAD: 000015f8
BUGCHECK_STR: APPLICATION_FAULT_SOFTWARE_NX_FAULT_CODE_WRONG_SYMBOLS
PRIMARY_PROBLEM_CLASS: SOFTWARE_NX_FAULT_CODE
DEFAULT_BUCKET_ID: SOFTWARE_NX_FAULT_CODE
LAST_CONTROL_TRANSFER: from 4e881999 to 4e88159e
STACK_TEXT:
0a4efa54 4e881999 0a4efa88 078db198 078db1a4 mciwave_4e880000!TaskBlock+0x1d
0a4efa68 74370ae5 00038edc 00000000 00000000 mciwave_4e880000!mwTask+0x45
0a4efa88 7670338a 078db198 0a4efad4 76f99f72 winmm!mmStartTask+0x22
0a4efa94 76f99f72 078db198 79f84a28 00000000 kernel32!BaseThreadInitThunk+0xe
0a4efad4 76f99f45 74370ac3 078db198 00000000 ntdll!__RtlUserThreadStart+0x70
0a4efaec 00000000 74370ac3 078db198 00000000 ntdll!_RtlUserThreadStart+0x1b
SYMBOL_STACK_INDEX: 0
SYMBOL_NAME: mciwave!TaskBlock+1d
FOLLOWUP_NAME: MachineOwner
MODULE_NAME: mciwave_4e880000
IMAGE_NAME: mciwave.dll
DEBUG_FLR_IMAGE_TIMESTAMP: 4a5bcb4a
STACK_COMMAND: ~30s; .ecxr ; kb
FAILURE_BUCKET_ID: SOFTWARE_NX_FAULT_CODE_c0000005_mciwave.dll!TaskBlock
BUCKET_ID: APPLICATION_FAULT_SOFTWARE_NX_FAULT_CODE_WRONG_SYMBOLS_mciwave!TaskBlock+1d
Followup: MachineOwner
---------
Exception seems to happen in thread #30 in Unloaded_mciwave.dll, but I have no idea how to push further the debugging.. How can I get a better idea of what's going ??
How can I get what's happening between these two lines ?
ntdll!KiUserExceptionDispatcher+0xf
--> WARNING: Frame IP not in any known module. Following frames may be wrong.
<Unloaded_mciwave.dll>+0x159e
Thank you for your help in advance.
You should get more details by reloading the DLL in the Debugger.
For that you need to do:
lmvm mciwave.dll
start end module name
Unloaded modules:
e6510000 e6548000 mciwave.dll
Timestamp: Fri Oct 14 12:00:00 2011 (4E98E6E2)
Checksum: 0003E937
ImageSize: 00038000
You need to set up the Symbol and Exe-Path so the debugger can find the DLL and PDB (which shouldn't be a problem if you have it in your machine). Then you can do
.reload mciwave.dll=e6510000,00038000
DBGHELP: <path>\mciwave.dll - OK
Now if you do !analyze -v again, it should give you the correct call stack.
I got a problem with wicket, slf4j and CentOS 5.8. This is the StackTrace I receive:
Can't instantiate page using constructor public MyCompany.backenduser.NewBackendUser()
org.apache.wicket.WicketRuntimeException: Can't instantiate page using constructor public MyCompany.backenduser.NewBackendUser()
at org.apache.wicket.session.DefaultPageFactory.createPage(DefaultPageFactory.java:212)
at org.apache.wicket.session.DefaultPageFactory.newPage(DefaultPageFactory.java:57)
at org.apache.wicket.request.target.component.BookmarkablePageRequestTarget.newPage(BookmarkablePageRequestTarget.java:298)
at org.apache.wicket.request.target.component.BookmarkablePageRequestTarget.getPage(BookmarkablePageRequestTarget.java:320)
at org.apache.wicket.request.target.component.BookmarkablePageRequestTarget.processEvents(BookmarkablePageRequestTarget.java:234)
at org.apache.wicket.request.AbstractRequestCycleProcessor.processEvents(AbstractRequestCycleProcessor.java:92)
at org.apache.wicket.RequestCycle.processEventsAndRespond(RequestCycle.java:1250)
at org.apache.wicket.RequestCycle.step(RequestCycle.java:1329)
at org.apache.wicket.RequestCycle.steps(RequestCycle.java:1436)
at org.apache.wicket.RequestCycle.request(RequestCycle.java:545)
at org.apache.wicket.protocol.http.WicketFilter.doGet(WicketFilter.java:486)
at org.apache.wicket.protocol.http.WicketFilter.doFilter(WicketFilter.java:319)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1307)
at org.springframework.orm.hibernate3.support.OpenSessionInViewFilter.doFilterInternal(OpenSessionInViewFilter.java:198)
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:76)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1307)
at org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:368)
at org.springframework.security.web.access.intercept.FilterSecurityInterceptor.invoke(FilterSecurityInterceptor.java:109)
at org.springframework.security.web.access.intercept.FilterSecurityInterceptor.doFilter(FilterSecurityInterceptor.java:83)
at org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:380)
at org.springframework.security.web.access.ExceptionTranslationFilter.doFilter(ExceptionTranslationFilter.java:97)
at org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:380)
at org.springframework.security.web.session.SessionManagementFilter.doFilter(SessionManagementFilter.java:100)
at org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:380)
at org.springframework.security.web.authentication.AnonymousAuthenticationFilter.doFilter(AnonymousAuthenticationFilter.java:78)
at org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:380)
at org.springframework.security.web.servletapi.SecurityContextHolderAwareRequestFilter.doFilter(SecurityContextHolderAwareRequestFilter.java:54)
at org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:380)
at org.springframework.security.web.savedrequest.RequestCacheAwareFilter.doFilter(RequestCacheAwareFilter.java:35)
at org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:380)
at org.springframework.security.web.authentication.www.BasicAuthenticationFilter.doFilter(BasicAuthenticationFilter.java:177)
at org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:380)
at org.springframework.security.web.authentication.AbstractAuthenticationProcessingFilter.doFilter(AbstractAuthenticationProcessingFilter.java:187)
at org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:380)
at org.springframework.security.web.authentication.logout.LogoutFilter.doFilter(LogoutFilter.java:105)
at org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:380)
at org.springframework.security.web.context.SecurityContextPersistenceFilter.doFilter(SecurityContextPersistenceFilter.java:79)
at org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:380)
at org.springframework.security.web.FilterChainProxy.doFilter(FilterChainProxy.java:169)
at org.springframework.web.filter.DelegatingFilterProxy.invokeDelegate(DelegatingFilterProxy.java:237)
at org.springframework.web.filter.DelegatingFilterProxy.doFilter(DelegatingFilterProxy.java:167)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1307)
at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:453)
at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137)
at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:559)
at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231)
at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1072)
at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:382)
at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193)
at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1006)
at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135)
at org.eclipse.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:255)
at org.eclipse.jetty.server.handler.HandlerCollection.handle(HandlerCollection.java:154)
at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116)
at org.eclipse.jetty.server.Server.handle(Server.java:365)
at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:485)
at org.eclipse.jetty.server.AbstractHttpConnection.headerComplete(AbstractHttpConnection.java:926)
at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.headerComplete(AbstractHttpConnection.java:988)
at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:635)
at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:235)
at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82)
at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:627)
at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:51)
at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608)
at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543)
at java.lang.Thread.run(Thread.java:662)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27)
at java.lang.reflect.Constructor.newInstance(Constructor.java:513)
at org.apache.wicket.session.DefaultPageFactory.createPage(DefaultPageFactory.java:192)
... 65 more
Caused by: java.lang.NoSuchMethodError: org.slf4j.helpers.MessageFormatter.format(Ljava/lang/String;Ljava/lang/Object;)Lorg/slf4j/helpers/FormattingTuple;
at org.slf4j.impl.Log4jLoggerAdapter.warn(Log4jLoggerAdapter.java:420)
at org.apache.wicket.Localizer.getStringIgnoreSettings(Localizer.java:198)
at org.apache.wicket.Localizer.getString(Localizer.java:313)
at org.apache.wicket.model.StringResourceModel.getString(StringResourceModel.java:503)
at MyCompany.controls.PermissionsControl.<init>(PermissionsControl.java:28)
at MyCompany.backenduser.NewBackendUser.init(NewBackendUser.java:281)
at MyCompany.backenduser.NewBackendUser.<init>(NewBackendUser.java:107)
... 70 more
I only receive this on CentOS 5.8, on Windows it works great. We use maven to build the project. I the pom.xml we excluded each version of slf4j, from the other jars.
Most probably you have an older slf4j-log4j.jar somewhere in your deployment environment. Look for slf4j* files in your web container's lib directories and check their version to the version of slf4j-api in your application.