Why spark-shell fails with NullPointerException? - scala

I try to execute spark-shell on Windows 10, but I keep getting this error every time I run it.
I used both latest and spark-1.5.0-bin-hadoop2.4 versions.
15/09/22 18:46:24 WARN Connection: BoneCP specified but not present in
CLASSPATH (or one of dependencies)
15/09/22 18:46:24 WARN Connection: BoneCP specified but not present in CLASSPATH (or one of dependencies)
15/09/22 18:46:27 WARN ObjectStore: Version information not found in
metastore. hive.metastore.schema.verification is not enabled so recording the schema version 1.2.0
15/09/22 18:46:27 WARN ObjectStore: Failed to get database default, returning NoSuchObjectException
15/09/22 18:46:27 WARN : Your hostname, DESKTOP-8JS2RD5 resolves to a loopback/non-reachable address: fe80:0:0:0:0:5efe:c0a8:103%net1, but we couldn't find any external IP address!
java.lang.RuntimeException: java.lang.NullPointerException
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:522)
at org.apache.spark.sql.hive.client.ClientWrapper.<init> (ClientWrapper.scala:171)
at org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala :163)
at org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:161)
at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:168)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(Unknown Source)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(Unknown Source)
at java.lang.reflect.Constructor.newInstance(Unknown Source)
at org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:1028)
at $iwC$$iwC.<init>(<console>:9)
at $iwC.<init>(<console>:18)
at <init>(<console>:20)
at .<init>(<console>:24)
at .<clinit>(<console>)
at .<init>(<console>:7)
at .<clinit>(<console>)
at $print(<console>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
at java.lang.reflect.Method.invoke(Unknown Source)
at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:132)
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
at org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
at org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
at org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.sca la:108)
at org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$proc ess$1.apply$mcZ$sp(SparkILoop.scala:991)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$proc ess$1.apply(SparkILoop.scala:945)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$proc ess$1.apply(SparkILoop.scala:945)
at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scal a:135)
at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
at org.apache.spark.repl.Main$.main(Main.scala:31)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
at java.lang.reflect.Method.invoke(Unknown Source)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:672)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.NullPointerException
at java.lang.ProcessBuilder.start(Unknown Source)
at org.apache.hadoop.util.Shell.runCommand(Shell.java:445)
at org.apache.hadoop.util.Shell.run(Shell.java:418)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:650)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:739)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:722)
at org.apache.hadoop.fs.FileUtil.execCommand(FileUtil.java:1097)
at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:559)
at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.getPermission(RawLocalFileSystem.java:534)
org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:599)
at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)
org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
... 56 more
<console>:10: error: not found: value sqlContext
import sqlContext.implicits._
^
<console>:10: error: not found: value sqlContext
import sqlContext.sql
^

I used Spark 1.5.2 with Hadoop 2.6 and had similar problems. Solved by doing the following steps:
Download winutils.exe from the repository to some local folder, e.g. C:\hadoop\bin.
Set HADOOP_HOME to C:\hadoop.
Create c:\tmp\hive directory (using Windows Explorer or any other tool).
Open command prompt with admin rights.
Run C:\hadoop\bin\winutils.exe chmod 777 /tmp/hive
With that, I am still getting some warnings, but no ERRORs and can run Spark applications just fine.

I was facing a similar issue, got it resolved by putting the winutil inside bin folder. The Hadoop_home should be set as C:\Winutils and winutil to be placed in C:\Winutils\bin.
Windows 10 64 bit Winutils are available in https://github.com/steveloughran/winutils/tree/master/hadoop-2.6.0/bin
Also ensure that command line has administrative access.
Refer https://wiki.apache.org/hadoop/WindowsProblems

My guess is that you're running into https://issues.apache.org/jira/browse/SPARK-10528. I was seeing the same issue running on Windows 7. Initially I was getting the NullPointerException as you did. When I put winutils into the bin directory and set HADOOP_HOME to point to the Spark directory, I got the error described in the JIRA issue.

Or perhaps this link here below be easier to follow,
https://wiki.apache.org/hadoop/WindowsProblems
Basically download and copy winutils.exe to your spark\bin folder. Re-run spark-shell
If you have not set your /tmp/hive to a writable state, please do so.

You need to give permission to /tmp/hive directory to resolve this exception.
Hope you already have winutils.exe and set HADOOP_HOME environment variable. Then open the command prompt and run following command as administrator:
If winutils.exe is present in D:\winutils\bin location and \tmp\hive is also in D drive:
D:\winutils\bin\winutils.exe chmod 777 D:\tmp\hive
For more details,you can refer the following links :
Frequent Issues occurred during Spark Development
How to run Apache Spark on Windows7 in standalone mode

You can resolve this issue by placing mysqlconnector jar in spark-1.6.0/libs folder and restart it again.It works.
The important thing is here instead of running spark-shell you should do
spark-shell --driver-class-path /home/username/spark-1.6.0-libs-mysqlconnector.jar
Hope it should work.

For Python - Create a SparkSession in your python (This config section is only for Windows)
spark = SparkSession.builder.config("spark.sql.warehouse.dir", "C:/temp").appName("SparkSQL").getOrCreate()
Copy winutils.exe and keep in C:\winutils\bin and execute the bellow commands
C:\Windows\system32>C:\winutils\bin\winutils.exe chmod 777 C:/temp
Run command prompt in ADMIN mode ( Run as Administrator)

My issue was having other .exe's/Jars inside the winutils/bin folder. So I cleared all the others and was left with winutils.exe alone. Was using spark 2.1.1

Issue was resolved after installing correct Java version in my case its java 8 and setting the environmental variables. Make sure you run the winutils.exe to create a temporary directory as below.
c:\winutils\bin\winutils.exe chmod 777 \tmp\hive
Above should not return any error. Use java -version to verify the version of java you are using before invoking spark-shell.

In Windows, you need to clone "winutils"
git clone https://github.com/steveloughran/winutils.git
And
set var HADOOP_HOME to DIR_CLONED\hadoop-{version}
Remember to choose the version of your hadoop.

Setting SPARK_LOCAL_HOSTNAME as localhost (on Windows 10) resolved the problem for me

Related

Problems with Kafka Source initialization in Siddhi

Can't create stream from Kafka topic using Siddhi. Even if I create string with Design View.
I copied all required jars to lib and bundle folders. Even started Kafka with Zookeeper locally (dunno why I need it locally but nwm).
On tooling.sh start I have following error:
[2020-02-26 22:15:43,041] WARNING {org.wso2.carbon.launcher.extensions.OSGiLibBundleDeployerUtils lambda$getBundlesInfo$1} - Error when loading the OSGi bundle information from /home/Hed/StreamProcessor/siddhi-tooling-5.1.2/lib/kafka-clients-2.3.0.jar
java.io.IOException: Required bundle manifest headers do not exist
at org.wso2.carbon.launcher.extensions.OSGiLibBundleDeployerUtils.getBundleInfo(OSGiLibBundleDeployerUtils.java:183)
at org.wso2.carbon.launcher.extensions.OSGiLibBundleDeployerUtils.lambda$getBundlesInfo$1(OSGiLibBundleDeployerUtils.java:135)
at java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:193)
at java.util.Spliterators$ArraySpliterator.forEachRemaining(Spliterators.java:948)
at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:482)
at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:472)
at java.util.stream.StreamSpliterators$WrappingSpliterator.forEachRemaining(StreamSpliterators.java:313)
at java.util.stream.StreamSpliterators$DistinctSpliterator.forEachRemaining(StreamSpliterators.java:1291)
at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:482)
at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:472)
at java.util.stream.ReduceOps$ReduceTask.doLeaf(ReduceOps.java:747)
at java.util.stream.ReduceOps$ReduceTask.doLeaf(ReduceOps.java:721)
at java.util.stream.AbstractTask.compute(AbstractTask.java:327)
at java.util.concurrent.CountedCompleter.exec(CountedCompleter.java:731)
at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289)
at java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1056)
at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1692)
at java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:157)
For this script:
#App:name("HelloKafka")
#App:description('Consume events from a Kafka Topic and publish to a different Kafka Topic')
#source(type='kafka',
topic.list='kafka_topic',
partition.no.list='0',
threading.option='single.thread',
group.id="group",
bootstrap.servers='localhost:9092',
#map(type='json'))
define stream SweetProductionStream (name string, amount double);
I have see error on Run command:
io.siddhi.core.exception.SiddhiAppCreationException: Error on 'HelloKafka' # Line: 10. Position: 26, near '#source(type='kafka',
topic.list='kafka_topic',
partition.no.list='0',
threading.option='single.thread',
group.id="group",
bootstrap.servers='localhost:9092',
#map(type='json'))'. org/apache/kafka/clients/producer/Producer
at io.siddhi.core.util.ExceptionUtil.populateQueryContext(ExceptionUtil.java:43)
at io.siddhi.core.util.parser.helper.DefinitionParserHelper.addEventSource(DefinitionParserHelper.java:388)
at io.siddhi.core.util.SiddhiAppRuntimeBuilder.defineStream(SiddhiAppRuntimeBuilder.java:117)
at io.siddhi.core.util.parser.SiddhiAppParser.defineStreamDefinitions(SiddhiAppParser.java:374)
at io.siddhi.core.util.parser.SiddhiAppParser.parse(SiddhiAppParser.java:230)
at io.siddhi.core.SiddhiManager.createSiddhiAppRuntime(SiddhiManager.java:85)
at io.siddhi.core.SiddhiManager.createSiddhiAppRuntime(SiddhiManager.java:95)
at io.siddhi.distribution.editor.core.internal.DebugRuntime.createRuntime(DebugRuntime.java:201)
at io.siddhi.distribution.editor.core.internal.DebugRuntime.(DebugRuntime.java:56)
at io.siddhi.distribution.editor.core.internal.DebugProcessorService.start(DebugProcessorService.java:38)
at io.siddhi.distribution.editor.core.internal.EditorMicroservice.start(EditorMicroservice.java:761)
at io.siddhi.distribution.editor.core.internal.EditorMicroservice.startWithVariables(EditorMicroservice.java:781)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.wso2.msf4j.internal.router.HttpMethodInfo.invokeResource(HttpMethodInfo.java:187)
at org.wso2.msf4j.internal.router.HttpMethodInfo.invoke(HttpMethodInfo.java:143)
at org.wso2.msf4j.internal.MSF4JHttpConnectorListener.dispatchMethod(MSF4JHttpConnectorListener.java:218)
at org.wso2.msf4j.internal.MSF4JHttpConnectorListener.lambda$onMessage$58(MSF4JHttpConnectorListener.java:129)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.NoClassDefFoundError: org/apache/kafka/clients/producer/Producer
at java.lang.Class.getDeclaredConstructors0(Native Method)
at java.lang.Class.privateGetDeclaredConstructors(Class.java:2671)
at java.lang.Class.getConstructor0(Class.java:3075)
at java.lang.Class.newInstance(Class.java:412)
at io.siddhi.core.util.SiddhiClassLoader.loadClass(SiddhiClassLoader.java:32)
at io.siddhi.core.util.SiddhiClassLoader.loadExtensionImplementation(SiddhiClassLoader.java:48)
at io.siddhi.core.util.parser.helper.DefinitionParserHelper.addEventSource(DefinitionParserHelper.java:346)
... 21 more
Caused by: java.lang.ClassNotFoundException: org.apache.kafka.clients.producer.Producer cannot be found by siddhi-io-kafka_5.0.7
at org.eclipse.osgi.internal.loader.BundleLoader.findClassInternal(BundleLoader.java:448)
at org.eclipse.osgi.internal.loader.BundleLoader.findClass(BundleLoader.java:361)
at org.eclipse.osgi.internal.loader.BundleLoader.findClass(BundleLoader.java:353)
at org.eclipse.osgi.internal.loader.ModuleClassLoader.loadClass(ModuleClassLoader.java:161)
at java.lang.ClassLoader.loadClass(ClassLoader.java:352)
... 28 more
Can somebody tell me what am I doing wrong? :(
Please make sure you had the OSGi-converted jars to the "C:\Program Files\WSO2\Enterprise Integrator\7.0.2\streaming-integrator\lib".
The OSGi-converted jar list:
kafka_2.12_2.3.0_1.0.0
kafka_clients_2.3.0_1.0.0
metrics_core_2.2.0_1.0.0
scala_library_2.12.8_1.0.0
zkclient_0.11_1.0.0
zookeeper_3.4.14_1.0.0
The, copy the original jars to to the "C:\Program Files\WSO2\Enterprise Integrator\7.0.2\streaming-integrator\samples\sample-clients\lib"
The list of original jars:
kafka_2.12-2.3.0
kafka-clients-2.3.0
metrics-core-2.2.0
scala-library-2.12.8
zkclient-0.11
zookeeper-3.4.14
In order to generate the OSGi-converted jars, copy all original jars to a folder called "source" and create an empty folder called "destination". Then run the following command in the terminal:
MINGW32 /c/Program Files/WSO2/Enterprise Integrator/7.0.2/streaming-integrator/bin
$ ./jartobundle.sh C:/DevTools/source C:/DevTools/destination
Finally, distribute the OSGis and original in accordance with the directories above.
PS1: in my case i am using kafka_2.12-2.4.1, but the basename of the jars does not change.
PS2: adapt the directories to your installation path
For more details check WSO2 documentation: Kafka transport

SchemaSpy DB2 Connection Failure

I tried to analyze a DB2 database with SchemaSpy, but got a warning 'Connection Failure'. I tried this way:
java -jar schemaspy-6.0.0.jar -configFile schemaspy.properties --logging.pattern.console="%d{HH:mm:ss.SSS} %clr(%-5level) - %msg%n" --logging.level.org.schemaspy=TRACE
(found the logging part on https://github.com/schemaspy/schemaspy/issues/250)
The .properties file looks like this:
schemaspy.t=db2
schemaspy.dp=C:\tmp\db2jcc.jar
schemaspy.host=**host**
schemaspy.port=50000
schemaspy.db=**db**
schemaspy.u=**user**
schemaspy.p=**password**
schemaspy.o=D:\**\schemaspy-output\
schemaspy.s=**schema**
The error I got was:
14:24:20.297 DEBUG - Unable to find driverClass COM.ibm.db2.jdbc.app.DB2Driver'
14:24:20.308 WARN - Connection Failure
org.schemaspy.model.ConnectionFailure: Failed to connect to database URL [jdbc:db2:zumtest] Failed to create any of 'COM.ibm.db2.jdbc.app.DB2Driver' driver from driverPath 'C:\tmp\db2jcc.jar' with sibling jars no.
Resulting in classpath:
file:/C:/tmp/db2jcc.jar
at org.schemaspy.DbDriverLoader.getConnection(DbDriverLoader.java:101)
at org.schemaspy.DbDriverLoader.getConnection(DbDriverLoader.java:75)
at org.schemaspy.service.SqlService.connect(SqlService.java:68)
at org.schemaspy.SchemaAnalyzer.analyze(SchemaAnalyzer.java:186)
at org.schemaspy.SchemaAnalyzer.analyze(SchemaAnalyzer.java:107)
at org.schemaspy.cli.SchemaSpyRunner.runAnalyzer(SchemaSpyRunner.java:97)
at org.schemaspy.cli.SchemaSpyRunner.run(SchemaSpyRunner.java:86)
at org.schemaspy.Main.main(Main.java:48)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
at java.lang.reflect.Method.invoke(Unknown Source)
at org.springframework.boot.loader.MainMethodRunner.run(MainMethodRunner.java:48)
at org.springframework.boot.loader.Launcher.launch(Launcher.java:87)
at org.springframework.boot.loader.Launcher.launch(Launcher.java:50)
at org.springframework.boot.loader.JarLauncher.main(JarLauncher.java:51)
Caused by: org.schemaspy.model.ConnectionFailure: Failed to create any of 'COM.ibm.db2.jdbc.app.DB2Driver' driver from driverPath 'C:\tmp\db2jcc.jar' with sibling jars no.
Resulting in classpath:
file:/C:/tmp/db2jcc.jar
at org.schemaspy.DbDriverLoader.getDriver(DbDriverLoader.java:147)
at org.schemaspy.DbDriverLoader.getConnection(DbDriverLoader.java:93)
... 15 common frames omitted
I guess, the error comes from the wrong class path? But how can I fix this? I tried to change the line in db2.properties
driver=COM.ibm.db2.jdbc.app.DB2Driver
to
driver=COM.ibm.db2.jcc.DB2Driver
, because I extracted this class path from the driver's .jar flie, but it did not help.
this worked for me using db2jcc.jar
# type of database. Run with -dbhelp for details
schemaspy.t=db2
# optional path to alternative jdbc drivers.
schemaspy.dp=/lib/db2jcc.jar
# database properties: host, port number, name user, password
schemaspy.host=xxxxxx
schemaspy.port=xxxxx
schemaspy.db=xxx
schemaspy.u=xx
schemaspy.p=xx
# output dir to save generated files
schemaspy.o=/output
# db scheme for which generate diagrams
schemaspy.s=xxxx
driver=com.ibm.db2.jcc.DB2Driver
connectionSpec=jdbc:db2://xxx.xx.x.xxx:[PORT]/[DBNAME]
schemaspy.cat=%
The command used:
java -jar schemaspy-6.1.0.jar -configFile db2.properties --logging.pattern.console="%d{HH:mm:ss.SSS} %clr(%-5level) - %msg%n" --logging.level.org.schemaspy=TRACE

error org.eclipse.swt.SWTError: No more handles [gtk_init_check() failed] on centos 7

I am new in centos 7, I Install Pentaho PDI 7 and run ./spoon.sh in centos 7 and this error pop Up :
org.eclipse.swt.SWTError: No more handles [gtk_init_check() failed]
at org.eclipse.swt.SWT.error(Unknown Source)
at org.eclipse.swt.widgets.Display.createDisplay(Unknown Source)
at org.eclipse.swt.widgets.Display.create(Unknown Source)
at org.eclipse.swt.graphics.Device.<init>(Unknown Source)
at org.eclipse.swt.widgets.Display.<init>(Unknown Source)
at org.eclipse.swt.widgets.Display.<init>(Unknown Source)
at org.pentaho.di.ui.spoon.Spoon.main(Spoon.java:642)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.pentaho.commons.launcher.Launcher.main(Launcher.java:92)
I've install Java 1.8. It's anything I missed to install?
Please help.
Same pb here, I've managed to make it work installing xvfb (centos 6)
Fixed mine by issuing the follows.
$ export DISPLAY=:0.0
I had a similour error message and the reason was the inability to reach a X11 server.
Check if you have a valid X11 server reachable by you application. Look at the DISPLAY environment variable.
'If you are running on a linux with Gnome:
Start a Gnome with an Xorg session:
After clicking your username in GDM (the greeter/login manager), click your username and before entering the password, click the little gear and select 'Gnome with Xorg', then enter your password

Deleted google storage directory appears "already exists" when calling Spark DataFrame.saveAsParquetFile()

After I deleted a Google Cloud Storage directory through the Google Cloud Console, (the directory was generated by early Spark (ver 1.3.1) job), when re-run the job, it always fail and seemed the directory was still there to the job; I cannot find the directory with gsutil.
Is this a bug, or anything I missed? Thanks!
The error I got:
java.lang.RuntimeException: path gs://<my_bucket>/job_dir1/output_1.parquet already exists.
at scala.sys.package$.error(package.scala:27)
at org.apache.spark.sql.parquet.DefaultSource.createRelation(newParquet.scala:112)
at org.apache.spark.sql.sources.ResolvedDataSource$.apply(ddl.scala:240)
at org.apache.spark.sql.DataFrame.save(DataFrame.scala:1196)
at org.apache.spark.sql.DataFrame.saveAsParquetFile(DataFrame.scala:995)
at com.xxx.Job1$.execute(Job1.scala:64)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:569)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:166)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:189)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:110)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
It appears you might be running into a known bug with the NFS list-consistency cache: https://github.com/GoogleCloudPlatform/bigdata-interop/issues/5
It was fixed in the latest release, and if you upgrade by deploying a new cluster with bdutil-1.3.1 (announced here: https://groups.google.com/forum/#!topic/gcp-hadoop-announce/vstNuV0LpDc) the problem should be fixed. If you need to upgrade in-place, you can try to download the latest gcs-connector-1.4.1 jarfile onto your master and worker nodes under /home/hadoop/hadoop-install/lib/gcs-connector-*.jar and then rebooting the Spark daemons:
sudo sudo -u hadoop /home/hadoop/spark-install/sbin/stop-all.sh
sudo sudo -u hadoop /home/hadoop/spark-install/sbin/start-all.sh

Error while starting JBoss server

I am very new to JBoss. Currently I have a requirement wherein I need to deploy the application (which is already running on Tomcat) on JBoss. I downloaded the JBoss however the directory structure is different in version 7.
I am running the bin\standalone.conf.bat file to start the server however I am getting the below error:
Calling "C:\Program Files\jboss-as-7.1.1.Final\bin\standalone.conf.bat"
===============================================================================
JBoss Bootstrap Environment
JBOSS_HOME: C:\Program Files\jboss-as-7.1.1.Final
JAVA: C:\Program Files\Java\jdk1.6.0_30\bin\java
JAVA_OPTS: -XX:+TieredCompilation -Dprogram.name=standalone.bat -Xms64M -Xmx51
2M -XX:MaxPermSize=256M -Dsun.rmi.dgc.client.gcInterval=3600000 -Dsun.rmi.dgc.se
rver.gcInterval=3600000 -Djava.net.preferIPv4Stack=true -Dorg.jboss.resolver.war
ning=true -Djboss.modules.system.pkgs=org.jboss.byteman -Djboss.server.default.c
onfig=standalone.xml
===============================================================================
Unable to set property fileName on class org.jboss.logmanager.handlers.FileHandl
er: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.jboss.logmanager.PropertyConfigurator.configureProperties(Propert
yConfigurator.java:187)
at org.jboss.logmanager.PropertyConfigurator.configureHandler(PropertyCo
nfigurator.java:312)
at org.jboss.logmanager.PropertyConfigurator.configure(PropertyConfigura
tor.java:128)
at org.jboss.logmanager.PropertyConfigurator.configure(PropertyConfigura
tor.java:86)
at org.jboss.logmanager.LogManager.readConfiguration(LogManager.java:246
)
at org.jboss.logmanager.LogManager.readConfiguration(LogManager.java:231
)
at java.util.logging.LogManager$2.run(LogManager.java:267)
at java.security.AccessController.doPrivileged(Native Method)
at java.util.logging.LogManager.readPrimordialConfiguration(LogManager.j
ava:265)
at java.util.logging.LogManager.getLogManager(LogManager.java:248)
at java.util.logging.Logger.<init>(Logger.java:225)
at java.util.logging.LogManager$RootLogger.<init>(LogManager.java:1092)
at java.util.logging.LogManager$RootLogger.<init>(LogManager.java:1089)
at java.util.logging.LogManager$1.run(LogManager.java:180)
at java.security.AccessController.doPrivileged(Native Method)
at java.util.logging.LogManager.<clinit>(LogManager.java:157)
at org.jboss.modules.Main.main(Main.java:275)
Caused by: java.io.FileNotFoundException: C:\Program Files\jboss-as-7.1.1.Final\
standalone\log\boot.log (The system cannot find the path specified)
at java.io.FileOutputStream.open(Native Method)
at java.io.FileOutputStream.<init>(FileOutputStream.java:194)
at org.jboss.logmanager.handlers.FileHandler.setFile(FileHandler.java:15
2)
at org.jboss.logmanager.handlers.FileHandler.setFileName(FileHandler.jav
a:183)
... 21 more
17:11:18,420 INFO [org.jboss.modules] JBoss Modules version 1.1.1.GA
java.lang.IllegalStateException: JBAS018704: Could not create server data direct
ory: C:\Program Files\jboss-as-7.1.1.Final\standalone\data
at org.jboss.as.server.ServerEnvironment.<init>(ServerEnvironment.java:3
88)
at org.jboss.as.server.Main.determineEnvironment(Main.java:242)
at org.jboss.as.server.Main.main(Main.java:83)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.jboss.modules.Module.run(Module.java:260)
at org.jboss.modules.Main.main(Main.java:291)
Press any key to continue . . .
How can I resolve the issue and get the server run properly?
Try running JBoss in Administrator mode or give your JBoss folder the proper permissions to access the log files mentioned in the error.
If you are using eclipse, starting it in admin mode does the trick, if not you can start you command prompt in admin mode and then run the start up script from there.
To set admin mode:
https://technet.microsoft.com/en-us/magazine/ff431742.aspx