lift-json cannot deserialize optional fields - lift

Got exception when trying to serialize/deserialize case class with optional field using lift-json.
scala> import net.liftweb.json._
import net.liftweb.json._
scala> import net.liftweb.json.Serialization.{read, write}
import net.liftweb.json.Serialization.{read, write}
scala> implicit val formats = DefaultFormats
formats: net.liftweb.json.DefaultFormats.type = net.liftweb.json.DefaultFormats$#707a7686
scala> case class Person(Name:String,Age:Option[Int])
defined class Person
scala> val friends=List(Person("Dan",Some(21)),Person("Ben",None))
friends: List[Person] = List(Person(Dan,Some(21)), Person(Ben,None))
scala> read[List[Person]](write(friends))
java.lang.InternalError: Malformed class name
at java.lang.Class.getSimpleName(Class.java:1169)
at net.liftweb.json.ScalaSigReader$$anonfun$findClass$3.apply(ScalaSig.scala:45)
at net.liftweb.json.ScalaSigReader$$anonfun$findClass$3.apply(ScalaSig.scala:45)
at scala.collection.LinearSeqOptimized$class.find(LinearSeqOptimized.scala:100)
at scala.collection.immutable.List.find(List.scala:76)
at net.liftweb.json.ScalaSigReader$.findClass(ScalaSig.scala:45)
at net.liftweb.json.ScalaSigReader$.findClass(ScalaSig.scala:41)
at net.liftweb.json.ScalaSigReader$.readConstructor(ScalaSig.scala:24)
at net.liftweb.json.Meta$Reflection$.term$1(Meta.scala:275)
at net.liftweb.json.Meta$Reflection$.typeParameters(Meta.scala:292)
at net.liftweb.json.Meta$.mkContainer$1(Meta.scala:107)
at net.liftweb.json.Meta$.fieldMapping$1(Meta.scala:134)
at net.liftweb.json.Meta$.toArg$1(Meta.scala:154)
at net.liftweb.json.Meta$$anonfun$constructors$1$1$$anonfun$apply$1.apply(Meta.scala:98)
at net.liftweb.json.Meta$$anonfun$constructors$1$1$$anonfun$apply$1.apply(Meta.scala:97)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:233)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:233)
at scala.collection.LinearSeqOptimized$class.foreach(LinearSeqOptimized.scala:59)
at scala.collection.immutable.List.foreach(List.scala:76)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:233)
at scala.collection.immutable.List.map(List.scala:76)
at net.liftweb.json.Meta$$anonfun$constructors$1$1.apply(Meta.scala:97)
at net.liftweb.json.Meta$$anonfun$constructors$1$1.apply(Meta.scala:96)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:233)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:233)
at scala.collection.LinearSeqOptimized$class.foreach(LinearSeqOptimized.scala:59)
at scala.collection.immutable.List.foreach(List.scala:76)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:233)
at scala.collection.immutable.List.map(List.scala:76)
at net.liftweb.json.Meta$.constructors$1(Meta.scala:96)
at net.liftweb.json.Meta$$anonfun$mappingOf$1.apply(Meta.scala:168)
at net.liftweb.json.Meta$$anonfun$mappingOf$1.apply(Meta.scala:160)
at net.liftweb.json.Meta$Memo.memoize(Meta.scala:197)
at net.liftweb.json.Meta$.mappingOf(Meta.scala:160)
at net.liftweb.json.Extraction$.mkMapping$1(Extraction.scala:193)
at net.liftweb.json.Extraction$.mkMapping$1(Extraction.scala:190)
at net.liftweb.json.Extraction$.net$liftweb$json$Extraction$$extract0(Extraction.scala:198)
at net.liftweb.json.Extraction$.extract(Extraction.scala:42)
at net.liftweb.json.JsonAST$JValue.extract(JsonAST.scala:300)
at net.liftweb.json.Serialization$.read(Serialization.scala:58)
at .<init>(<console>:16)
at .<clinit>(<console>)
at .<init>(<console>:11)
at .<clinit>(<console>)
at $print(<console>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:704)
at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:914)
at scala.tools.nsc.interpreter.IMain.loadAndRunReq$1(IMain.scala:546)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:577)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:543)
at scala.tools.nsc.interpreter.ILoop.reallyInterpret$1(ILoop.scala:694)
at scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:745)
at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:651)
at scala.tools.nsc.interpreter.ILoop.processLine$1(ILoop.scala:542)
at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:550)
at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:822)
at scala.tools.nsc.interpreter.ILoop.main(ILoop.scala:851)
at xsbt.ConsoleInterface.run(ConsoleInterface.scala:57)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at sbt.compiler.AnalyzingCompiler.call(AnalyzingCompiler.scala:73)
at sbt.compiler.AnalyzingCompiler.console(AnalyzingCompiler.scala:64)
at sbt.Console.console0$1(Console.scala:23)
at sbt.Console$$anonfun$apply$2$$anonfun$apply$1.apply$mcV$sp(Console.scala:24)
at sbt.TrapExit$.executeMain$1(TrapExit.scala:33)
at sbt.TrapExit$$anon$1.run(TrapExit.scala:42)
Notes:
If I use String like "case class Person(Name:String,Age:Option[String])" instead of Int, it will return the correct result.
If I use "java.lang.Integer" like "case class Person(Name:String,Age:Option[java.lang.Integer])", it will return the correct result as well.
My question is why I have to use java type here? Is there a better/cleaner way to express this?

Please see the FAQ at the end of this README. Extraction does not work properly for classes that are defined in the REPL.

Related

Small number causes java.lang.ClassCastException when snakeyaml deserialized object is passed to Gatling feeder

I'm running a gatling simulation that uses numeric input from a yml file to feed its scenario. Everything works when my numeric inputs are large enough that they cannot be parsed as instances of java.lang.Integer, but small numeric values are apparently parsed to Integers and result in a ClassCastException.
import java.io.FileInputStream
import io.gatling.core.Predef.{Feeder, Simulation}
import org.yaml.snakeyaml.Yaml
import org.yaml.snakeyaml.constructor.Constructor
import io.gatling.core.Predef.{scenario, _}
import scala.collection.JavaConversions
class TestClass extends Simulation {
val yaml = new Yaml(new Constructor(classOf[Holder]))
val holder = yaml.load(new FileInputStream("src/test/resources/data.yml")).asInstanceOf[Holder]
scenario("sim").feed(getUserEmulationFeeder(holder))
def getUserEmulationFeeder(holder:Holder) : Feeder[Long] = {
val iterable = JavaConversions.iterableAsScalaIterable(holder.numbers)
iterable.map(l => Map("userToEmulate" -> l)).iterator
}
}
data.yml has the following data:
numbers:
- 30687965369
- 31415388869
- 2
and is being deserialized into:
import scala.beans.BeanProperty
class Holder {
#BeanProperty var numbers = new java.util.ArrayList[Long]()
}
Removing the 2 fixes the ClassCastException.
The full stacktrace is:
java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at io.gatling.mojo.MainWithArgsInFile.runMain(MainWithArgsInFile.java:50)
at io.gatling.mojo.MainWithArgsInFile.main(MainWithArgsInFile.java:33)
Caused by: java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.Long
at scala.runtime.BoxesRunTime.unboxToLong(BoxesRunTime.java:105)
at com.mercurygate.TestClass$$anonfun$getUserEmulationFeeder$1.apply(TestClass.scala:25)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.Iterator$class.foreach(Iterator.scala:893)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
at scala.collection.AbstractTraversable.map(Traversable.scala:104)
at com.mercurygate.TestClass.getUserEmulationFeeder(TestClass.scala:25)
at com.mercurygate.TestClass.<init>(TestClass.scala:21)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at java.lang.Class.newInstance(Class.java:442)
at io.gatling.app.Gatling$.io$gatling$app$Gatling$$$anonfun$1(Gatling.scala:41)
at io.gatling.app.Gatling$lambda$1.apply(Gatling.scala:41)
at io.gatling.app.Gatling$lambda$1.apply(Gatling.scala:41)
at io.gatling.app.Gatling.run(Gatling.scala:92)
at io.gatling.app.Gatling.runIfNecessary(Gatling.scala:75)
at io.gatling.app.Gatling.start(Gatling.scala:65)
at io.gatling.app.Gatling$.start(Gatling.scala:57)
at io.gatling.app.Gatling$.fromArgs(Gatling.scala:49)
at io.gatling.app.Gatling$.main(Gatling.scala:43)
at io.gatling.app.Gatling.main(Gatling.scala)
... 6 more
P.S. Sorry for the complexity of the example. It's only when I combine snakeyaml, gatling, and the small input that I get the error.

<console>:22: error: not found: value sc

I am completely new to Spark, Learning is in progress on Spark. While in practice, facing few issues as below. Multiple steps and quiet long.
I am using spark-shell in UNIX environment. Getting errors as below.
Step 1
$ spark-shell
Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/___/ .__/\_,_/_/ /_/\_\ version 1.3.1
/_/
Using Scala version 2.10.4 (Java HotSpot(TM) 64-Bit Server VM, Java 1.7.0_25)
Type in expressions to have them evaluated.
Type :help for more information.
2016-04-22 07:44:31,5095 ERROR JniCommon fs/client/fileclient/cc/jni_MapRClient.cc:1473 Thread: 20535 mkdirs failed for /user/cni/.sparkStaging/application_1459074732364_1192326, error 13
org.apache.hadoop.security.AccessControlException: User cni(user id 5689) has been denied access to create application_1459074732364_1192326
at com.mapr.fs.MapRFileSystem.makeDir(MapRFileSystem.java:1100)
at com.mapr.fs.MapRFileSystem.mkdirs(MapRFileSystem.java:1120)
at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:1851)
at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:631)
at org.apache.spark.deploy.yarn.Client.prepareLocalResources(Client.scala:224)
at org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:384)
at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:102)
at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:58)
at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:141)
at org.apache.spark.SparkContext.(SparkContext.scala:381)
at org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:1016)
at $iwC$$iwC.(:9)
at $iwC.(:18)
at (:20)
at .(:24)
at .()
at .(:7)
at .()
at $print()
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1338)
at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:856)
at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:901)
at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:813)
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:123)
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:122)
at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
at org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:122)
at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:973)
at org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:157)
at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
at org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:106)
at org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:990)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:944)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:944)
at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:944)
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1058)
at org.apache.spark.repl.Main$.main(Main.scala:31)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:569)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:166)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:189)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:110)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
java.lang.NullPointerException
at org.apache.spark.sql.SQLContext.(SQLContext.scala:145)
at org.apache.spark.sql.hive.HiveContext.(HiveContext.scala:49)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
at org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:1027)
at $iwC$$iwC.(:9)
at $iwC.(:18)
at (:20)
at .(:24)
at .()
at .(:7)
at .()
at $print()
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1338)
at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:856)
at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:901)
at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:813)
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:130)
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:122)
at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
at org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:122)
at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:973)
at org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:157)
at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
at org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:106)
at org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:990)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:944)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:944)
at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:944)
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1058)
at org.apache.spark.repl.Main$.main(Main.scala:31)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:569)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:166)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:189)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:110)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
:10: error: not found: value sqlContext
import sqlContext.implicits._
^
:10: error: not found: value sqlContext
import sqlContext.sql
^
Step 2:
I just ignored warning/errors above, and moved on with my code. I read that, sc will get created automatically if i use spark-shell, So coded as below.
<pre>
scala> val textFile = sc.textFile("README.md")
<console>:13: error: not found: value sc
val textFile = sc.textFile("README.md")
</pre>
Step 3:
As it is saying sc not found, tried creating it.
scala> import org.apache.spark._
import org.apache.spark._
scala> import org.apache.spark.streaming._
import org.apache.spark.streaming._
scala> import org.apache.spark.streaming.StreamingContext._
import org.apache.spark.streaming.StreamingContext._
scala> val conf = new SparkConf().setMaster("local[2]").setAppName("NetworkWordCount").set("spark.ui.port", "44040" ).set("spark.driver.allowMultipleContexts", "true")
conf: org.apache.spark.SparkConf = org.apache.spark.SparkConf#1a58697d
scala> val ssc = new StreamingContext(conf, Seconds(2) )
16/04/22 08:19:18 WARN SparkContext: Another SparkContext is being constructed (or threw an exception in its constructor). This may indicate an error, since only one SparkContext may be running in this JVM (see SPARK-2243). The other SparkContext was created at:
org.apache.spark.SparkContext.<init>(SparkContext.scala:80)
org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:1016)
$line3.$read$$iwC$$iwC.<init>(<console>:9)
$line3.$read$$iwC.<init>(<console>:18)
$line3.$read.<init>(<console>:20)
$line3.$read$.<init>(<console>:24)
$line3.$read$.<clinit>(<console>)
$line3.$eval$.<init>(<console>:7)
$line3.$eval$.<clinit>(<console>)
$line3.$eval.$print(<console>)
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
java.lang.reflect.Method.invoke(Method.java:606)
org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1338)
org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:856)
ssc: org.apache.spark.streaming.StreamingContext = org.apache.spark.streaming.StreamingContext#15492914
As spark told me it is warning(of course also said, it may indicate error), So ignored and moved on to create RDD. Again, Here I am not sure, Is this an error/Warning???
step 4
Created RDD as follows.
<pre>
scala> var fil = ssc.textFile("/mapr/datalake/01.Call_ID.txt")
<console>:21: error: value textFile is not a member of org.apache.spark.streaming.StreamingContext
var fil = ssc.textFile("/mapr/datalake/01.Call_ID.txt")
^
</pre>
Here it is saying me textFile is not a member of streamingContext. I am going mad with all these. Also, I am working for a company, executing scripts in company's laptop(JFYI).
I think all of this is due to a lack of permissions. Presuming you have the correct access priveleges to use the cluster you can type
HADOOP_USER_NAME=hdfs spark-shell
That should overwrite the permissions of your account.
It seems that you are having issues to create a folder inside your user directory in HDFS.
Check the permissions on folder: /user/cni/
You can try giving all access to your user folder with the command:
hdfs dfs -chmod -R 777 /user/cni
This is not recommended in shared clusters or in production but it could help you to identify if it is an access issue.

Instantiating scala collections via their apply method with scala reflection

I have a tool that is trying to build instances of sub-classes of various scala collections, for example scala.collection.Seq. I don't know in advance what specific class should be built, so I am trying to use reflection to get the apply method in the companion object as follows (similar to declaring List[Int](1, 2, 3)).
import scala.reflect.runtime.{universe => ru}
import scala.reflect.runtime.universe._
def makeNewInstance[T <: scala.collection.Seq[_]](clazz: Class[T], args: List[_]): T = {
val clazzMirror: ru.Mirror = ru.runtimeMirror(clazz.getClassLoader)
val clazzSymbol = clazzMirror.classSymbol(clazz)
val companionObject = clazzSymbol.companion.asModule
val instanceMirror = clazzMirror reflect (clazzMirror reflectModule companionObject).instance
val typeSignature = instanceMirror.symbol.typeSignature
val name = "apply"
val ctor = typeSignature.member(TermName(name)).asMethod
instanceMirror.reflectMethod(ctor)(args:_*).asInstanceOf[T]
}
makeNewInstance(clazz = classOf[scala.collection.mutable.ListBuffer[Int]], args = List[Int](1,2,3))
Nonetheless, I am getting the following exception. I am unable to figure out what I should be passing into the apply method.
java.lang.IllegalArgumentException: argument type mismatch
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at scala.reflect.runtime.JavaMirrors$JavaMirror$JavaVanillaMethodMirror1.jinvokeraw(JavaMirrors.scala:373)
at scala.reflect.runtime.JavaMirrors$JavaMirror$JavaMethodMirror.jinvoke(JavaMirrors.scala:339)
at scala.reflect.runtime.JavaMirrors$JavaMirror$JavaVanillaMethodMirror.apply(JavaMirrors.scala:355)
at Main$$anon$1.makeNewInstance(test.scala:12)
at Main$$anon$1.<init>(test.scala:15)
at Main$.main(test.scala:1)
at Main.main(test.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at scala.reflect.internal.util.ScalaClassLoader$$anonfun$run$1.apply(ScalaClassLoader.scala:70)
at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
at scala.reflect.internal.util.ScalaClassLoader$URLClassLoader.asContext(ScalaClassLoader.scala:101)
at scala.reflect.internal.util.ScalaClassLoader$class.run(ScalaClassLoader.scala:70)
at scala.reflect.internal.util.ScalaClassLoader$URLClassLoader.run(ScalaClassLoader.scala:101)
at scala.tools.nsc.CommonRunner$class.run(ObjectRunner.scala:22)
at scala.tools.nsc.ObjectRunner$.run(ObjectRunner.scala:39)
at scala.tools.nsc.CommonRunner$class.runAndCatch(ObjectRunner.scala:29)
at scala.tools.nsc.ObjectRunner$.runAndCatch(ObjectRunner.scala:39)
at scala.tools.nsc.ScriptRunner.scala$tools$nsc$ScriptRunner$$runCompiled(ScriptRunner.scala:175)
at scala.tools.nsc.ScriptRunner$$anonfun$runScript$1.apply(ScriptRunner.scala:192)
at scala.tools.nsc.ScriptRunner$$anonfun$runScript$1.apply(ScriptRunner.scala:192)
at scala.tools.nsc.ScriptRunner$$anonfun$withCompiledScript$1$$anonfun$apply$mcZ$sp$1.apply(ScriptRunner.scala:161)
at scala.tools.nsc.ScriptRunner$$anonfun$withCompiledScript$1.apply$mcZ$sp(ScriptRunner.scala:161)
at scala.tools.nsc.ScriptRunner$$anonfun$withCompiledScript$1.apply(ScriptRunner.scala:129)
at scala.tools.nsc.ScriptRunner$$anonfun$withCompiledScript$1.apply(ScriptRunner.scala:129)
at scala.tools.nsc.util.package$.trackingThreads(package.scala:43)
at scala.tools.nsc.util.package$.waitingForThreads(package.scala:27)
at scala.tools.nsc.ScriptRunner.withCompiledScript(ScriptRunner.scala:128)
at scala.tools.nsc.ScriptRunner.runScript(ScriptRunner.scala:192)
at scala.tools.nsc.ScriptRunner.runScriptAndCatch(ScriptRunner.scala:205)
at scala.tools.nsc.MainGenericRunner.runTarget$1(MainGenericRunner.scala:67)
at scala.tools.nsc.MainGenericRunner.run$1(MainGenericRunner.scala:87)
at scala.tools.nsc.MainGenericRunner.process(MainGenericRunner.scala:98)
at scala.tools.nsc.MainGenericRunner$.main(MainGenericRunner.scala:103)
at scala.tools.nsc.MainGenericRunner.main(MainGenericRunner.scala)
Thank-you for any help you can offer in advance.
I will answer my own question, check if the constructor accepts varargs or not:
if (ctor.isVarargs) instanceMirror.reflectMethod(ctor)(args.toList).asInstanceOf[T]
else instanceMirror.reflectMethod(ctor)(args:_*).asInstanceOf[T]
Thanks to a friend who pointed me in the right direction.

Java List to Scala Conversion Error

I have a Java code base that returns me a java.util.List that I consume in my Scala layer as below:
import scala.collection.JavaConverters._
val myList = myServiceClient.getMyList.asScala.toList //fails here!
println(myList)
I then hit the following error:
Exception in thread "main" javax.xml.ws.soap.SOAPFaultException: scala.collection.immutable.$colon$colon cannot be cast to java.util.List
at org.apache.cxf.jaxws.JaxWsClientProxy.invoke(JaxWsClientProxy.java:161)
at com.sun.proxy.$Proxy49.getSlaveList(Unknown Source)
at Test$.main(Test.scala:35)
at Test.main(Test.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:140)
Caused by: java.lang.ClassCastException: scala.collection.immutable.$colon$colon cannot be cast to java.util.List
at org.apache.cxf.binding.soap.SoapMessage.getHeaders(SoapMessage.java:56)
at org.apache.cxf.binding.soap.interceptor.SoapHeaderOutFilterInterceptor.handleMessage(SoapHeaderOutFilterInterceptor.java:37)
at org.apache.cxf.binding.soap.interceptor.SoapHeaderOutFilterInterceptor.handleMessage(SoapHeaderOutFilterInterceptor.java:29)
at org.apache.cxf.phase.PhaseInterceptorChain.doIntercept(PhaseInterceptorChain.java:308)
at org.apache.cxf.endpoint.ClientImpl.doInvoke(ClientImpl.java:514)
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:423)
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:324)
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:277)
at org.apache.cxf.frontend.ClientProxy.invokeSync(ClientProxy.java:96)
at org.apache.cxf.jaxws.JaxWsClientProxy.invoke(JaxWsClientProxy.java:139)
... 8 more
So the original problem was a couple of lines above in my code base to what I posted in my original question:
I had to do the following when I pass the List to the Apache CXF library:
val headerList = Seq(
new Header(new QName("http://www.myService.com/MyServices/", "UserName"), "", new JAXBDataBinding(classOf[String])),
new Header(new QName("http://www.myService.com/MyServices//", "Password"), "", new JAXBDataBinding(classOf[String]))
)
import scala.collection.JavaConverters._
proxy.getRequestContext.put(Header.HEADER_LIST, headerList.asJava)

Dynamically loading a Scala object

I have a number of objects (not classes) that manipulate databases, and I want to make a smaller helper class so I can do something like java my.helper.class my.database.class and execute the the run method.
For example, this compiles
trait A extends Runnable
class B extends A { def run() = println("run") }
object Test extends App {
Class.forName(args(0)).newInstance().asInstanceOf[A].run()
}
And then does what I expect.
$scala Test B
run
This also compiles
trait A extends Runnable
object B extends A { def run() = println("run") }
object Test extends App {
Class.forName(args(0)).newInstance().asInstanceOf[A].run()
}
But this happens:
$scala Test B
java.lang.InstantiationException: B
at java.lang.Class.newInstance(Class.java:418)
at Test$.delayedEndpoint$Test$1(Test.scala:9)
at Test$delayedInit$body.apply(Test.scala:8)
at scala.Function0$class.apply$mcV$sp(Function0.scala:40)
at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12)
at scala.App$$anonfun$main$1.apply(App.scala:76)
at scala.App$$anonfun$main$1.apply(App.scala:76)
at scala.collection.immutable.List.foreach(List.scala:383)
at scala.collection.generic.TraversableForwarder$class.foreach(TraversableForwarder.scala:35)
at scala.App$class.main(App.scala:76)
at Test$.main(Test.scala:8)
at Test.main(Test.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:483)
at scala.reflect.internal.util.ScalaClassLoader$$anonfun$run$1.apply(ScalaClassLoader.scala:68)
at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
at scala.reflect.internal.util.ScalaClassLoader$URLClassLoader.asContext(ScalaClassLoader.scala:99)
at scala.reflect.internal.util.ScalaClassLoader$class.run(ScalaClassLoader.scala:68)
at scala.reflect.internal.util.ScalaClassLoader$URLClassLoader.run(ScalaClassLoader.scala:99)
at scala.tools.nsc.CommonRunner$class.run(ObjectRunner.scala:22)
at scala.tools.nsc.ObjectRunner$.run(ObjectRunner.scala:39)
at scala.tools.nsc.CommonRunner$class.runAndCatch(ObjectRunner.scala:29)
at scala.tools.nsc.ObjectRunner$.runAndCatch(ObjectRunner.scala:39)
at scala.tools.nsc.MainGenericRunner.runTarget$1(MainGenericRunner.scala:72)
at scala.tools.nsc.MainGenericRunner.process(MainGenericRunner.scala:94)
at scala.tools.nsc.MainGenericRunner$.main(MainGenericRunner.scala:103)
at scala.tools.nsc.MainGenericRunner.main(MainGenericRunner.scala)
Caused by: java.lang.NoSuchMethodException: B.<init>()
at java.lang.Class.getConstructor0(Class.java:2971)
at java.lang.Class.newInstance(Class.java:403)
... 28 more
Which makes sense, and I figured this would work:
$scala Test B$
java.lang.IllegalAccessException: Class Test$ can not access a member of class B$ with modifiers "private"
at sun.reflect.Reflection.ensureMemberAccess(Reflection.java:101)
at java.lang.Class.newInstance(Class.java:427)
at Test$.delayedEndpoint$Test$1(Test.scala:9)
at Test$delayedInit$body.apply(Test.scala:8)
at scala.Function0$class.apply$mcV$sp(Function0.scala:40)
at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12)
at scala.App$$anonfun$main$1.apply(App.scala:76)
at scala.App$$anonfun$main$1.apply(App.scala:76)
at scala.collection.immutable.List.foreach(List.scala:383)
at scala.collection.generic.TraversableForwarder$class.foreach(TraversableForwarder.scala:35)
at scala.App$class.main(App.scala:76)
at Test$.main(Test.scala:8)
at Test.main(Test.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:483)
at scala.reflect.internal.util.ScalaClassLoader$$anonfun$run$1.apply(ScalaClassLoader.scala:68)
at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
at scala.reflect.internal.util.ScalaClassLoader$URLClassLoader.asContext(ScalaClassLoader.scala:99)
at scala.reflect.internal.util.ScalaClassLoader$class.run(ScalaClassLoader.scala:68)
at scala.reflect.internal.util.ScalaClassLoader$URLClassLoader.run(ScalaClassLoader.scala:99)
at scala.tools.nsc.CommonRunner$class.run(ObjectRunner.scala:22)
at scala.tools.nsc.ObjectRunner$.run(ObjectRunner.scala:39)
at scala.tools.nsc.CommonRunner$class.runAndCatch(ObjectRunner.scala:29)
at scala.tools.nsc.ObjectRunner$.runAndCatch(ObjectRunner.scala:39)
at scala.tools.nsc.MainGenericRunner.runTarget$1(MainGenericRunner.scala:72)
at scala.tools.nsc.MainGenericRunner.process(MainGenericRunner.scala:94)
at scala.tools.nsc.MainGenericRunner$.main(MainGenericRunner.scala:103)
at scala.tools.nsc.MainGenericRunner.main(MainGenericRunner.scala)
But it also fails. I know I could just make all these static objects into classes, but that doesn't makes sense in this application, so I'm specifically looking for the elegant way to do this.
I personally think the most elegant way is to not dynamically load things like this. Is it really that difficult to specify the valid input? This allows much greater flexibility with respect to where your instances of A come from.
object Test extends App {
args(0) match {
case "B" => B
case "C" =>
val someOtherConfig = args(1)
new C(someOtherParam)
case other => throw new Exception("invalid input")
} run
}
I would use Scopt to parse parameters