scpaegoat: [error] java.lang.NoSuchMethodError: scala.tools.nsc.Global.reporter()Lscala/tools/nsc/reporters/Reporter; - scala

i am trying to use scapegoat in scala 2.13.1
i have added sbt-scapegoat plugin in project/plugin.sbt
addSbtPlugin("com.sksamuel.scapegoat" %% "sbt-scapegoat" % "1.1.0")
here is my build.sbt
lazy val scala212 = "2.12.8"
lazy val scala213 = "2.13.1"
scalaVersion in ThisBuild := scala213
lazy val supportedScalaVersions = List(scala213, scala212)
crossScalaVersions := supportedScalaVersions
scapegoatVersion in ThisBuild := "1.3.11"
libraryDependencies := {
val VersionSchema = """(\d+)\.(\d+)\.(\d+)""".r
scalaVersion.value match {
case VersionSchema("2", "12", "8") =>
libraryDependencies.value ++ Seq(
"ch.qos.logback" % "logback-classic" % "1.2.3",
"org.scalatest" %% "scalatest" % "3.1.0" % Test,
"io.gatling.highcharts" % "gatling-charts-highcharts" % "3.3.1" % "test",
"io.gatling" % "gatling-test-framework" % "3.3.1" % "test")
case VersionSchema("2", "13", "1") =>
libraryDependencies.value ++ Seq(
"co.pragmati" %% "swagger-ui-akka-http" % "1.3.0"
,"com.sksamuel.scapegoat" %% "scalac-scapegoat-plugin"%"1.3.11"
)
}
}
when i run the scapegoat command in my sbt i am getting following exception
[error] ## Exception when compiling 46 sources to /home/sara/git/gitinterpret/interpret/target/scala-2.13/scapegoat-classes
[error] scala.tools.nsc.Global.reporter()Lscala/tools/nsc/reporters/Reporter;
[error] com.sksamuel.scapegoat.ScapegoatComponent$$anon$1.run(plugin.scala:172)
[error] scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1506)
[error] scala.tools.nsc.Global$Run.compileUnits(Global.scala:1490)
[error] scala.tools.nsc.Global$Run.compileSources(Global.scala:1482)
[error] scala.tools.nsc.Global$Run.compile(Global.scala:1614)
[error] xsbt.CachedCompiler0.run(CompilerInterface.scala:130)
[error] xsbt.CachedCompiler0.run(CompilerInterface.scala:105)
[error] xsbt.CompilerInterface.run(CompilerInterface.scala:31)
[error] java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[error] java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[error] java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[error] java.base/java.lang.reflect.Method.invoke(Method.java:566)
[error] sbt.internal.inc.AnalyzingCompiler.call(AnalyzingCompiler.scala:237)
[error] sbt.internal.inc.AnalyzingCompiler.compile(AnalyzingCompiler.scala:111)
[error] sbt.internal.inc.AnalyzingCompiler.compile(AnalyzingCompiler.scala:90)
[error] sbt.internal.inc.MixedAnalyzingCompiler.$anonfun$compile$3(MixedAnalyzingCompiler.scala:82)
[error] scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12)
[error] sbt.internal.inc.MixedAnalyzingCompiler.timed(MixedAnalyzingCompiler.scala:133)
[error] sbt.internal.inc.MixedAnalyzingCompiler.compileScala$1(MixedAnalyzingCompiler.scala:73)
[error] sbt.internal.inc.MixedAnalyzingCompiler.compile(MixedAnalyzingCompiler.scala:116)
[error] sbt.internal.inc.IncrementalCompilerImpl.$anonfun$compileInternal$1(IncrementalCompilerImpl.scala:307)
[error] sbt.internal.inc.IncrementalCompilerImpl.$anonfun$compileInternal$1$adapted(IncrementalCompilerImpl.scala:307)
[error] sbt.internal.inc.Incremental$.doCompile(Incremental.scala:106)
[error] sbt.internal.inc.Incremental$.$anonfun$compile$4(Incremental.scala:87)
[error] sbt.internal.inc.IncrementalCommon.recompileClasses(IncrementalCommon.scala:116)
[error] sbt.internal.inc.IncrementalCommon.cycle(IncrementalCommon.scala:63)
[error] sbt.internal.inc.Incremental$.$anonfun$compile$3(Incremental.scala:89)
[error] sbt.internal.inc.Incremental$.manageClassfiles(Incremental.scala:134)
[error] sbt.internal.inc.Incremental$.compile(Incremental.scala:80)
[error] sbt.internal.inc.IncrementalCompile$.apply(Compile.scala:67)
[error] sbt.internal.inc.IncrementalCompilerImpl.compileInternal(IncrementalCompilerImpl.scala:311)
[error] sbt.internal.inc.IncrementalCompilerImpl.$anonfun$compileIncrementally$1(IncrementalCompilerImpl.scala:269)
[error] sbt.internal.inc.IncrementalCompilerImpl.handleCompilationError(IncrementalCompilerImpl.scala:159)
[error] sbt.internal.inc.IncrementalCompilerImpl.compileIncrementally(IncrementalCompilerImpl.scala:238)
[error] sbt.internal.inc.IncrementalCompilerImpl.compile(IncrementalCompilerImpl.scala:69)
[error] sbt.Defaults$.compileIncrementalTaskImpl(Defaults.scala:1549)
[error] sbt.Defaults$.$anonfun$compileIncrementalTask$1(Defaults.scala:1523)
[error] scala.Function1.$anonfun$compose$1(Function1.scala:44)
[error] sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:40)
[error] sbt.std.Transform$$anon$4.work(System.scala:67)
[error] sbt.Execute.$anonfun$submit$2(Execute.scala:269)
[error] sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:16)
[error] sbt.Execute.work(Execute.scala:278)
[error] sbt.Execute.$anonfun$submit$1(Execute.scala:269)
[error] sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:178)
[error] sbt.CompletionService$$anon$2.call(CompletionService.scala:37)
[error] java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
[error] java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
[error] java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
[error] java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
[error] java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
[error] java.base/java.lang.Thread.run(Thread.java:834)
[error]
[error] java.lang.NoSuchMethodError: scala.tools.nsc.Global.reporter()Lscala/tools/nsc/reporters/Reporter;
[error] at com.sksamuel.scapegoat.ScapegoatComponent$$anon$1.run(plugin.scala:172)
[error] at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1506)
[error] at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1490)
[error] at scala.tools.nsc.Global$Run.compileSources(Global.scala:1482)
[error] at scala.tools.nsc.Global$Run.compile(Global.scala:1614)
[error] at xsbt.CachedCompiler0.run(CompilerInterface.scala:130)
[error] at xsbt.CachedCompiler0.run(CompilerInterface.scala:105)
[error] at xsbt.CompilerInterface.run(CompilerInterface.scala:31)
[error] at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[error] at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[error] at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[error] at java.base/java.lang.reflect.Method.invoke(Method.java:566)
[error] at sbt.internal.inc.AnalyzingCompiler.call(AnalyzingCompiler.scala:237)
[error] at sbt.internal.inc.AnalyzingCompiler.compile(AnalyzingCompiler.scala:111)
[error] at sbt.internal.inc.AnalyzingCompiler.compile(AnalyzingCompiler.scala:90)
[error] at sbt.internal.inc.MixedAnalyzingCompiler.$anonfun$compile$3(MixedAnalyzingCompiler.scala:82)
[error] at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12)
[error] at sbt.internal.inc.MixedAnalyzingCompiler.timed(MixedAnalyzingCompiler.scala:133)
[error] at sbt.internal.inc.MixedAnalyzingCompiler.compileScala$1(MixedAnalyzingCompiler.scala:73)
[error] at sbt.internal.inc.MixedAnalyzingCompiler.compile(MixedAnalyzingCompiler.scala:116)

Related

scala spark rdd error : java.lang.ClassCastException: cannot assign instance of java.lang.invoke.SerializedLambda

I am a beginner to Scala and Spark.
scala version : 2.12.10
spark version : 3.0.1
I'm trying a very simple spark rdd function in scala.
But I get an error.
(1) build.sbt
scalaVersion := "2.12.10"
name := "hello-world"
organization := "ch.epfl.scala"
version := "1.0"
libraryDependencies += "org.scala-lang.modules" %% "scala-parser-combinators" % "1.1.2"
libraryDependencies += "org.apache.spark" %% "spark-sql" % "3.0.1"
libraryDependencies += "org.apache.spark" %% "spark-core" % "3.0.1"
(2) Main.scala
import org.apache.spark.sql.SparkSession
object Main extends App {
println("Hello, World!")
implicit val spark = SparkSession.builder()
.master("spark://centos-master:7077")
// .master("local[*]")
.appName("spark-api")
.getOrCreate()
val inputrdd = spark.sparkContext.parallelize(Seq(("arth",10), ("arth", 20), ("samuel", 60), ("jack", 65)))
println("inputrdd : ", inputrdd)
val mapped = inputrdd.mapValues(x => (x, 1))
println("mapped : ", mapped)
mapped.collect.foreach(println)
}
(3) When the error occurred
It seems that an error occurs in the mapped.collect.foreach(println) part.
(4) Error content
21/04/17 20:54:19 INFO DAGScheduler: Job 0 failed: collect at Main.scala:16, took 6.083947 s
[error] (run-main-0) org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 0.0 failed 4 times, most recent failure: Lost task 0.3 in stage 0.0 (TID 7, 192.168.0.220, executor 0):
java.lang.ClassCastException: cannot assign instance of java.lang.invoke.SerializedLambda
to field org.apache.spark.rdd.MapPartitionsRDD.f of type scala.Function3 in
instance of org.apache.spark.rdd.MapPartitionsRDD
[error] at java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2301)
[error] at java.io.ObjectStreamClass.setObjFieldValues(ObjectStreamClass.java:1431)
[error] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2410)
[error] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2328)
[error] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2186)
[error] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1666)
[error] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2404)
[error] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2328)
[error] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2186)
[error] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1666)
[error] at java.io.ObjectInputStream.readObject(ObjectInputStream.java:502)
[error] at java.io.ObjectInputStream.readObject(ObjectInputStream.java:460)
[error] at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:76)
[error] at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:115)
[error] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:83)
[error] at org.apache.spark.scheduler.Task.run(Task.scala:127)
[error] at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:446)
[error] at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
[error] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:449)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error]
[error] Driver stacktrace:
21/04/17 20:54:19 INFO TaskSetManager: Lost task 1.3 in stage 0.0 (TID 6) on 192.168.0.220, executor 0: java.lang.ClassCastException (cannot assign instance of java.lang.invoke.SerializedLambda to field org.apache.spark.rdd.MapPartitionsRDD.f of type scala.Function3 in instance of org.apache.spark.rdd.MapPartitionsRDD) [duplicate 7]
21/04/17 20:54:19 INFO TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have all completed, from pool
[error] org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 0.0 failed 4 times, most recent failure: Lost task 0.3 in stage 0.0 (TID 7, 192.168.0.220, executor 0): java.lang.ClassCastException: cannot assign instance of java.lang.invoke.SerializedLambda to field org.apache.spark.rdd.MapPartitionsRDD.f of type scala.Function3 in instance of org.apache.spark.rdd.MapPartitionsRDD
[error] at java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2301)
[error] at java.io.ObjectStreamClass.setObjFieldValues(ObjectStreamClass.java:1431)
[error] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2410)
[error] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2328)
[error] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2186)
[error] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1666)
[error] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2404)
[error] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2328)
[error] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2186)
[error] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1666)
[error] at java.io.ObjectInputStream.readObject(ObjectInputStream.java:502)
[error] at java.io.ObjectInputStream.readObject(ObjectInputStream.java:460)
[error] at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:76)
[error] at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:115)
[error] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:83)
[error] at org.apache.spark.scheduler.Task.run(Task.scala:127)
[error] at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:446)
[error] at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
[error] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:449)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error]
[error] Driver stacktrace:
[error] at org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2059)
[error] at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2008)
[error] at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2007)
[error] at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
[error] at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
[error] at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
[error] at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2007)
[error] at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:973)
[error] at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:973)
[error] at scala.Option.foreach(Option.scala:407)
[error] at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:973)
[error] at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2239)
[error] at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2188)
[error] at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2177)
[error] at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
[error] at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:775)
[error] at org.apache.spark.SparkContext.runJob(SparkContext.scala:2099)
[error] at org.apache.spark.SparkContext.runJob(SparkContext.scala:2120)
[error] at org.apache.spark.SparkContext.runJob(SparkContext.scala:2139)
[error] at org.apache.spark.SparkContext.runJob(SparkContext.scala:2164)
[error] at org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:1004)
[error] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[error] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[error] at org.apache.spark.rdd.RDD.withScope(RDD.scala:388)
[error] at org.apache.spark.rdd.RDD.collect(RDD.scala:1003)
[error] at Main$.delayedEndpoint$Main$1(Main.scala:16)
[error] at Main$delayedInit$body.apply(Main.scala:2)
[error] at scala.Function0.apply$mcV$sp(Function0.scala:39)
[error] at scala.Function0.apply$mcV$sp$(Function0.scala:39)
[error] at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:17)
[error] at scala.App.$anonfun$main$1$adapted(App.scala:80)
[error] at scala.collection.immutable.List.foreach(List.scala:392)
[error] at scala.App.main(App.scala:80)
[error] at scala.App.main$(App.scala:78)
[error] at Main$.main(Main.scala:2)
[error] at Main.main(Main.scala)
[error] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[error] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[error] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[error] at java.lang.reflect.Method.invoke(Method.java:498)
[error] Caused by: java.lang.ClassCastException: cannot assign instance of java.lang.invoke.SerializedLambda to field org.apache.spark.rdd.MapPartitionsRDD.f of type scala.Function3 in instance of org.apache.spark.rdd.MapPartitionsRDD
[error] at java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2301)
[error] at java.io.ObjectStreamClass.setObjFieldValues(ObjectStreamClass.java:1431)
[error] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2410)
[error] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2328)
[error] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2186)
[error] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1666)
[error] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2404)
[error] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2328)
[error] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2186)
[error] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1666)
[error] at java.io.ObjectInputStream.readObject(ObjectInputStream.java:502)
[error] at java.io.ObjectInputStream.readObject(ObjectInputStream.java:460)
[error] at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:76)
[error] at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:115)
[error] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:83)
[error] at org.apache.spark.scheduler.Task.run(Task.scala:127)
[error] at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:446)
[error] at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
[error] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:449)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
If I need more libraries or if the code is wrong (but it works fine in spark-shell)....
How to solve it?
You need to submit your jars to spark so that your code can run on there. spark-shell is hiding all of this from you behind the scenes.
This answer provides better detail https://stackoverflow.com/a/28367602/1810962 with the background.
You can use bin/spark-submit as workaround and provide your local classpath using --class, --jars, and --driver-class-path

org.scalajs.testing.adapter.JSEnvRPC$RunTerminatedException when trying to use scalatest with scalajs

I am trying to use scalatest for testing scalajs but it throwing exceptions. Not sure what could be the issue. Any suggestions, please? TIA.
Another thing that I would like to mention is that the following exception was being thrown even without any test file.
Build.sbt
ThisBuild / scalaVersion := "2.12.10"
lazy val root = project.in(file(".")).aggregate(parser.js, parser.jvm).
settings(
publish := {},
publishLocal := {},
)
lazy val parser = crossProject(JSPlatform, JVMPlatform).in(file(".")).
settings(
name := "rules-parser",
version := "0.1",
libraryDependencies ++= Seq(
"com.lihaoyi" %%% "fastparse" % "2.2.4",
"com.chuusai" %%% "shapeless" % "2.4.0-M1",
"org.scalactic" %%% "scalactic" % "3.2.2",
"org.scalatest" %%% "scalatest" % "3.3.0-SNAP2" % Test
)
).
jsSettings(
// scalaJSUseMainModuleInitializer := true,
libraryDependencies ++= Seq(
"org.scala-js" %%% "scalajs-dom" % "1.1.0",
"com.lihaoyi" %%% "scalatags" % "0.9.2"
)
).
jvmSettings(
libraryDependencies ++= Seq(
"org.scala-js" %% "scalajs-stubs" % "1.0.0" % "provided"
)
)
js/test/scala/example/ExampleSpec.scala
package example
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.must.Matchers
object ExampleSpec extends AnyFlatSpec with Matchers {
"Simple test" should "pass" in {
assert(1 == 1)
}
}
Exception
sbt:root> last parserJS / Test / loadedTestFrameworks
[debug] Starting process: node
[error] org.scalajs.testing.common.RPCCore$ClosedException: org.scalajs.testing.adapter.JSEnvRPC$RunTerminatedException
[error] at org.scalajs.testing.common.RPCCore.helpClose(RPCCore.scala:223)
[error] at org.scalajs.testing.common.RPCCore.call(RPCCore.scala:164)
[error] at org.scalajs.testing.adapter.TestAdapter.loadFrameworks(TestAdapter.scala:57)
[error] at org.scalajs.sbtplugin.ScalaJSPluginInternal$.$anonfun$scalaJSTestSettings$4(ScalaJSPluginInternal.scala:597)
[error] at scala.Function1.$anonfun$compose$1(Function1.scala:49)
[error] at sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:62)
[error] at sbt.std.Transform$$anon$4.work(Transform.scala:68)
[error] at sbt.Execute.$anonfun$submit$2(Execute.scala:282)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:23)
[error] at sbt.Execute.work(Execute.scala:291)
[error] at sbt.Execute.$anonfun$submit$1(Execute.scala:282)
[error] at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:265)
[error] at sbt.CompletionService$$anon$2.call(CompletionService.scala:64)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error] Caused by: org.scalajs.testing.adapter.JSEnvRPC$RunTerminatedException
[error] at org.scalajs.testing.adapter.JSEnvRPC.$anonfun$new$1(JSEnvRPC.scala:38)
[error] at org.scalajs.testing.adapter.JSEnvRPC.$anonfun$new$1$adapted(JSEnvRPC.scala:38)
[error] at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)
[error] at java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1402)
[error] at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289)
[error] at java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1056)
[error] at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1692)
[error] at java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:157)
[error] Caused by: java.io.IOException: Cannot run program "node": error=2, No such file or directory
[error] at java.lang.ProcessBuilder.start(ProcessBuilder.java:1048)
[error] at org.scalajs.jsenv.ExternalJSRun$.startProcess(ExternalJSRun.scala:143)
[error] at org.scalajs.jsenv.ExternalJSRun$.start(ExternalJSRun.scala:40)
[error] at org.scalajs.jsenv.nodejs.NodeJSEnv.internalStart(NodeJSEnv.scala:63)
[error] at org.scalajs.jsenv.nodejs.NodeJSEnv.$anonfun$startWithCom$1(NodeJSEnv.scala:47)
[error] at org.scalajs.jsenv.nodejs.ComRun$.start(ComSupport.scala:214)
[error] at org.scalajs.jsenv.nodejs.NodeJSEnv.startWithCom(NodeJSEnv.scala:46)
[error] at org.scalajs.testing.adapter.JSEnvRPC.<init>(JSEnvRPC.scala:25)
[error] at org.scalajs.testing.adapter.TestAdapter.startManagedRunner(TestAdapter.scala:129)
[error] at org.scalajs.testing.adapter.TestAdapter.$anonfun$getRunnerForThread$1(TestAdapter.scala:120)
[error] at scala.collection.concurrent.TrieMap.getOrElseUpdate(TrieMap.scala:897)
[error] at org.scalajs.testing.adapter.TestAdapter.getRunnerForThread(TestAdapter.scala:120)
[error] at org.scalajs.testing.adapter.TestAdapter.loadFrameworks(TestAdapter.scala:56)
[error] at org.scalajs.sbtplugin.ScalaJSPluginInternal$.$anonfun$scalaJSTestSettings$4(ScalaJSPluginInternal.scala:597)
[error] at scala.Function1.$anonfun$compose$1(Function1.scala:49)
[error] at sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:62)
[error] at sbt.std.Transform$$anon$4.work(Transform.scala:68)
[error] at sbt.Execute.$anonfun$submit$2(Execute.scala:282)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:23)
[error] at sbt.Execute.work(Execute.scala:291)
[error] at sbt.Execute.$anonfun$submit$1(Execute.scala:282)
[error] at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:265)
[error] at sbt.CompletionService$$anon$2.call(CompletionService.scala:64)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error] Caused by: java.io.IOException: error=2, No such file or directory
[error] at java.lang.UNIXProcess.forkAndExec(Native Method)
[error] at java.lang.UNIXProcess.<init>(UNIXProcess.java:247)
[error] at java.lang.ProcessImpl.start(ProcessImpl.java:134)
[error] at java.lang.ProcessBuilder.start(ProcessBuilder.java:1029)
[error] at org.scalajs.jsenv.ExternalJSRun$.startProcess(ExternalJSRun.scala:143)
[error] at org.scalajs.jsenv.ExternalJSRun$.start(ExternalJSRun.scala:40)
[error] at org.scalajs.jsenv.nodejs.NodeJSEnv.internalStart(NodeJSEnv.scala:63)
[error] at org.scalajs.jsenv.nodejs.NodeJSEnv.$anonfun$startWithCom$1(NodeJSEnv.scala:47)
[error] at org.scalajs.jsenv.nodejs.ComRun$.start(ComSupport.scala:214)
[error] at org.scalajs.jsenv.nodejs.NodeJSEnv.startWithCom(NodeJSEnv.scala:46)
[error] at org.scalajs.testing.adapter.JSEnvRPC.<init>(JSEnvRPC.scala:25)
[error] at org.scalajs.testing.adapter.TestAdapter.startManagedRunner(TestAdapter.scala:129)
[error] at org.scalajs.testing.adapter.TestAdapter.$anonfun$getRunnerForThread$1(TestAdapter.scala:120)
[error] at scala.collection.concurrent.TrieMap.getOrElseUpdate(TrieMap.scala:897)
[error] at org.scalajs.testing.adapter.TestAdapter.getRunnerForThread(TestAdapter.scala:120)
[error] at org.scalajs.testing.adapter.TestAdapter.loadFrameworks(TestAdapter.scala:56)
[error] at org.scalajs.sbtplugin.ScalaJSPluginInternal$.$anonfun$scalaJSTestSettings$4(ScalaJSPluginInternal.scala:597)
[error] at scala.Function1.$anonfun$compose$1(Function1.scala:49)
[error] at sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:62)
[error] at sbt.std.Transform$$anon$4.work(Transform.scala:68)
[error] at sbt.Execute.$anonfun$submit$2(Execute.scala:282)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:23)
[error] at sbt.Execute.work(Execute.scala:291)
[error] at sbt.Execute.$anonfun$submit$1(Execute.scala:282)
[error] at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:265)
[error] at sbt.CompletionService$$anon$2.call(CompletionService.scala:64)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error] (parserJS / Test / loadedTestFrameworks) org.scalajs.testing.common.RPCCore$ClosedException: org.scalajs.testing.adapter.JSEnvRPC$RunTerminatedException
From this: Cannot run program "node": error=2, No such file
It looks like you need to install node.js. If it's already installed, make sure it is on your path.

SBT publish is not uploading jar file to artifactory (publish) java.io.IOException: PUT operation to URL failed with status code 400: Bad Request

I've below build.sbt file
name := "tads-akka-cluster-events"
organization := "technorati"
version := "0.0.3"
scalaVersion := "2.11.12"
crossScalaVersions := Seq("2.12.9", "2.13.0")
publishMavenStyle := true
PB.targets in Compile := Seq(
scalapb.gen() -> (sourceManaged in Compile).value
)
libraryDependencies += "com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.compiler.Version.scalapbVersion % "protobuf"
credentials += Credentials("Artifactory Realm", "artifactory.svcs.opal.synacor.com", "admin", "password")
publishTo := {
val nexus = "http://artifactory.svcs.opal.synacor.com/"
if (isSnapshot.value)
Some("repository.synacor.com-snapshots" at nexus + "artifactory/synacor-local")
else
Some("repository.synacor.com-releases" at nexus + "artifactory/synacor-local")
}
When I did sbt publish, I get below error -
sbt:tads-akka-cluster-events> publish
[info] Packaging /Users/rajkumar.natarajan/Documents/Coding/misc/tads-akka-cluster-events/target/scala-2.11/tads-akka-cluster-events_2.11-0.0.3-sources.jar ...
[info] Done packaging.
[info] Wrote /Users/rajkumar.natarajan/Documents/Coding/misc/tads-akka-cluster-events/target/scala-2.11/tads-akka-cluster-events_2.11-0.0.3.pom
[info] Updating ...
[info] Done updating.
[info] Packaging /Users/rajkumar.natarajan/Documents/Coding/misc/tads-akka-cluster-events/target/scala-2.11/tads-akka-cluster-events_2.11-0.0.3.jar ...
[info] Packaging /Users/rajkumar.natarajan/Documents/Coding/misc/tads-akka-cluster-events/target/scala-2.11/tads-akka-cluster-events_2.11-0.0.3-javadoc.jar ...
[info] Done packaging.
[info] Done packaging.
[info] published tads-akka-cluster-events_2.11 to http://artifactory.svcs.opal.synacor.com/artifactory/synacor-local/technorati/tads-akka-cluster-events_2.11/0.0.3/tads-akka-cluster-events_2.11-0.0.3.pom
[error] java.io.IOException: PUT operation to URL http://artifactory.svcs.opal.synacor.com/artifactory/synacor-local/technorati/tads-akka-cluster-events_2.11/0.0.3/tads-akka-cluster-events_2.11-0.0.3.jar failed with status code 400: Bad Request
[error] at org.apache.ivy.util.url.AbstractURLHandler.validatePutStatusCode(AbstractURLHandler.java:82)
[error] at sbt.internal.librarymanagement.ivyint.GigahorseUrlHandler.upload(GigahorseUrlHandler.scala:191)
[error] at org.apache.ivy.util.url.URLHandlerDispatcher.upload(URLHandlerDispatcher.java:82)
[error] at org.apache.ivy.util.FileUtil.copy(FileUtil.java:150)
[error] at org.apache.ivy.plugins.repository.url.URLRepository.put(URLRepository.java:84)
[error] at sbt.internal.librarymanagement.ConvertResolver$LocalIfFileRepo.put(ConvertResolver.scala:366)
[error] at org.apache.ivy.plugins.repository.AbstractRepository.put(AbstractRepository.java:130)
[error] at sbt.internal.librarymanagement.ConvertResolver$ChecksumFriendlyURLResolver.put(ConvertResolver.scala:118)
[error] at sbt.internal.librarymanagement.ConvertResolver$ChecksumFriendlyURLResolver.put$(ConvertResolver.scala:105)
[error] at sbt.internal.librarymanagement.ConvertResolver$$anonfun$defaultConvert$lzycompute$1$PluginCapableResolver$1.put(ConvertResolver.scala:165)
[error] at org.apache.ivy.plugins.resolver.RepositoryResolver.publish(RepositoryResolver.java:216)
[error] at sbt.internal.librarymanagement.IvyActions$.$anonfun$publish$5(IvyActions.scala:497)
[error] at sbt.internal.librarymanagement.IvyActions$.$anonfun$publish$5$adapted(IvyActions.scala:496)
[error] at scala.collection.TraversableLike$WithFilter.$anonfun$foreach$1(TraversableLike.scala:788)
[error] at scala.collection.Iterator.foreach(Iterator.scala:937)
[error] at scala.collection.Iterator.foreach$(Iterator.scala:937)
[error] at scala.collection.AbstractIterator.foreach(Iterator.scala:1425)
[error] at scala.collection.IterableLike.foreach(IterableLike.scala:70)
[error] at scala.collection.IterableLike.foreach$(IterableLike.scala:69)
[error] at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
[error] at scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:787)
[error] at sbt.internal.librarymanagement.IvyActions$.publish(IvyActions.scala:496)
[error] at sbt.internal.librarymanagement.IvyActions$.$anonfun$publish$3(IvyActions.scala:144)
[error] at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12)
[error] at sbt.internal.librarymanagement.IvyActions$.withChecksums(IvyActions.scala:157)
[error] at sbt.internal.librarymanagement.IvyActions$.withChecksums(IvyActions.scala:151)
[error] at sbt.internal.librarymanagement.IvyActions$.$anonfun$publish$1(IvyActions.scala:144)
[error] at sbt.internal.librarymanagement.IvyActions$.$anonfun$publish$1$adapted(IvyActions.scala:134)
[error] at sbt.internal.librarymanagement.IvySbt$Module.$anonfun$withModule$1(Ivy.scala:239)
[error] at sbt.internal.librarymanagement.IvySbt.$anonfun$withIvy$1(Ivy.scala:204)
[error] at sbt.internal.librarymanagement.IvySbt.sbt$internal$librarymanagement$IvySbt$$action$1(Ivy.scala:70)
[error] at sbt.internal.librarymanagement.IvySbt$$anon$3.call(Ivy.scala:77)
[error] at xsbt.boot.Locks$GlobalLock.withChannel$1(Locks.scala:95)
[error] at xsbt.boot.Locks$GlobalLock.xsbt$boot$Locks$GlobalLock$$withChannelRetries$1(Locks.scala:80)
[error] at xsbt.boot.Locks$GlobalLock$$anonfun$withFileLock$1.apply(Locks.scala:99)
[error] at xsbt.boot.Using$.withResource(Using.scala:10)
[error] at xsbt.boot.Using$.apply(Using.scala:9)
[error] at xsbt.boot.Locks$GlobalLock.ignoringDeadlockAvoided(Locks.scala:60)
[error] at xsbt.boot.Locks$GlobalLock.withLock(Locks.scala:50)
[error] at xsbt.boot.Locks$.apply0(Locks.scala:31)
[error] at xsbt.boot.Locks$.apply(Locks.scala:28)
[error] at sbt.internal.librarymanagement.IvySbt.withDefaultLogger(Ivy.scala:77)
[error] at sbt.internal.librarymanagement.IvySbt.withIvy(Ivy.scala:199)
[error] at sbt.internal.librarymanagement.IvySbt.withIvy(Ivy.scala:196)
[error] at sbt.internal.librarymanagement.IvySbt$Module.withModule(Ivy.scala:238)
[error] at sbt.internal.librarymanagement.IvyActions$.publish(IvyActions.scala:134)
[error] at sbt.Classpaths$.$anonfun$publishTask$4(Defaults.scala:2416)
[error] at sbt.Classpaths$.$anonfun$publishTask$4$adapted(Defaults.scala:2416)
[error] at scala.Function1.$anonfun$compose$1(Function1.scala:44)
[error] at sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:40)
[error] at sbt.std.Transform$$anon$4.work(System.scala:67)
[error] at sbt.Execute.$anonfun$submit$2(Execute.scala:269)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:16)
[error] at sbt.Execute.work(Execute.scala:278)
[error] at sbt.Execute.$anonfun$submit$1(Execute.scala:269)
[error] at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:178)
[error] at sbt.CompletionService$$anon$2.call(CompletionService.scala:37)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error] (publish) java.io.IOException: PUT operation to URL http://artifactory.svcs.opal.synacor.com/artifactory/synacor-local/technorati/tads-akka-cluster-events_2.11/0.0.3/tads-akka-cluster-events_2.11-0.0.3.jar failed with status code 400: Bad Request
[error] Total time: 2 s, completed Sep 3, 2019 4:12:46 PM
sbt:tads-akka-cluster-events>
When I checked the repository I see pom, pom.md5 and pom.sha1.
But it is missing jar, jar.md5 and jar.sha1.
Why I'm getting this error? What I'm missing and how can I fix this issue?
I've to add the setting updateOptions := updateOptions.value.withGigahorse(false) or run sbt like sbt -Dsbt.gigahorse=false clean publish.
For more details please see this github sbt issue.

Why is sbt failing to find these jackson dependency

I´m trying to add the dependencies to support java.time for jackson library. Those dependencies are:
val jacksonVersion = "2.9.7"
libraryDependencies += "com.fasterxml.jackson.module" % "jackson-module-parameter-names" % jacksonVersion
libraryDependencies += "com.fasterxml.jackson.module" % "jackson-datatype-jdk8" % jacksonVersion
libraryDependencies += "com.fasterxml.jackson.module" % "jackson-datatype-jsr310" % jacksonVersion
as specified here
The problem is that sbt can't find them. Based on the maven webpage here, they should be available at Maven Central and JCenter so I added the resolvers to my dependencies:
resolvers += Resolver.JCenterRepository
resolvers += Resolver.DefaultMavenRepository
I know one should be enough but they don't seem to work. The urls that they resolve for are:
[warn] ==== jcenter: tried
[warn] https://jcenter.bintray.com/com/fasterxml/jackson/module/jackson-datatype-jsr310/2.9.7/jackson-datatype-jsr310-2.9.7.pom
[warn] ==== public: tried
[warn] https://repo1.maven.org/maven2/com/fasterxml/jackson/module/jackson-datatype-jsr310/2.9.7/jackson-datatype-jsr310-2.9.7.pom
but apparently they are not there. I don't know what I'm missing...
This is the error for the jackson-datatype-jdk8 one.
[warn] Note: Unresolved dependencies path:
[warn] com.fasterxml.jackson.module:jackson-datatype-jdk8:2.9.7 (/Users/overflow/MyDocuments/JaviOverflowDrive/_workspaces/RandomPlays/eurostar/pipeline/build.sbt#L15-16)
[warn] +- default:pipeline_2.12:0.1
[warn] com.fasterxml.jackson.module:jackson-datatype-jsr310:2.9.7 (/Users/overflow/MyDocuments/JaviOverflowDrive/_workspaces/RandomPlays/eurostar/pipeline/build.sbt#L16-17)
[warn] +- default:pipeline_2.12:0.1
[error] sbt.librarymanagement.ResolveException: unresolved dependency: com.fasterxml.jackson.module#jackson-datatype-jdk8;2.9.7: not found
[error] unresolved dependency: com.fasterxml.jackson.module#jackson-datatype-jsr310;2.9.7: not found
[error] at sbt.internal.librarymanagement.IvyActions$.resolveAndRetrieve(IvyActions.scala:332)
[error] at sbt.internal.librarymanagement.IvyActions$.$anonfun$updateEither$1(IvyActions.scala:208)
[error] at sbt.internal.librarymanagement.IvySbt$Module.$anonfun$withModule$1(Ivy.scala:239)
[error] at sbt.internal.librarymanagement.IvySbt.$anonfun$withIvy$1(Ivy.scala:204)
[error] at sbt.internal.librarymanagement.IvySbt.sbt$internal$librarymanagement$IvySbt$$action$1(Ivy.scala:70)
[error] at sbt.internal.librarymanagement.IvySbt$$anon$3.call(Ivy.scala:77)
[error] at xsbt.boot.Locks$GlobalLock.withChannel$1(Locks.scala:95)
[error] at xsbt.boot.Locks$GlobalLock.xsbt$boot$Locks$GlobalLock$$withChannelRetries$1(Locks.scala:80)
[error] at xsbt.boot.Locks$GlobalLock$$anonfun$withFileLock$1.apply(Locks.scala:99)
[error] at xsbt.boot.Using$.withResource(Using.scala:10)
[error] at xsbt.boot.Using$.apply(Using.scala:9)
[error] at xsbt.boot.Locks$GlobalLock.ignoringDeadlockAvoided(Locks.scala:60)
[error] at xsbt.boot.Locks$GlobalLock.withLock(Locks.scala:50)
[error] at xsbt.boot.Locks$.apply0(Locks.scala:31)
[error] at xsbt.boot.Locks$.apply(Locks.scala:28)
[error] at sbt.internal.librarymanagement.IvySbt.withDefaultLogger(Ivy.scala:77)
[error] at sbt.internal.librarymanagement.IvySbt.withIvy(Ivy.scala:199)
[error] at sbt.internal.librarymanagement.IvySbt.withIvy(Ivy.scala:196)
[error] at sbt.internal.librarymanagement.IvySbt$Module.withModule(Ivy.scala:238)
[error] at sbt.internal.librarymanagement.IvyActions$.updateEither(IvyActions.scala:193)
[error] at sbt.librarymanagement.ivy.IvyDependencyResolution.update(IvyDependencyResolution.scala:20)
[error] at sbt.librarymanagement.DependencyResolution.update(DependencyResolution.scala:56)
[error] at sbt.internal.LibraryManagement$.resolve$1(LibraryManagement.scala:45)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$12(LibraryManagement.scala:93)
[error] at sbt.util.Tracked$.$anonfun$lastOutput$1(Tracked.scala:68)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$19(LibraryManagement.scala:106)
[error] at scala.util.control.Exception$Catch.apply(Exception.scala:224)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$11(LibraryManagement.scala:106)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$11$adapted(LibraryManagement.scala:89)
[error] at sbt.util.Tracked$.$anonfun$inputChanged$1(Tracked.scala:149)
[error] at sbt.internal.LibraryManagement$.cachedUpdate(LibraryManagement.scala:120)
[error] at sbt.Classpaths$.$anonfun$updateTask$5(Defaults.scala:2561)
[error] at scala.Function1.$anonfun$compose$1(Function1.scala:44)
[error] at sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:40)
[error] at sbt.std.Transform$$anon$4.work(System.scala:67)
[error] at sbt.Execute.$anonfun$submit$2(Execute.scala:269)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:16)
[error] at sbt.Execute.work(Execute.scala:278)
[error] at sbt.Execute.$anonfun$submit$1(Execute.scala:269)
[error] at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:178)
[error] at sbt.CompletionService$$anon$2.call(CompletionService.scala:37)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error] sbt.librarymanagement.ResolveException: unresolved dependency: com.fasterxml.jackson.module#jackson-datatype-jdk8;2.9.7: not found
[error] unresolved dependency: com.fasterxml.jackson.module#jackson-datatype-jsr310;2.9.7: not found
[error] at sbt.internal.librarymanagement.IvyActions$.resolveAndRetrieve(IvyActions.scala:332)
[error] at sbt.internal.librarymanagement.IvyActions$.$anonfun$updateEither$1(IvyActions.scala:208)
[error] at sbt.internal.librarymanagement.IvySbt$Module.$anonfun$withModule$1(Ivy.scala:239)
[error] at sbt.internal.librarymanagement.IvySbt.$anonfun$withIvy$1(Ivy.scala:204)
[error] at sbt.internal.librarymanagement.IvySbt.sbt$internal$librarymanagement$IvySbt$$action$1(Ivy.scala:70)
[error] at sbt.internal.librarymanagement.IvySbt$$anon$3.call(Ivy.scala:77)
[error] at xsbt.boot.Locks$GlobalLock.withChannel$1(Locks.scala:95)
[error] at xsbt.boot.Locks$GlobalLock.xsbt$boot$Locks$GlobalLock$$withChannelRetries$1(Locks.scala:80)
[error] at xsbt.boot.Locks$GlobalLock$$anonfun$withFileLock$1.apply(Locks.scala:99)
[error] at xsbt.boot.Using$.withResource(Using.scala:10)
[error] at xsbt.boot.Using$.apply(Using.scala:9)
[error] at xsbt.boot.Locks$GlobalLock.ignoringDeadlockAvoided(Locks.scala:60)
[error] at xsbt.boot.Locks$GlobalLock.withLock(Locks.scala:50)
[error] at xsbt.boot.Locks$.apply0(Locks.scala:31)
[error] at xsbt.boot.Locks$.apply(Locks.scala:28)
[error] at sbt.internal.librarymanagement.IvySbt.withDefaultLogger(Ivy.scala:77)
[error] at sbt.internal.librarymanagement.IvySbt.withIvy(Ivy.scala:199)
[error] at sbt.internal.librarymanagement.IvySbt.withIvy(Ivy.scala:196)
[error] at sbt.internal.librarymanagement.IvySbt$Module.withModule(Ivy.scala:238)
[error] at sbt.internal.librarymanagement.IvyActions$.updateEither(IvyActions.scala:193)
[error] at sbt.librarymanagement.ivy.IvyDependencyResolution.update(IvyDependencyResolution.scala:20)
[error] at sbt.librarymanagement.DependencyResolution.update(DependencyResolution.scala:56)
[error] at sbt.internal.LibraryManagement$.resolve$1(LibraryManagement.scala:45)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$12(LibraryManagement.scala:93)
[error] at sbt.util.Tracked$.$anonfun$lastOutput$1(Tracked.scala:68)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$19(LibraryManagement.scala:106)
[error] at scala.util.control.Exception$Catch.apply(Exception.scala:224)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$11(LibraryManagement.scala:106)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$11$adapted(LibraryManagement.scala:89)
[error] at sbt.util.Tracked$.$anonfun$inputChanged$1(Tracked.scala:149)
[error] at sbt.internal.LibraryManagement$.cachedUpdate(LibraryManagement.scala:120)
[error] at sbt.Classpaths$.$anonfun$updateTask$5(Defaults.scala:2561)
[error] at scala.Function1.$anonfun$compose$1(Function1.scala:44)
[error] at sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:40)
[error] at sbt.std.Transform$$anon$4.work(System.scala:67)
[error] at sbt.Execute.$anonfun$submit$2(Execute.scala:269)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:16)
[error] at sbt.Execute.work(Execute.scala:278)
[error] at sbt.Execute.$anonfun$submit$1(Execute.scala:269)
[error] at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:178)
[error] at sbt.CompletionService$$anon$2.call(CompletionService.scala:37)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error] (update) sbt.librarymanagement.ResolveException: unresolved dependency: com.fasterxml.jackson.module#jackson-datatype-jdk8;2.9.7: not found
[error] unresolved dependency: com.fasterxml.jackson.module#jackson-datatype-jsr310;2.9.7: not found
[error] (ssExtractDependencies) sbt.librarymanagement.ResolveException: unresolved dependency: com.fasterxml.jackson.module#jackson-datatype-jdk8;2.9.7: not found
[error] unresolved dependency: com.fasterxml.jackson.module#jackson-datatype-jsr310;2.9.7: not found
Any idea why the dependencies are not there?
According to the link you provided, your last 2 dependencies should be with datatype :
val jacksonVersion = "2.9.7"
libraryDependencies += "com.fasterxml.jackson.module" % "jackson-module-
parameter-names" % jacksonVersion
libraryDependencies += "com.fasterxml.jackson.datatype" % "jackson-datatype-jdk8"
% jacksonVersion
libraryDependencies += "com.fasterxml.jackson.datatype" % "jackson-datatype-jsr310" % jacksonVersion
BTW, there is already newer version - 2.9.8 - take a look here

sbt error ClassNotFoundException in WordCount

it might be a newbie question but i'm trying to learn more about scala in intellij. I firstly created a simple HelloWorld project that worked fine. Next i watched a tutorial to create a WordCount project, but i'm getting exception error that i can't figure out.
The error i get is:
Error:Error while importing SBT project:<br/>...<br/><pre>
[error] at
sbt.MainLoop$.$anonfun$runWithNewLog$1(MainLoop.scala:107)
[error] at sbt.io.Using.apply(Using.scala:22)
[error] at sbt.MainLoop$.runWithNewLog(MainLoop.scala:101)
[error] at sbt.MainLoop$.runAndClearLast(MainLoop.scala:57)
[error] at sbt.MainLoop$.runLoggedLoop(MainLoop.scala:42)
[error] at sbt.MainLoop$.runLogged(MainLoop.scala:34)
[error] at sbt.StandardMain$.runManaged(Main.scala:113)
[error] at sbt.xMain.run(Main.scala:76)
[error] at xsbt.boot.Launch$$anonfun$run$1.apply(Launch.scala:109)
[error] at xsbt.boot.Launch$.withContextLoader(Launch.scala:128)
[error] at xsbt.boot.Launch$.run(Launch.scala:109)
[error] at xsbt.boot.Launch$$anonfun$apply$1.apply(Launch.scala:35)
[error] at xsbt.boot.Launch$.launch(Launch.scala:117)
[error] at xsbt.boot.Launch$.apply(Launch.scala:18)
[error] at xsbt.boot.Boot$.runImpl(Boot.scala:41)
[error] at xsbt.boot.Boot$.main(Boot.scala:17)
[error] at xsbt.boot.Boot.main(Boot.scala)
[error] java.lang.ClassNotFoundException:
org.jetbrains.sbt.CreateTasks$
[error] Use 'last' for the full log.
[info] shutting down server
My build.sbt file is:
name := "Scalaprogramms"
version := "1.0"
scalaVersion:="2.11.8"
libraryDependencies += "org.apache.spark" %% "spark-core" % "2.2.0"
libraryDependencies += "org.apache.spark" %% "spark-streaming" % "2.2.0" % "provided"
My java version is 8
Last the last log file is
OpenJDK 64-Bit Server VM warning: ignoring option MaxPermSize=384M;
support was removed in 8.0
[info] Loading settings from plugins.sbt ...
[info] Loading project definition from
/home/user/IdeaProjects/Scalaprogramms/project
[info] Loading settings from build.sbt ...
[info] Set current project to Scalaprogramms (in build
file:/home/user/IdeaProjects/Scalaprogramms/)
[info] sbt server started at
local:///home/user/.sbt/1.0/server/fc2d3f3386938f38b259/sock
sbt:Scalaprogramms>
[info] Defining Global / sbtStructureOptions, Global /
sbtStructureOutputFile, shellPrompt
[info] The new values will be used by no settings or tasks.
[info] Reapplying settings...
[info] Set current project to Scalaprogramms (in build
file:/home/user/IdeaProjects/Scalaprogramms/)
[info] Applying State transformations org.jetbrains.sbt.CreateTasks
from /home/user/.IdeaIC2017.2/config/plugins/Scala/launcher/sbt-
structure-1.1.jar
[error] java.lang.ClassNotFoundException:
org.jetbrains.sbt.CreateTasks$
[error] at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
[error] at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
[error] at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
[error] at java.lang.Class.forName0(Native Method)
[error] at java.lang.Class.forName(Class.java:348)
[error] at sbt.internal.inc.ModuleUtilities$.getObject(ModuleUtilities.scala:20)
[error] at sbt.BasicCommands$.$anonfun$call$5(BasicCommands.scala:203)
[error] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:234)
[error] at scala.collection.immutable.List.foreach(List.scala:389)
[error] at scala.collection.TraversableLike.map(TraversableLike.scala:234)
[error] at scala.collection.TraversableLike.map$(TraversableLike.scala:227)
[error] at scala.collection.immutable.List.map(List.scala:295)
[error] at sbt.BasicCommands$.$anonfun$call$2(BasicCommands.scala:203)
[error] at sbt.Command$.$anonfun$applyEffect$4(Command.scala:134)
[error] at sbt.Command$.$anonfun$applyEffect$2(Command.scala:130)
[error] at sbt.MainLoop$.processCommand(MainLoop.scala:153)
[error] at sbt.MainLoop$.$anonfun$next$2(MainLoop.scala:136)
[error] at sbt.State$$anon$1.runCmd$1(State.scala:242)
[error] at sbt.State$$anon$1.process(State.scala:248)
[error] at sbt.MainLoop$.$anonfun$next$1(MainLoop.scala:136)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:16)
[error] at sbt.MainLoop$.next(MainLoop.scala:136)
[error] at sbt.MainLoop$.run(MainLoop.scala:129)
[error] at sbt.MainLoop$.$anonfun$runWithNewLog$1(MainLoop.scala:107)
[error] at sbt.io.Using.apply(Using.scala:22)
[error] at sbt.MainLoop$.runWithNewLog(MainLoop.scala:101)
[error] at sbt.MainLoop$.runAndClearLast(MainLoop.scala:57)
[error] at sbt.MainLoop$.runLoggedLoop(MainLoop.scala:42)
[error] at sbt.MainLoop$.runLogged(MainLoop.scala:34)
[error] at sbt.StandardMain$.runManaged(Main.scala:113)
[error] at sbt.xMain.run(Main.scala:76)
[error] at xsbt.boot.Launch$$anonfun$run$1.apply(Launch.scala:109)
[error] at xsbt.boot.Launch$.withContextLoader(Launch.scala:128)
[error] at xsbt.boot.Launch$.run(Launch.scala:109)
[error] at xsbt.boot.Launch$$anonfun$apply$1.apply(Launch.scala:35)
[error] at xsbt.boot.Launch$.launch(Launch.scala:117)
[error] at xsbt.boot.Launch$.apply(Launch.scala:18)
[error] at xsbt.boot.Boot$.runImpl(Boot.scala:41)
[error] at xsbt.boot.Boot$.main(Boot.scala:17)
[error] at xsbt.boot.Boot.main(Boot.scala)
[error] java.lang.ClassNotFoundException: org.jetbrains.sbt.CreateTasks$
[error] Use 'last' for the full log.
[info] shutting down server
Also, my code is:
package scala.SparkApps
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
object WordCount {
def main(args: Array[String]): Unit = {
val inputFile = args(0)
val outputFile = args(1)
val conf = new SparkConf().setMaster("local(*)").setAppName("wordcount")
val sc = new SparkContext(Conf)
val input = sc.textFile(inputFile)
val words = input.flatMap(_.split(" "))
val counts = words.map(word => (word, 1)).reduceByKey(_+_)
counts.saveAsTextFile(outputFile)
}
}
Any ideas?