Spark Streaming + Kafka Integration 0.8.2.1 - scala

I have problems integrating spark with kafka. I using spark-streaming-kafka-0-8. I compile with SBT.
This is my code:
import org.apache.spark.SparkConf
import org.apache.spark.streaming._
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming.kafka._
object sparkKafka {
def main(args: Array[String]) {
val sparkConf = new SparkConf().setAppName("KafkaWordCount").setMaster("local[*]")
val ssc = new StreamingContext(sparkConf, Seconds(2))
val kafkaStream = KafkaUtils.createStream(ssc,
"localhost:2181", "spark stream", Map("customer" -> 2))
kafkaStream.print()
ssc.start()
ssc.awaitTermination()
}
}
I received this error:
`[info] Running sparkKafka
[error] (run-main-0) java.lang.NoClassDefFoundError: scala/Product$class
[error] java.lang.NoClassDefFoundError: scala/Product$class
[error] at org.apache.spark.SparkConf$DeprecatedConfig.<init>(SparkConf.scala:723)
[error] at org.apache.spark.SparkConf$.<init>(SparkConf.scala:571)
[error] at org.apache.spark.SparkConf$.<clinit>(SparkConf.scala)
[error] at org.apache.spark.SparkConf.set(SparkConf.scala:92)
[error] at org.apache.spark.SparkConf.set(SparkConf.scala:81)
[error] at org.apache.spark.SparkConf.setAppName(SparkConf.scala:118)
[error] at sparkKafka$.main(sparkKafka.scala:15)
[error] at sparkKafka.main(sparkKafka.scala)
[error] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[error] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[error] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[error] at java.lang.reflect.Method.invoke(Method.java:498)
[error] Caused by: java.lang.ClassNotFoundException: scala.Product$class
[error] at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
[error] at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
[error] at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
[error] at org.apache.spark.SparkConf$DeprecatedConfig.<init>(SparkConf.scala:723)
[error] at org.apache.spark.SparkConf$.<init>(SparkConf.scala:571)
[error] at org.apache.spark.SparkConf$.<clinit>(SparkConf.scala)
[error] at org.apache.spark.SparkConf.set(SparkConf.scala:92)
[error] at org.apache.spark.SparkConf.set(SparkConf.scala:81)
[error] at org.apache.spark.SparkConf.setAppName(SparkConf.scala:118)
[error] at sparkKafka$.main(sparkKafka.scala:15)
[error] at sparkKafka.main(sparkKafka.scala)
[error] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[error] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[error] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[error] at java.lang.reflect.Method.invoke(Method.java:498)
[error] Nonzero exit code: 1
[error] (Compile / run) Nonzero exit code: 1
[error] Total time: 6 s, completed Jan 14, 2019 2:19:15 PM.`
This is my build.sbt file:
libraryDependencies += "org.apache.spark" % "spark-core_2.11" % "2.2.0"
libraryDependencies += "org.apache.spark" % "spark-streaming_2.11" % "2.2.0"
libraryDependencies += "org.apache.spark" % "spark-streaming-kafka-0-10_2.11" % "2.2.0"
libraryDependencies += "org.apache.spark" % "spark-streaming-kafka-0-8_2.11" % "2.2.0"
How can I interated spark streaming with Kafka? I have a problem even spark-streaming-kafka-0-10....
Thanks

This is a version issue with Scala or Spark. Make sure you first are using Scala 2.11
If you are using Kafka 0.10, or higher (which if you've setup Kafka recently, and are only running it locally, then you likely would be), then you shouldn't be using kafka-0-8 package.
Do not mix spark-streaming-kafka-0-8 with spark-streaming-kafka-0-10
So, if you wanted to use 0-10, as answered previously, the package needs to be org.apache.spark.streaming.kafka010, not org.apache.spark.streaming.kafka
Also, note that the 0-8 does use Zookeeper (localhost:2181, for example), and 0-10 does not.

Related

scpaegoat: [error] java.lang.NoSuchMethodError: scala.tools.nsc.Global.reporter()Lscala/tools/nsc/reporters/Reporter;

i am trying to use scapegoat in scala 2.13.1
i have added sbt-scapegoat plugin in project/plugin.sbt
addSbtPlugin("com.sksamuel.scapegoat" %% "sbt-scapegoat" % "1.1.0")
here is my build.sbt
lazy val scala212 = "2.12.8"
lazy val scala213 = "2.13.1"
scalaVersion in ThisBuild := scala213
lazy val supportedScalaVersions = List(scala213, scala212)
crossScalaVersions := supportedScalaVersions
scapegoatVersion in ThisBuild := "1.3.11"
libraryDependencies := {
val VersionSchema = """(\d+)\.(\d+)\.(\d+)""".r
scalaVersion.value match {
case VersionSchema("2", "12", "8") =>
libraryDependencies.value ++ Seq(
"ch.qos.logback" % "logback-classic" % "1.2.3",
"org.scalatest" %% "scalatest" % "3.1.0" % Test,
"io.gatling.highcharts" % "gatling-charts-highcharts" % "3.3.1" % "test",
"io.gatling" % "gatling-test-framework" % "3.3.1" % "test")
case VersionSchema("2", "13", "1") =>
libraryDependencies.value ++ Seq(
"co.pragmati" %% "swagger-ui-akka-http" % "1.3.0"
,"com.sksamuel.scapegoat" %% "scalac-scapegoat-plugin"%"1.3.11"
)
}
}
when i run the scapegoat command in my sbt i am getting following exception
[error] ## Exception when compiling 46 sources to /home/sara/git/gitinterpret/interpret/target/scala-2.13/scapegoat-classes
[error] scala.tools.nsc.Global.reporter()Lscala/tools/nsc/reporters/Reporter;
[error] com.sksamuel.scapegoat.ScapegoatComponent$$anon$1.run(plugin.scala:172)
[error] scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1506)
[error] scala.tools.nsc.Global$Run.compileUnits(Global.scala:1490)
[error] scala.tools.nsc.Global$Run.compileSources(Global.scala:1482)
[error] scala.tools.nsc.Global$Run.compile(Global.scala:1614)
[error] xsbt.CachedCompiler0.run(CompilerInterface.scala:130)
[error] xsbt.CachedCompiler0.run(CompilerInterface.scala:105)
[error] xsbt.CompilerInterface.run(CompilerInterface.scala:31)
[error] java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[error] java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[error] java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[error] java.base/java.lang.reflect.Method.invoke(Method.java:566)
[error] sbt.internal.inc.AnalyzingCompiler.call(AnalyzingCompiler.scala:237)
[error] sbt.internal.inc.AnalyzingCompiler.compile(AnalyzingCompiler.scala:111)
[error] sbt.internal.inc.AnalyzingCompiler.compile(AnalyzingCompiler.scala:90)
[error] sbt.internal.inc.MixedAnalyzingCompiler.$anonfun$compile$3(MixedAnalyzingCompiler.scala:82)
[error] scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12)
[error] sbt.internal.inc.MixedAnalyzingCompiler.timed(MixedAnalyzingCompiler.scala:133)
[error] sbt.internal.inc.MixedAnalyzingCompiler.compileScala$1(MixedAnalyzingCompiler.scala:73)
[error] sbt.internal.inc.MixedAnalyzingCompiler.compile(MixedAnalyzingCompiler.scala:116)
[error] sbt.internal.inc.IncrementalCompilerImpl.$anonfun$compileInternal$1(IncrementalCompilerImpl.scala:307)
[error] sbt.internal.inc.IncrementalCompilerImpl.$anonfun$compileInternal$1$adapted(IncrementalCompilerImpl.scala:307)
[error] sbt.internal.inc.Incremental$.doCompile(Incremental.scala:106)
[error] sbt.internal.inc.Incremental$.$anonfun$compile$4(Incremental.scala:87)
[error] sbt.internal.inc.IncrementalCommon.recompileClasses(IncrementalCommon.scala:116)
[error] sbt.internal.inc.IncrementalCommon.cycle(IncrementalCommon.scala:63)
[error] sbt.internal.inc.Incremental$.$anonfun$compile$3(Incremental.scala:89)
[error] sbt.internal.inc.Incremental$.manageClassfiles(Incremental.scala:134)
[error] sbt.internal.inc.Incremental$.compile(Incremental.scala:80)
[error] sbt.internal.inc.IncrementalCompile$.apply(Compile.scala:67)
[error] sbt.internal.inc.IncrementalCompilerImpl.compileInternal(IncrementalCompilerImpl.scala:311)
[error] sbt.internal.inc.IncrementalCompilerImpl.$anonfun$compileIncrementally$1(IncrementalCompilerImpl.scala:269)
[error] sbt.internal.inc.IncrementalCompilerImpl.handleCompilationError(IncrementalCompilerImpl.scala:159)
[error] sbt.internal.inc.IncrementalCompilerImpl.compileIncrementally(IncrementalCompilerImpl.scala:238)
[error] sbt.internal.inc.IncrementalCompilerImpl.compile(IncrementalCompilerImpl.scala:69)
[error] sbt.Defaults$.compileIncrementalTaskImpl(Defaults.scala:1549)
[error] sbt.Defaults$.$anonfun$compileIncrementalTask$1(Defaults.scala:1523)
[error] scala.Function1.$anonfun$compose$1(Function1.scala:44)
[error] sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:40)
[error] sbt.std.Transform$$anon$4.work(System.scala:67)
[error] sbt.Execute.$anonfun$submit$2(Execute.scala:269)
[error] sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:16)
[error] sbt.Execute.work(Execute.scala:278)
[error] sbt.Execute.$anonfun$submit$1(Execute.scala:269)
[error] sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:178)
[error] sbt.CompletionService$$anon$2.call(CompletionService.scala:37)
[error] java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
[error] java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
[error] java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
[error] java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
[error] java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
[error] java.base/java.lang.Thread.run(Thread.java:834)
[error]
[error] java.lang.NoSuchMethodError: scala.tools.nsc.Global.reporter()Lscala/tools/nsc/reporters/Reporter;
[error] at com.sksamuel.scapegoat.ScapegoatComponent$$anon$1.run(plugin.scala:172)
[error] at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1506)
[error] at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1490)
[error] at scala.tools.nsc.Global$Run.compileSources(Global.scala:1482)
[error] at scala.tools.nsc.Global$Run.compile(Global.scala:1614)
[error] at xsbt.CachedCompiler0.run(CompilerInterface.scala:130)
[error] at xsbt.CachedCompiler0.run(CompilerInterface.scala:105)
[error] at xsbt.CompilerInterface.run(CompilerInterface.scala:31)
[error] at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[error] at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[error] at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[error] at java.base/java.lang.reflect.Method.invoke(Method.java:566)
[error] at sbt.internal.inc.AnalyzingCompiler.call(AnalyzingCompiler.scala:237)
[error] at sbt.internal.inc.AnalyzingCompiler.compile(AnalyzingCompiler.scala:111)
[error] at sbt.internal.inc.AnalyzingCompiler.compile(AnalyzingCompiler.scala:90)
[error] at sbt.internal.inc.MixedAnalyzingCompiler.$anonfun$compile$3(MixedAnalyzingCompiler.scala:82)
[error] at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12)
[error] at sbt.internal.inc.MixedAnalyzingCompiler.timed(MixedAnalyzingCompiler.scala:133)
[error] at sbt.internal.inc.MixedAnalyzingCompiler.compileScala$1(MixedAnalyzingCompiler.scala:73)
[error] at sbt.internal.inc.MixedAnalyzingCompiler.compile(MixedAnalyzingCompiler.scala:116)

scala sbt libraryDependencies provided - Avoid downloading 3rd party library

I've the following Spark Scala code that references 3rd party libraries,
package com.protegrity.spark
import org.apache.spark.sql.api.java.UDF2
import com.protegrity.spark.udf.ptyProtectStr
import com.protegrity.spark.udf.ptyProtectInt
class ptyProtectStr extends UDF2[String, String, String] {
def call(input: String, dataElement: String): String = {
return ptyProtectStr(input, dataElement);
}
}
class ptyUnprotectStr extends UDF2[String, String, String] {
def call(input: String, dataElement: String): String = {
return ptyUnprotectStr(input, dataElement);
}
}
class ptyProtectInt extends UDF2[Integer, String, Integer] {
def call(input: Integer, dataElement: String): Integer = {
return ptyProtectInt(input, dataElement);
}
}
class ptyUnprotectInt extends UDF2[Integer, String, Integer] {
def call(input: Integer, dataElement: String): Integer = {
return ptyUnprotectInt(input, dataElement);
}
}
I want to create JAR file using SBT. My build.sbt looks like the following,
name := "Protegrity UDF"
version := "1.0"
scalaVersion := "2.11.8"
libraryDependencies ++= Seq(
"com.protegrity.spark" % "udf" % "2.3.2" % "provided",
"org.apache.spark" %% "spark-core" % "2.3.2" % "provided",
"org.apache.spark" %% "spark-sql" % "2.3.2" % "provided"
)
As you see, I trying to create a thin JAR file using "provided" option as my Spark environment already contains those libraries.
In spite of using "provided", sbt is trying to download from maven and throwing below error,
[warn] Note: Unresolved dependencies path:
[error] sbt.librarymanagement.ResolveException: Error downloading com.protegrity.spark:udf:2.3.2
[error] Not found
[error] Not found
[error] not found: C:\Users\user1\.ivy2\local\com.protegrity.spark\udf\2.3.2\ivys\ivy.xml
[error] not found: https://repo1.maven.org/maven2/com/protegrity/spark/udf/2.3.2/udf-2.3.2.pom
[error] at lmcoursier.CoursierDependencyResolution.unresolvedWarningOrThrow(CoursierDependencyResolution.scala:249)
[error] at lmcoursier.CoursierDependencyResolution.$anonfun$update$35(CoursierDependencyResolution.scala:218)
[error] at scala.util.Either$LeftProjection.map(Either.scala:573)
[error] at lmcoursier.CoursierDependencyResolution.update(CoursierDependencyResolution.scala:218)
[error] at sbt.librarymanagement.DependencyResolution.update(DependencyResolution.scala:60)
[error] at sbt.internal.LibraryManagement$.resolve$1(LibraryManagement.scala:52)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$12(LibraryManagement.scala:102)
[error] at sbt.util.Tracked$.$anonfun$lastOutput$1(Tracked.scala:69)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$20(LibraryManagement.scala:115)
[error] at scala.util.control.Exception$Catch.apply(Exception.scala:228)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$11(LibraryManagement.scala:115)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$11$adapted(LibraryManagement.scala:96)
[error] at sbt.util.Tracked$.$anonfun$inputChanged$1(Tracked.scala:150)
[error] at sbt.internal.LibraryManagement$.cachedUpdate(LibraryManagement.scala:129)
[error] at sbt.Classpaths$.$anonfun$updateTask0$5(Defaults.scala:2950)
[error] at scala.Function1.$anonfun$compose$1(Function1.scala:49)
[error] at sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:62)
[error] at sbt.std.Transform$$anon$4.work(Transform.scala:67)
[error] at sbt.Execute.$anonfun$submit$2(Execute.scala:281)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:19)
[error] at sbt.Execute.work(Execute.scala:290)
[error] at sbt.Execute.$anonfun$submit$1(Execute.scala:281)
[error] at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:178)
[error] at sbt.CompletionService$$anon$2.call(CompletionService.scala:37)
[error] at java.util.concurrent.FutureTask.run(Unknown Source)
[error] at java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)
[error] at java.util.concurrent.FutureTask.run(Unknown Source)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
[error] at java.lang.Thread.run(Unknown Source)
[error] (update) sbt.librarymanagement.ResolveException: Error downloading com.protegrity.spark:udf:2.3.2
[error] Not found
[error] Not found
[error] not found: C:\Users\user1\.ivy2\local\com.protegrity.spark\udf\2.3.2\ivys\ivy.xml
[error] not found: https://repo1.maven.org/maven2/com/protegrity/spark/udf/2.3.2/udf-2.3.2.pom
What change in build.sbt should I make to skip the maven download for "com.protegrity.spark"? Interestingly, I don't face this issue for "org.apache.spark" on the same build
Assuming that you have the JAR file available (but not through Maven or another artifact repository) wherever you're compiling the code, just place the JAR in (by default) the lib directory within your project (the path can be changed with the unmanagedBase setting in build.sbt if you need to do that for some reason).
Note that this will result in the unmanaged JAR being included in an assembly JAR. If you want to build a "slightly less fat" JAR that excludes the unmanaged JAR, you'll have to filter it out. One way to accomplish this is with
assemblyExcludedJars in assembly := {
val cp = (fullClasspath in assembly).value
cp.filter(_.data.getName == "name-of-unmanaged.jar")
}
If you don't have the JAR (or perhaps something very close to the JAR) handy, how exactly do you expect the compiler to typecheck your calls into the JAR?

Error compiling the sbt component 'compiler-bridge_2.11'

I obtained the next error compiling my code in Scala. How can I fix this problem?
My java version is "1.8.0_231".
My scala version is: Scala code runner version 2.11.8 -- Copyright 2002-2016, LAMP/EPFL
[error] (commonsUtils / Compile / compileIncremental) Error compiling
the sbt component 'compiler-bridge_2.11'
The trace is the following one:
[info] Non-compiled module 'compiler-bridge_2.11' for Scala 2.11.8. Compiling...
error: scala.reflect.internal.MissingRequirementError: object java.lang.Object in compiler mirror not found.
at scala.reflect.internal.MissingRequirementError$.signal(MissingRequirementError.scala:17)
at scala.reflect.internal.MissingRequirementError$.notFound(MissingRequirementError.scala:18)
at scala.reflect.internal.Mirrors$RootsBase.getModuleOrClass(Mirrors.scala:53)
at scala.reflect.internal.Mirrors$RootsBase.getModuleOrClass(Mirrors.scala:45)
at scala.reflect.internal.Mirrors$RootsBase.getModuleOrClass(Mirrors.scala:45)
at scala.reflect.internal.Mirrors$RootsBase.getModuleOrClass(Mirrors.scala:66)
at scala.reflect.internal.Mirrors$RootsBase.getClassByName(Mirrors.scala:102)
at scala.reflect.internal.Mirrors$RootsBase.getRequiredClass(Mirrors.scala:105)
at scala.reflect.internal.Definitions$DefinitionsClass.ObjectClass$lzycompute(Definitions.scala:257)
at scala.reflect.internal.Definitions$DefinitionsClass.ObjectClass(Definitions.scala:257)
at scala.reflect.internal.Definitions$DefinitionsClass.init(Definitions.scala:1394)
at scala.tools.nsc.Global$Run.<init>(Global.scala:1215)
at scala.tools.nsc.Driver.doCompile(Driver.scala:31)
at scala.tools.nsc.MainClass.doCompile(Main.scala:23)
at scala.tools.nsc.Driver.process(Driver.scala:51)
at scala.tools.nsc.Main.process(Main.scala)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at sbt.internal.inc.RawCompiler.getReporter$1(RawCompiler.scala:56)
at sbt.internal.inc.RawCompiler.apply(RawCompiler.scala:77)
at sbt.internal.inc.AnalyzingCompiler$.$anonfun$compileSources$6(AnalyzingCompiler.scala:363)
at sbt.internal.inc.AnalyzingCompiler$.handleCompilationError$1(AnalyzingCompiler.scala:338)
at sbt.internal.inc.AnalyzingCompiler$.$anonfun$compileSources$4(AnalyzingCompiler.scala:359)
at sbt.internal.inc.AnalyzingCompiler$.$anonfun$compileSources$4$adapted(AnalyzingCompiler.scala:354)
at sbt.io.IO$.withTemporaryDirectory(IO.scala:489)
at sbt.io.IO$.withTemporaryDirectory(IO.scala:499)
at sbt.internal.inc.AnalyzingCompiler$.$anonfun$compileSources$2(AnalyzingCompiler.scala:354)
at sbt.internal.inc.AnalyzingCompiler$.$anonfun$compileSources$2$adapted(AnalyzingCompiler.scala:346)
at sbt.io.IO$.withTemporaryDirectory(IO.scala:489)
at sbt.io.IO$.withTemporaryDirectory(IO.scala:499)
at sbt.internal.inc.AnalyzingCompiler$.compileSources(AnalyzingCompiler.scala:346)
at sbt.internal.inc.ZincComponentCompiler.$anonfun$compileAndInstall$3(ZincComponentCompiler.scala:261)
at sbt.internal.inc.ZincComponentCompiler.$anonfun$compileAndInstall$3$adapted(ZincComponentCompiler.scala:250)
at sbt.io.IO$.withTemporaryDirectory(IO.scala:489)
at sbt.io.IO$.withTemporaryDirectory(IO.scala:499)
at sbt.internal.inc.ZincComponentCompiler.$anonfun$compileAndInstall$2(ZincComponentCompiler.scala:250)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at sbt.internal.util.BufferedLogger.bufferQuietly(BufferedLogger.scala:119)
at sbt.internal.inc.ZincComponentCompiler.$anonfun$compileAndInstall$1(ZincComponentCompiler.scala:250)
at sbt.internal.inc.ZincComponentCompiler.$anonfun$compileAndInstall$1$adapted(ZincComponentCompiler.scala:247)
at sbt.io.IO$.withTemporaryDirectory(IO.scala:489)
at sbt.io.IO$.withTemporaryDirectory(IO.scala:499)
at sbt.internal.inc.ZincComponentCompiler.compileAndInstall(ZincComponentCompiler.scala:247)
at sbt.internal.inc.ZincComponentCompiler.$anonfun$compiledBridgeJar$1(ZincComponentCompiler.scala:215)
at sbt.internal.inc.IfMissing$Define.run(IfMissing.scala:19)
at sbt.internal.inc.ZincComponentManager.createAndCache$1(ZincComponentManager.scala:49)
at sbt.internal.inc.ZincComponentManager.$anonfun$files$3(ZincComponentManager.scala:60)
at sbt.internal.inc.ZincComponentManager.getOrElse$1(ZincComponentManager.scala:42)
at sbt.internal.inc.ZincComponentManager.$anonfun$files$2(ZincComponentManager.scala:60)
at sbt.internal.inc.ZincComponentManager$$anon$1.call(ZincComponentManager.scala:89)
at xsbt.boot.Locks$GlobalLock.withChannel$1(Locks.scala:95)
at xsbt.boot.Locks$GlobalLock.xsbt$boot$Locks$GlobalLock$$withChannelRetries$1(Locks.scala:80)
at xsbt.boot.Locks$GlobalLock$$anonfun$withFileLock$1.apply(Locks.scala:99)
at xsbt.boot.Using$.withResource(Using.scala:10)
at xsbt.boot.Using$.apply(Using.scala:9)
at xsbt.boot.Locks$GlobalLock.ignoringDeadlockAvoided(Locks.scala:60)
at xsbt.boot.Locks$GlobalLock.withLock(Locks.scala:50)
at xsbt.boot.Locks$.apply0(Locks.scala:31)
at xsbt.boot.Locks$.apply(Locks.scala:28)
at sbt.internal.inc.ZincComponentManager.lock(ZincComponentManager.scala:89)
at sbt.internal.inc.ZincComponentManager.$anonfun$lockSecondaryCache$1(ZincComponentManager.scala:86)
at scala.Option.map(Option.scala:230)
at sbt.internal.inc.ZincComponentManager.lockSecondaryCache(ZincComponentManager.scala:86)
at sbt.internal.inc.ZincComponentManager.fromSecondary$1(ZincComponentManager.scala:58)
at sbt.internal.inc.ZincComponentManager.$anonfun$files$6(ZincComponentManager.scala:64)
at sbt.internal.inc.ZincComponentManager.getOrElse$1(ZincComponentManager.scala:42)
at sbt.internal.inc.ZincComponentManager.$anonfun$files$5(ZincComponentManager.scala:64)
at sbt.internal.inc.ZincComponentManager$$anon$1.call(ZincComponentManager.scala:89)
at xsbt.boot.Locks$GlobalLock.withChannel$1(Locks.scala:95)
at xsbt.boot.Locks$GlobalLock.xsbt$boot$Locks$GlobalLock$$withChannelRetries$1(Locks.scala:80)
at xsbt.boot.Locks$GlobalLock$$anonfun$withFileLock$1.apply(Locks.scala:99)
at xsbt.boot.Using$.withResource(Using.scala:10)
at xsbt.boot.Using$.apply(Using.scala:9)
at xsbt.boot.Locks$GlobalLock.ignoringDeadlockAvoided(Locks.scala:60)
at xsbt.boot.Locks$GlobalLock.withLock(Locks.scala:50)
at xsbt.boot.Locks$.apply0(Locks.scala:31)
at xsbt.boot.Locks$.apply(Locks.scala:28)
at sbt.internal.inc.ZincComponentManager.lock(ZincComponentManager.scala:89)
at sbt.internal.inc.ZincComponentManager.lockLocalCache(ZincComponentManager.scala:82)
at sbt.internal.inc.ZincComponentManager.files(ZincComponentManager.scala:64)
at sbt.internal.inc.ZincComponentManager.file(ZincComponentManager.scala:70)
at sbt.internal.inc.ZincComponentCompiler.compiledBridgeJar(ZincComponentCompiler.scala:215)
at sbt.internal.inc.ZincComponentCompiler$ZincCompilerBridgeProvider.compiledBridge(ZincComponentCompiler.scala:75)
at sbt.internal.inc.ZincComponentCompiler$ZincCompilerBridgeProvider.fetchCompiledBridge(ZincComponentCompiler.scala:81)
at sbt.internal.inc.AnalyzingCompiler.loader(AnalyzingCompiler.scala:260)
at sbt.internal.inc.AnalyzingCompiler.getInterfaceClass(AnalyzingCompiler.scala:278)
at sbt.internal.inc.AnalyzingCompiler.call(AnalyzingCompiler.scala:245)
at sbt.internal.inc.AnalyzingCompiler.newCachedCompiler(AnalyzingCompiler.scala:145)
at sbt.internal.inc.AnalyzingCompiler.newCachedCompiler(AnalyzingCompiler.scala:132)
at sbt.internal.inc.FreshCompilerCache.apply(CompilerCache.scala:102)
at sbt.internal.inc.AnalyzingCompiler.compile(AnalyzingCompiler.scala:92)
at sbt.internal.inc.MixedAnalyzingCompiler.$anonfun$compile$4(MixedAnalyzingCompiler.scala:91)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at sbt.internal.inc.MixedAnalyzingCompiler.timed(MixedAnalyzingCompiler.scala:186)
at sbt.internal.inc.MixedAnalyzingCompiler.$anonfun$compile$3(MixedAnalyzingCompiler.scala:82)
at sbt.internal.inc.MixedAnalyzingCompiler.$anonfun$compile$3$adapted(MixedAnalyzingCompiler.scala:77)
at sbt.internal.inc.JarUtils$.withPreviousJar(JarUtils.scala:215)
at sbt.internal.inc.MixedAnalyzingCompiler.compileScala$1(MixedAnalyzingCompiler.scala:77)
at sbt.internal.inc.MixedAnalyzingCompiler.compile(MixedAnalyzingCompiler.scala:146)
at sbt.internal.inc.IncrementalCompilerImpl.$anonfun$compileInternal$1(IncrementalCompilerImpl.scala:343)
at sbt.internal.inc.IncrementalCompilerImpl.$anonfun$compileInternal$1$adapted(IncrementalCompilerImpl.scala:343)
at sbt.internal.inc.Incremental$.doCompile(Incremental.scala:120)
at sbt.internal.inc.Incremental$.$anonfun$compile$4(Incremental.scala:100)
at sbt.internal.inc.IncrementalCommon.recompileClasses(IncrementalCommon.scala:180)
at sbt.internal.inc.IncrementalCommon.cycle(IncrementalCommon.scala:98)
at sbt.internal.inc.Incremental$.$anonfun$compile$3(Incremental.scala:102)
at sbt.internal.inc.Incremental$.manageClassfiles(Incremental.scala:155)
at sbt.internal.inc.Incremental$.compile(Incremental.scala:92)
at sbt.internal.inc.IncrementalCompile$.apply(Compile.scala:75)
at sbt.internal.inc.IncrementalCompilerImpl.compileInternal(IncrementalCompilerImpl.scala:348)
at sbt.internal.inc.IncrementalCompilerImpl.$anonfun$compileIncrementally$1(IncrementalCompilerImpl.scala:301)
at sbt.internal.inc.IncrementalCompilerImpl.handleCompilationError(IncrementalCompilerImpl.scala:168)
at sbt.internal.inc.IncrementalCompilerImpl.compileIncrementally(IncrementalCompilerImpl.scala:248)
at sbt.internal.inc.IncrementalCompilerImpl.compile(IncrementalCompilerImpl.scala:74)
at sbt.Defaults$.compileIncrementalTaskImpl(Defaults.scala:1761)
at sbt.Defaults$.$anonfun$compileIncrementalTask$1(Defaults.scala:1734)
at scala.Function1.$anonfun$compose$1(Function1.scala:49)
at sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:62)
at sbt.std.Transform$$anon$4.work(Transform.scala:67)
at sbt.Execute.$anonfun$submit$2(Execute.scala:281)
at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:19)
at sbt.Execute.work(Execute.scala:290)
at sbt.Execute.$anonfun$submit$1(Execute.scala:281)
at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:178)
at sbt.CompletionService$$anon$2.call(CompletionService.scala:37)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:834)
[info] Attempting to fetch org.scala-sbt:compiler-bridge_2.11:1.3.1.
[error] (commonsUtils / Compile / compileIncremental) Error compiling the sbt component 'compiler-bridge_2.11'
[error] Total time: 4 s, completed Feb 6, 2020, 6:57:02 PM
My build.sbt contains the following lines:
val sparkVersion = "2.4.3"
val scalaVersion_ = "2.11.8"
lazy val commonDependencies = Seq(
"org.apache.spark" %% "spark-sql" % sparkVersion
,"com.databricks" %% "dbutils-api" % "0.0.3"
,"com.microsoft.azure" % "applicationinsights-core" % "2.5.0"
,"com.microsoft.azure" % "applicationinsights-web" % "2.5.0"
)
lazy val commonSettings = Seq(
scalaVersion := scalaVersion_
My java version is "1.8.0_231".
This is inconsistent with what the stack trace is showing:
at java.base/java.lang.Thread.run(Thread.java:834)
The notion of "java.base/java.lang" is Java Modules thing, which started in Java 9. You can figure out how to change your IntelliJ configuration to use JDK 8 or use Scala 2.11.12 that works with JDK 11.
Change
val scalaVersion_ = "2.11.8"
to
val scalaVersion_ = "2.11.12"

Generate html report with ScalaTest 3.0

This question asks how to get an html report with sbt and ScalaTest. The answers reference Scala test 2.0 and do not appear to work for me with ScalaTest 3.0
I declare ScalaTest by
lazy val scalaTest = Seq("org.scalatest" %% "scalatest" % "3.0.8" % "test",
"org.scalactic" %% "scalactic" % "3.0.8",
"org.scalamock" %% "scalamock" % "4.4.0" % Test)
and then use it by
ThisBuild / Test / testOptions += Tests.Argument(TestFrameworks.ScalaTest, "-h", "target/test-reports")
and
lazy val foo = (project in file("foo")).
settings(libraryDependencies ++= scalaTest)
This fails with
[error] java.lang.NoClassDefFoundError: org/pegdown/PegDownProcessor
[error] at org.scalatest.tools.HtmlReporter.<init>(HtmlReporter.scala:117)
[error] at org.scalatest.tools.ReporterFactory.createHtmlReporter(ReporterFactory.scala:192)
[error] at org.scalatest.tools.ReporterFactory.getReporterFromConfiguration(ReporterFactory.scala:239)
[error] at org.scalatest.tools.ReporterFactory.$anonfun$createReportersFromConfigurations$1(ReporterFactory.scala:248)
[error] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
[error] at scala.collection.Iterator.foreach(Iterator.scala:941)
[error] at scala.collection.Iterator.foreach$(Iterator.scala:941)
[error] at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
[error] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[error] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[error] at org.scalatest.tools.ReporterConfigurations.foreach(ReporterConfiguration.scala:42)
...
The question suggests using "test->*" for the test declaration. Trying
lazy val scalaTest = Seq("org.scalatest" %% "scalatest" % "3.0.8" % "test->*" excludeAll (
ExclusionRule(organization="org.junit", name="junit")),
"org.scalamock" %% "scalamock" % "4.4.0" % Test
)
instead fails with
[info] Compiling 1 Scala source to /projects/foo/target/scala-2.12/test-classes ...
[error] /projects/foo/src/test/scala/com/example/FooTest.scala:17:73: Symbol 'type org.scalactic.TripleEquals' is missing from the classpath.
[error] This symbol is required by 'trait org.scalatest.Assertions'.
[error] Make sure that type TripleEquals is in your classpath and check for conflicting dependencies with `-Ylog-classpath`.
[error] A full rebuild may help if 'Assertions.class' was compiled against an incompatible version of org.scalactic.
[error] class InfoGainTest extends FlatSpec with Matchers with LoneElement with LazyLogging {
[error] ^
[error] /projects/foo/src/test/scala/com/example/FooTest.scala:17:42: Symbol 'type org.scalactic.Tolerance' is missing from the classpath.
[error] This symbol is required by 'trait org.scalatest.Matchers'.
[error] Make sure that type Tolerance is in your classpath and check for conflicting dependencies with `-Ylog-classpath`.
[error] A full rebuild may help if 'Matchers.class' was compiled against an incompatible version of org.scalactic.
[error] class InfoGainTest extends FlatSpec with Matchers with LoneElement with LazyLogging {
[error] ^
[error] /projects/foo/src/test/scala/com/example/FooTest.scala:22:54: Symbol 'term org.scalactic.source' is missing from the classpath.
[error] This symbol is required by 'value org.scalatest.Matchers.pos'.
[error] Make sure that term source is in your classpath and check for conflicting dependencies with `-Ylog-classpath`.
[error] A full rebuild may help if 'Matchers.class' was compiled against an incompatible version of org.scalactic.
[error] def printGain(gainByProbe: Map[Probe, Double]) = logger.info("Info gain: {}",
[error] ^
[error] /projects/foo/src/test/scala/com/example/FooTest.scala:27:17: value should is not a member of String
followed by errors that look like the implicit conversions for FlatSpec are not present and are probably follow-on errors from the above.
Is there a way to do this with ScalaTest 3.0?
Try adding pegdown dependency like so
libraryDependencies += "org.pegdown" % "pegdown" % "1.6.0" % Test
Note in ScalaTest 3.1.x pegdown is replaced with flexmark-java as per resolve #1201 replace pegdown with flexmark-java #1229

sbt error ClassNotFoundException in WordCount

it might be a newbie question but i'm trying to learn more about scala in intellij. I firstly created a simple HelloWorld project that worked fine. Next i watched a tutorial to create a WordCount project, but i'm getting exception error that i can't figure out.
The error i get is:
Error:Error while importing SBT project:<br/>...<br/><pre>
[error] at
sbt.MainLoop$.$anonfun$runWithNewLog$1(MainLoop.scala:107)
[error] at sbt.io.Using.apply(Using.scala:22)
[error] at sbt.MainLoop$.runWithNewLog(MainLoop.scala:101)
[error] at sbt.MainLoop$.runAndClearLast(MainLoop.scala:57)
[error] at sbt.MainLoop$.runLoggedLoop(MainLoop.scala:42)
[error] at sbt.MainLoop$.runLogged(MainLoop.scala:34)
[error] at sbt.StandardMain$.runManaged(Main.scala:113)
[error] at sbt.xMain.run(Main.scala:76)
[error] at xsbt.boot.Launch$$anonfun$run$1.apply(Launch.scala:109)
[error] at xsbt.boot.Launch$.withContextLoader(Launch.scala:128)
[error] at xsbt.boot.Launch$.run(Launch.scala:109)
[error] at xsbt.boot.Launch$$anonfun$apply$1.apply(Launch.scala:35)
[error] at xsbt.boot.Launch$.launch(Launch.scala:117)
[error] at xsbt.boot.Launch$.apply(Launch.scala:18)
[error] at xsbt.boot.Boot$.runImpl(Boot.scala:41)
[error] at xsbt.boot.Boot$.main(Boot.scala:17)
[error] at xsbt.boot.Boot.main(Boot.scala)
[error] java.lang.ClassNotFoundException:
org.jetbrains.sbt.CreateTasks$
[error] Use 'last' for the full log.
[info] shutting down server
My build.sbt file is:
name := "Scalaprogramms"
version := "1.0"
scalaVersion:="2.11.8"
libraryDependencies += "org.apache.spark" %% "spark-core" % "2.2.0"
libraryDependencies += "org.apache.spark" %% "spark-streaming" % "2.2.0" % "provided"
My java version is 8
Last the last log file is
OpenJDK 64-Bit Server VM warning: ignoring option MaxPermSize=384M;
support was removed in 8.0
[info] Loading settings from plugins.sbt ...
[info] Loading project definition from
/home/user/IdeaProjects/Scalaprogramms/project
[info] Loading settings from build.sbt ...
[info] Set current project to Scalaprogramms (in build
file:/home/user/IdeaProjects/Scalaprogramms/)
[info] sbt server started at
local:///home/user/.sbt/1.0/server/fc2d3f3386938f38b259/sock
sbt:Scalaprogramms>
[info] Defining Global / sbtStructureOptions, Global /
sbtStructureOutputFile, shellPrompt
[info] The new values will be used by no settings or tasks.
[info] Reapplying settings...
[info] Set current project to Scalaprogramms (in build
file:/home/user/IdeaProjects/Scalaprogramms/)
[info] Applying State transformations org.jetbrains.sbt.CreateTasks
from /home/user/.IdeaIC2017.2/config/plugins/Scala/launcher/sbt-
structure-1.1.jar
[error] java.lang.ClassNotFoundException:
org.jetbrains.sbt.CreateTasks$
[error] at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
[error] at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
[error] at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
[error] at java.lang.Class.forName0(Native Method)
[error] at java.lang.Class.forName(Class.java:348)
[error] at sbt.internal.inc.ModuleUtilities$.getObject(ModuleUtilities.scala:20)
[error] at sbt.BasicCommands$.$anonfun$call$5(BasicCommands.scala:203)
[error] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:234)
[error] at scala.collection.immutable.List.foreach(List.scala:389)
[error] at scala.collection.TraversableLike.map(TraversableLike.scala:234)
[error] at scala.collection.TraversableLike.map$(TraversableLike.scala:227)
[error] at scala.collection.immutable.List.map(List.scala:295)
[error] at sbt.BasicCommands$.$anonfun$call$2(BasicCommands.scala:203)
[error] at sbt.Command$.$anonfun$applyEffect$4(Command.scala:134)
[error] at sbt.Command$.$anonfun$applyEffect$2(Command.scala:130)
[error] at sbt.MainLoop$.processCommand(MainLoop.scala:153)
[error] at sbt.MainLoop$.$anonfun$next$2(MainLoop.scala:136)
[error] at sbt.State$$anon$1.runCmd$1(State.scala:242)
[error] at sbt.State$$anon$1.process(State.scala:248)
[error] at sbt.MainLoop$.$anonfun$next$1(MainLoop.scala:136)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:16)
[error] at sbt.MainLoop$.next(MainLoop.scala:136)
[error] at sbt.MainLoop$.run(MainLoop.scala:129)
[error] at sbt.MainLoop$.$anonfun$runWithNewLog$1(MainLoop.scala:107)
[error] at sbt.io.Using.apply(Using.scala:22)
[error] at sbt.MainLoop$.runWithNewLog(MainLoop.scala:101)
[error] at sbt.MainLoop$.runAndClearLast(MainLoop.scala:57)
[error] at sbt.MainLoop$.runLoggedLoop(MainLoop.scala:42)
[error] at sbt.MainLoop$.runLogged(MainLoop.scala:34)
[error] at sbt.StandardMain$.runManaged(Main.scala:113)
[error] at sbt.xMain.run(Main.scala:76)
[error] at xsbt.boot.Launch$$anonfun$run$1.apply(Launch.scala:109)
[error] at xsbt.boot.Launch$.withContextLoader(Launch.scala:128)
[error] at xsbt.boot.Launch$.run(Launch.scala:109)
[error] at xsbt.boot.Launch$$anonfun$apply$1.apply(Launch.scala:35)
[error] at xsbt.boot.Launch$.launch(Launch.scala:117)
[error] at xsbt.boot.Launch$.apply(Launch.scala:18)
[error] at xsbt.boot.Boot$.runImpl(Boot.scala:41)
[error] at xsbt.boot.Boot$.main(Boot.scala:17)
[error] at xsbt.boot.Boot.main(Boot.scala)
[error] java.lang.ClassNotFoundException: org.jetbrains.sbt.CreateTasks$
[error] Use 'last' for the full log.
[info] shutting down server
Also, my code is:
package scala.SparkApps
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
object WordCount {
def main(args: Array[String]): Unit = {
val inputFile = args(0)
val outputFile = args(1)
val conf = new SparkConf().setMaster("local(*)").setAppName("wordcount")
val sc = new SparkContext(Conf)
val input = sc.textFile(inputFile)
val words = input.flatMap(_.split(" "))
val counts = words.map(word => (word, 1)).reduceByKey(_+_)
counts.saveAsTextFile(outputFile)
}
}
Any ideas?