With the following ultra simple streaming app:
object Streaming {
def main(args: Array[String]): Unit = {
val sparkConf = new SparkConf().setAppName("Simple Application").setMaster("local[*]")
val streamingContext = new StreamingContext(sparkConf, Seconds(10))
val lines = streamingContext.socketTextStream("localhost", 8888)
lines.print()
streamingContext.start()
streamingContext.awaitTermination()
}
}
Running sbt run and sending a text to this host, I'm getting this amazing error:
java.lang.IllegalArgumentException: null
at org.apache.xbean.asm5.ClassReader.<init>(Unknown Source)
at org.apache.xbean.asm5.ClassReader.<init>(Unknown Source)
at org.apache.xbean.asm5.ClassReader.<init>(Unknown Source)
at org.apache.spark.util.ClosureCleaner$.getClassReader(ClosureCleaner.scala:46)
at org.apache.spark.util.FieldAccessFinder$$anon$3$$anonfun$visitMethodInsn$2.apply(ClosureCleaner.scala:449)
at org.apache.spark.util.FieldAccessFinder$$anon$3$$anonfun$visitMethodInsn$2.apply(ClosureCleaner.scala:432)
at scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
at scala.collection.mutable.HashMap$$anon$1$$anonfun$foreach$2.apply(HashMap.scala:134)
at scala.collection.mutable.HashMap$$anon$1$$anonfun$foreach$2.apply(HashMap.scala:134)
at scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:236)
at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
at scala.collection.mutable.HashMap$$anon$1.foreach(HashMap.scala:134)
at scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
at org.apache.spark.util.FieldAccessFinder$$anon$3.visitMethodInsn(ClosureCleaner.scala:432)
at org.apache.xbean.asm5.ClassReader.a(Unknown Source)
at org.apache.xbean.asm5.ClassReader.b(Unknown Source)
at org.apache.xbean.asm5.ClassReader.accept(Unknown Source)
at org.apache.xbean.asm5.ClassReader.accept(Unknown Source)
at org.apache.spark.util.ClosureCleaner$$anonfun$org$apache$spark$util$ClosureCleaner$$clean$14.apply(ClosureCleaner.scala:262)
at org.apache.spark.util.ClosureCleaner$$anonfun$org$apache$spark$util$ClosureCleaner$$clean$14.apply(ClosureCleaner.scala:261)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:261)
at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:159)
at org.apache.spark.SparkContext.clean(SparkContext.scala:2292)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2066)
at org.apache.spark.rdd.RDD$$anonfun$take$1.apply(RDD.scala:1358)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.RDD.take(RDD.scala:1331)
at org.apache.spark.streaming.dstream.DStream$$anonfun$print$2$$anonfun$foreachFunc$3$1.apply(DStream.scala:735)
at org.apache.spark.streaming.dstream.DStream$$anonfun$print$2$$anonfun$foreachFunc$3$1.apply(DStream.scala:734)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ForEachDStream.scala:51)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:51)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:51)
at org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:416)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply$mcV$sp(ForEachDStream.scala:50)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:50)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:50)
at scala.util.Try$.apply(Try.scala:192)
at org.apache.spark.streaming.scheduler.Job.run(Job.scala:39)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply$mcV$sp(JobScheduler.scala:257)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:257)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:257)
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler.run(JobScheduler.scala:256)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1135)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
at java.base/java.lang.Thread.run(Thread.java:844)
[error] (run-main-0) java.lang.IllegalArgumentException
[error] java.lang.IllegalArgumentException
[error] at org.apache.xbean.asm5.ClassReader.<init>(Unknown Source)
[error] at org.apache.xbean.asm5.ClassReader.<init>(Unknown Source)
[error] at org.apache.xbean.asm5.ClassReader.<init>(Unknown Source)
[error] at org.apache.spark.util.ClosureCleaner$.getClassReader(ClosureCleaner.scala:46)
[error] at org.apache.spark.util.FieldAccessFinder$$anon$3$$anonfun$visitMethodInsn$2.apply(ClosureCleaner.scala:449)
[error] at org.apache.spark.util.FieldAccessFinder$$anon$3$$anonfun$visitMethodInsn$2.apply(ClosureCleaner.scala:432)
[error] at scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
[error] at scala.collection.mutable.HashMap$$anon$1$$anonfun$foreach$2.apply(HashMap.scala:134)
[error] at scala.collection.mutable.HashMap$$anon$1$$anonfun$foreach$2.apply(HashMap.scala:134)
[error] at scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:236)
[error] at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
[error] at scala.collection.mutable.HashMap$$anon$1.foreach(HashMap.scala:134)
[error] at scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
[error] at org.apache.spark.util.FieldAccessFinder$$anon$3.visitMethodInsn(ClosureCleaner.scala:432)
[error] at org.apache.xbean.asm5.ClassReader.a(Unknown Source)
[error] at org.apache.xbean.asm5.ClassReader.b(Unknown Source)
[error] at org.apache.xbean.asm5.ClassReader.accept(Unknown Source)
[error] at org.apache.xbean.asm5.ClassReader.accept(Unknown Source)
[error] at org.apache.spark.util.ClosureCleaner$$anonfun$org$apache$spark$util$ClosureCleaner$$clean$14.apply(ClosureCleaner.scala:262)
[error] at org.apache.spark.util.ClosureCleaner$$anonfun$org$apache$spark$util$ClosureCleaner$$clean$14.apply(ClosureCleaner.scala:261)
[error] at scala.collection.immutable.List.foreach(List.scala:392)
[error] at org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:261)
[error] at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:159)
[error] at org.apache.spark.SparkContext.clean(SparkContext.scala:2292)
[error] at org.apache.spark.SparkContext.runJob(SparkContext.scala:2066)
[error] at org.apache.spark.rdd.RDD$$anonfun$take$1.apply(RDD.scala:1358)
[error] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[error] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[error] at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
[error] at org.apache.spark.rdd.RDD.take(RDD.scala:1331)
[error] at org.apache.spark.streaming.dstream.DStream$$anonfun$print$2$$anonfun$foreachFunc$3$1.apply(DStream.scala:735)
[error] at org.apache.spark.streaming.dstream.DStream$$anonfun$print$2$$anonfun$foreachFunc$3$1.apply(DStream.scala:734)
[error] at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ForEachDStream.scala:51)
[error] at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:51)
[error] at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:51)
[error] at org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:416)
[error] at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply$mcV$sp(ForEachDStream.scala:50)
[error] at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:50)
[error] at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:50)
[error] at scala.util.Try$.apply(Try.scala:192)
[error] at org.apache.spark.streaming.scheduler.Job.run(Job.scala:39)
[error] at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply$mcV$sp(JobScheduler.scala:257)
[error] at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:257)
[error] at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:257)
[error] at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
[error] at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler.run(JobScheduler.scala:256)
[error] at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1135)
[error] at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
[error] at java.base/java.lang.Thread.run(Thread.java:844)
15:25:30.859 [spark-listener-group-executorManagement] INFO org.apache.spark.scheduler.AsyncEventQueue - Stopping listener queue executorManagement.
java.lang.InterruptedException: null
at java.base/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.reportInterruptAfterWait(AbstractQueuedSynchronizer.java:2050)
at java.base/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2084)
at java.base/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435)
at org.apache.spark.scheduler.AsyncEventQueue$$anonfun$org$apache$spark$scheduler$AsyncEventQueue$$dispatch$1.apply(AsyncEventQueue.scala:94)
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
at org.apache.spark.scheduler.AsyncEventQueue.org$apache$spark$scheduler$AsyncEventQueue$$dispatch(AsyncEventQueue.scala:83)
at org.apache.spark.scheduler.AsyncEventQueue$$anon$1$$anonfun$run$1.apply$mcV$sp(AsyncEventQueue.scala:79)
at org.apache.spark.util.Utils$.tryOrStopSparkContext(Utils.scala:1319)
at org.apache.spark.scheduler.AsyncEventQueue$$anon$1.run(AsyncEventQueue.scala:78)
15:25:30.859 [spark-listener-group-shared] INFO org.apache.spark.scheduler.AsyncEventQueue - Stopping listener queue shared.
java.lang.InterruptedException: null
at java.base/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.reportInterruptAfterWait(AbstractQueuedSynchronizer.java:2050)
at java.base/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2084)
at java.base/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435)
at org.apache.spark.scheduler.AsyncEventQueue$$anonfun$org$apache$spark$scheduler$AsyncEventQueue$$dispatch$1.apply(AsyncEventQueue.scala:94)
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
at org.apache.spark.scheduler.AsyncEventQueue.org$apache$spark$scheduler$AsyncEventQueue$$dispatch(AsyncEventQueue.scala:83)
at org.apache.spark.scheduler.AsyncEventQueue$$anon$1$$anonfun$run$1.apply$mcV$sp(AsyncEventQueue.scala:79)
at org.apache.spark.util.Utils$.tryOrStopSparkContext(Utils.scala:1319)
at org.apache.spark.scheduler.AsyncEventQueue$$anon$1.run(AsyncEventQueue.scala:78)
15:25:30.860 [spark-listener-group-appStatus] INFO org.apache.spark.scheduler.AsyncEventQueue - Stopping listener queue appStatus.
java.lang.InterruptedException: null
at java.base/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.reportInterruptAfterWait(AbstractQueuedSynchronizer.java:2050)
at java.base/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2084)
at java.base/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435)
at org.apache.spark.scheduler.AsyncEventQueue$$anonfun$org$apache$spark$scheduler$AsyncEventQueue$$dispatch$1.apply(AsyncEventQueue.scala:94)
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
at org.apache.spark.scheduler.AsyncEventQueue.org$apache$spark$scheduler$AsyncEventQueue$$dispatch(AsyncEventQueue.scala:83)
at org.apache.spark.scheduler.AsyncEventQueue$$anon$1$$anonfun$run$1.apply$mcV$sp(AsyncEventQueue.scala:79)
at org.apache.spark.util.Utils$.tryOrStopSparkContext(Utils.scala:1319)
at org.apache.spark.scheduler.AsyncEventQueue$$anon$1.run(AsyncEventQueue.scala:78)
15:25:30.860 [org.apache.hadoop.fs.FileSystem$Statistics$StatisticsDataReferenceCleaner] WARN org.apache.hadoop.fs.FileSystem - exception in the cleaner thread but it will continue to run
java.lang.InterruptedException: null
at java.base/java.lang.Object.wait(Native Method)
at java.base/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:151)
at java.base/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:172)
at org.apache.hadoop.fs.FileSystem$Statistics$StatisticsDataReferenceCleaner.run(FileSystem.java:3063)
at java.base/java.lang.Thread.run(Thread.java:844)
15:25:30.862 [Spark Context Cleaner] ERROR org.apache.spark.ContextCleaner - Error in cleaning thread
java.lang.InterruptedException: null
at java.base/java.lang.Object.wait(Native Method)
at java.base/java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:151)
at org.apache.spark.ContextCleaner$$anonfun$org$apache$spark$ContextCleaner$$keepCleaning$1.apply$mcV$sp(ContextCleaner.scala:181)
at org.apache.spark.util.Utils$.tryOrStopSparkContext(Utils.scala:1319)
at org.apache.spark.ContextCleaner.org$apache$spark$ContextCleaner$$keepCleaning(ContextCleaner.scala:178)
at org.apache.spark.ContextCleaner$$anon$1.run(ContextCleaner.scala:73)
[error] java.lang.RuntimeException: Nonzero exit code: 1
[error] at sbt.Run$.executeTrapExit(Run.scala:124)
[error] at sbt.Run.run(Run.scala:77)
[error] at sbt.Defaults$.$anonfun$bgRunTask$5(Defaults.scala:1185)
[error] at sbt.Defaults$.$anonfun$bgRunTask$5$adapted(Defaults.scala:1180)
[error] at sbt.internal.BackgroundThreadPool.$anonfun$run$1(DefaultBackgroundJobService.scala:366)
[error] at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12)
[error] at scala.util.Try$.apply(Try.scala:209)
[error] at sbt.internal.BackgroundThreadPool$BackgroundRunnable.run(DefaultBackgroundJobService.scala:289)
[error] at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1135)
[error] at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
[error] at java.base/java.lang.Thread.run(Thread.java:844)
What am I doing wrong here?
Here is the full project: https://github.com/joan38/spark-issue
I managed to run it in a Docker container so I guess it's environmental as gemelen commented.
Related
I am trying to build sbt code. I am using sbt assembly but it is breaking at Aop. Used following MergeStragegy for Aop
Following is the error log
<!DOCTYPE aspectj PUBLIC "-//AspectJ//DTD//EN" "http://www.eclipse.org/aspectj/dtd/aspectj.dtd">
/home/puneet/repo/target/streams/_global/assemblyOption/_global/streams/assembly/sbtMergeTarget-e2e021ed2f7893685f6d16c35a11a6d2dcda6205.tmp[error] org.xml.sax.SAXParseExceptionpublicId: -//AspectJ//DTD//EN; systemId: http://www.eclipse.org/aspectj/dtd/aspectj.dtd; lineNumber: 1; columnNumber: 2; The markup declarations contained or pointed to by the document type declaration must be well-formed.
[error] at com.sun.org.apache.xerces.internal.util.ErrorHandlerWrapper.createSAXParseException(ErrorHandlerWrapper.java:203)
[error] at com.sun.org.apache.xerces.internal.util.ErrorHandlerWrapper.fatalError(ErrorHandlerWrapper.java:177)
[error] at com.sun.org.apache.xerces.internal.impl.XMLErrorReporter.reportError(XMLErrorReporter.java:400)
[error] at com.sun.org.apache.xerces.internal.impl.XMLErrorReporter.reportError(XMLErrorReporter.java:327)
[error] at com.sun.org.apache.xerces.internal.impl.XMLScanner.reportFatalError(XMLScanner.java:1473)
[error] at com.sun.org.apache.xerces.internal.impl.XMLDTDScannerImpl.scanDecls(XMLDTDScannerImpl.java:2044)
[error] at com.sun.org.apache.xerces.internal.impl.XMLDTDScannerImpl.scanDTDExternalSubset(XMLDTDScannerImpl.java:307)
[error] at com.sun.org.apache.xerces.internal.impl.XMLDocumentScannerImpl$DTDDriver.dispatch(XMLDocumentScannerImpl.java:1174)
[error] at com.sun.org.apache.xerces.internal.impl.XMLDocumentScannerImpl$DTDDriver.next(XMLDocumentScannerImpl.java:1045)
[error] at com.sun.org.apache.xerces.internal.impl.XMLDocumentScannerImpl$PrologDriver.next(XMLDocumentScannerImpl.java:959)
[error] at com.sun.org.apache.xerces.internal.impl.XMLDocumentScannerImpl.next(XMLDocumentScannerImpl.java:602)
[error] at com.sun.org.apache.xerces.internal.impl.XMLDocumentFragmentScannerImpl.scanDocument(XMLDocumentFragmentScannerImpl.java:505)
[error] at com.sun.org.apache.xerces.internal.parsers.XML11Configuration.parse(XML11Configuration.java:842)
[error] at com.sun.org.apache.xerces.internal.parsers.XML11Configuration.parse(XML11Configuration.java:771)
[error] at com.sun.org.apache.xerces.internal.parsers.XMLParser.parse(XMLParser.java:141)
[error] at com.sun.org.apache.xerces.internal.parsers.AbstractSAXParser.parse(AbstractSAXParser.java:1213)
[error] at com.sun.org.apache.xerces.internal.jaxp.SAXParserImpl$JAXPSAXParser.parse(SAXParserImpl.java:643)
[error] at com.sun.org.apache.xerces.internal.jaxp.SAXParserImpl.parse(SAXParserImpl.java:327)
[error] at scala.xml.factory.XMLLoader.loadXML(XMLLoader.scala:41)
[error] at scala.xml.factory.XMLLoader.loadXML$(XMLLoader.scala:37)
[error] at scala.xml.XML$.loadXML(XML.scala:60)
[error] at scala.xml.factory.XMLLoader.loadFile(XMLLoader.scala:48)
[error] at scala.xml.factory.XMLLoader.loadFile$(XMLLoader.scala:48)
[error] at scala.xml.XML$.loadFile(XML.scala:60)
[error] at AopMerge$.$anonfun$apply$1(AopMerge.scala:17)
[error] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
[error] at scala.collection.Iterator.foreach(Iterator.scala:941)
[error] at scala.collection.Iterator.foreach$(Iterator.scala:941)
[error] at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
[error] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[error] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[error] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[error] at scala.collection.TraversableLike.map(TraversableLike.scala:238)
[error] at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
[error] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[error] at AopMerge$.apply(AopMerge.scala:17)
[error] at sbtassembly.MergeStrategy.apply(MergeStrategy.scala:20)
[error] at sbtassembly.Assembly$.applyStrategy$1(Assembly.scala:110)
[error] at sbtassembly.Assembly$.$anonfun$applyStrategies$11(Assembly.scala:135)
[error] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
[error] at scala.collection.Iterator.foreach(Iterator.scala:941)
[error] at scala.collection.Iterator.foreach$(Iterator.scala:941)
[error] at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
[error] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[error] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[error] at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
[error] at scala.collection.TraversableLike.map(TraversableLike.scala:238)
[error] at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
[error] at scala.collection.AbstractTraversable.map(Traversable.scala:108)
[error] at sbtassembly.Assembly$.applyStrategies(Assembly.scala:132)
[error] at sbtassembly.Assembly$.x$1$lzycompute$1(Assembly.scala:25)
[error] at sbtassembly.Assembly$.x$1$1(Assembly.scala:23)
[error] at sbtassembly.Assembly$.stratMapping$lzycompute$1(Assembly.scala:23)
[error] at sbtassembly.Assembly$.stratMapping$1(Assembly.scala:23)
[error] at sbtassembly.Assembly$.inputs$lzycompute$1(Assembly.scala:68)
[error] at sbtassembly.Assembly$.inputs$1(Assembly.scala:58)
[error] at sbtassembly.Assembly$.apply(Assembly.scala:85)
[error] at sbtassembly.Assembly$.$anonfun$assemblyTask$1(Assembly.scala:244)
[error] at scala.Function1.$anonfun$compose$1(Function1.scala:49)
[error] at sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:62)
[error] at sbt.std.Transform$$anon$4.work(Transform.scala:67)
[error] at sbt.Execute.$anonfun$submit$2(Execute.scala:281)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:19)
[error] at sbt.Execute.work(Execute.scala:290)
[error] at sbt.Execute.$anonfun$submit$1(Execute.scala:281)
[error] at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:178)
[error] at sbt.CompletionService$$anon$2.call(CompletionService.scala:37)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error] (repo / assembly) org.xml.sax.SAXParseExceptionpublicId: -//AspectJ//DTD//EN; systemId: http://www.eclipse.org/aspectj/dtd/aspectj.dtd; lineNumber: 1; columnNumber: 2; The markup declarations contained or pointed to by the document type declaration must be well-formed.
Last week the same changes where working. But somehow it is throwing this error. What could be the reason?
sbt version -> 1.3.10
I have created a new MergeStrategy for aop.xml files (part of the Kamon dependencies).
If replacing http with https does not work, it might be worth trying disabling DTD validation as follows:
import java.io.FileInputStream
import org.xml.sax.InputSource
val parser = {
val factory = javax.xml.parsers.SAXParserFactory.newInstance()
factory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false)
factory.newSAXParser()
}
val xmls: Seq[Elem] = files.map(f => XML.loadXML(new InputSource(new FileInputStream(f)), parser))
I am a beginner to Scala and Spark.
scala version : 2.12.10
spark version : 3.0.1
I'm trying a very simple spark rdd function in scala.
But I get an error.
(1) build.sbt
scalaVersion := "2.12.10"
name := "hello-world"
organization := "ch.epfl.scala"
version := "1.0"
libraryDependencies += "org.scala-lang.modules" %% "scala-parser-combinators" % "1.1.2"
libraryDependencies += "org.apache.spark" %% "spark-sql" % "3.0.1"
libraryDependencies += "org.apache.spark" %% "spark-core" % "3.0.1"
(2) Main.scala
import org.apache.spark.sql.SparkSession
object Main extends App {
println("Hello, World!")
implicit val spark = SparkSession.builder()
.master("spark://centos-master:7077")
// .master("local[*]")
.appName("spark-api")
.getOrCreate()
val inputrdd = spark.sparkContext.parallelize(Seq(("arth",10), ("arth", 20), ("samuel", 60), ("jack", 65)))
println("inputrdd : ", inputrdd)
val mapped = inputrdd.mapValues(x => (x, 1))
println("mapped : ", mapped)
mapped.collect.foreach(println)
}
(3) When the error occurred
It seems that an error occurs in the mapped.collect.foreach(println) part.
(4) Error content
21/04/17 20:54:19 INFO DAGScheduler: Job 0 failed: collect at Main.scala:16, took 6.083947 s
[error] (run-main-0) org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 0.0 failed 4 times, most recent failure: Lost task 0.3 in stage 0.0 (TID 7, 192.168.0.220, executor 0):
java.lang.ClassCastException: cannot assign instance of java.lang.invoke.SerializedLambda
to field org.apache.spark.rdd.MapPartitionsRDD.f of type scala.Function3 in
instance of org.apache.spark.rdd.MapPartitionsRDD
[error] at java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2301)
[error] at java.io.ObjectStreamClass.setObjFieldValues(ObjectStreamClass.java:1431)
[error] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2410)
[error] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2328)
[error] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2186)
[error] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1666)
[error] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2404)
[error] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2328)
[error] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2186)
[error] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1666)
[error] at java.io.ObjectInputStream.readObject(ObjectInputStream.java:502)
[error] at java.io.ObjectInputStream.readObject(ObjectInputStream.java:460)
[error] at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:76)
[error] at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:115)
[error] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:83)
[error] at org.apache.spark.scheduler.Task.run(Task.scala:127)
[error] at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:446)
[error] at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
[error] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:449)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error]
[error] Driver stacktrace:
21/04/17 20:54:19 INFO TaskSetManager: Lost task 1.3 in stage 0.0 (TID 6) on 192.168.0.220, executor 0: java.lang.ClassCastException (cannot assign instance of java.lang.invoke.SerializedLambda to field org.apache.spark.rdd.MapPartitionsRDD.f of type scala.Function3 in instance of org.apache.spark.rdd.MapPartitionsRDD) [duplicate 7]
21/04/17 20:54:19 INFO TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have all completed, from pool
[error] org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 0.0 failed 4 times, most recent failure: Lost task 0.3 in stage 0.0 (TID 7, 192.168.0.220, executor 0): java.lang.ClassCastException: cannot assign instance of java.lang.invoke.SerializedLambda to field org.apache.spark.rdd.MapPartitionsRDD.f of type scala.Function3 in instance of org.apache.spark.rdd.MapPartitionsRDD
[error] at java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2301)
[error] at java.io.ObjectStreamClass.setObjFieldValues(ObjectStreamClass.java:1431)
[error] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2410)
[error] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2328)
[error] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2186)
[error] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1666)
[error] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2404)
[error] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2328)
[error] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2186)
[error] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1666)
[error] at java.io.ObjectInputStream.readObject(ObjectInputStream.java:502)
[error] at java.io.ObjectInputStream.readObject(ObjectInputStream.java:460)
[error] at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:76)
[error] at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:115)
[error] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:83)
[error] at org.apache.spark.scheduler.Task.run(Task.scala:127)
[error] at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:446)
[error] at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
[error] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:449)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error]
[error] Driver stacktrace:
[error] at org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2059)
[error] at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2008)
[error] at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2007)
[error] at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
[error] at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
[error] at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
[error] at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2007)
[error] at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:973)
[error] at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:973)
[error] at scala.Option.foreach(Option.scala:407)
[error] at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:973)
[error] at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2239)
[error] at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2188)
[error] at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2177)
[error] at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
[error] at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:775)
[error] at org.apache.spark.SparkContext.runJob(SparkContext.scala:2099)
[error] at org.apache.spark.SparkContext.runJob(SparkContext.scala:2120)
[error] at org.apache.spark.SparkContext.runJob(SparkContext.scala:2139)
[error] at org.apache.spark.SparkContext.runJob(SparkContext.scala:2164)
[error] at org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:1004)
[error] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[error] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[error] at org.apache.spark.rdd.RDD.withScope(RDD.scala:388)
[error] at org.apache.spark.rdd.RDD.collect(RDD.scala:1003)
[error] at Main$.delayedEndpoint$Main$1(Main.scala:16)
[error] at Main$delayedInit$body.apply(Main.scala:2)
[error] at scala.Function0.apply$mcV$sp(Function0.scala:39)
[error] at scala.Function0.apply$mcV$sp$(Function0.scala:39)
[error] at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:17)
[error] at scala.App.$anonfun$main$1$adapted(App.scala:80)
[error] at scala.collection.immutable.List.foreach(List.scala:392)
[error] at scala.App.main(App.scala:80)
[error] at scala.App.main$(App.scala:78)
[error] at Main$.main(Main.scala:2)
[error] at Main.main(Main.scala)
[error] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[error] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[error] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[error] at java.lang.reflect.Method.invoke(Method.java:498)
[error] Caused by: java.lang.ClassCastException: cannot assign instance of java.lang.invoke.SerializedLambda to field org.apache.spark.rdd.MapPartitionsRDD.f of type scala.Function3 in instance of org.apache.spark.rdd.MapPartitionsRDD
[error] at java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2301)
[error] at java.io.ObjectStreamClass.setObjFieldValues(ObjectStreamClass.java:1431)
[error] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2410)
[error] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2328)
[error] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2186)
[error] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1666)
[error] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2404)
[error] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2328)
[error] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2186)
[error] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1666)
[error] at java.io.ObjectInputStream.readObject(ObjectInputStream.java:502)
[error] at java.io.ObjectInputStream.readObject(ObjectInputStream.java:460)
[error] at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:76)
[error] at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:115)
[error] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:83)
[error] at org.apache.spark.scheduler.Task.run(Task.scala:127)
[error] at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:446)
[error] at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
[error] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:449)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
If I need more libraries or if the code is wrong (but it works fine in spark-shell)....
How to solve it?
You need to submit your jars to spark so that your code can run on there. spark-shell is hiding all of this from you behind the scenes.
This answer provides better detail https://stackoverflow.com/a/28367602/1810962 with the background.
You can use bin/spark-submit as workaround and provide your local classpath using --class, --jars, and --driver-class-path
I have 2 DataFrames called df1 and df2, where they both have the same column names. I wish to run a for loop over unique dates, from df1 and apply the same date filter to df2. I created a list of unique dates and then tried to iterate through that. However what I have is throwing errors.
Here is what I have:
val unique_weeks = df1.select(df1("date")).distinct
for( week <- unique_weeks) {
val df1_filtered = df1.filter($"date" === week)
val df2_filtered = df2.filter($"date" === week)
/// will run a join here and more code
}
I think <- this part may be incorrect - but not sure how I can filter the DataFrames using another method.
Here is the error:
[error] (run-main-0) org.apache.spark.SparkException: Job aborted due to stage failure: Task 35 in stage 3.0 failed 1 times, most recent failure: Lost task 35.0 in stage 3.0 (TID 399, localhost, executor driver): java.lang.RuntimeException: Unsupported literal type class org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema [1591772400000]
[error] at org.apache.spark.sql.catalyst.expressions.Literal$.apply(literals.scala:75)
[error] at org.apache.spark.sql.functions$.lit(functions.scala:101)
[error] at org.apache.spark.sql.Column.$eq$eq$eq(Column.scala:267)
[error] at spark_pkg.SparkMain$$anonfun$main$1.apply(SparkMain.scala:880)
[error] at spark_pkg.SparkMain$$anonfun$main$1.apply(SparkMain.scala:878)
[error] at scala.collection.Iterator$class.foreach(Iterator.scala:893)
[error] at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
[error] at org.apache.spark.rdd.RDD$$anonfun$foreach$1$$anonfun$apply$28.apply(RDD.scala:917)
[error] at org.apache.spark.rdd.RDD$$anonfun$foreach$1$$anonfun$apply$28.apply(RDD.scala:917)
[error] at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1944)
[error] at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1944)
[error] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
[error] at org.apache.spark.scheduler.Task.run(Task.scala:99)
[error] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error]
[error] Driver stacktrace:
[error] org.apache.spark.SparkException: Job aborted due to stage failure: Task 35 in stage 3.0 failed 1 times, most recent failure: Lost task 35.0 in stage 3.0 (TID 399, localhost, executor driver): java.lang.RuntimeException: Unsupported literal type class org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema [1591772400000]
[error] at org.apache.spark.sql.catalyst.expressions.Literal$.apply(literals.scala:75)
[error] at org.apache.spark.sql.functions$.lit(functions.scala:101)
[error] at org.apache.spark.sql.Column.$eq$eq$eq(Column.scala:267)
[error] at spark_pkg.SparkMain$$anonfun$main$1.apply(SparkMain.scala:880)
[error] at spark_pkg.SparkMain$$anonfun$main$1.apply(SparkMain.scala:878)
[error] at scala.collection.Iterator$class.foreach(Iterator.scala:893)
[error] at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
[error] at org.apache.spark.rdd.RDD$$anonfun$foreach$1$$anonfun$apply$28.apply(RDD.scala:917)
[error] at org.apache.spark.rdd.RDD$$anonfun$foreach$1$$anonfun$apply$28.apply(RDD.scala:917)
[error] at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1944)
[error] at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1944)
[error] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
[error] at org.apache.spark.scheduler.Task.run(Task.scala:99)
[error] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error]
[error] Driver stacktrace:
[error] at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1435)
[error] at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1423)
[error] at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1422)
[error] at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
[error] at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
[error] at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1422)
[error] at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802)
[error] at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802)
[error] at scala.Option.foreach(Option.scala:257)
[error] at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:802)
[error] at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1650)
[error] at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1605)
[error] at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1594)
[error] at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
[error] at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:628)
[error] at org.apache.spark.SparkContext.runJob(SparkContext.scala:1918)
[error] at org.apache.spark.SparkContext.runJob(SparkContext.scala:1931)
[error] at org.apache.spark.SparkContext.runJob(SparkContext.scala:1944)
[error] at org.apache.spark.SparkContext.runJob(SparkContext.scala:1958)
[error] at org.apache.spark.rdd.RDD$$anonfun$foreach$1.apply(RDD.scala:917)
[error] at org.apache.spark.rdd.RDD$$anonfun$foreach$1.apply(RDD.scala:915)
[error] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[error] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[error] at org.apache.spark.rdd.RDD.withScope(RDD.scala:362)
[error] at org.apache.spark.rdd.RDD.foreach(RDD.scala:915)
[error] at org.apache.spark.sql.Dataset$$anonfun$foreach$1.apply$mcV$sp(Dataset.scala:2286)
[error] at org.apache.spark.sql.Dataset$$anonfun$foreach$1.apply(Dataset.scala:2286)
[error] at org.apache.spark.sql.Dataset$$anonfun$foreach$1.apply(Dataset.scala:2286)
[error] at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)
[error] at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2765)
[error] at org.apache.spark.sql.Dataset.foreach(Dataset.scala:2285)
[error] at spark_pkg.SparkMain$.main(SparkMain.scala:878)
[error] at spark_pkg.SparkMain.main(SparkMain.scala)
[error] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[error] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[error] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[error] at java.lang.reflect.Method.invoke(Method.java:498)
[error] Caused by: java.lang.RuntimeException: Unsupported literal type class org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema [1591772400000]
[error] at org.apache.spark.sql.catalyst.expressions.Literal$.apply(literals.scala:75)
[error] at org.apache.spark.sql.functions$.lit(functions.scala:101)
[error] at org.apache.spark.sql.Column.$eq$eq$eq(Column.scala:267)
[error] at spark_pkg.SparkMain$$anonfun$main$1.apply(SparkMain.scala:880)
[error] at spark_pkg.SparkMain$$anonfun$main$1.apply(SparkMain.scala:878)
[error] at scala.collection.Iterator$class.foreach(Iterator.scala:893)
[error] at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
[error] at org.apache.spark.rdd.RDD$$anonfun$foreach$1$$anonfun$apply$28.apply(RDD.scala:917)
[error] at org.apache.spark.rdd.RDD$$anonfun$foreach$1$$anonfun$apply$28.apply(RDD.scala:917)
[error] at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1944)
[error] at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1944)
[error] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
[error] at org.apache.spark.scheduler.Task.run(Task.scala:99)
[error] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error] stack trace is suppressed; run 'last Compile / bgRun' for the full output
[error] Nonzero exit code: 1
[error] (Compile / run) Nonzero exit code: 1
[error] Total time: 137 s (02:17), completed Aug 20, 2020 1:16:02 PM
A dataframe is not an iterator, and therefore, you cannot run a for loop over it. You can run something like this - but I don't think it will do what you're hoping to achieve based on your other code.
unique_weeks.foreachPartition{ weeks : Iterator[YourData] =>
for( week <- weeks) {
}
}
Your question suggests your mental model of what a dataframe is and how Spark works is not quite complete. Think of a Dataframe more as a List[List[YourData]], except each inner List[YourData] is located on an independent piece of a machine, and may not necessarily know or interact with any of the other Lists until you collect them back to the driver.
I am currently working on a system where staff members in an organization can be informed regarding their pending tasks to be completed by their managers via devices. This is demonstrated via the following classes:
Staff
Manager (Extends Staff)
Device (Receives tasks and removes them once completed)
Task
System (Assigns tasks)
Main
I am currently stuck at a point where I am unable to determine why I cannot run my classes, as I receive a non-zero exit code of 1.
The problem currently seems to lie within the Main.scala file, at the moment:
object Main {
def main(args: Array[String]): Unit = {
var system = new System
var s1 = new Staff(1, "John", "johndoe#outlook.com", "Brazil")
system.addStaff(s1)
var s2 = new Manager(2, "Reese", "reesecups#gmail.com", "Japan")
system.addStaff(s2)
s2.assignTask(system, 1, "PLEASE WORK")
}
}
The code used to run as intended, however when I added the following line to the Main.scala file:
s2.assignTask(system, 1, "PLEASE WORK")
I received the following error:
[error] java.lang.RuntimeException: No main class detected.
[error] at scala.sys.package$.error(package.scala:30)
[error] at sbt.Defaults$.$anonfun$bgRunTask$4(Defaults.scala:1477)
[error] at scala.Option.getOrElse(Option.scala:189)
[error] at sbt.Defaults$.$anonfun$bgRunTask$3(Defaults.scala:1477)
[error] at scala.Function1.$anonfun$compose$1(Function1.scala:49)
[error] at sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:62)
[error] at sbt.std.Transform$$anon$4.work(Transform.scala:67)
[error] at sbt.Execute.$anonfun$submit$2(Execute.scala:281)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:19)
[error] at sbt.Execute.work(Execute.scala:290)
[error] at sbt.Execute.$anonfun$submit$1(Execute.scala:281)
[error] at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:178)
[error] at sbt.CompletionService$$anon$2.call(CompletionService.scala:37)
[error] at java.util.concurrent.FutureTask.run(Unknown Source)
[error] at java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)
[error] at java.util.concurrent.FutureTask.run(Unknown Source)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
[error] at java.lang.Thread.run(Unknown Source)
[error] (Compile / bgRun) No main class detected.
Is there any indication to what the problem may be?
Using scala 2.11.8, guava 20.0 and sbt 0.13.13,
the following snippet causes the compile to crash:
import com.google.common.hash.BloomFilter
object Test {
def test() {
BloomFilter.create(null, 1, 1)
}
}
with the error:
Error in Scala compiler: trying to do lub/glb of typevar ?T
SBT builder crashed while compiling. The error message is 'trying to do lub/glb of typevar ?T'
Unknown Scala Problem
Any idea what causes this problem?
The full log:
[error] trying to do lub/glb of typevar ?T
scala.reflect.internal.FatalError: trying to do lub/glb of typevar ?T
at scala.reflect.internal.Reporting$class.abort(Reporting.scala:59)
at scala.reflect.internal.SymbolTable.abort(SymbolTable.scala:16) at
scala.reflect.internal.tpe.GlbLubs$class.stripType$1(GlbLubs.scala:224)
at
scala.reflect.internal.tpe.GlbLubs$$anonfun$18.apply(GlbLubs.scala:227)
at
scala.reflect.internal.tpe.GlbLubs$$anonfun$18.apply(GlbLubs.scala:227)
at scala.collection.immutable.List.loop$1(List.scala:173) at
scala.collection.immutable.List.mapConserve(List.scala:189) at
scala.reflect.internal.tpe.GlbLubs$class.stripExistentialsAndTypeVars(GlbLubs.scala:227)
at scala.reflect.internal.tpe.GlbLubs$class.lub1$1(GlbLubs.scala:328)
at scala.reflect.internal.tpe.GlbLubs$class.lub0$1(GlbLubs.scala:322)
at scala.reflect.internal.tpe.GlbLubs$class.lub(GlbLubs.scala:419)
at scala.reflect.internal.SymbolTable.lub(SymbolTable.scala:16) at
scala.reflect.internal.tpe.GlbLubs$class.lub(GlbLubs.scala:279) at
scala.reflect.internal.SymbolTable.lub(SymbolTable.scala:16) at
scala.reflect.internal.tpe.TypeConstraints$class.solveOne$1(TypeConstraints.scala:248)
at
scala.reflect.internal.tpe.TypeConstraints$$anonfun$solve$1.apply(TypeConstraints.scala:260)
at
scala.reflect.internal.tpe.TypeConstraints$$anonfun$solve$1.apply(TypeConstraints.scala:260)
at
scala.reflect.internal.util.Collections$class.foreach3(Collections.scala:231)
at scala.reflect.internal.SymbolTable.foreach3(SymbolTable.scala:16)
at
scala.reflect.internal.tpe.TypeConstraints$class.solve(TypeConstraints.scala:260)
at scala.reflect.internal.SymbolTable.solve(SymbolTable.scala:16) at
scala.reflect.internal.Types$ExistentialType.withTypeVars(Types.scala:2699)
at
scala.reflect.internal.tpe.TypeComparers$class.thirdTry$1(TypeComparers.scala:483)
at
scala.reflect.internal.tpe.TypeComparers$class.secondTry$1(TypeComparers.scala:450)
at
scala.reflect.internal.tpe.TypeComparers$class.firstTry$1(TypeComparers.scala:426)
at
scala.reflect.internal.tpe.TypeComparers$class.isSubType2(TypeComparers.scala:552)
at
scala.reflect.internal.tpe.TypeComparers$class.isSubType1(TypeComparers.scala:320)
at
scala.reflect.internal.tpe.TypeComparers$class.isSubType(TypeComparers.scala:278)
at scala.reflect.internal.SymbolTable.isSubType(SymbolTable.scala:16)
at
scala.reflect.internal.tpe.TypeComparers$class.secondTry$1(TypeComparers.scala:445)
at
scala.reflect.internal.tpe.TypeComparers$class.firstTry$1(TypeComparers.scala:426)
at
scala.reflect.internal.tpe.TypeComparers$class.isSubType2(TypeComparers.scala:552)
at
scala.reflect.internal.tpe.TypeComparers$class.isSubType1(TypeComparers.scala:320)
at
scala.reflect.internal.tpe.TypeComparers$class.isSubType(TypeComparers.scala:278)
at scala.reflect.internal.SymbolTable.isSubType(SymbolTable.scala:16)
at
scala.reflect.internal.tpe.TypeComparers$class.isWeakSubType(TypeComparers.scala:575)
at
scala.reflect.internal.SymbolTable.isWeakSubType(SymbolTable.scala:16)
at
scala.reflect.internal.Types$Type.weak_$less$colon$less(Types.scala:825)
at
scala.tools.nsc.typechecker.Infer$Inferencer.scala$tools$nsc$typechecker$Infer$Inferencer$$isCompatible(Infer.scala:305)
at
scala.tools.nsc.typechecker.Infer$Inferencer$$anonfun$methTypeArgs$2.apply(Infer.scala:545)
at
scala.tools.nsc.typechecker.Infer$Inferencer$$anonfun$methTypeArgs$2.apply(Infer.scala:539)
at
scala.reflect.internal.util.Collections$class.map2(Collections.scala:79)
at scala.reflect.internal.SymbolTable.map2(SymbolTable.scala:16) at
scala.tools.nsc.typechecker.Infer$Inferencer.methTypeArgs(Infer.scala:539)
at
scala.tools.nsc.typechecker.Infer$Inferencer$$anonfun$tryInstantiating$1$1.apply$mcZ$sp(Infer.scala:736)
at
scala.tools.nsc.typechecker.Infer$class.falseIfNoInstance(Infer.scala:114)
at scala.tools.nsc.Global$$anon$1.falseIfNoInstance(Global.scala:462)
at
scala.tools.nsc.typechecker.Infer$Inferencer.tryInstantiating$1(Infer.scala:734)
at
scala.tools.nsc.typechecker.Infer$Inferencer.typesCompatible$1(Infer.scala:746)
at
scala.tools.nsc.typechecker.Infer$Inferencer.isApplicableToMethod(Infer.scala:757)
at
scala.tools.nsc.typechecker.Infer$Inferencer.scala$tools$nsc$typechecker$Infer$Inferencer$$isApplicable(Infer.scala:776)
at
scala.tools.nsc.typechecker.Infer$Inferencer$$anonfun$scala$tools$nsc$typechecker$Infer$Inferencer$$isApplicable$2.apply(Infer.scala:778)
at
scala.tools.nsc.typechecker.Infer$Inferencer$$anonfun$scala$tools$nsc$typechecker$Infer$Inferencer$$isApplicable$2.apply(Infer.scala:778)
at
scala.reflect.internal.Symbols$class.createFromClonedSymbols(Symbols.scala:3662)
at
scala.reflect.internal.SymbolTable.createFromClonedSymbols(SymbolTable.scala:16)
at
scala.tools.nsc.typechecker.Infer$Inferencer.scala$tools$nsc$typechecker$Infer$Inferencer$$isApplicable(Infer.scala:778)
at
scala.tools.nsc.typechecker.Infer$Inferencer.checkIsApplicable$1(Infer.scala:807)
at
scala.tools.nsc.typechecker.Infer$Inferencer.isAsSpecific(Infer.scala:827)
at
scala.tools.nsc.typechecker.Infer$Inferencer.isStrictlyMoreSpecific(Infer.scala:865)
at
scala.tools.nsc.typechecker.Infer$Inferencer$InferMethodAlternativeTwice$1.scala$tools$nsc$typechecker$Infer$Inferencer$InferMethodAlternativeTwice$$rankAlternatives(Infer.scala:1401)
at
scala.tools.nsc.typechecker.Infer$Inferencer$InferMethodAlternativeTwice$1$$anonfun$36.apply(Infer.scala:1404)
at
scala.tools.nsc.typechecker.Infer$Inferencer$InferMethodAlternativeTwice$1$$anonfun$36.apply(Infer.scala:1404)
at scala.tools.nsc.typechecker.Infer$class.improves$1(Infer.scala:62)
at scala.tools.nsc.typechecker.Infer$$anonfun$4.apply(Infer.scala:65)
at scala.tools.nsc.typechecker.Infer$$anonfun$4.apply(Infer.scala:65)
at scala.math.Ordering$$anon$9.compare(Ordering.scala:200) at
java.util.TimSort.countRunAndMakeAscending(Unknown Source) at
java.util.TimSort.sort(Unknown Source) at
java.util.Arrays.sort(Unknown Source) at
scala.collection.SeqLike$class.sorted(SeqLike.scala:648) at
scala.collection.AbstractSeq.sorted(Seq.scala:41) at
scala.collection.SeqLike$class.sortWith(SeqLike.scala:601) at
scala.collection.AbstractSeq.sortWith(Seq.scala:41) at
scala.tools.nsc.typechecker.Infer$class.scala$tools$nsc$typechecker$Infer$$bestAlternatives(Infer.scala:65)
at
scala.tools.nsc.typechecker.Infer$Inferencer$InferMethodAlternativeTwice$1.bestForExpectedType(Infer.scala:1404)
at
scala.tools.nsc.typechecker.Infer$Inferencer$InferMethodAlternativeTwice$1.tryOnce(Infer.scala:1416)
at
scala.tools.nsc.typechecker.Contexts$Context$TryTwice.apply(Contexts.scala:357)
at
scala.tools.nsc.typechecker.Infer$Inferencer.inferMethodAlternative(Infer.scala:1420)
at
scala.tools.nsc.typechecker.Typers$Typer.handleOverloaded$1(Typers.scala:3297)
at
scala.tools.nsc.typechecker.Typers$Typer.doTypedApply(Typers.scala:3301)
at
scala.tools.nsc.typechecker.Typers$Typer$$anonfun$98.apply(Typers.scala:4496)
at
scala.tools.nsc.typechecker.Typers$Typer$$anonfun$98.apply(Typers.scala:4496)
at scala.tools.nsc.typechecker.Typers$Typer.silent(Typers.scala:680)
at
scala.tools.nsc.typechecker.Typers$Typer.tryTypedApply$1(Typers.scala:4496)
at
scala.tools.nsc.typechecker.Typers$Typer.normalTypedApply$1(Typers.scala:4544)
at
scala.tools.nsc.typechecker.Typers$Typer.typedApply$1(Typers.scala:4580)
at
scala.tools.nsc.typechecker.Typers$Typer.typedInAnyMode$1(Typers.scala:5343)
at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5360)
at
scala.tools.nsc.typechecker.Typers$Typer.runTyper$1(Typers.scala:5396)
at
scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedInternal(Typers.scala:5423)
at scala.tools.nsc.typechecker.Typers$Typer.body$2(Typers.scala:5370)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5374)
at
scala.tools.nsc.typechecker.Typers$Typer.transformedOrTyped(Typers.scala:5605)
at
scala.tools.nsc.typechecker.Typers$Typer.typedDefDef(Typers.scala:2208)
at
scala.tools.nsc.typechecker.Typers$Typer.typedMemberDef$1(Typers.scala:5308)
at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5359)
at
scala.tools.nsc.typechecker.Typers$Typer.runTyper$1(Typers.scala:5396)
at
scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedInternal(Typers.scala:5423)
at scala.tools.nsc.typechecker.Typers$Typer.body$2(Typers.scala:5370)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5374)
at
scala.tools.nsc.typechecker.Typers$Typer.typedByValueExpr(Typers.scala:5452)
at
scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:3047)
at
scala.tools.nsc.typechecker.Typers$Typer$$anonfun$65.apply(Typers.scala:3151)
at
scala.tools.nsc.typechecker.Typers$Typer$$anonfun$65.apply(Typers.scala:3151)
at scala.collection.immutable.List.loop$1(List.scala:173) at
scala.collection.immutable.List.mapConserve(List.scala:189) at
scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:3151)
at
scala.tools.nsc.typechecker.Typers$Typer.typedTemplate(Typers.scala:1921)
at
scala.tools.nsc.typechecker.Typers$Typer.typedModuleDef(Typers.scala:1808)
at
scala.tools.nsc.typechecker.Typers$Typer.typedMemberDef$1(Typers.scala:5310)
at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5359)
at
scala.tools.nsc.typechecker.Typers$Typer.runTyper$1(Typers.scala:5396)
at
scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedInternal(Typers.scala:5423)
at scala.tools.nsc.typechecker.Typers$Typer.body$2(Typers.scala:5370)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5374)
at
scala.tools.nsc.typechecker.Typers$Typer.typedByValueExpr(Typers.scala:5452)
at
scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedStat$1(Typers.scala:3047)
at
scala.tools.nsc.typechecker.Typers$Typer$$anonfun$65.apply(Typers.scala:3151)
at
scala.tools.nsc.typechecker.Typers$Typer$$anonfun$65.apply(Typers.scala:3151)
at scala.collection.immutable.List.loop$1(List.scala:173) at
scala.collection.immutable.List.mapConserve(List.scala:189) at
scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:3151)
at
scala.tools.nsc.typechecker.Typers$Typer.typedPackageDef$1(Typers.scala:5015)
at
scala.tools.nsc.typechecker.Typers$Typer.typedMemberDef$1(Typers.scala:5312)
at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:5359)
at
scala.tools.nsc.typechecker.Typers$Typer.runTyper$1(Typers.scala:5396)
at
scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$typedInternal(Typers.scala:5423)
at scala.tools.nsc.typechecker.Typers$Typer.body$2(Typers.scala:5370)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5374)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5448)
at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:102)
at
scala.tools.nsc.Global$GlobalPhase$$anonfun$applyPhase$1.apply$mcV$sp(Global.scala:440)
at
scala.tools.nsc.Global$GlobalPhase.withCurrentUnit(Global.scala:431)
at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:440)
at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:94)
at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:93)
at scala.collection.Iterator$class.foreach(Iterator.scala:893) at
scala.collection.AbstractIterator.foreach(Iterator.scala:1336) at
scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:93)
at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1501)
at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1486) at
scala.tools.nsc.Global$Run.compileSources(Global.scala:1481) at
scala.tools.nsc.Global$Run.compile(Global.scala:1582) at
xsbt.CachedCompiler0.run(CompilerInterface.scala:116) at
xsbt.CachedCompiler0.run(CompilerInterface.scala:95) at
xsbt.CompilerInterface.run(CompilerInterface.scala:26) at
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at
sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source) at
sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source) at
java.lang.reflect.Method.invoke(Unknown Source) at
sbt.compiler.AnalyzingCompiler.call(AnalyzingCompiler.scala:107) at
sbt.compiler.AnalyzingCompiler.compile(AnalyzingCompiler.scala:53) at
sbt.compiler.AnalyzingCompiler.compile(AnalyzingCompiler.scala:47) at
sbt.compiler.MixedAnalyzingCompiler$$anonfun$compileScala$1$1.apply$mcV$sp(MixedAnalyzingCompiler.scala:50)
at
sbt.compiler.MixedAnalyzingCompiler$$anonfun$compileScala$1$1.apply(MixedAnalyzingCompiler.scala:50)
at
sbt.compiler.MixedAnalyzingCompiler$$anonfun$compileScala$1$1.apply(MixedAnalyzingCompiler.scala:50)
at
sbt.compiler.MixedAnalyzingCompiler.timed(MixedAnalyzingCompiler.scala:74)
at
sbt.compiler.MixedAnalyzingCompiler.compileScala$1(MixedAnalyzingCompiler.scala:49)
at
sbt.compiler.MixedAnalyzingCompiler.compile(MixedAnalyzingCompiler.scala:64)
at
sbt.compiler.IC$$anonfun$compileInternal$1.apply(IncrementalCompiler.scala:160)
at
sbt.compiler.IC$$anonfun$compileInternal$1.apply(IncrementalCompiler.scala:160)
at
sbt.inc.IncrementalCompile$$anonfun$doCompile$1.apply(Compile.scala:66)
at
sbt.inc.IncrementalCompile$$anonfun$doCompile$1.apply(Compile.scala:64)
at sbt.inc.IncrementalCommon.cycle(IncrementalCommon.scala:32) at
sbt.inc.Incremental$$anonfun$1.apply(Incremental.scala:72) at
sbt.inc.Incremental$$anonfun$1.apply(Incremental.scala:71) at
sbt.inc.Incremental$.manageClassfiles(Incremental.scala:99) at
sbt.inc.Incremental$.compile(Incremental.scala:71) at
sbt.inc.IncrementalCompile$.apply(Compile.scala:54) at
sbt.compiler.IC$.compileInternal(IncrementalCompiler.scala:160) at
sbt.compiler.IC$.incrementalCompile(IncrementalCompiler.scala:138) at
sbt.Compiler$.compile(Compiler.scala:155) at
sbt.Compiler$.compile(Compiler.scala:141) at
sbt.Defaults$.sbt$Defaults$$compileIncrementalTaskImpl(Defaults.scala:879)
at
sbt.Defaults$$anonfun$compileIncrementalTask$1.apply(Defaults.scala:870)
at
sbt.Defaults$$anonfun$compileIncrementalTask$1.apply(Defaults.scala:868)
at scala.Function1$$anonfun$compose$1.apply(Function1.scala:47) at
sbt.$tilde$greater$$anonfun$$u2219$1.apply(TypeFunctions.scala:40) at
sbt.std.Transform$$anon$4.work(System.scala:63) at
sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
at
sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:17) at
sbt.Execute.work(Execute.scala:237) at
sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228) at
sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228) at
sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:159)
at sbt.CompletionService$$anon$2.call(CompletionService.scala:28) at
java.util.concurrent.FutureTask.run(Unknown Source) at
java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)
at java.util.concurrent.FutureTask.run(Unknown Source) at
java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source) at
java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source) at
java.lang.Thread.run(Unknown Source) [error] (test:compileIncremental)
scala.reflect.internal.FatalError: trying to do lub/glb of typevar ?T
[error] Total time: 3 s, completed Nov 14, 2016 10:05:52 AM
The create method has got a generic type T and the scala compiler is failing to infer it from the null value you give as a first argument. If you help it a little, it compiles fine:
import com.google.common.hash.BloomFilter
object Test {
def test(): Unit = {
BloomFilter.create[Nothing](null, 1, 1)
}
}