Unable to verify subset of arguments passed to mocked method in specs2 - scala

Here's a simple specification reproducing the issue:
package ro.igstan.learning
import org.specs2.matcher.ThrownExpectations
import org.specs2.mock.Mockito
import org.specs2.mutable.Specification
import org.specs2.specification.Scope
class LearningSpecs2 extends Specification {
trait Logger {
def error(a: String, b: String)
}
trait mocks extends Mockito with Scope with ThrownExpectations {
val mockedLogger = mock[Logger]
}
"mocking" should {
// passes
"be able to verify arguments passed to mocks" in new mocks {
mockedLogger.error("message", "foo")
got {
one(mockedLogger).error("message", "foo")
}
}
// fails
"be able to *partially* verify arguments passed to mocks" in new mocks {
mockedLogger.error("message", "foo")
got {
one(mockedLogger).error(any, ===("foo"))
}
}
}
}
This is the output:
[info] mocking should
[info] + be able to verify arguments passed to mocks
[error] ! Fragment evaluation error
[error] ThrowableException: org/hamcrest/TypeSafeMatcher (FutureTask.java:303)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2.argThat(LearningSpecs2.scala:27)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2$$anonfun$2.apply$mcV$sp(LearningSpecs2.scala:31)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2$$anonfun$2.apply(LearningSpecs2.scala:31)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2$$anonfun$2.apply(LearningSpecs2.scala:31)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2.got(LearningSpecs2.scala:27)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2.<init>(LearningSpecs2.scala:30)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3.apply(LearningSpecs2.scala:27)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3.apply(LearningSpecs2.scala:27)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2.argThat(LearningSpecs2.scala:27)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2$$anonfun$2.apply$mcV$sp(LearningSpecs2.scala:31)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2$$anonfun$2.apply(LearningSpecs2.scala:31)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2$$anonfun$2.apply(LearningSpecs2.scala:31)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2.got(LearningSpecs2.scala:27)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2.<init>(LearningSpecs2.scala:30)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3.apply(LearningSpecs2.scala:27)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3.apply(LearningSpecs2.scala:27)
[error] org/hamcrest/TypeSafeMatcher
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2.argThat(LearningSpecs2.scala:27)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2$$anonfun$2.apply$mcV$sp(LearningSpecs2.scala:31)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2$$anonfun$2.apply(LearningSpecs2.scala:31)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2$$anonfun$2.apply(LearningSpecs2.scala:31)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2.got(LearningSpecs2.scala:27)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2.<init>(LearningSpecs2.scala:30)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3.apply(LearningSpecs2.scala:27)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3.apply(LearningSpecs2.scala:27)
[error] org.hamcrest.TypeSafeMatcher
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2.argThat(LearningSpecs2.scala:27)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2$$anonfun$2.apply$mcV$sp(LearningSpecs2.scala:31)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2$$anonfun$2.apply(LearningSpecs2.scala:31)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2$$anonfun$2.apply(LearningSpecs2.scala:31)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2.got(LearningSpecs2.scala:27)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3$$anon$2.<init>(LearningSpecs2.scala:30)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3.apply(LearningSpecs2.scala:27)
[error] ro.igstan.learning.LearningSpecs2$$anonfun$3$$anonfun$apply$3.apply(LearningSpecs2.scala:27)
And here are the library versions as reported by the SBT dependency graph plugin:
[info] default:learning_2.9.1:1.0.0
[info] +-junit:junit:4.7
[info] +-org.mockito:mockito-all:1.9.0
[info] +-org.scala-lang:scala-library:2.9.1
[info] +-org.specs2:specs2_2.9.1:1.11
[info] +-org.specs2:specs2-scalaz-core_2.9.1:6.0.1
[info] +-org.scala-lang:scala-library:2.9.1
Any idea what's going on?

Fixed. I had to explicitly specify the Hamcrest version:
libraryDependencies ++= Seq(
"org.specs2" %% "specs2" % "1.11" ,
"org.mockito" % "mockito-all" % "1.9.0",
"org.hamcrest" % "hamcrest-all" % "1.3", // <- this was missing
"junit" % "junit" % "4.7"
)

Related

org.scalajs.testing.adapter.JSEnvRPC$RunTerminatedException when trying to use scalatest with scalajs

I am trying to use scalatest for testing scalajs but it throwing exceptions. Not sure what could be the issue. Any suggestions, please? TIA.
Another thing that I would like to mention is that the following exception was being thrown even without any test file.
Build.sbt
ThisBuild / scalaVersion := "2.12.10"
lazy val root = project.in(file(".")).aggregate(parser.js, parser.jvm).
settings(
publish := {},
publishLocal := {},
)
lazy val parser = crossProject(JSPlatform, JVMPlatform).in(file(".")).
settings(
name := "rules-parser",
version := "0.1",
libraryDependencies ++= Seq(
"com.lihaoyi" %%% "fastparse" % "2.2.4",
"com.chuusai" %%% "shapeless" % "2.4.0-M1",
"org.scalactic" %%% "scalactic" % "3.2.2",
"org.scalatest" %%% "scalatest" % "3.3.0-SNAP2" % Test
)
).
jsSettings(
// scalaJSUseMainModuleInitializer := true,
libraryDependencies ++= Seq(
"org.scala-js" %%% "scalajs-dom" % "1.1.0",
"com.lihaoyi" %%% "scalatags" % "0.9.2"
)
).
jvmSettings(
libraryDependencies ++= Seq(
"org.scala-js" %% "scalajs-stubs" % "1.0.0" % "provided"
)
)
js/test/scala/example/ExampleSpec.scala
package example
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.must.Matchers
object ExampleSpec extends AnyFlatSpec with Matchers {
"Simple test" should "pass" in {
assert(1 == 1)
}
}
Exception
sbt:root> last parserJS / Test / loadedTestFrameworks
[debug] Starting process: node
[error] org.scalajs.testing.common.RPCCore$ClosedException: org.scalajs.testing.adapter.JSEnvRPC$RunTerminatedException
[error] at org.scalajs.testing.common.RPCCore.helpClose(RPCCore.scala:223)
[error] at org.scalajs.testing.common.RPCCore.call(RPCCore.scala:164)
[error] at org.scalajs.testing.adapter.TestAdapter.loadFrameworks(TestAdapter.scala:57)
[error] at org.scalajs.sbtplugin.ScalaJSPluginInternal$.$anonfun$scalaJSTestSettings$4(ScalaJSPluginInternal.scala:597)
[error] at scala.Function1.$anonfun$compose$1(Function1.scala:49)
[error] at sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:62)
[error] at sbt.std.Transform$$anon$4.work(Transform.scala:68)
[error] at sbt.Execute.$anonfun$submit$2(Execute.scala:282)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:23)
[error] at sbt.Execute.work(Execute.scala:291)
[error] at sbt.Execute.$anonfun$submit$1(Execute.scala:282)
[error] at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:265)
[error] at sbt.CompletionService$$anon$2.call(CompletionService.scala:64)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error] Caused by: org.scalajs.testing.adapter.JSEnvRPC$RunTerminatedException
[error] at org.scalajs.testing.adapter.JSEnvRPC.$anonfun$new$1(JSEnvRPC.scala:38)
[error] at org.scalajs.testing.adapter.JSEnvRPC.$anonfun$new$1$adapted(JSEnvRPC.scala:38)
[error] at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)
[error] at java.util.concurrent.ForkJoinTask$RunnableExecuteAction.exec(ForkJoinTask.java:1402)
[error] at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289)
[error] at java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1056)
[error] at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1692)
[error] at java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:157)
[error] Caused by: java.io.IOException: Cannot run program "node": error=2, No such file or directory
[error] at java.lang.ProcessBuilder.start(ProcessBuilder.java:1048)
[error] at org.scalajs.jsenv.ExternalJSRun$.startProcess(ExternalJSRun.scala:143)
[error] at org.scalajs.jsenv.ExternalJSRun$.start(ExternalJSRun.scala:40)
[error] at org.scalajs.jsenv.nodejs.NodeJSEnv.internalStart(NodeJSEnv.scala:63)
[error] at org.scalajs.jsenv.nodejs.NodeJSEnv.$anonfun$startWithCom$1(NodeJSEnv.scala:47)
[error] at org.scalajs.jsenv.nodejs.ComRun$.start(ComSupport.scala:214)
[error] at org.scalajs.jsenv.nodejs.NodeJSEnv.startWithCom(NodeJSEnv.scala:46)
[error] at org.scalajs.testing.adapter.JSEnvRPC.<init>(JSEnvRPC.scala:25)
[error] at org.scalajs.testing.adapter.TestAdapter.startManagedRunner(TestAdapter.scala:129)
[error] at org.scalajs.testing.adapter.TestAdapter.$anonfun$getRunnerForThread$1(TestAdapter.scala:120)
[error] at scala.collection.concurrent.TrieMap.getOrElseUpdate(TrieMap.scala:897)
[error] at org.scalajs.testing.adapter.TestAdapter.getRunnerForThread(TestAdapter.scala:120)
[error] at org.scalajs.testing.adapter.TestAdapter.loadFrameworks(TestAdapter.scala:56)
[error] at org.scalajs.sbtplugin.ScalaJSPluginInternal$.$anonfun$scalaJSTestSettings$4(ScalaJSPluginInternal.scala:597)
[error] at scala.Function1.$anonfun$compose$1(Function1.scala:49)
[error] at sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:62)
[error] at sbt.std.Transform$$anon$4.work(Transform.scala:68)
[error] at sbt.Execute.$anonfun$submit$2(Execute.scala:282)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:23)
[error] at sbt.Execute.work(Execute.scala:291)
[error] at sbt.Execute.$anonfun$submit$1(Execute.scala:282)
[error] at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:265)
[error] at sbt.CompletionService$$anon$2.call(CompletionService.scala:64)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error] Caused by: java.io.IOException: error=2, No such file or directory
[error] at java.lang.UNIXProcess.forkAndExec(Native Method)
[error] at java.lang.UNIXProcess.<init>(UNIXProcess.java:247)
[error] at java.lang.ProcessImpl.start(ProcessImpl.java:134)
[error] at java.lang.ProcessBuilder.start(ProcessBuilder.java:1029)
[error] at org.scalajs.jsenv.ExternalJSRun$.startProcess(ExternalJSRun.scala:143)
[error] at org.scalajs.jsenv.ExternalJSRun$.start(ExternalJSRun.scala:40)
[error] at org.scalajs.jsenv.nodejs.NodeJSEnv.internalStart(NodeJSEnv.scala:63)
[error] at org.scalajs.jsenv.nodejs.NodeJSEnv.$anonfun$startWithCom$1(NodeJSEnv.scala:47)
[error] at org.scalajs.jsenv.nodejs.ComRun$.start(ComSupport.scala:214)
[error] at org.scalajs.jsenv.nodejs.NodeJSEnv.startWithCom(NodeJSEnv.scala:46)
[error] at org.scalajs.testing.adapter.JSEnvRPC.<init>(JSEnvRPC.scala:25)
[error] at org.scalajs.testing.adapter.TestAdapter.startManagedRunner(TestAdapter.scala:129)
[error] at org.scalajs.testing.adapter.TestAdapter.$anonfun$getRunnerForThread$1(TestAdapter.scala:120)
[error] at scala.collection.concurrent.TrieMap.getOrElseUpdate(TrieMap.scala:897)
[error] at org.scalajs.testing.adapter.TestAdapter.getRunnerForThread(TestAdapter.scala:120)
[error] at org.scalajs.testing.adapter.TestAdapter.loadFrameworks(TestAdapter.scala:56)
[error] at org.scalajs.sbtplugin.ScalaJSPluginInternal$.$anonfun$scalaJSTestSettings$4(ScalaJSPluginInternal.scala:597)
[error] at scala.Function1.$anonfun$compose$1(Function1.scala:49)
[error] at sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:62)
[error] at sbt.std.Transform$$anon$4.work(Transform.scala:68)
[error] at sbt.Execute.$anonfun$submit$2(Execute.scala:282)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:23)
[error] at sbt.Execute.work(Execute.scala:291)
[error] at sbt.Execute.$anonfun$submit$1(Execute.scala:282)
[error] at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:265)
[error] at sbt.CompletionService$$anon$2.call(CompletionService.scala:64)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error] (parserJS / Test / loadedTestFrameworks) org.scalajs.testing.common.RPCCore$ClosedException: org.scalajs.testing.adapter.JSEnvRPC$RunTerminatedException
From this: Cannot run program "node": error=2, No such file
It looks like you need to install node.js. If it's already installed, make sure it is on your path.

scala sbt libraryDependencies provided - Avoid downloading 3rd party library

I've the following Spark Scala code that references 3rd party libraries,
package com.protegrity.spark
import org.apache.spark.sql.api.java.UDF2
import com.protegrity.spark.udf.ptyProtectStr
import com.protegrity.spark.udf.ptyProtectInt
class ptyProtectStr extends UDF2[String, String, String] {
def call(input: String, dataElement: String): String = {
return ptyProtectStr(input, dataElement);
}
}
class ptyUnprotectStr extends UDF2[String, String, String] {
def call(input: String, dataElement: String): String = {
return ptyUnprotectStr(input, dataElement);
}
}
class ptyProtectInt extends UDF2[Integer, String, Integer] {
def call(input: Integer, dataElement: String): Integer = {
return ptyProtectInt(input, dataElement);
}
}
class ptyUnprotectInt extends UDF2[Integer, String, Integer] {
def call(input: Integer, dataElement: String): Integer = {
return ptyUnprotectInt(input, dataElement);
}
}
I want to create JAR file using SBT. My build.sbt looks like the following,
name := "Protegrity UDF"
version := "1.0"
scalaVersion := "2.11.8"
libraryDependencies ++= Seq(
"com.protegrity.spark" % "udf" % "2.3.2" % "provided",
"org.apache.spark" %% "spark-core" % "2.3.2" % "provided",
"org.apache.spark" %% "spark-sql" % "2.3.2" % "provided"
)
As you see, I trying to create a thin JAR file using "provided" option as my Spark environment already contains those libraries.
In spite of using "provided", sbt is trying to download from maven and throwing below error,
[warn] Note: Unresolved dependencies path:
[error] sbt.librarymanagement.ResolveException: Error downloading com.protegrity.spark:udf:2.3.2
[error] Not found
[error] Not found
[error] not found: C:\Users\user1\.ivy2\local\com.protegrity.spark\udf\2.3.2\ivys\ivy.xml
[error] not found: https://repo1.maven.org/maven2/com/protegrity/spark/udf/2.3.2/udf-2.3.2.pom
[error] at lmcoursier.CoursierDependencyResolution.unresolvedWarningOrThrow(CoursierDependencyResolution.scala:249)
[error] at lmcoursier.CoursierDependencyResolution.$anonfun$update$35(CoursierDependencyResolution.scala:218)
[error] at scala.util.Either$LeftProjection.map(Either.scala:573)
[error] at lmcoursier.CoursierDependencyResolution.update(CoursierDependencyResolution.scala:218)
[error] at sbt.librarymanagement.DependencyResolution.update(DependencyResolution.scala:60)
[error] at sbt.internal.LibraryManagement$.resolve$1(LibraryManagement.scala:52)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$12(LibraryManagement.scala:102)
[error] at sbt.util.Tracked$.$anonfun$lastOutput$1(Tracked.scala:69)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$20(LibraryManagement.scala:115)
[error] at scala.util.control.Exception$Catch.apply(Exception.scala:228)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$11(LibraryManagement.scala:115)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$11$adapted(LibraryManagement.scala:96)
[error] at sbt.util.Tracked$.$anonfun$inputChanged$1(Tracked.scala:150)
[error] at sbt.internal.LibraryManagement$.cachedUpdate(LibraryManagement.scala:129)
[error] at sbt.Classpaths$.$anonfun$updateTask0$5(Defaults.scala:2950)
[error] at scala.Function1.$anonfun$compose$1(Function1.scala:49)
[error] at sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:62)
[error] at sbt.std.Transform$$anon$4.work(Transform.scala:67)
[error] at sbt.Execute.$anonfun$submit$2(Execute.scala:281)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:19)
[error] at sbt.Execute.work(Execute.scala:290)
[error] at sbt.Execute.$anonfun$submit$1(Execute.scala:281)
[error] at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:178)
[error] at sbt.CompletionService$$anon$2.call(CompletionService.scala:37)
[error] at java.util.concurrent.FutureTask.run(Unknown Source)
[error] at java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)
[error] at java.util.concurrent.FutureTask.run(Unknown Source)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
[error] at java.lang.Thread.run(Unknown Source)
[error] (update) sbt.librarymanagement.ResolveException: Error downloading com.protegrity.spark:udf:2.3.2
[error] Not found
[error] Not found
[error] not found: C:\Users\user1\.ivy2\local\com.protegrity.spark\udf\2.3.2\ivys\ivy.xml
[error] not found: https://repo1.maven.org/maven2/com/protegrity/spark/udf/2.3.2/udf-2.3.2.pom
What change in build.sbt should I make to skip the maven download for "com.protegrity.spark"? Interestingly, I don't face this issue for "org.apache.spark" on the same build
Assuming that you have the JAR file available (but not through Maven or another artifact repository) wherever you're compiling the code, just place the JAR in (by default) the lib directory within your project (the path can be changed with the unmanagedBase setting in build.sbt if you need to do that for some reason).
Note that this will result in the unmanaged JAR being included in an assembly JAR. If you want to build a "slightly less fat" JAR that excludes the unmanaged JAR, you'll have to filter it out. One way to accomplish this is with
assemblyExcludedJars in assembly := {
val cp = (fullClasspath in assembly).value
cp.filter(_.data.getName == "name-of-unmanaged.jar")
}
If you don't have the JAR (or perhaps something very close to the JAR) handy, how exactly do you expect the compiler to typecheck your calls into the JAR?

SBT publish is not uploading jar file to artifactory (publish) java.io.IOException: PUT operation to URL failed with status code 400: Bad Request

I've below build.sbt file
name := "tads-akka-cluster-events"
organization := "technorati"
version := "0.0.3"
scalaVersion := "2.11.12"
crossScalaVersions := Seq("2.12.9", "2.13.0")
publishMavenStyle := true
PB.targets in Compile := Seq(
scalapb.gen() -> (sourceManaged in Compile).value
)
libraryDependencies += "com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.compiler.Version.scalapbVersion % "protobuf"
credentials += Credentials("Artifactory Realm", "artifactory.svcs.opal.synacor.com", "admin", "password")
publishTo := {
val nexus = "http://artifactory.svcs.opal.synacor.com/"
if (isSnapshot.value)
Some("repository.synacor.com-snapshots" at nexus + "artifactory/synacor-local")
else
Some("repository.synacor.com-releases" at nexus + "artifactory/synacor-local")
}
When I did sbt publish, I get below error -
sbt:tads-akka-cluster-events> publish
[info] Packaging /Users/rajkumar.natarajan/Documents/Coding/misc/tads-akka-cluster-events/target/scala-2.11/tads-akka-cluster-events_2.11-0.0.3-sources.jar ...
[info] Done packaging.
[info] Wrote /Users/rajkumar.natarajan/Documents/Coding/misc/tads-akka-cluster-events/target/scala-2.11/tads-akka-cluster-events_2.11-0.0.3.pom
[info] Updating ...
[info] Done updating.
[info] Packaging /Users/rajkumar.natarajan/Documents/Coding/misc/tads-akka-cluster-events/target/scala-2.11/tads-akka-cluster-events_2.11-0.0.3.jar ...
[info] Packaging /Users/rajkumar.natarajan/Documents/Coding/misc/tads-akka-cluster-events/target/scala-2.11/tads-akka-cluster-events_2.11-0.0.3-javadoc.jar ...
[info] Done packaging.
[info] Done packaging.
[info] published tads-akka-cluster-events_2.11 to http://artifactory.svcs.opal.synacor.com/artifactory/synacor-local/technorati/tads-akka-cluster-events_2.11/0.0.3/tads-akka-cluster-events_2.11-0.0.3.pom
[error] java.io.IOException: PUT operation to URL http://artifactory.svcs.opal.synacor.com/artifactory/synacor-local/technorati/tads-akka-cluster-events_2.11/0.0.3/tads-akka-cluster-events_2.11-0.0.3.jar failed with status code 400: Bad Request
[error] at org.apache.ivy.util.url.AbstractURLHandler.validatePutStatusCode(AbstractURLHandler.java:82)
[error] at sbt.internal.librarymanagement.ivyint.GigahorseUrlHandler.upload(GigahorseUrlHandler.scala:191)
[error] at org.apache.ivy.util.url.URLHandlerDispatcher.upload(URLHandlerDispatcher.java:82)
[error] at org.apache.ivy.util.FileUtil.copy(FileUtil.java:150)
[error] at org.apache.ivy.plugins.repository.url.URLRepository.put(URLRepository.java:84)
[error] at sbt.internal.librarymanagement.ConvertResolver$LocalIfFileRepo.put(ConvertResolver.scala:366)
[error] at org.apache.ivy.plugins.repository.AbstractRepository.put(AbstractRepository.java:130)
[error] at sbt.internal.librarymanagement.ConvertResolver$ChecksumFriendlyURLResolver.put(ConvertResolver.scala:118)
[error] at sbt.internal.librarymanagement.ConvertResolver$ChecksumFriendlyURLResolver.put$(ConvertResolver.scala:105)
[error] at sbt.internal.librarymanagement.ConvertResolver$$anonfun$defaultConvert$lzycompute$1$PluginCapableResolver$1.put(ConvertResolver.scala:165)
[error] at org.apache.ivy.plugins.resolver.RepositoryResolver.publish(RepositoryResolver.java:216)
[error] at sbt.internal.librarymanagement.IvyActions$.$anonfun$publish$5(IvyActions.scala:497)
[error] at sbt.internal.librarymanagement.IvyActions$.$anonfun$publish$5$adapted(IvyActions.scala:496)
[error] at scala.collection.TraversableLike$WithFilter.$anonfun$foreach$1(TraversableLike.scala:788)
[error] at scala.collection.Iterator.foreach(Iterator.scala:937)
[error] at scala.collection.Iterator.foreach$(Iterator.scala:937)
[error] at scala.collection.AbstractIterator.foreach(Iterator.scala:1425)
[error] at scala.collection.IterableLike.foreach(IterableLike.scala:70)
[error] at scala.collection.IterableLike.foreach$(IterableLike.scala:69)
[error] at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
[error] at scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:787)
[error] at sbt.internal.librarymanagement.IvyActions$.publish(IvyActions.scala:496)
[error] at sbt.internal.librarymanagement.IvyActions$.$anonfun$publish$3(IvyActions.scala:144)
[error] at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12)
[error] at sbt.internal.librarymanagement.IvyActions$.withChecksums(IvyActions.scala:157)
[error] at sbt.internal.librarymanagement.IvyActions$.withChecksums(IvyActions.scala:151)
[error] at sbt.internal.librarymanagement.IvyActions$.$anonfun$publish$1(IvyActions.scala:144)
[error] at sbt.internal.librarymanagement.IvyActions$.$anonfun$publish$1$adapted(IvyActions.scala:134)
[error] at sbt.internal.librarymanagement.IvySbt$Module.$anonfun$withModule$1(Ivy.scala:239)
[error] at sbt.internal.librarymanagement.IvySbt.$anonfun$withIvy$1(Ivy.scala:204)
[error] at sbt.internal.librarymanagement.IvySbt.sbt$internal$librarymanagement$IvySbt$$action$1(Ivy.scala:70)
[error] at sbt.internal.librarymanagement.IvySbt$$anon$3.call(Ivy.scala:77)
[error] at xsbt.boot.Locks$GlobalLock.withChannel$1(Locks.scala:95)
[error] at xsbt.boot.Locks$GlobalLock.xsbt$boot$Locks$GlobalLock$$withChannelRetries$1(Locks.scala:80)
[error] at xsbt.boot.Locks$GlobalLock$$anonfun$withFileLock$1.apply(Locks.scala:99)
[error] at xsbt.boot.Using$.withResource(Using.scala:10)
[error] at xsbt.boot.Using$.apply(Using.scala:9)
[error] at xsbt.boot.Locks$GlobalLock.ignoringDeadlockAvoided(Locks.scala:60)
[error] at xsbt.boot.Locks$GlobalLock.withLock(Locks.scala:50)
[error] at xsbt.boot.Locks$.apply0(Locks.scala:31)
[error] at xsbt.boot.Locks$.apply(Locks.scala:28)
[error] at sbt.internal.librarymanagement.IvySbt.withDefaultLogger(Ivy.scala:77)
[error] at sbt.internal.librarymanagement.IvySbt.withIvy(Ivy.scala:199)
[error] at sbt.internal.librarymanagement.IvySbt.withIvy(Ivy.scala:196)
[error] at sbt.internal.librarymanagement.IvySbt$Module.withModule(Ivy.scala:238)
[error] at sbt.internal.librarymanagement.IvyActions$.publish(IvyActions.scala:134)
[error] at sbt.Classpaths$.$anonfun$publishTask$4(Defaults.scala:2416)
[error] at sbt.Classpaths$.$anonfun$publishTask$4$adapted(Defaults.scala:2416)
[error] at scala.Function1.$anonfun$compose$1(Function1.scala:44)
[error] at sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:40)
[error] at sbt.std.Transform$$anon$4.work(System.scala:67)
[error] at sbt.Execute.$anonfun$submit$2(Execute.scala:269)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:16)
[error] at sbt.Execute.work(Execute.scala:278)
[error] at sbt.Execute.$anonfun$submit$1(Execute.scala:269)
[error] at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:178)
[error] at sbt.CompletionService$$anon$2.call(CompletionService.scala:37)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
[error] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[error] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[error] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[error] at java.lang.Thread.run(Thread.java:748)
[error] (publish) java.io.IOException: PUT operation to URL http://artifactory.svcs.opal.synacor.com/artifactory/synacor-local/technorati/tads-akka-cluster-events_2.11/0.0.3/tads-akka-cluster-events_2.11-0.0.3.jar failed with status code 400: Bad Request
[error] Total time: 2 s, completed Sep 3, 2019 4:12:46 PM
sbt:tads-akka-cluster-events>
When I checked the repository I see pom, pom.md5 and pom.sha1.
But it is missing jar, jar.md5 and jar.sha1.
Why I'm getting this error? What I'm missing and how can I fix this issue?
I've to add the setting updateOptions := updateOptions.value.withGigahorse(false) or run sbt like sbt -Dsbt.gigahorse=false clean publish.
For more details please see this github sbt issue.

sbt error ClassNotFoundException in WordCount

it might be a newbie question but i'm trying to learn more about scala in intellij. I firstly created a simple HelloWorld project that worked fine. Next i watched a tutorial to create a WordCount project, but i'm getting exception error that i can't figure out.
The error i get is:
Error:Error while importing SBT project:<br/>...<br/><pre>
[error] at
sbt.MainLoop$.$anonfun$runWithNewLog$1(MainLoop.scala:107)
[error] at sbt.io.Using.apply(Using.scala:22)
[error] at sbt.MainLoop$.runWithNewLog(MainLoop.scala:101)
[error] at sbt.MainLoop$.runAndClearLast(MainLoop.scala:57)
[error] at sbt.MainLoop$.runLoggedLoop(MainLoop.scala:42)
[error] at sbt.MainLoop$.runLogged(MainLoop.scala:34)
[error] at sbt.StandardMain$.runManaged(Main.scala:113)
[error] at sbt.xMain.run(Main.scala:76)
[error] at xsbt.boot.Launch$$anonfun$run$1.apply(Launch.scala:109)
[error] at xsbt.boot.Launch$.withContextLoader(Launch.scala:128)
[error] at xsbt.boot.Launch$.run(Launch.scala:109)
[error] at xsbt.boot.Launch$$anonfun$apply$1.apply(Launch.scala:35)
[error] at xsbt.boot.Launch$.launch(Launch.scala:117)
[error] at xsbt.boot.Launch$.apply(Launch.scala:18)
[error] at xsbt.boot.Boot$.runImpl(Boot.scala:41)
[error] at xsbt.boot.Boot$.main(Boot.scala:17)
[error] at xsbt.boot.Boot.main(Boot.scala)
[error] java.lang.ClassNotFoundException:
org.jetbrains.sbt.CreateTasks$
[error] Use 'last' for the full log.
[info] shutting down server
My build.sbt file is:
name := "Scalaprogramms"
version := "1.0"
scalaVersion:="2.11.8"
libraryDependencies += "org.apache.spark" %% "spark-core" % "2.2.0"
libraryDependencies += "org.apache.spark" %% "spark-streaming" % "2.2.0" % "provided"
My java version is 8
Last the last log file is
OpenJDK 64-Bit Server VM warning: ignoring option MaxPermSize=384M;
support was removed in 8.0
[info] Loading settings from plugins.sbt ...
[info] Loading project definition from
/home/user/IdeaProjects/Scalaprogramms/project
[info] Loading settings from build.sbt ...
[info] Set current project to Scalaprogramms (in build
file:/home/user/IdeaProjects/Scalaprogramms/)
[info] sbt server started at
local:///home/user/.sbt/1.0/server/fc2d3f3386938f38b259/sock
sbt:Scalaprogramms>
[info] Defining Global / sbtStructureOptions, Global /
sbtStructureOutputFile, shellPrompt
[info] The new values will be used by no settings or tasks.
[info] Reapplying settings...
[info] Set current project to Scalaprogramms (in build
file:/home/user/IdeaProjects/Scalaprogramms/)
[info] Applying State transformations org.jetbrains.sbt.CreateTasks
from /home/user/.IdeaIC2017.2/config/plugins/Scala/launcher/sbt-
structure-1.1.jar
[error] java.lang.ClassNotFoundException:
org.jetbrains.sbt.CreateTasks$
[error] at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
[error] at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
[error] at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
[error] at java.lang.Class.forName0(Native Method)
[error] at java.lang.Class.forName(Class.java:348)
[error] at sbt.internal.inc.ModuleUtilities$.getObject(ModuleUtilities.scala:20)
[error] at sbt.BasicCommands$.$anonfun$call$5(BasicCommands.scala:203)
[error] at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:234)
[error] at scala.collection.immutable.List.foreach(List.scala:389)
[error] at scala.collection.TraversableLike.map(TraversableLike.scala:234)
[error] at scala.collection.TraversableLike.map$(TraversableLike.scala:227)
[error] at scala.collection.immutable.List.map(List.scala:295)
[error] at sbt.BasicCommands$.$anonfun$call$2(BasicCommands.scala:203)
[error] at sbt.Command$.$anonfun$applyEffect$4(Command.scala:134)
[error] at sbt.Command$.$anonfun$applyEffect$2(Command.scala:130)
[error] at sbt.MainLoop$.processCommand(MainLoop.scala:153)
[error] at sbt.MainLoop$.$anonfun$next$2(MainLoop.scala:136)
[error] at sbt.State$$anon$1.runCmd$1(State.scala:242)
[error] at sbt.State$$anon$1.process(State.scala:248)
[error] at sbt.MainLoop$.$anonfun$next$1(MainLoop.scala:136)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:16)
[error] at sbt.MainLoop$.next(MainLoop.scala:136)
[error] at sbt.MainLoop$.run(MainLoop.scala:129)
[error] at sbt.MainLoop$.$anonfun$runWithNewLog$1(MainLoop.scala:107)
[error] at sbt.io.Using.apply(Using.scala:22)
[error] at sbt.MainLoop$.runWithNewLog(MainLoop.scala:101)
[error] at sbt.MainLoop$.runAndClearLast(MainLoop.scala:57)
[error] at sbt.MainLoop$.runLoggedLoop(MainLoop.scala:42)
[error] at sbt.MainLoop$.runLogged(MainLoop.scala:34)
[error] at sbt.StandardMain$.runManaged(Main.scala:113)
[error] at sbt.xMain.run(Main.scala:76)
[error] at xsbt.boot.Launch$$anonfun$run$1.apply(Launch.scala:109)
[error] at xsbt.boot.Launch$.withContextLoader(Launch.scala:128)
[error] at xsbt.boot.Launch$.run(Launch.scala:109)
[error] at xsbt.boot.Launch$$anonfun$apply$1.apply(Launch.scala:35)
[error] at xsbt.boot.Launch$.launch(Launch.scala:117)
[error] at xsbt.boot.Launch$.apply(Launch.scala:18)
[error] at xsbt.boot.Boot$.runImpl(Boot.scala:41)
[error] at xsbt.boot.Boot$.main(Boot.scala:17)
[error] at xsbt.boot.Boot.main(Boot.scala)
[error] java.lang.ClassNotFoundException: org.jetbrains.sbt.CreateTasks$
[error] Use 'last' for the full log.
[info] shutting down server
Also, my code is:
package scala.SparkApps
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
object WordCount {
def main(args: Array[String]): Unit = {
val inputFile = args(0)
val outputFile = args(1)
val conf = new SparkConf().setMaster("local(*)").setAppName("wordcount")
val sc = new SparkContext(Conf)
val input = sc.textFile(inputFile)
val words = input.flatMap(_.split(" "))
val counts = words.map(word => (word, 1)).reduceByKey(_+_)
counts.saveAsTextFile(outputFile)
}
}
Any ideas?

Apache Spark Build error

I'm building Apache spark source code in ubuntu 14.04.4 (spark version: 1.6.0 with Scala code runner version 2.10.4) with command
sudo sbt/sbt assembly
and getting the following error,
[warn] def deleteRecursively(dir: TachyonFile, client: TachyonFS) {
[warn] ^
[error] [error] while compiling:
/home/ashish/spark-apps/spark-1.6.1/core/src/main/scala/org/apache/spark/util/random/package.scala
[error] during phase: jvm [error] library
version: version 2.10.5 [error] compiler version: version
2.10.5 [error] reconstructed args: -deprecation -Xplugin:/home/ashish/.ivy2/cache/org.spark-project/genjavadoc-plugin_2.10.5/jars/genjavadoc-plugin_2.10.5-0.9-spark0.jar
-feature -P:genjavadoc:out=/home/ashish/spark-apps/spark-1.6.1/core/target/java -classpath /home/ashish/spark-apps/spark-1.6.1/core/target/scala-2.10/classes:/home/ashish/spark-apps/spark-1.6.1/launcher/target/scala-2.10/classes:/home/ashish/spark-apps/spark-1.6.1/network/common/target/scala-2.10/classes:/home/ashish/spark-apps/spark-1.6.1/network/shuffle/target/scala-2.10/classes:/home/ashish/spark-apps/spark-1.6.1/unsafe/target/scala-2.10/classes:/home/ashish/.ivy2/cache/org.spark-project.spark/unused/jars/unused-1.0.0.jar:/home/ashish/.ivy2/cache/com.google.guava/guava/bundles/guava-14.0.1.jar:/home/ashish/.ivy2/cache/io.netty/netty-all/jars/netty-all-4.0.29.Final.jar:/home/ashish/.ivy2/cache/org.fusesource.leveldbjni/leveldbjni-all/bundles/leveldbjni-all-1.8.jar:/home/ashish/.ivy2/cache/com.fasterxml.jackson.core/jackson-databind/bundles/jackson-databind-2.4.4.jar:/home/ashish/.ivy2/cache/com.fasterxml.jackson.core/jackson-annotations/bundles/jackson-annotations-2.4.4.jar:/home/ashish/.ivy2/cache/com.fasterxml.jackson.core/jackson-core/bundles/jackson-......and
many other jars...
[error] [error] last tree to typer:
Literal(Constant(collection.mutable.Map)) [error]
symbol: null [error] symbol definition: null [error]
tpe: Class(classOf[scala.collection.mutable.Map]) [error]
symbol owners: [error] context owners: package package ->
package random [error] [error] == Enclosing template or
block == [error] [error] Template( // val :
in package random,
tree.tpe=org.apache.spark.util.random.package.type [error]
"java.lang.Object" // parents [error] ValDef( [error]
private [error] "_" [error] [error]
[error] ) [error] DefDef( // def ():
org.apache.spark.util.random.package.type in package random
[error] [error] "" [error]
[] [error] List(Nil) [error] //
tree.tpe=org.apache.spark.util.random.package.type [error]
Block( // tree.tpe=Unit [error] Apply( // def ():
Object in class Object, tree.tpe=Object [error]
package.super."" // def (): Object in class Object,
tree.tpe=()Object [error] Nil [error] )
[error] () [error] ) [error] ) [error]
) [error] [error] == Expanded type of tree == [error]
[error] ConstantType(value = Constant(collection.mutable.Map))
[error] [error] uncaught exception during compilation:
java.io.IOException [error] File name too long [warn] 45
warnings found [error] two errors found [error]
(core/compile:compile) Compilation failed [error] Total time:
5598 s, completed 5 Apr, 2016 9:06:50 AM
Where I'm getting wrong?
You should build Spark with Maven...
download the source and run ./bin/mvn clean package
Probably similar to http://apache-spark-user-list.1001560.n3.nabble.com/spark-github-source-build-error-td10532.html
Try sudo sbt/sbt clean assembly