I am trying to build a sbt Scala application on AWS CodeBuild. I need to use the corporate Nexus in order to download packages. However, I keep getting unauthorised access. What am I setting up wrong?
buildspec.yaml containing the UNIX commands I need to run in order to setup the nexus credentials, sbt options, etc.
version: 0.2
env:
variables:
# JAVA_HOME: "/usr/lib/jvm/java-8-openjdk-amd64"
SBT_OPTS: "-Djavax.net.ssl.trustStore=nexusprod-all.jks -Dsbt.repository.config=repositories -Dsbt.repository.secure=true -Dsbt.override.build.repos=true -Dsbt.boot.credentials=/root/.sbt/.credentials"
phases:
install:
commands:
- echo Entered the install phase ..
finally:
- echo This always runs even if the update or install command fails
pre_build:
commands:
- echo Copying repositories and creating credentials
- cp repositories /root/.sbt/
- USERNAME=$(aws secretsmanager get-secret-value --secret-id nexus_secret --query SecretString --output text|jq -r .username)
- PASSWORD=$(aws secretsmanager get-secret-value --secret-id nexus_secret --query SecretString --output text|jq -r .password)
- echo "realm=Sonatype Nexus" >> /root/.sbt/.credentials
- echo "host=nexus-eu.windmill.local" >> /root/.sbt/.credentials
- echo "user=${USERNAME}" >> /root/.sbt/.credentials
- echo "password=${PASSWORD}" >> /root/.sbt/.credentials
- cat /root/.sbt/.credentials
# NEXUS CONNECTION WORKS, USE BELOW TO TEST IT.
# - export NEXUS_TOKEN=$(aws secretsmanager get-secret-value --secret-id nexus_secret --query SecretString --output text|jq -r .token)
# - export USERNAME=$(aws secretsmanager get-secret-value --secret-id nexus_secret --query SecretString --output text|jq -r .username)
# - export PASSWORD=$(aws secretsmanager get-secret-value --secret-id nexus_secret --query SecretString --output text|jq -r .password)
# - npm config set registry=https://nexus-eu.windmill.local/repository/npm-proxy/
# - npm set strict-ssl false
# - npm set always-auth=true
# - npm set email=email#email.email
# - npm set _auth=$NEXUS_TOKEN
# - pip config --user set global.index-url https://$USERNAME:$PASSWORD#nexus-eu.windmill.local/repository/pypi-proxy/simple
# - pip config --user set global.index https://nexus-eu.windmill.local/repository/pypi-proxy/pypi
# - pip config --user set global.trusted-host nexus-eu.windmill.local
# - pip install black
build:
commands:
- echo Build started on `date`
- sbt build
post_build:
commands:
- echo Build completed on `date`
- echo "$(cat /root/.sbt/boot/update.log)"
artifacts:
files:
- aac-cdp-glue-bl/src/main/scala/daan/cdp/etl/glue/Main.scala
- aac-cdp-glue-bl/target/*/aac-cdp-glue.jar
discard-paths: yes
cache:
paths:
- '/root/.sbt/**/*'
As you can see from the npm commented-out commands, when I use those to connect to nexus I am able to download packages.
build.sbt file
import Dependencies._
organization := "XYZ"
ThisBuild / version := "0.1"
ThisBuild / scalaVersion := "2.12.14"
credentials += Credentials("root" / ".sbt" / ".credentials")
lazy val testSettings = Seq(
Test / testOptions := Seq(
Tests.Argument(
TestFrameworks.ScalaTest,
"-oDS",
"-u",
"target/test-reports",
"-h",
(target.value / "test-reports").getAbsolutePath
)
)
)
lazy val root =
project
.in(file("."))
.settings(name := "aac-cdp-glue")
.aggregate(`aac-cdp-glue-bl`, `aac-cdp-glue-unit-tests`)
lazy val `aac-cdp-glue-bl` = project
.in(file("aac-cdp-glue-bl"))
.settings(
name := "aac-cdp-glue-bl",
libraryDependencies ++= Seq(
Aws.glueSdk,
Enumeratum.core,
Refined.core
) ++ Cats.all ++ Circe.all ++ Decline.all ++ Spark3.all,
resolvers += "Aws".at("https://aws-glue-etl-artifacts.s3.amazonaws.com/release"),
assembly / assemblyOption := (assembly / assemblyOption).value.copy(includeScala = false),
assembly / assemblyJarName := "aac-cdp-glue.jar",
assembly / assemblyMergeStrategy := {
case PathList("javax", "servlet", xs # _*) => MergeStrategy.last
case PathList("javax", "activation", xs # _*) => MergeStrategy.last
case PathList("org", "apache", xs # _*) => MergeStrategy.last
case PathList("com", "google", xs # _*) => MergeStrategy.last
case PathList("com", "esotericsoftware", xs # _*) => MergeStrategy.last
case PathList("com", "codahale", xs # _*) => MergeStrategy.last
case PathList("com", "yammer", xs # _*) => MergeStrategy.last
case "about.html" => MergeStrategy.rename
case "META-INF/ECLIPSEF.RSA" => MergeStrategy.last
case "META-INF/mailcap" => MergeStrategy.last
case "META-INF/mimetypes.default" => MergeStrategy.last
case "plugin.properties" => MergeStrategy.last
case "log4j.properties" => MergeStrategy.last
case x =>
val oldStrategy = (assembly / assemblyMergeStrategy).value
oldStrategy(x)
}
)
lazy val `aac-cdp-glue-unit-tests` = project
.in(file("aac-cdp-glue-unit-tests"))
.settings(
name := "aac-cdp-glue-unit-tests",
libraryDependencies ++= Seq(
Refined.core % Test,
Refined.scalaCheck
) ++ Cats.all ++ Circe.all ++ Spark3.all ++ TestLibs.all,
assembly / test := {}
)
.dependsOn(`aac-cdp-glue-bl` % "test -> compile")
addCommandAlias("build", "; clean; test; aac-cdp-glue-bl/assembly")
project/plugins.sbt
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.6")
repositories file
[repositories]
local
my-maven-proxy-releases: https://nexus-eu.windmill.local/repository/public/
my-maven-releases: https://nexus-eu.windmill.local/repository/public/,[organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
build.properties file
sbt.version=1.6.1
project/Dependencies.scala (don't think this is related)
import sbt._
object Dependencies {
object Aws {
val glueSdk = "com.amazonaws" % "AWSGlueETL" % "3.0.0" % Provided
}
object Cats {
private val effectVersion =
"2.1.1" // Do not upgrade! Spark 3.1.1 depends on cats-kernel_2.12-2.0.0-M4 (compatible with cats-kernel_2.12-2.1.1)
val effect = "org.typelevel" %% "cats-effect" % effectVersion
val all = Seq(effect)
}
object Circe {
private val version = "0.13.0"
val core = "io.circe" %% "circe-core" % version
val parser = "io.circe" %% "circe-parser" % version
val generic = "io.circe" %% "circe-generic" % version
val all = Seq(core, generic, parser)
}
object Decline {
val version = "1.2.0" // Do not upgrade! This version depends on cats 2.1.1
val core = "com.monovore" %% "decline" % version
val effect = "com.monovore" %% "decline-effect" % version
val enumeratum = "com.monovore" %% "decline-enumeratum" % version
val refined = "com.monovore" %% "decline-refined" % version
val all = Seq(core, effect, enumeratum, refined)
}
object Enumeratum {
val core = "com.beachape" %% "enumeratum" % "1.7.0"
}
object Refined {
private val version = "0.9.21"
val core = "eu.timepit" %% "refined" % version
val scalaCheck = "eu.timepit" %% "refined-scalacheck" % version % Test
}
object Spark3 {
private val sparkVersion =
"3.1.1" // Beware! This version depends on cats-kernel_2.12-2.0.0-M4 (see above)
val core = "org.apache.spark" %% "spark-core" % sparkVersion % Provided
val sql = "org.apache.spark" %% "spark-sql" % sparkVersion % Provided
val all = Seq(core, sql)
}
object TestLibs {
val scalaTest = "org.scalatest" %% "scalatest" % "3.2.2" % Test
val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.15.4" % Test
val scalaTestPlus = "org.scalatestplus" %% "scalatestplus-scalacheck" % "3.1.0.0-RC2" % Test
val all = Seq(scalaTest, scalaCheck, scalaTestPlus)
}
}
I get an unauthorised error when trying to download the plugins (sbt-assembly). Note that if I remove the plugin from plugins.sbt I get the same unauthorised error on the subsequent Scala dependencies that I need to get from Nexus.
[Container] 2022/04/14 15:47:53 Waiting for agent ping
[Container] 2022/04/14 15:48:53 Waiting for DOWNLOAD_SOURCE
[Container] 2022/04/14 15:48:54 Phase is DOWNLOAD_SOURCE
[Container] 2022/04/14 15:48:54 CODEBUILD_SRC_DIR=/codebuild/output/src246710942/src
[Container] 2022/04/14 15:48:54 YAML location is /codebuild/output/src246710942/src/buildspec.yml
[Container] 2022/04/14 15:48:54 Processing environment variables
[Container] 2022/04/14 15:48:54 No runtime version selected in buildspec.
[Container] 2022/04/14 15:48:56 Moving to directory /codebuild/output/src246710942/src
[Container] 2022/04/14 15:48:56 Expanded cache path /root/.sbt/**/*
[Container] 2022/04/14 15:48:56 Configuring ssm agent with target id: codebuild:20483d62-bbbc-409d-b677-d6bbbe8149bb
[Container] 2022/04/14 15:48:56 Successfully updated ssm agent configuration
[Container] 2022/04/14 15:48:56 Registering with agent
[Container] 2022/04/14 15:48:56 Phases found in YAML: 4
[Container] 2022/04/14 15:48:56 INSTALL: 1 commands
[Container] 2022/04/14 15:48:56 PRE_BUILD: 9 commands
[Container] 2022/04/14 15:48:56 BUILD: 2 commands
[Container] 2022/04/14 15:48:56 POST_BUILD: 2 commands
[Container] 2022/04/14 15:48:56 Phase complete: DOWNLOAD_SOURCE State: SUCCEEDED
[Container] 2022/04/14 15:48:56 Phase context status code: Message:
[Container] 2022/04/14 15:48:56 Entering phase INSTALL
[Container] 2022/04/14 15:48:56 Running command echo Entered the install phase ..
Entered the install phase ..
[Container] 2022/04/14 15:48:56 Running command echo This always runs even if the update or install command fails
This always runs even if the update or install command fails
[Container] 2022/04/14 15:48:56 Phase complete: INSTALL State: SUCCEEDED
[Container] 2022/04/14 15:48:56 Phase context status code: Message:
[Container] 2022/04/14 15:48:56 Entering phase PRE_BUILD
[Container] 2022/04/14 15:48:56 Running command echo Copying repositories and creating credentials
Copying repositories and creating credentials
[Container] 2022/04/14 15:48:56 Running command cp repositories /root/.sbt/
[Container] 2022/04/14 15:48:56 Running command USERNAME=$(aws secretsmanager get-secret-value --secret-id nexus_secret --query SecretString --output text|jq -r .username)
[Container] 2022/04/14 15:49:00 Running command PASSWORD=$(aws secretsmanager get-secret-value --secret-id nexus_secret --query SecretString --output text|jq -r .password)
[Container] 2022/04/14 15:49:01 Running command echo "realm=Sonatype Nexus" >> /root/.sbt/.credentials
[Container] 2022/04/14 15:49:01 Running command echo "host=nexus-eu.windmill.local" >> /root/.sbt/.credentials
[Container] 2022/04/14 15:49:01 Running command echo "user=${USERNAME}" >> /root/.sbt/.credentials
[Container] 2022/04/14 15:49:01 Running command echo "password=${PASSWORD}" >> /root/.sbt/.credentials
[Container] 2022/04/14 15:49:01 Running command cat /root/.sbt/.credentials
realm=Sonatype Nexus
host=nexus-eu.windmill.local
user=<OBSCURED>
password=<OBSCURED>
[Container] 2022/04/14 15:49:01 Phase complete: PRE_BUILD State: SUCCEEDED
[Container] 2022/04/14 15:49:01 Phase context status code: Message:
[Container] 2022/04/14 15:49:01 Entering phase BUILD
[Container] 2022/04/14 15:49:01 Running command echo Build started on `date`
Build started on Thu Apr 14 15:49:01 UTC 2022
[Container] 2022/04/14 15:49:01 Running command sbt build
[info] welcome to sbt 1.6.1 (Amazon.com Inc. Java 11.0.14.1)
[info] loading settings for project src-build from plugins.sbt ...
[info] loading project definition from /codebuild/output/src246710942/src/project
[warn]
[warn] Note: Some unresolved dependencies have extra attributes. Check that these dependencies exist with the requested attributes.
[warn] com.eed3si9n:sbt-assembly:0.14.6 (sbtVersion=1.0, scalaVersion=2.12)
[warn]
[warn] Note: Unresolved dependencies path:
[error] sbt.librarymanagement.ResolveException: Error downloading com.eed3si9n:sbt-assembly;sbtVersion=1.0;scalaVersion=2.12:0.14.6
[error] Not found
[error] Not found
[error] not found: /root/.ivy2/localcom.eed3si9n/sbt-assembly/scala_2.12/sbt_1.0/0.14.6/ivys/ivy.xml
[error] unauthorized: https://nexus-eu.windmill.local/repository/public/com/eed3si9n/sbt-assembly_2.12_1.0/0.14.6/sbt-assembly-0.14.6.pom (Sonatype Nexus Repository Manager)
[error] unauthorized: https://nexus-eu.windmill.local/repository/public/com.eed3si9n/sbt-assembly/scala_2.12/sbt_1.0/0.14.6/ivys/ivy.xml (Sonatype Nexus Repository Manager)
[error] at lmcoursier.CoursierDependencyResolution.unresolvedWarningOrThrow(CoursierDependencyResolution.scala:345)
[error] at lmcoursier.CoursierDependencyResolution.$anonfun$update$38(CoursierDependencyResolution.scala:314)
[error] at scala.util.Either$LeftProjection.map(Either.scala:573)
[error] at lmcoursier.CoursierDependencyResolution.update(CoursierDependencyResolution.scala:314)
[error] at sbt.librarymanagement.DependencyResolution.update(DependencyResolution.scala:60)
[error] at sbt.internal.LibraryManagement$.resolve$1(LibraryManagement.scala:59)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$12(LibraryManagement.scala:133)
[error] at sbt.util.Tracked$.$anonfun$lastOutput$1(Tracked.scala:73)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$20(LibraryManagement.scala:146)
[error] at scala.util.control.Exception$Catch.apply(Exception.scala:228)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$11(LibraryManagement.scala:146)
[error] at sbt.internal.LibraryManagement$.$anonfun$cachedUpdate$11$adapted(LibraryManagement.scala:127)
[error] at sbt.util.Tracked$.$anonfun$inputChangedW$1(Tracked.scala:219)
[error] at sbt.internal.LibraryManagement$.cachedUpdate(LibraryManagement.scala:160)
[error] at sbt.Classpaths$.$anonfun$updateTask0$1(Defaults.scala:3690)
[error] at scala.Function1.$anonfun$compose$1(Function1.scala:49)
[error] at sbt.internal.util.$tilde$greater.$anonfun$$u2219$1(TypeFunctions.scala:62)
[error] at sbt.std.Transform$$anon$4.work(Transform.scala:68)
[error] at sbt.Execute.$anonfun$submit$2(Execute.scala:282)
[error] at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:23)
[error] at sbt.Execute.work(Execute.scala:291)
[error] at sbt.Execute.$anonfun$submit$1(Execute.scala:282)
[error] at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:265)
[error] at sbt.CompletionService$$anon$2.call(CompletionService.scala:64)
[error] at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
[error] at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
[error] at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
[error] at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
[error] at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
[error] at java.base/java.lang.Thread.run(Thread.java:829)
[error] (update) sbt.librarymanagement.ResolveException: Error downloading com.eed3si9n:sbt-assembly;sbtVersion=1.0;scalaVersion=2.12:0.14.6
[error] Not found
[error] Not found
[error] not found: /root/.ivy2/localcom.eed3si9n/sbt-assembly/scala_2.12/sbt_1.0/0.14.6/ivys/ivy.xml
[error] unauthorized: https://nexus-eu.windmill.local/repository/public/com/eed3si9n/sbt-assembly_2.12_1.0/0.14.6/sbt-assembly-0.14.6.pom (Sonatype Nexus Repository Manager)
[error] unauthorized: https://nexus-eu.windmill.local/repository/public/com.eed3si9n/sbt-assembly/scala_2.12/sbt_1.0/0.14.6/ivys/ivy.xml (Sonatype Nexus Repository Manager)
[warn] Project loading failed: (r)etry, (q)uit, (l)ast, or (i)gnore? (default: r)
[Container] 2022/04/14 15:49:19 Command did not exit successfully sbt build exit status 1
[Container] 2022/04/14 15:49:19 Phase complete: BUILD State: FAILED
[Container] 2022/04/14 15:49:19 Phase context status code: COMMAND_EXECUTION_ERROR Message: Error while executing command: sbt build. Reason: exit status 1
[Container] 2022/04/14 15:49:19 Entering phase POST_BUILD
[Container] 2022/04/14 15:49:19 Running command echo Build completed on `date`
Build completed on Thu Apr 14 15:49:19 UTC 2022
[Container] 2022/04/14 15:49:19 Running command echo "$(cat /root/.sbt/boot/update.log)"
[Container] 2022/04/14 15:49:19 Phase complete: POST_BUILD State: SUCCEEDED
[Container] 2022/04/14 15:49:19 Phase context status code: Message:
[Container] 2022/04/14 15:49:19 Expanding base directory path: .
[Container] 2022/04/14 15:49:19 Assembling file list
[Container] 2022/04/14 15:49:19 Expanding .
[Container] 2022/04/14 15:49:19 Expanding file paths for base directory .
[Container] 2022/04/14 15:49:19 Assembling file list
[Container] 2022/04/14 15:49:19 Expanding aac-cdp-glue-bl/src/main/scala/daan/cdp/etl/glue/Main.scala
[Container] 2022/04/14 15:49:19 Expanding aac-cdp-glue-bl/target/*/aac-cdp-glue.jar
[Container] 2022/04/14 15:49:19 Found 1 file(s)
[Container] 2022/04/14 15:49:19 Phase complete: UPLOAD_ARTIFACTS State: SUCCEEDED
[Container] 2022/04/14 15:49:19 Phase context status code: Message:
Note that I have tried to set the credentials both inline in the build.sbt or via the credentials file. Same error. I can reach Nexus (npm proxy) though. What am I doing wrong?
Related
I keep accidentally publishing my internal project still referencing internal SNAPSHOTs, but it would be very helpful if there was an SBT plugin that would fail to publish if you are relying on any SNAPSHOT dependencies. Is anyone aware of such a plugin or feature in SBT?
Here's how you can write such plugin.
output
> publish
[info] :: delivering :: com.example#b_2.10;0.1.0 :: 0.1.0 :: release :: Fri Jan 13 15:50:53 EST 2017
[info] delivering ivy file to /xxx/b/target/scala-2.10/ivy-0.1.0.xml
[info] Wrote /xxx/b/target/scala-2.10/b_2.10-0.1.0.pom
[info] Wrote /xxx/a/target/scala-2.10/a_2.10-0.1.0.pom
[info] :: delivering :: com.example#a_2.10;0.1.0 :: 0.1.0 :: release :: Fri Jan 13 15:50:53 EST 2017
[info] delivering ivy file to /xxx/a/target/scala-2.10/ivy-0.1.0.xml
[trace] Stack trace suppressed: run last b/*:publishConfiguration for the full output.
[trace] Stack trace suppressed: run last a/*:publishConfiguration for the full output.
[error] (b/*:publishConfiguration) SNAPSHOT found in classpath:
[error] com.eed3si9n:treehugger_2.10:0.2.4-SNAPSHOT:compile->default;compile->compile;compile->runtime;compile->default(compile);compile->master
[error] (a/*:publishConfiguration) SNAPSHOT found in classpath:
[error] com.eed3si9n:treehugger_2.10:0.2.4-SNAPSHOT:compile->default;compile->compile;compile->runtime;compile->default(compile);compile->master
[error] com.example:c_2.10:0.1.0-SNAPSHOT:compile->compile;compile->default(compile)
[error] io.netty:netty-all:4.1.8.Final-SNAPSHOT:compile->default;compile->compile;compile->runtime;compile->default(compile);compile->master
[error] Total time: 0 s, completed Jan 13, 2017 3:50:53 PM
project/build.properties
sbt.version = 0.13.13
project/DepsVerifyPlugin.scala
import sbt._
import Keys._
object DepsVerifyPlugin extends sbt.AutoPlugin {
override def requires = plugins.JvmPlugin
override def trigger = allRequirements
override def projectSettings = Seq(
publishConfiguration := {
val old = publishConfiguration.value
val ur = update.value
ur.configuration("compile") foreach { compileReport =>
val allModules = compileReport.allModules
val snapshotDeps = allModules filter { _.revision contains "SNAPSHOT" }
if (snapshotDeps.nonEmpty) {
sys.error(
"SNAPSHOT found in classpath:\n" +
snapshotDeps.mkString("\n")
)
}
}
old
}
)
}
build.sbt
val commonSettings: Seq[Setting[_]] = Seq(
organization in ThisBuild := "com.example",
scalaVersion in ThisBuild := "2.10.6",
version in ThisBuild := "0.1.0",
resolvers += Resolver.sonatypeRepo("public"),
publishTo := Some(Resolver.file("file", new File(Path.userHome.absolutePath+"/test-repo")))
)
val netty = "io.netty" % "netty-all" % "4.1.8.Final-SNAPSHOT"
val treehugger = "com.eed3si9n" %% "treehugger" % "0.2.4-SNAPSHOT"
lazy val root = (project in file("."))
.aggregate(a, b, c)
.settings(
commonSettings,
name := "Hello",
publish := ()
)
lazy val a = (project in file("a"))
.dependsOn(b, c)
.settings(
commonSettings,
libraryDependencies += netty
)
lazy val b = (project in file("b"))
.settings(
commonSettings,
libraryDependencies += treehugger
)
lazy val c = (project in file("c"))
.settings(
commonSettings,
version := "0.1.0-SNAPSHOT",
publish := ()
)
You could consider adopting sbt-release.
This is a more high-level 'workflow' plugin: 'publish' is used as one of the steps in a release (after 'check that there's no SNAPSHOT dependencies').
It will not prevent you from running 'sbt publish', but when you make a habit of using 'sbt release' instead of 'sbt publish' it accomplishes what you're looking for.
I installed spark on ubuntu 14.04 following this tutorial http://blog.prabeeshk.com/blog/2014/10/31/install-apache-spark-on-ubuntu-14-dot-04/
I am able to run the examples provided inside spark and it seems to work.
The problem is that I am not able to make a scala file and to execute it with spark. This is what I have done following the guidelines https://spark.apache.org/docs/latest/quick-start.html
My standalone app is:
/* SimpleApp.scala */
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
import org.apache.commons.math3.random.RandomDataGenerator
object SimpleApp {
def main(args: Array[String]) {
val logFile = "/home/donbeo/Applications/spark/spark-1.1.0/README.md" // Should be some file on your system
val conf = new SparkConf().setAppName("Simple Application")
val sc = new SparkContext(conf)
val logData = sc.textFile(logFile, 2).cache()
val numAs = logData.filter(line => line.contains("a")).count()
val numBs = logData.filter(line => line.contains("b")).count()
println("Lines with a: %s, Lines with b: %s".format(numAs, numBs))
println("A random number")
val randomData = new RandomDataGenerator()
println(randomData.nextLong(0, 100))
}
}
and my sbt file is :
name := "Simple Project"
version := "1.0"
scalaVersion := "2.10.4"
libraryDependencies += "org.apache.spark" %% "spark-core" % "1.1.0"
libraryDependencies += "org.apache.commons" % "commons-math3" % "3.3"
my project structure is:
donbeo#donbeo-HP-EliteBook-Folio-9470m:~/Documents/scala_code/simpleApp$ find .
.
./src
./src/main
./src/main/scala
./src/main/scala/SimpleApp.scala~
./src/main/scala/SimpleApp.scala
./simple.sbt
donbeo#donbeo-HP-EliteBook-Folio-9470m:~/Documents/scala_code/simpleApp$
and then I run
donbeo#donbeo-HP-EliteBook-Folio-9470m:~/Documents/scala_code/simpleApp$ sbt package
[info] Set current project to Simple Project (in build file:/home/donbeo/Documents/scala_code/simpleApp/)
[info] Updating {file:/home/donbeo/Documents/scala_code/simpleApp/}simpleapp...
[info] Resolving org.eclipse.jetty.orbit#javax.transaction;1.1.1.v201105210645 .[info] Resolving org.eclipse.jetty.orbit#javax.mail.glassfish;1.4.1.v20100508202[info] Resolving org.eclipse.jetty.orbit#javax.activation;1.1.0.v201105071233 ..[info] Resolving org.spark-project.akka#akka-remote_2.10;2.2.3-shaded-protobuf .[info] Resolving org.spark-project.akka#akka-actor_2.10;2.2.3-shaded-protobuf ..[info] Resolving org.spark-project.akka#akka-slf4j_2.10;2.2.3-shaded-protobuf ..[info] Resolving org.fusesource.jansi#jansi;1.4 ...
[info] Done updating.
[info] Compiling 1 Scala source to /home/donbeo/Documents/scala_code/simpleApp/target/scala-2.10/classes...
[info] Packaging /home/donbeo/Documents/scala_code/simpleApp/target/scala-2.10/simple-project_2.10-1.0.jar ...
[info] Done packaging.
[success] Total time: 8 s, completed 04-Feb-2015 15:20:09
donbeo#donbeo-HP-EliteBook-Folio-9470m:~/Documents/scala_code/simpleApp$
and at the final step I get an error
donbeo#donbeo-HP-EliteBook-Folio-9470m:~/Applications/spark/spark-1.1.0$ ./bin/spark-submit \ --class "SimpleApp" \ --master local[4] \ /home/donbeo/Documents/scala_code/simpleApp/target/scala-2.10/simple-project_2.10-1.0.jar
Exception in thread "main" java.net.URISyntaxException: Illegal character in path at index 0: --class
at java.net.URI$Parser.fail(URI.java:2829)
at java.net.URI$Parser.checkChars(URI.java:3002)
at java.net.URI$Parser.parseHierarchical(URI.java:3086)
at java.net.URI$Parser.parse(URI.java:3044)
at java.net.URI.<init>(URI.java:595)
at org.apache.spark.util.Utils$.resolveURI(Utils.scala:1343)
at org.apache.spark.deploy.SparkSubmitArguments.parse$1(SparkSubmitArguments.scala:338)
at org.apache.spark.deploy.SparkSubmitArguments.parseOpts(SparkSubmitArguments.scala:225)
at org.apache.spark.deploy.SparkSubmitArguments.<init>(SparkSubmitArguments.scala:60)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:70)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
donbeo#donbeo-HP-EliteBook-Folio-9470m:~/Applications/spark/spark-1.1.0$
Am I doing something wrong? How can I solve?
You need to remove all the \ from their command line examples, they have been added because of the line breaks:
./bin/spark-submit --class "SimpleApp" --master local[4] /home/donbeo/Documents/scala_code/simpleApp/target/scala-2.10/simple-project_2.10-1.0.jar
I am trying out a simple HelloWorld example. Here is my directory structure:
hello
build.sbt
main
scala
Hello.scala
test
scala
HelloTest.scala
Hello.scala contains a sayHello function that I am trying to call from a simple test in HelloTest.scala. Here is my build.sbt:
name := "Hello"
organization := "tycon"
scalaVersion := "2.11.2"
libraryDependencies += "org.scalatest" %% "scalatest" % "2.2.1" % "test"
And here is an sbt run that does not run any tests:
$ sbt
[info] Set current project to scala (in build
file:~/git/scala/hello/main/scala/)
> compile
[info] Updating
{file:~/git/scala/hello/main/scala/}scala...
[info] Resolving org.fusesource.jansi#jansi;1.4 ...
[info] Done updating.
[info] Compiling 1 Scala source to
~/git/scala/hello/main/scala/target/scala-2.10/classes...
[success] Total time: 3 s, completed Sep 17, 2014 9:04:00 AM
> test
[info] Passed: Total 0, Failed 0, Errors 0, Passed 0
[info] No tests to run for test:test
[success] Total time: 0 s, completed Sep 17, 2014 9:04:03 AM
I tried suggestions from other answers: replaced %% with % and substituted scalatest_2.10 for scalatest in libraryDependencies, and changed scalaVersion to 2.10.0. None of them worked. And yes, I was reloading each time build.sbt changed.
I believe that I am missing something very basic. I would appreciate any help. I am new to Scala.
Edit: For the sake of completeness, here are the two scala files:
Hello.scala:
trait Hello {
def sayHello () = {
println ("Hello, World!")
}
}
HelloTest:scala:
import org.scalatest.FunSuite
class HelloTest extends FunSuite with Hello {
test("say hello") {
sayHello()
}
}
Edit2: I changed the directory structure as suggested by ajozwik and Gabriele, but sbt still doesn't run the test:
~/git/scala/hello/src/main/scala$ sbt
[info] Set current project to scala (in build
file:~/git/scala/hello/src/main/scala/)
> test
[info] Passed: Total 0, Failed 0, Errors 0, Passed 0
[info] No tests to run for test:test
[success] Total time: 1 s, completed Sep 17, 2014 9:36:24 AM
The directory structure should be
hello
build.sbt
src
main
scala
Hello.scala
test
scala
HelloTest.scala
i.e. you're missing the outer src directory.
Also, you need to run sbt from the root of your project, and not from within a subfolder.
I am using xsbt-web-plugin with Sbt 0.13.2. If I add the following to build.sbt I can type "myTask" in the console and it works:
val myTask = taskKey[Unit]("My task.")
myTask := {
val (art, file) = packagedArtifact.in(Compile, packageWar).value
println("Artifact definition: " + art)
println("Packaged file: " + file.getAbsolutePath)
}
But why does this return an error if I type it in the Sbt console?
inspect compile:packageWar::packagedArtifact
Error message:
[error] Expected key
[error] Not a valid key: packageWar (similar: package, packageSrc, package-src)
[error] inspect compile:packageWar::packagedArtifact
[error] ^
For comparison, this one does work:
inspect compile:packageBin::packagedArtifact
Key parts of build.sbt:
tomcat()
name := "my-war"
scalaVersion := "2.10.4"
webappSrc in webapp := baseDirectory.value / "web"
webInfClasses in webapp := true
val myTask = taskKey[Unit]("My task.")
myTask := {
val (art, file) = packagedArtifact.in(Compile, packageWar).value
println("Artifact definition: " + art)
println("Packaged file: " + file.getAbsolutePath)
}
project/plugins.sbt:
addSbtPlugin("com.earldouglas" % "xsbt-web-plugin" % "1.0.0-M4")
(I'm only asking so that I can understand Sbt better, it's not actually causing a problem.)
You can get this info from package rather than packageWar:
> inspect compile:package::packagedArtifact
[info] Task: scala.Tuple2[sbt.Artifact, java.io.File]
[info] Description:
[info] Generates a packaged artifact, returning the Artifact and the produced File.
The packageWar task is set up indirectly using packageTaskSettings.
Assuming a multiproject SBT project with a foo-project and bar-project, such that foo-project depends on bar-project for code etc.
I would like tests in foo-project to run iff the tests in bar-project pass.
How?
You may provide explicit dependencies between projects. For example root -> A -> B
Test case on GitHub. Project definition:
val commonSettings = Seq(libraryDependencies += "org.scalatest" %% "scalatest" % "1.9.1")
lazy val a: Project = (project in file("a")) settings(commonSettings: _*) settings(
name := "a",
test in Test <<= test in Test dependsOn (test in Test in b)
)
lazy val b: Project = (project in file("b")) settings(commonSettings: _*) settings(
name := "b"
)
lazy val root: Project = (project in file(".")) settings(commonSettings: _*) settings(
name := "root",
test in Test <<= test in Test dependsOn (test in Test in a)
)
Beginning from B and complete them successful:
ezh#mobile ZZZZZZ % sbt-0.13
[info] Set current project to root (in build file:/home/ezh/ZZZZZZ/)
> root/test
[info] Compiling 1 Scala source to /home/ezh/ZZZZZZ/b/target/scala-2.10/test-classes...
[info] TestSpecB:
[info] This test
[info] - should fail
[info] Passed: Total 1, Failed 0, Errors 0, Passed 1
[info] Compiling 1 Scala source to /home/ezh/ZZZZZZ/a/target/scala-2.10/test-classes...
[info] TestSpecA:
[info] This test
[info] - should succeed
[info] Passed: Total 1, Failed 0, Errors 0, Passed 1
[info] Passed: Total 0, Failed 0, Errors 0, Passed 0
[info] No tests to run for root/test:test
[success] Total time: 5 s, completed 28.11.2013 16:20:12
Beginning from B but fail:
ezh#mobile ZZZZZZ % sbt-0.13
[info] Set current project to root (in build file:/home/ezh/ZZZZZZ/)
> test
[info] Compiling 1 Scala source to /home/ezh/ZZZZZZ/b/target/scala-2.10/test-classes...
[info] TestSpecB:
[info] This test
[info] - should fail *** FAILED ***
[info] 2 did not equal 3 (Test.scala:5)
[error] Failed: Total 1, Failed 1, Errors 0, Passed 0
[error] Failed tests:
[error] TestSpecB
[error] (b/test:test) sbt.TestsFailedException: Tests unsuccessful
[error] Total time: 3 s, completed 28.11.2013 16:20:35
>
as already noted this is probably evil, however this should probably work:
import sbt._
import sbt.Keys._
object Build extends Build {
lazy val projectA = project
lazy val myTest = taskKey[Seq[Option[Tests.Output]]]("my test")
lazy val root: Project = project in file(".") settings (myTest <<= myTestTask) dependsOn projectA
def myTestTask = Def.task {
val state: State = Keys.state.value
val log: Logger = streams.value.log
val extracted = Project.extract(state)
import extracted._
def noTestsMessage(scoped: ScopedKey[_])(implicit display: Show[ScopedKey[_]]): String =
"No tests to run for " + display(scoped)
def f(ref: ProjectReference) = for {
state Pair Value(r) <- Project.runTask(executeTests in(ref, Test), state)
_ = Tests.showResults(log, r, noTestsMessage(test in ref))
} yield r
val depsTests = currentProject.dependencies.map(_.project).map(f)
val passed = depsTests.forall(_.forall(_.overall == TestResult.Passed))
if (passed) depsTests :+ f(ThisProject) else depsTests
}
}
http://scastie.org/3319