Calling a TaskKey with different settings - scala

I'm using the plugin sbt-assembly (version 0.13.0), and I would like to call assemblyPackageDependency with or without appendContentHash depending on some InputKey
Basically, I would like to do something like this:
lazy val isGlobalCached = InputKey[Boolean]("")
lazy val myTask = taskKey[sbt.File]("myTask")
myTask := {
if (isGlobalCached.value)
//run with the modified assemblyOption
//assemblyOption in assemblyPackageDependency ~= { x => x.copy(appendContentHash = true) }
assemblyPackageDependency.value
else
assemblyPackageDependency.value
}
but I can't figure out how to set the assemblyOption only if the condition is true, and not globally
Here are a couple of things I tried that didn't work:
lazy val isGlobalCached = InputKey[Boolean]("")
lazy val myTask = taskKey[sbt.File]("myTask")
lazy val assemblyPackageDependencyWithHash = taskKey[sbt.File]("assemblyPackageDependencyWithHash")
assemblyPackageDependencyWithHash <<= assemblyPackageDependency
assemblyOption in assemblyPackageDependencyWithHash ~= { x => x.copy(appendContentHash = true) }
myTask := {
//run with the modified assemblyOption
if (isGlobalCached.value)
assemblyPackageDependencyWithHash.value
else
assemblyPackageDependency.value
}
and:
lazy val isGlobalCached = InputKey[Boolean]("")
lazy val myTask = taskKey[sbt.File]("myTask")
lazy val globalCacheConf = config("globalCacheConf")
assemblyOption in globalCacheConf := (assemblyOption in assemblyPackageDependency).value.copy(appendContentHash = true)
myTask := {
//run with the modified assemblyOption
if (isGlobalCached.value)
assemblyPackageDependency.in(globalCacheConf).value
else
assemblyPackageDependency.value
}

import complete.DefaultParsers._
val myCmd : Command = Command("myCmd")(_ => Space ~> Bool) { (state, globalCached) =>
val extracted = Project extract state
import extracted._
Project.runTask(
assemblyPackageDependency,
append(Seq(assemblyOption in assemblyPackageDependency := globalCached), state)
)
state
}
commands += myCmd

Related

Why does sbt try to pull my interproject dependency?

I have a multi-project build with a build.sbt that looks as follows:
import lmcoursier.CoursierConfiguration
import lmcoursier.definitions.Authentication
ThisBuild / version := "0.1.0-SNAPSHOT"
ThisBuild / scalaVersion := "2.12.12"
val adoMavenUsername = "."
val adoMavenPassword = "ADO_PAT"
val adoRepoIdWithView = "ADO-id"
val adoMavenRepos = Vector(
MavenRepository(adoRepoIdWithView, s"https://adoMavenHost/adoOrganization/adoProject/_packaging/${adoRepoIdWithView.replace("#", "%40")}/maven/v1")
)
val adoAuthentication =
Authentication(user = adoMavenUsername, password = adoMavenPassword)
.withOptional(false)
.withHttpsOnly(true)
.withPassOnRedirect(false)
val coursierConfiguration = {
val initial =
CoursierConfiguration()
.withResolvers(adoMavenRepos)
.withClassifiers(Vector("", "sources"))
.withHasClassifiers(true)
adoMavenRepos.foldLeft(initial) {
case (conf, repo) ⇒
conf.addRepositoryAuthentication(repo.name, adoAuthentication)
}
}
lazy val mainSettings = Seq(
organization := "org.some",
csrConfiguration := coursierConfiguration,
updateClassifiers / csrConfiguration := coursierConfiguration
)
lazy val root = (project in file("."))
.settings(mainSettings: _*)
.settings(
name := "sbt-test",
).aggregate(core, util)
lazy val core = (project in file("core"))
.settings(mainSettings: _*)
.settings(
name := "core",
).dependsOn(util)
lazy val util = (project in file("util"))
.settings(mainSettings: _*)
.settings(
name := "util"
)
For some reason, coursier attempts to download the util package externally during the core/update task. This is not what I want, as it should resolve it internally as part of the project. The package is not added to libraryDependencies, so I'm baffled why it would attempt the download.
The above example will fail because the Azure DevOps credentials are and Maven repository are incorrect, but it shows the attempt to download util.
It seems somehow related to this Github issue.
The default CoursierConfiguration constructor sets the interProjectDependencies property to an empty Vector. To fix this, manually add resolvers on top of sbt's csrConfiguration taskKey using .withResolvers.
This is what the solution looks like applied to my question, largely based on this Github comment:
val adoMavenUsername = "."
val adoMavenPassword = "ADO_PAT"
val adoRepoIdWithView = "ADO-id"
val adoMavenHost = "pkgs.dev.azure.com"
val adoMavenRepos = Vector(
MavenRepository(adoRepoIdWithView, s"https://$adoMavenHost/adoOrganization/adoProject/_packaging/$adoRepoIdWithView/maven/v1")
)
lazy val mainSettings = Seq(
organization := "org.some",
csrConfiguration := {
val resolvers = csrResolvers.value ++ adoMavenRepos
val conf = csrConfiguration.value.withResolvers(resolvers.toVector)
val adoCredentialsOpt = credentials.value.collectFirst { case creds: DirectCredentials if creds.host == adoMavenHost => creds }
val newConfOpt = adoCredentialsOpt.map { adoCredentials =>
val auths =
resolvers
.collect {
case repo: MavenRepository if repo.root.startsWith(s"https://$adoMavenHost/") => {
repo.name ->
Authentication(adoCredentials.userName, adoCredentials.passwd)
}
}
auths.foldLeft(conf) { case (conf, (repoId, auth)) => conf.addRepositoryAuthentication(repoId, auth) }
}
newConfOpt.getOrElse(conf)
},
updateClassifiers / csrConfiguration := coursierConfiguration
)

sbt autoplugin: add javaagent for task

I have a sbt autoplugin and when the user runs a task I want to fork a new JVM with a -javaagent. The task should measure memory using jamm.
object SbtMemory extends AutoPlugin {
object autoImport {
val agentTest = inputKey[Unit]("Run task with javaagent")
}
def makeAgentOptions(classpath: Classpath) : String = {
val jammJar = classpath.map(_.data).filter(_.toString.contains("jamm")).head
s"-javaagent:$jammJar"
}
override lazy val projectSettings =
Seq(
agentTest := agentTask.value,
fork in agentTest := true,
javaOptions in agentTest += (dependencyClasspath in Test).map(makeAgentOptions).value
)
lazy val agentTask = Def.task {
val o = new Array[Byte](1024*1024)
val mm = new MemoryMeter()
println("Size of new Array[Byte](1024*1024): " + mm.measureDeep(o))
}
}
When I run sbt perf from the command line, I get the following exception:
java.lang.IllegalStateException: Instrumentation is not set; Jamm must be set as -javaagent
I also tried printing the javaOptions and the -javaagent option was not set.
How can I add the -javaagent javaOption inside the plugin to run the task with jamm?
Thanks!
Apparently, fork is only available for the run and test task. I added my own forking code and moved the measure code to a separate class MemoryMeasure:
val mainClass: String = "MemoryMeasure"
val forkOptions = ForkOptions(
bootJars = (fullClasspath in Test).value.files,
runJVMOptions = Seq(
(dependencyClasspath in Test).map(makeAgentOptions).value
)
)
val process = Fork.java.fork(forkOptions, mainClass +: arguments)
def cancel() = {
process.destroy()
1
}
val exitCode = try process.exitValue() catch { case e: InterruptedException => cancel() }

Akka.actor.dispatcher NoSuchMethod exception

I am trying to learn about Akka actors and I am running the following example. My problem is that when I run though the Idea IDE it works perfectly fine. But when I run it using the jar created by sbt assembly it throws a NoSuchMethodException java.lang.NoSuchMethodError: akka.actor.ActorSystem.dispatcher()Lscala/concurrent/ExecutionContextExecutor Exception which I cannot debug because it works fine in the IDE.
object Runner {
def main(args: Array[String]) {
run()
}
def run() = {
val system = ActorSystem("my-system")
import system.dispatcher
val props = Props[Manager]
val pool = mutable.ArrayBuffer.empty[(Int, ActorRef)]
for (i <- 1 to 10) {
pool += ((i, system.actorOf(props)))
}
val futures = pool.map {
case (x: Int, y: ActorRef) =>
val future = ask(y, Echo(x))(Timeout(100 seconds)).mapTo[Int]
println(future.toString)
future
}
/*Next line causes Exception*/
val futureList = Future.sequence(futures)
val result = futureList.map(x => {
x.sum
})
result onSuccess {
case sum => println(sum)
}
pool.foreach(x => system.stop(x._2))
system.shutdown()
}
}
The sbt file I am using is the following.
lazy val commonSettings = Seq(
organization := "foobar",
version := "1.0",
scalaVersion := "2.10.6",
test in assembly := {}
)
lazy val root = (project).aggregate(redis).settings(commonSettings: _*).
settings(
name := "scala_code_root",
version := "1.0",
scalaVersion := "2.10.6"
exportJars := false
)
lazy val myakka =(project in file("myakka")).settings(commonSettings: _*).settings(
libraryDependencies += "com.typesafe.akka" % "akka-actor_2.10" % "2.3.15"
)
The exception is thrown at the line val futureList = Future.sequence(futures). Apparently the method is trere because both IDEA and sbt-assembly use the same sbt file. What could be the cause of the Exception?

How can I override tasks ``run`` and ``runMain`` in SBT to use my own ``ForkOptions``?

Problem
In a multimodule build, each module has it's own baseDirectory but I would like to launch applications defined in modules employing the baseDirectory of the root project instead of the baseDirectory relative to modules involved.
This way, applications always would take relative file names from the root folder, which is a very common pattern.
The problem is that ForkOptions enforces the baseDirectory from the module and apparently there's no easy way to change that because forkOptions is private. I would like to pass a forkOptions populated with the baseDirectory from the root project instead.
Besides, there are modules which contain two or more applications. So, I'd like to have separate configurations for each application in a given module which contains two or more applications.
An example tells more than 1000 words:
build.sbt
import sbt._
import Keys._
lazy val buildSettings: Seq[Setting[_]] = Defaults.defaultSettings
lazy val forkRunOptions: Seq[Setting[_]] = Seq(fork := true)
addCommandAlias("r1", "ModuleA/RunnerR1:run")
addCommandAlias("r2", "ModuleA/RunnerR2:run")
lazy val RunnerR1 = sbt.config("RunnerR1").extend(Compile)
lazy val RunnerR2 = sbt.config("RunnerR2").extend(Compile)
lazy val root =
project
.in(file("."))
.settings(buildSettings:_*)
.aggregate(ModuleA)
lazy val ModuleA =
project
.in(file("ModuleA"))
.settings(buildSettings:_*)
.configs(RunnerR1,RunnerR2)
.settings(inConfig(RunnerR1)(
forkRunOptions ++
Seq(
mainClass in Compile := Option("sbt.tests.issueX.Application1"))):_*)
.settings(inConfig(RunnerR2)(
forkRunOptions ++
Seq(
mainClass in Compile := Option("sbt.tests.issueX.Application2"))):_*)
In SBT console, I would expect this:
> r1
This is Application1
> r2
This is Application2
But I see this:
> r1
This is Application2
> r2
This is Application2
What is the catch?
Not only that... SBT is running applications in process. It's not forking them. Why fork := true is not taking any effect?
Explanation
see: https://github.com/frgomes/sbt-issue-2247
Turns out that configurations do not work the way one might think they work.
The problem is that, in the snippet below, configuration RunnerR1 does not inherit tasks from module ModuleA as you might expect. So, when you type r1 or r2 (i.e: ModuleA/RunnerR1:run or ModuleA/RunnerR2:run), SBT will employ the delegaton algorithm in order to find tasks and settings which, depending on how these tasks and settings were defined, it will end up running tasks from scopes you do not expect, or finding settings from scopes you do not expect.
lazy val ModuleA =
project
.in(file("ModuleA"))
.settings(buildSettings:_*)
.configs(RunnerR1,RunnerR2)
.settings(inConfig(RunnerR1)(
forkRunOptions ++
Seq(
mainClass in Compile := Option("sbt.tests.issueX.Application1"))):_*)
This issue is related to usability, since the API provided by SBT is misleading. Eventually this pattern can be improved or better documented, but it's more a usability problem than anything else.
Circumventing the difficulty
Please find below how this issue can be circumvented.
Since ForkOptions is private, we have to provide our own way of running applications, which is based on SBT code, as much as possible.
In a nutshell, we have to guarantee that we redefine run, runMain and runner in all configurations we have.
import sbt._
import Keys._
//-------------------------------------------------------------
// This file contains a solution for the problem presented by
// https://github.com/sbt/sbt/issues/2247
//-------------------------------------------------------------
lazy val buildSettings: Seq[Setting[_]] = Defaults.defaultSettings ++ runSettings
lazy val runSettings: Seq[Setting[_]] =
Seq(
fork in (Compile, run) := true)
def forkRunOptions(s: Scope): Seq[Setting[_]] =
Seq(
// see: https://github.com/sbt/sbt/issues/2247
// see: https://github.com/sbt/sbt/issues/2244
runner in run in s := {
val forkOptions: ForkOptions =
ForkOptions(
workingDirectory = Some((baseDirectory in ThisBuild).value),
bootJars = Nil,
javaHome = (javaHome in s).value,
connectInput = (connectInput in s).value,
outputStrategy = (outputStrategy in s).value,
runJVMOptions = (javaOptions in s).value,
envVars = (envVars in s).value)
new {
val fork_ = (fork in run).value
val config: ForkOptions = forkOptions
} with ScalaRun {
override def run(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger): Option[String] =
javaRunner(
Option(mainClass), Option(classpath), options,
Some("java"), Option(log), fork_,
config.runJVMOptions, config.javaHome, config.workingDirectory, config.envVars, config.connectInput, config.outputStrategy)
}
},
runner in runMain in (s) := (runner in run in (s)).value,
run in (s) <<= Defaults.runTask (fullClasspath in s, mainClass in run in s, runner in run in s),
runMain in (s) <<= Defaults.runMainTask(fullClasspath in s, runner in runMain in s)
)
def javaRunner(mainClass: Option[String] = None,
classpath: Option[Seq[File]] = None,
options: Seq[String],
javaTool: Option[String] = None,
log: Option[Logger] = None,
fork: Boolean = false,
jvmOptions: Seq[String] = Nil,
javaHome: Option[File] = None,
cwd: Option[File] = None,
envVars: Map[String, String] = Map.empty,
connectInput: Boolean = false,
outputStrategy: Option[OutputStrategy] = Some(StdoutOutput)): Option[String] = {
def runner(app: String,
args: Seq[String],
cwd: Option[File] = None,
env: Map[String, String] = Map.empty): Int = {
import scala.collection.JavaConverters._
val cmd: Seq[String] = app +: args
val pb = new java.lang.ProcessBuilder(cmd.asJava)
if (cwd.isDefined) pb.directory(cwd.get)
pb.inheritIO
//FIXME: set environment
val process = pb.start()
if (fork) 0
else {
def cancel() = {
if(log.isDefined) log.get.warn("Background process cancelled.")
process.destroy()
15
}
try process.waitFor catch {
case e: InterruptedException => cancel()
}
}
}
val app: String = javaHome.fold("") { p => p.absolutePath + "/bin/" } + javaTool.getOrElse("java")
val jvm: Seq[String] = jvmOptions.map(p => p.toString)
val cp: Seq[String] =
classpath
.fold(Seq.empty[String]) { paths =>
Seq(
"-cp",
paths
.map(p => p.absolutePath)
.mkString(java.io.File.pathSeparator))
}
val klass = mainClass.fold(Seq.empty[String]) { name => Seq(name) }
val xargs: Seq[String] = jvm ++ cp ++ klass ++ options
if(log.isDefined)
if(fork) {
log.get.info(s"Forking: ${app} " + xargs.mkString(" "))
} else {
log.get.info(s"Running: ${app} " + xargs.mkString(" "))
}
if (cwd.isDefined) IO.createDirectory(cwd.get)
val exitCode = runner(app, xargs, cwd, envVars)
if (exitCode == 0)
None
else
Some("Nonzero exit code returned from " + app + ": " + exitCode)
}
addCommandAlias("r1", "ModuleA/RunnerR1:run")
addCommandAlias("r2", "ModuleA/RunnerR2:run")
lazy val RunnerR1 = sbt.config("RunnerR1").extend(Compile)
lazy val RunnerR2 = sbt.config("RunnerR2").extend(Compile)
lazy val root =
project
.in(file("."))
.settings(buildSettings:_*)
.aggregate(ModuleA)
lazy val ModuleA =
project
.in(file("ModuleA"))
.settings(buildSettings:_*)
.configs(RunnerR1,RunnerR2)
.settings(inConfig(RunnerR1)(
forkRunOptions(ThisScope) ++
Seq(
mainClass := Option("sbt.tests.issueX.Application1"))):_*)
.settings(inConfig(RunnerR2)(
forkRunOptions(ThisScope) ++
Seq(
mainClass := Option("sbt.tests.issueX.Application2"))):_*)

SBT triggering or detecting in a task if any sources have been recompiled

This snippet is wrong:
def bundleTo(dir: String) = Seq(
mkBundles <<= (bundle, compile in Compile) map { (fl, anal) =>
val flTarget = baseDirectory / s"app/$dir/${fl.getName}"
if (!flTarget.exists()) {
println("target did not exist copying over")
IO.copyFile(fl, flTarget)
} else if (anal.compilations.allCompilations.nonEmpty) {
println("something was recompiled, copying over")
IO.copyFile(fl, flTarget)
}
},
mkBundles <<= mkBundles.triggeredBy(compile in Compile)
)
Specifically anal.compilations.allCompilations.nonEmpty. I'd like to move a plugin into a directory only if something has changed as it triggers a bundle reload.
This snippet for SBT 13.7 will trigger the inner closure upon source change. There is probably pre-rolled logic for this in the SBT code base. You will probably need invalidation logic for SBT setting key changes and dependency updates.
myTask := {
val us = (unmanagedSources in Compile).value
val cd = streams.value.cacheDirectory / "osgi-recompile-cache"
println("bam")
val func = FileFunction.cached(cd, FilesInfo.lastModified) { par: Set[File] =>
println("boom")
par
}
func(us.toSet)
}
myTask <<= myTask.triggeredBy(compile in Compile)
Fleshed out a script to do what I need. Here it is :
import sbt._
import sbt.Keys._
import com.typesafe.sbt.osgi.OsgiKeys._
object OsgiDistUtils {
lazy val rootDirectory = SettingKey[File]("the root of the entire build")
lazy val distDirectoryName = SettingKey[String]("name for the dist directory")
lazy val distdirectory = SettingKey[File]("derived location where the OSGI dist will be constructed")
lazy val bundleDirectory = SettingKey[File]("location for the bundles")
lazy val compileBundleAndMove = TaskKey[Unit]("make bundles if needed")
val osgiDistUtildefaults = Seq(
distDirectoryName := "app",
distdirectory := rootDirectory.value / distDirectoryName.value,
compileBundleAndMove := {
val targetDirectory = bundleDirectory.value
val moduleName = name.value
val bundleFile = bundle.value
val s = streams.value
val targetFile = targetDirectory / bundleFile.getName
if(!targetDirectory.exists()) {
IO.createDirectory(targetDirectory)
} else if(!targetFile.exists()) {
s.log.info(s"module $moduleName did not exist in dist, copying over.")
IO.copyFile(bundleFile, targetFile)
} else {
val sources = (unmanagedSources in Compile).value
val cp = (managedClasspath in Compile).value
val cd = s.cacheDirectory / "osgi-recompile-cache"
FileFunction.cached(cd, FilesInfo.lastModified) { sources: Set[File] =>
s.log.info(s"Recompiling $moduleName as sources or classpath have changed.")
IO.copyFile(bundleFile, targetFile)
sources
} (sources.toSet ++ cp.seq.map(_.data).toSet)
}
},
compileBundleAndMove <<= compileBundleAndMove.triggeredBy(compile in Compile)
)
def createModuleGroup(base: File, name: String, aggregatorSettings: Seq[Def.Setting[_]], moduleSettings: Seq[Def.Setting[_]], projectDeps: Array[Project] = Array()) = {
val moduleRoot = base / name
val modules = for (x <- moduleRoot.listFiles if x.isDirectory && x.getName != "target") yield {
Project(
id = name + "-%s".format(x.getName).replace(".", "-"),
base = x,
settings = moduleSettings ++ osgiDistUtildefaults ++ Seq(
bundleDirectory := (distdirectory / name).value
)
).dependsOn(projectDeps.map(x=> ClasspathDependency(x,Some("compile"))):_*)
}
val moduleRefs = modules.map { x =>
x:ProjectReference
}
val aggregationNode = Project(
id = name,
base = moduleRoot,
settings = aggregatorSettings
).aggregate(moduleRefs: _*)
(aggregationNode, modules)
}
}