I'm trying to write a plugin for sbt for my project that will process resources. In a nutshell, it is maven profiles made in sbt. When I inspect prod:dictionary I get expected state of this Map, however, when I try prod:expandParameters I get an empty Map. How could I get the value of the dictionary from the scope of the exact configuration that command is run with?
project/ResourceFiltering.scala
import sbt._
import sbt.Keys._
import sbt.internal.util.ManagedLogger
import scala.util.matching.Regex
object ResourceFiltering extends AutoPlugin {
override def trigger = AllRequirements
sealed trait Keys {
lazy val expandParameters = taskKey[Unit]("")
lazy val extensions = settingKey[Seq[String]]("")
lazy val pattern = settingKey[Regex]("")
lazy val dictionary = settingKey[Map[String, String]]("")
}
object autoImport extends Keys
import autoImport._
override val projectSettings: Seq[Def.Setting[_]] = Seq(
Zero / extensions := Seq("conf", "properties", "xml"),
Zero / pattern := """(\$\{()\})""".r,
Zero / dictionary := Map.empty,
expandParameters := {
val log: ManagedLogger = streams.value.log
log.info(s"""|Parameter expansion
|Configuration: $configuration
|Extensions: ${extensions value}
|Pattern: ${pattern value}
|Dictionary: ${dictionary value}
""".stripMargin)
}
)
}
build.sbt
enablePlugins(ResourceFiltering)
lazy val Prod = config("prod") extend Compile describedAs "Scope to build production packages."
lazy val Stage = config("stage") extend Compile describedAs "Scope to build stage packages."
lazy val Local = config("local") extend Compile describedAs "Scope to build local packages."
lazy val root = (project in file("."))
.configs(Prod, Stage, Local)
.settings(sharedSettings)
lazy val sharedSettings =
prodSettings ++ stageSettings ++ localSettings
lazy val defaults = Defaults.configSettings ++ Defaults.configTasks ++ Defaults.resourceConfigPaths
lazy val prodSettings = inConfig(Prod)(defaults ++ Seq(
dictionary ++= Profiles.prod
))
lazy val stageSettings = inConfig(Stage)(defaults ++ Seq(
dictionary ++= Profiles.stage
))
lazy val localSettings = inConfig(Local)(defaults ++ Seq(
dictionary ++= Profiles.local
))
project/Profiles.scala
lazy val default: Map[String, String] = local
lazy val local: Map[String, String] = Map("example" -> "local")
lazy val stage: Map[String, String] = Map("example" -> "stage")
lazy val prod: Map[String, String] = Map("example" -> "prod")
Analysing Plugins Best Practices docs I would make the following recommendations regarding configuration and scoping.
Provide default values in globalSettings instead of projectSettings like so
override lazy val globalSettings = Seq(
dictionary := Map.empty
)
Next collect base configuration of expandParameters into its own sequence like so
lazy val baseResourceFilteringSettings: Seq[Def.Setting[_]] = Seq(
extensions := Seq("conf", "properties", "xml"),
pattern := """(\$\{()\})""".r,
expandParameters := {
val log: ManagedLogger = streams.value.log
log.info(
s"""|Parameter expansion
|Configuration: $configuration
|Extensions: ${extensions value}
|Pattern: ${pattern value}
|Dictionary: ${dictionary value}
""".stripMargin
)
}
)
Note how dictionary is not initialised in baseResourceFilteringSettings, instead by default it will come from globalSettings.
Now we have taken care of our defaults and we have our base configuration, so we can proceed to "specialise" it by configuration scope using inConfig like so
lazy val localSettings = inConfig(Local)(defaults ++ Seq(
dictionary ++= Profiles.local
) ++ baseResourceFilteringSettings)
Note how we have scoped baseResourceFilteringSettings to Local config, as well as dictionary ++= Profiles.local.
Now executing ;reload;local:expandParameters should output
[info] Parameter expansion
[info] Configuration: SettingKey(This / This / This / configuration)
[info] Extensions: List(conf, properties, xml)
[info] Pattern: (\$\{()\})
[info] Dictionary: Map(example -> local)
where we see Dictionary: Map(example -> local) as required.
Here is the complete code of ResourceFiltering
object ResourceFiltering extends AutoPlugin {
override def trigger = AllRequirements
sealed trait Keys {
lazy val expandParameters = taskKey[Unit]("")
lazy val extensions = settingKey[Seq[String]]("")
lazy val pattern = settingKey[Regex]("")
lazy val dictionary = settingKey[Map[String, String]]("")
lazy val baseResourceFilteringSettings: Seq[Def.Setting[_]] = Seq(
extensions := Seq("conf", "properties", "xml"),
pattern := """(\$\{()\})""".r,
expandParameters := {
val log: ManagedLogger = streams.value.log
log.info(
s"""|Parameter expansion
|Configuration: $configuration
|Extensions: ${extensions value}
|Pattern: ${pattern value}
|Dictionary: ${dictionary value}
""".stripMargin
)
}
)
}
object autoImport extends Keys
import autoImport._
override lazy val globalSettings = Seq(
dictionary := Map.empty
)
}
Also consider moving configuration definitions into plugin like so
object ResourceFiltering extends AutoPlugin {
override def trigger = AllRequirements
sealed trait Keys {
lazy val Prod = config("prod") extend Compile describedAs "Scope to build production packages."
lazy val Stage = config("stage") extend Compile describedAs "Scope to build stage packages."
lazy val Local = config("local") extend Compile describedAs "Scope to build local packages."
lazy val expandParameters = taskKey[Unit]("")
lazy val extensions = settingKey[Seq[String]]("")
lazy val pattern = settingKey[Regex]("")
lazy val dictionary = settingKey[Map[String, String]]("")
lazy val baseResourceFilteringSettings: Seq[Def.Setting[_]] = Seq(
extensions := Seq("conf", "properties", "xml"),
pattern := """(\$\{()\})""".r,
expandParameters := {
val log: ManagedLogger = streams.value.log
log.info(
s"""|Parameter expansion
|Configuration: $configuration
|Extensions: ${extensions value}
|Pattern: ${pattern value}
|Dictionary: ${dictionary value}
""".stripMargin
)
}
)
}
object autoImport extends Keys
import autoImport._
override lazy val globalSettings = Seq(
dictionary := Map.empty
)
override val projectSettings: Seq[Def.Setting[_]] =
inConfig(Stage)(baseResourceFilteringSettings) ++
inConfig(Prod)(baseResourceFilteringSettings) ++
inConfig(Local)(baseResourceFilteringSettings)
}
This way we do not have to remember to add baseResourceFilteringSettings to config scope and can simply write
lazy val localSettings = inConfig(Local)(defaults ++ Seq(
dictionary ++= Profiles.local
)
Related
I have a multi-project build with a build.sbt that looks as follows:
import lmcoursier.CoursierConfiguration
import lmcoursier.definitions.Authentication
ThisBuild / version := "0.1.0-SNAPSHOT"
ThisBuild / scalaVersion := "2.12.12"
val adoMavenUsername = "."
val adoMavenPassword = "ADO_PAT"
val adoRepoIdWithView = "ADO-id"
val adoMavenRepos = Vector(
MavenRepository(adoRepoIdWithView, s"https://adoMavenHost/adoOrganization/adoProject/_packaging/${adoRepoIdWithView.replace("#", "%40")}/maven/v1")
)
val adoAuthentication =
Authentication(user = adoMavenUsername, password = adoMavenPassword)
.withOptional(false)
.withHttpsOnly(true)
.withPassOnRedirect(false)
val coursierConfiguration = {
val initial =
CoursierConfiguration()
.withResolvers(adoMavenRepos)
.withClassifiers(Vector("", "sources"))
.withHasClassifiers(true)
adoMavenRepos.foldLeft(initial) {
case (conf, repo) ⇒
conf.addRepositoryAuthentication(repo.name, adoAuthentication)
}
}
lazy val mainSettings = Seq(
organization := "org.some",
csrConfiguration := coursierConfiguration,
updateClassifiers / csrConfiguration := coursierConfiguration
)
lazy val root = (project in file("."))
.settings(mainSettings: _*)
.settings(
name := "sbt-test",
).aggregate(core, util)
lazy val core = (project in file("core"))
.settings(mainSettings: _*)
.settings(
name := "core",
).dependsOn(util)
lazy val util = (project in file("util"))
.settings(mainSettings: _*)
.settings(
name := "util"
)
For some reason, coursier attempts to download the util package externally during the core/update task. This is not what I want, as it should resolve it internally as part of the project. The package is not added to libraryDependencies, so I'm baffled why it would attempt the download.
The above example will fail because the Azure DevOps credentials are and Maven repository are incorrect, but it shows the attempt to download util.
It seems somehow related to this Github issue.
The default CoursierConfiguration constructor sets the interProjectDependencies property to an empty Vector. To fix this, manually add resolvers on top of sbt's csrConfiguration taskKey using .withResolvers.
This is what the solution looks like applied to my question, largely based on this Github comment:
val adoMavenUsername = "."
val adoMavenPassword = "ADO_PAT"
val adoRepoIdWithView = "ADO-id"
val adoMavenHost = "pkgs.dev.azure.com"
val adoMavenRepos = Vector(
MavenRepository(adoRepoIdWithView, s"https://$adoMavenHost/adoOrganization/adoProject/_packaging/$adoRepoIdWithView/maven/v1")
)
lazy val mainSettings = Seq(
organization := "org.some",
csrConfiguration := {
val resolvers = csrResolvers.value ++ adoMavenRepos
val conf = csrConfiguration.value.withResolvers(resolvers.toVector)
val adoCredentialsOpt = credentials.value.collectFirst { case creds: DirectCredentials if creds.host == adoMavenHost => creds }
val newConfOpt = adoCredentialsOpt.map { adoCredentials =>
val auths =
resolvers
.collect {
case repo: MavenRepository if repo.root.startsWith(s"https://$adoMavenHost/") => {
repo.name ->
Authentication(adoCredentials.userName, adoCredentials.passwd)
}
}
auths.foldLeft(conf) { case (conf, (repoId, auth)) => conf.addRepositoryAuthentication(repoId, auth) }
}
newConfOpt.getOrElse(conf)
},
updateClassifiers / csrConfiguration := coursierConfiguration
)
I have written an SBT auto plugin MyPlugin.scala:
package com.abhi
import sbt._
import sbt.Keys._
object MyPlugin extends AutoPlugin {
object autoImport {
val helloTask = taskKey[Unit]("says hello")
val byeTask = taskKey[Unit]("bye task")
}
import autoImport._
override lazy val projectSettings = Seq(
helloTask := {
val dir = sourceManaged.value
val cp = (dependencyClasspath in Compile).value
val r = (runner in Compile).value
val s = streams.value
val rd = (resourceDirectory in Compile).value
val sd = (sourceDirectory in Compile).value
println(s"Here to say hello $dir $cp $r $s $rd $sd")
},
byeTask := {
val dir = sourceManaged.value
val cp = (dependencyClasspath in Compile).value
val r = (runner in Compile).value
val s = streams.value
val rd = (resourceDirectory in Compile).value
val sd = (sourceDirectory in Compile).value
println(s"Here to say bye $dir $cp $r $s $rd $sd")
}
)
}
This works and I am able to use this plugin. However the implementation of helloTask and byeTask will be a little long, so I don't want to write the implementation inside MyPlugin.scala.
Instead, I want to create two separate files HelloTask.scala and ByeTask.scala and then write the respective implementations there.
I looked at the SBT documentation for Custom Settings and all examples always implement the tasks inside of the plugin itself.
How can I write the implementation of helloTask and byeTask outside of the MyPlugin.scala file? also how to share some logic between HelloTask and ByeTask.
The following lines are common between the two tasks and I want to write these only once
val dir = sourceManaged.value
val cp = (dependencyClasspath in Compile).value
val r = (runner in Compile).value
val s = streams.value
val rd = (resourceDirectory in Compile).value
val sd = (sourceDirectory in Compile).value
Separating tasks implementation is a common good practice. It is mentioned in the Tasks documentation. You can write an implementation using Def.task macro:
def taskImpl(args: ...): Def.Initialize[Task[...]] = Def.task {
...
}
And then use it with different arguments to set different task keys:
override def projectSettings = Seq(
taskA := taskImpl("A").value,
taskB := taskImpl("B").value,
)
In your case you could do something like this:
def saySmthImpl(msg: String): Def.Initialize[Task[Unit]] = Def.task {
val dir = sourceManaged.value
val cp = (dependencyClasspath in Compile).value
val r = (runner in Compile).value
val s = streams.value
val rd = (resourceDirectory in Compile).value
val sd = (sourceDirectory in Compile).value
println(s"$msg $dir $cp $r $s $rd $sd")
}
You can keep this implementation in a separate file if you want. The in the plugin definition you can use it like this:
override def projectSettings = Seq(
helloTask := saySmthImpl("Here to say hello").value,
byeTask := saySmthImpl("Here to say bye").value,
)
You should keep in mind though, that accessing other settings or tasks with .value can be done only in certain scopes, like that Def.task or Def.setting or when setting keys with :=. This limits (or rather directs) the ways you can share logic between different tasks implementations.
I am trying to create an implicit encoder using Circe. However this encoder will be created using an annotation hence I am using Scalameta. Here is my code. However, the compiler complains about having an override statement within quasiquotes.
class HalResource extends StaticAnnotation {
inline def apply(defn: Any): Any = meta {
val q"..$mods class $tName (..$params) extends $template {..$stats}" = defn
q"object $tName {${createApply(tName)}}"
}
private def createApply(className: Type.Name): Defn.Def = {
q"""
import _root_.io.circe.Json
import _root_.io.circe.syntax._
import _root_.io.circe.Encoder
implicit def encoder = Encoder[$className] {
override def apply(a: $className): Json = {
val (simpleFields: Seq[Term.Param], nonSimpleFields: Seq[Term.Param]) =
params.partition(field => field.decltpe.fold(false) {
case _: Type.Name => true
case _ => false
})
val embedded: Seq[(String, Json)] = nonSimpleFields.map(field => field.name.syntax -> field.name.value.asJson)
val simpleJsonFields: Seq[(String, Json)] = simpleFields.map(field => field.name.syntax -> field.name.value.asJson)
val baseSeq: Seq[(String, Json)] = Seq(
"_links" -> Json.obj(
"href" -> Json.obj(
"self" -> Json.fromString("self_reference")
)
),
"_embedded" -> Json.fromFields(embedded),
) ++ simpleJsonFields
val result: Seq[(String, Json)] = baseSeq ++ simpleJsonFields
Json.fromFields(result)
}
}
"""
}
}
The build file is as follows:
import sbt.Keys.{scalaVersion, scalacOptions}
val circeVersion = "0.8.0"
lazy val circeDependencies = Seq(
"io.circe" %% "circe-core",
"io.circe" %% "circe-generic",
"io.circe" %% "circe-parser"
).map(_ % circeVersion)
lazy val commonSettings = Seq(
name := "Annotation",
version := "1.0",
scalaVersion := "2.12.2",
scalacOptions ++= Seq("-unchecked", "-deprecation", "-feature"),
resolvers += Resolver.sonatypeRepo("releases")
)
lazy val macroAnnotationSettings = Seq(
addCompilerPlugin("org.scalameta" % "paradise" % "3.0.0-M9" cross CrossVersion.full),
scalacOptions += "-Xplugin-require:macroparadise",
scalacOptions in (Compile, console) ~= (_ filterNot (_ contains "paradise"))
)
lazy val projectThatDefinesMacroAnnotations = project.in(file("annotation-definition"))
.settings(commonSettings)
.settings(
name := "HalResource",
libraryDependencies += "org.scalameta" %% "scalameta" % "1.8.0" % Provided,
macroAnnotationSettings)
lazy val annotation = project.in(file("."))
.settings(commonSettings)
.settings(macroAnnotationSettings)
.settings(
libraryDependencies ++= circeDependencies
).dependsOn(projectThatDefinesMacroAnnotations)
As a result I still get:
macro annotation could not be expanded (the most common reason for that is that you need to enable the macro paradise plugin; another possibility is that you try to use macro annotation in the same compilation run that defines it)
You are just missing new before Encoder[$className] { (there may be other errors, but that's the immediate one).
Because of this, the compiler thinks you are trying to call a generic method Encoder with the block
{
override def apply(a: $className): Json = ...
...
}
as the argument, and local methods can't be override.
Problem
In a multimodule build, each module has it's own baseDirectory but I would like to launch applications defined in modules employing the baseDirectory of the root project instead of the baseDirectory relative to modules involved.
This way, applications always would take relative file names from the root folder, which is a very common pattern.
The problem is that ForkOptions enforces the baseDirectory from the module and apparently there's no easy way to change that because forkOptions is private. I would like to pass a forkOptions populated with the baseDirectory from the root project instead.
Besides, there are modules which contain two or more applications. So, I'd like to have separate configurations for each application in a given module which contains two or more applications.
An example tells more than 1000 words:
build.sbt
import sbt._
import Keys._
lazy val buildSettings: Seq[Setting[_]] = Defaults.defaultSettings
lazy val forkRunOptions: Seq[Setting[_]] = Seq(fork := true)
addCommandAlias("r1", "ModuleA/RunnerR1:run")
addCommandAlias("r2", "ModuleA/RunnerR2:run")
lazy val RunnerR1 = sbt.config("RunnerR1").extend(Compile)
lazy val RunnerR2 = sbt.config("RunnerR2").extend(Compile)
lazy val root =
project
.in(file("."))
.settings(buildSettings:_*)
.aggregate(ModuleA)
lazy val ModuleA =
project
.in(file("ModuleA"))
.settings(buildSettings:_*)
.configs(RunnerR1,RunnerR2)
.settings(inConfig(RunnerR1)(
forkRunOptions ++
Seq(
mainClass in Compile := Option("sbt.tests.issueX.Application1"))):_*)
.settings(inConfig(RunnerR2)(
forkRunOptions ++
Seq(
mainClass in Compile := Option("sbt.tests.issueX.Application2"))):_*)
In SBT console, I would expect this:
> r1
This is Application1
> r2
This is Application2
But I see this:
> r1
This is Application2
> r2
This is Application2
What is the catch?
Not only that... SBT is running applications in process. It's not forking them. Why fork := true is not taking any effect?
Explanation
see: https://github.com/frgomes/sbt-issue-2247
Turns out that configurations do not work the way one might think they work.
The problem is that, in the snippet below, configuration RunnerR1 does not inherit tasks from module ModuleA as you might expect. So, when you type r1 or r2 (i.e: ModuleA/RunnerR1:run or ModuleA/RunnerR2:run), SBT will employ the delegaton algorithm in order to find tasks and settings which, depending on how these tasks and settings were defined, it will end up running tasks from scopes you do not expect, or finding settings from scopes you do not expect.
lazy val ModuleA =
project
.in(file("ModuleA"))
.settings(buildSettings:_*)
.configs(RunnerR1,RunnerR2)
.settings(inConfig(RunnerR1)(
forkRunOptions ++
Seq(
mainClass in Compile := Option("sbt.tests.issueX.Application1"))):_*)
This issue is related to usability, since the API provided by SBT is misleading. Eventually this pattern can be improved or better documented, but it's more a usability problem than anything else.
Circumventing the difficulty
Please find below how this issue can be circumvented.
Since ForkOptions is private, we have to provide our own way of running applications, which is based on SBT code, as much as possible.
In a nutshell, we have to guarantee that we redefine run, runMain and runner in all configurations we have.
import sbt._
import Keys._
//-------------------------------------------------------------
// This file contains a solution for the problem presented by
// https://github.com/sbt/sbt/issues/2247
//-------------------------------------------------------------
lazy val buildSettings: Seq[Setting[_]] = Defaults.defaultSettings ++ runSettings
lazy val runSettings: Seq[Setting[_]] =
Seq(
fork in (Compile, run) := true)
def forkRunOptions(s: Scope): Seq[Setting[_]] =
Seq(
// see: https://github.com/sbt/sbt/issues/2247
// see: https://github.com/sbt/sbt/issues/2244
runner in run in s := {
val forkOptions: ForkOptions =
ForkOptions(
workingDirectory = Some((baseDirectory in ThisBuild).value),
bootJars = Nil,
javaHome = (javaHome in s).value,
connectInput = (connectInput in s).value,
outputStrategy = (outputStrategy in s).value,
runJVMOptions = (javaOptions in s).value,
envVars = (envVars in s).value)
new {
val fork_ = (fork in run).value
val config: ForkOptions = forkOptions
} with ScalaRun {
override def run(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger): Option[String] =
javaRunner(
Option(mainClass), Option(classpath), options,
Some("java"), Option(log), fork_,
config.runJVMOptions, config.javaHome, config.workingDirectory, config.envVars, config.connectInput, config.outputStrategy)
}
},
runner in runMain in (s) := (runner in run in (s)).value,
run in (s) <<= Defaults.runTask (fullClasspath in s, mainClass in run in s, runner in run in s),
runMain in (s) <<= Defaults.runMainTask(fullClasspath in s, runner in runMain in s)
)
def javaRunner(mainClass: Option[String] = None,
classpath: Option[Seq[File]] = None,
options: Seq[String],
javaTool: Option[String] = None,
log: Option[Logger] = None,
fork: Boolean = false,
jvmOptions: Seq[String] = Nil,
javaHome: Option[File] = None,
cwd: Option[File] = None,
envVars: Map[String, String] = Map.empty,
connectInput: Boolean = false,
outputStrategy: Option[OutputStrategy] = Some(StdoutOutput)): Option[String] = {
def runner(app: String,
args: Seq[String],
cwd: Option[File] = None,
env: Map[String, String] = Map.empty): Int = {
import scala.collection.JavaConverters._
val cmd: Seq[String] = app +: args
val pb = new java.lang.ProcessBuilder(cmd.asJava)
if (cwd.isDefined) pb.directory(cwd.get)
pb.inheritIO
//FIXME: set environment
val process = pb.start()
if (fork) 0
else {
def cancel() = {
if(log.isDefined) log.get.warn("Background process cancelled.")
process.destroy()
15
}
try process.waitFor catch {
case e: InterruptedException => cancel()
}
}
}
val app: String = javaHome.fold("") { p => p.absolutePath + "/bin/" } + javaTool.getOrElse("java")
val jvm: Seq[String] = jvmOptions.map(p => p.toString)
val cp: Seq[String] =
classpath
.fold(Seq.empty[String]) { paths =>
Seq(
"-cp",
paths
.map(p => p.absolutePath)
.mkString(java.io.File.pathSeparator))
}
val klass = mainClass.fold(Seq.empty[String]) { name => Seq(name) }
val xargs: Seq[String] = jvm ++ cp ++ klass ++ options
if(log.isDefined)
if(fork) {
log.get.info(s"Forking: ${app} " + xargs.mkString(" "))
} else {
log.get.info(s"Running: ${app} " + xargs.mkString(" "))
}
if (cwd.isDefined) IO.createDirectory(cwd.get)
val exitCode = runner(app, xargs, cwd, envVars)
if (exitCode == 0)
None
else
Some("Nonzero exit code returned from " + app + ": " + exitCode)
}
addCommandAlias("r1", "ModuleA/RunnerR1:run")
addCommandAlias("r2", "ModuleA/RunnerR2:run")
lazy val RunnerR1 = sbt.config("RunnerR1").extend(Compile)
lazy val RunnerR2 = sbt.config("RunnerR2").extend(Compile)
lazy val root =
project
.in(file("."))
.settings(buildSettings:_*)
.aggregate(ModuleA)
lazy val ModuleA =
project
.in(file("ModuleA"))
.settings(buildSettings:_*)
.configs(RunnerR1,RunnerR2)
.settings(inConfig(RunnerR1)(
forkRunOptions(ThisScope) ++
Seq(
mainClass := Option("sbt.tests.issueX.Application1"))):_*)
.settings(inConfig(RunnerR2)(
forkRunOptions(ThisScope) ++
Seq(
mainClass := Option("sbt.tests.issueX.Application2"))):_*)
I'm moving an SBT plugin from 0.12 over to 0.13. At various points in my plugin I schedule a dynamic set of tasks onto the SBT build graph.
Below is my old code. Is this still the idiomatic way to express this, or is it possible to leverage the macros to make everything prettier?
import sbt._
import Keys._
object Toplevel extends Build
{
lazy val ordinals = taskKey[Seq[String]]("A list of things")
lazy val times = taskKey[Int]("Number of times to list things")
lazy val inParallel = taskKey[Seq[String]]("Strings to log in parallel")
lazy val Foo = Project( id="Foo", base=file("foo"),
settings = Defaults.defaultSettings ++ Seq(
scalaVersion := "2.10.2",
ordinals := Seq( "First", "Second", "Third", "Four", "Five" ),
times := 3,
inParallel <<= (times, ordinals, streams) flatMap
{ case (t, os, s) =>
os.map( o => toTask( () =>
{
(0 until t).map( _ => o ).mkString(",")
} ) ).join
}
)
)
}
Apologies for the entirely contrived example!
EDIT
So, taking Mark's advice into account I have the following tidier code:
import sbt._
import Keys._
object Toplevel extends Build
{
lazy val ordinals = taskKey[Seq[String]]("A list of things")
lazy val times = taskKey[Int]("Number of times to list things")
lazy val inParallel = taskKey[Seq[String]]("Strings to log in parallel")
def parTask = Def.taskDyn
{
val t = times.value
ordinals.value.map(o => ordinalTask(o, t)).join
}
def ordinalTask(o: String, t: Int) = Def.task
{
(0 until t).map(_ => o).mkString(",")
}
lazy val Foo = Project( id="Foo", base=file("foo"),
settings = Defaults.defaultSettings ++ Seq(
scalaVersion := "2.10.2",
ordinals := Seq( "First", "Second", "Third", "Four", "Five" ),
times := 3,
inParallel := parTask.value
)
)
}
This seems to be nearly there, but fails the build with:
[error] /home/alex.wilson/tmp/sbt0.13/project/build.scala:13: type mismatch;
[error] found : sbt.Def.Initialize[Seq[sbt.Task[String]]]
[error] required: sbt.Def.Initialize[sbt.Task[?]]
[error] ordinals.value.map(o => ordinalTask(o, t)).join
You can use Def.taskDyn, which provides the new syntax for flatMap. The difference from Def.task is that the expected return type is a task Initialize[Task[T]] instead of just T. Translating your example,
inParallel := parTask.value
def parTask = Def.taskDyn {
val t = times.value
ordinals.value.map(o => ordinalTask(o, t)).joinWith(_.join)
}
def ordinalTask(o: String, t: Int) = Def.task {
(0 until t).map(_ => o).mkString(",")
}