The dummy_rocc_test runs fine on spike with the custom0 instruction but when I put it on the zed board and run it with ./fesvr-zynq pk dummy_rocc_test it gives me an error of illegal instruction.
I understand this is because the rocc io is not but in the default config of rocket chip, but I'd like to know how to enable the RoCC interface in order to run the dummy_roc_test on zed board. I tried giving the below listed value to the "BuildRoCC" as shown in the snippet
case BuildRoCC => Some(() => (Module(new AccumulatorExample, { case CoreName => "rocket" })))
but while doing make rocket, I'm getting the below error:
[error] /home/prashantravi/rocket-chip/src/main/scala/Configs.scala:100: could not find implicit value for parameter p: cde.Parameters
[error] case BuildRoCC => Some(() => (Module(new AccumulatorExample, { case CoreName => "rocket" })))
[error] ^
[error] one error found
[error] (rocketchip/compile:compileIncremental) Compilation failed
[error] Total time: 8 s, completed Oct 27, 2015 11:24:59 AM
the configs.scala code is as below.
// See LICENSE for license details.
package rocketchip
import Chisel._
import junctions._
import uncore._
import rocket._
import rocket.Util._
import zscale._
import scala.math.max
import DefaultTestSuites._
import cde.{Parameters, Config, Dump, Knob}
class DefaultConfig extends Config (
topDefinitions = { (pname,site,here) =>
type PF = PartialFunction[Any,Any]
def findBy(sname:Any):Any = here[PF](site[Any](sname))(pname)
def genCsrAddrMap: AddrMap = {
val csrSize = (1 << 12) * (site(XLen) / 8)
val csrs = (0 until site(NTiles)).map{ i =>
AddrMapEntry(s"csr$i", None, MemSize(csrSize, AddrMapConsts.RW))
}
val scrSize = site(HtifKey).nSCR * (site(XLen) / 8)
val scr = AddrMapEntry("scr", None, MemSize(scrSize, AddrMapConsts.RW))
new AddrMap(csrs :+ scr)
}
pname match {
case HtifKey => HtifParameters(
width = Dump("HTIF_WIDTH", 16),
nSCR = 64,
offsetBits = site(CacheBlockOffsetBits),
nCores = site(NTiles))
//Memory Parameters
case PAddrBits => 32
case PgIdxBits => 12
case PgLevels => if (site(XLen) == 64) 3 /* Sv39 */ else 2 /* Sv32 */
case PgLevelBits => site(PgIdxBits) - log2Up(site(XLen)/8)
case VPNBits => site(PgLevels) * site(PgLevelBits)
case PPNBits => site(PAddrBits) - site(PgIdxBits)
case VAddrBits => site(VPNBits) + site(PgIdxBits)
case ASIdBits => 7
case MIFTagBits => Dump("MEM_TAG_BITS",
log2Up(site(NAcquireTransactors)+2) +
log2Up(site(NBanksPerMemoryChannel)) +
log2Up(site(NMemoryChannels)))
case MIFDataBits => Dump("MEM_DATA_BITS", 128)
case MIFAddrBits => Dump("MEM_ADDR_BITS", site(PAddrBits) - site(CacheBlockOffsetBits))
case MIFDataBeats => site(CacheBlockBytes) * 8 / site(MIFDataBits)
case NastiKey => NastiParameters(
dataBits = site(MIFDataBits),
addrBits = site(PAddrBits),
idBits = site(MIFTagBits))
//Params used by all caches
case NSets => findBy(CacheName)
case NWays => findBy(CacheName)
case RowBits => findBy(CacheName)
case NTLBEntries => findBy(CacheName)
case "L1I" => {
case NSets => Knob("L1I_SETS") //64
case NWays => Knob("L1I_WAYS") //4
case RowBits => 4*site(CoreInstBits)
case NTLBEntries => 8
}:PF
case "L1D" => {
case NSets => Knob("L1D_SETS") //64
case NWays => Knob("L1D_WAYS") //4
case RowBits => 2*site(CoreDataBits)
case NTLBEntries => 8
}:PF
case ECCCode => None
case Replacer => () => new RandomReplacement(site(NWays))
case AmoAluOperandBits => site(XLen)
//L1InstCache
case BtbKey => BtbParameters()
//L1DataCache
case WordBits => site(XLen)
case StoreDataQueueDepth => 17
case ReplayQueueDepth => 16
case NMSHRs => Knob("L1D_MSHRS")
case NIOMSHRs => 1
case LRSCCycles => 32
//L2 Memory System Params
case NAcquireTransactors => 7
case L2StoreDataQueueDepth => 1
case L2DirectoryRepresentation => new NullRepresentation(site(NTiles))
case BuildL2CoherenceManager => (p: Parameters) =>
Module(new L2BroadcastHub()(p.alterPartial({
case InnerTLId => "L1toL2"
case OuterTLId => "L2toMC" })))
//Tile Constants
case BuildTiles => {
TestGeneration.addSuites(rv64i.map(_("p")))
TestGeneration.addSuites((if(site(UseVM)) List("pt","v") else List("pt")).flatMap(env => rv64u.map(_(env))))
TestGeneration.addSuites(if(site(NTiles) > 1) List(mtBmarks, bmarks) else List(bmarks))
List.fill(site(NTiles)){ (r: Bool, p: Parameters) =>
Module(new RocketTile(resetSignal = r)(p.alterPartial({case TLId => "L1toL2"})))
}
}
case BuildRoCC => Some(() => (Module(new AccumulatorExample, { case CoreName => "rocket" })))
case RoccNMemChannels => 1
//Rocket Core Constants
case FetchWidth => 1
case RetireWidth => 1
case UseVM => true
case UsePerfCounters => true
case FastLoadWord => true
case FastLoadByte => false
case FastMulDiv => true
case XLen => 64
case UseFPU => {
val env = if(site(UseVM)) List("p","pt","v") else List("p","pt")
if(site(FDivSqrt)) TestGeneration.addSuites(env.map(rv64uf))
else TestGeneration.addSuites(env.map(rv64ufNoDiv))
true
}
case FDivSqrt => true
case SFMALatency => 2
case DFMALatency => 3
case CoreInstBits => 32
case CoreDataBits => site(XLen)
case NCustomMRWCSRs => 0
//Uncore Paramters
case RTCPeriod => 100 // gives 10 MHz RTC assuming 1 GHz uncore clock
case LNEndpoints => site(TLKey(site(TLId))).nManagers + site(TLKey(site(TLId))).nClients
case LNHeaderBits => log2Ceil(site(TLKey(site(TLId))).nManagers) +
log2Up(site(TLKey(site(TLId))).nClients)
case TLKey("L1toL2") =>
TileLinkParameters(
coherencePolicy = new MESICoherence(site(L2DirectoryRepresentation)),
nManagers = site(NBanksPerMemoryChannel)*site(NMemoryChannels),
nCachingClients = site(NTiles),
nCachelessClients = 1 + site(NTiles) *
(1 + (if(site(BuildRoCC).isEmpty) 0 else site(RoccNMemChannels))),
maxClientXacts = max(site(NMSHRs) + site(NIOMSHRs),
if(site(BuildRoCC).isEmpty) 1 else site(RoccMaxTaggedMemXacts)),
maxClientsPerPort = if(site(BuildRoCC).isEmpty) 1 else 2,
maxManagerXacts = site(NAcquireTransactors) + 2,
dataBits = site(CacheBlockBytes)*8)
case TLKey("L2toMC") =>
TileLinkParameters(
coherencePolicy = new MEICoherence(new NullRepresentation(site(NBanksPerMemoryChannel))),
nManagers = 1,
nCachingClients = site(NBanksPerMemoryChannel),
nCachelessClients = 0,
maxClientXacts = 1,
maxClientsPerPort = site(NAcquireTransactors) + 2,
maxManagerXacts = 1,
dataBits = site(CacheBlockBytes)*8)
case TLKey("Outermost") => site(TLKey("L2toMC")).copy(dataBeats = site(MIFDataBeats))
case NTiles => Knob("NTILES")
case NMemoryChannels => 1
case NBanksPerMemoryChannel => Knob("NBANKS")
case NOutstandingMemReqsPerChannel => site(NBanksPerMemoryChannel)*(site(NAcquireTransactors)+2)
case BankIdLSB => 0
case CacheBlockBytes => 64
case CacheBlockOffsetBits => log2Up(here(CacheBlockBytes))
case UseBackupMemoryPort => true
case MMIOBase => BigInt(1 << 30) // 1 GB
case ExternalIOStart => 2 * site(MMIOBase)
case GlobalAddrMap => AddrMap(
AddrMapEntry("mem", None, MemSize(site(MMIOBase), AddrMapConsts.RWX)),
AddrMapEntry("conf", None, MemSubmap(site(ExternalIOStart) - site(MMIOBase), genCsrAddrMap)),
AddrMapEntry("io", Some(site(ExternalIOStart)), MemSize(2 * site(MMIOBase), AddrMapConsts.RW)))
}},
knobValues = {
case "NTILES" => 1
case "NBANKS" => 1
case "L1D_MSHRS" => 2
case "L1D_SETS" => 64
case "L1D_WAYS" => 4
case "L1I_SETS" => 64
case "L1I_WAYS" => 4
}
)
class DefaultVLSIConfig extends DefaultConfig
class DefaultCPPConfig extends DefaultConfig
class With2Cores extends Config(knobValues = { case "NTILES" => 2 })
class With4Cores extends Config(knobValues = { case "NTILES" => 4 })
class With8Cores extends Config(knobValues = { case "NTILES" => 8 })
class With2Banks extends Config(knobValues = { case "NBANKS" => 2 })
class With4Banks extends Config(knobValues = { case "NBANKS" => 4 })
class With8Banks extends Config(knobValues = { case "NBANKS" => 8 })
class WithL2Cache extends Config(
(pname,site,here) => pname match {
case "L2_CAPACITY_IN_KB" => Knob("L2_CAPACITY_IN_KB")
case "L2Bank" => {
case NSets => (((here[Int]("L2_CAPACITY_IN_KB")*1024) /
site(CacheBlockBytes)) /
site(NBanksPerMemoryChannel)*site(NMemoryChannels)) /
site(NWays)
case NWays => Knob("L2_WAYS")
case RowBits => site(TLKey(site(TLId))).dataBitsPerBeat
}: PartialFunction[Any,Any]
case NAcquireTransactors => 2
case NSecondaryMisses => 4
case L2DirectoryRepresentation => new FullRepresentation(site(NTiles))
case BuildL2CoherenceManager => (p: Parameters) =>
Module(new L2HellaCacheBank()(p.alterPartial({
case CacheName => "L2Bank"
case InnerTLId => "L1toL2"
case OuterTLId => "L2toMC"})))
},
knobValues = { case "L2_WAYS" => 8; case "L2_CAPACITY_IN_KB" => 2048 }
)
class WithL2Capacity2048 extends Config(knobValues = { case "L2_CAPACITY_IN_KB" => 2048 })
class WithL2Capacity1024 extends Config(knobValues = { case "L2_CAPACITY_IN_KB" => 1024 })
class WithL2Capacity512 extends Config(knobValues = { case "L2_CAPACITY_IN_KB" => 512 })
class WithL2Capacity256 extends Config(knobValues = { case "L2_CAPACITY_IN_KB" => 256 })
class WithL2Capacity128 extends Config(knobValues = { case "L2_CAPACITY_IN_KB" => 128 })
class WithL2Capacity64 extends Config(knobValues = { case "L2_CAPACITY_IN_KB" => 64 })
class DefaultL2Config extends Config(new WithL2Cache ++ new DefaultConfig)
class DefaultL2VLSIConfig extends Config(new WithL2Cache ++ new DefaultVLSIConfig)
class DefaultL2CPPConfig extends Config(new WithL2Cache ++ new DefaultCPPConfig)
class DefaultL2FPGAConfig extends Config(new WithL2Capacity64 ++ new WithL2Cache ++ new DefaultFPGAConfig)
class WithZscale extends Config(
(pname,site,here) => pname match {
case BuildZscale => {
TestGeneration.addSuites(List(rv32ui("p"), rv32um("p")))
TestGeneration.addSuites(List(zscaleBmarks))
(r: Bool, p: Parameters) => Module(new Zscale(r)(p))
}
case BootROMCapacity => Dump("BOOT_CAPACITY", 16*1024)
case DRAMCapacity => Dump("DRAM_CAPACITY", 64*1024*1024)
}
)
class ZscaleConfig extends Config(new WithZscale ++ new DefaultConfig)
class FPGAConfig extends Config (
(pname,site,here) => pname match {
case NAcquireTransactors => 4
case UseBackupMemoryPort => false
}
)
class DefaultFPGAConfig extends Config(new FPGAConfig ++ new DefaultConfig)
class SmallConfig extends Config (
topDefinitions = { (pname,site,here) => pname match {
case UseFPU => false
case FastMulDiv => false
case NTLBEntries => 4
case BtbKey => BtbParameters(nEntries = 8)
}},
knobValues = {
case "L1D_SETS" => 64
case "L1D_WAYS" => 1
case "L1I_SETS" => 64
case "L1I_WAYS" => 1
}
)
class DefaultFPGASmallConfig extends Config(new SmallConfig ++ new DefaultFPGAConfig)
class ExampleSmallConfig extends Config(new SmallConfig ++ new DefaultConfig)
class MultibankConfig extends Config(new With2Banks ++ new DefaultConfig)
class MultibankL2Config extends Config(
new With2Banks ++ new WithL2Cache ++ new DefaultConfig)
The Rocc.scala where the actual accumulator example is given is as below
// See LICENSE for license details.
package rocket
import Chisel._
import uncore._
import Util._
import cde.{Parameters, Field}
case object RoccMaxTaggedMemXacts extends Field[Int]
case object RoccNMemChannels extends Field[Int]
class RoCCInstruction extends Bundle
{
val funct = Bits(width = 7)
val rs2 = Bits(width = 5)
val rs1 = Bits(width = 5)
val xd = Bool()
val xs1 = Bool()
val xs2 = Bool()
val rd = Bits(width = 5)
val opcode = Bits(width = 7)
}
class RoCCCommand(implicit p: Parameters) extends CoreBundle()(p) {
val inst = new RoCCInstruction
val rs1 = Bits(width = xLen)
val rs2 = Bits(width = xLen)
}
class RoCCResponse(implicit p: Parameters) extends CoreBundle()(p) {
val rd = Bits(width = 5)
val data = Bits(width = xLen)
}
class RoCCInterface(implicit p: Parameters) extends Bundle {
val cmd = Decoupled(new RoCCCommand).flip
val resp = Decoupled(new RoCCResponse)
val mem = new HellaCacheIO()(p.alterPartial({ case CacheName => "L1D" }))
val busy = Bool(OUTPUT)
val s = Bool(INPUT)
val interrupt = Bool(OUTPUT)
// These should be handled differently, eventually
val imem = new ClientUncachedTileLinkIO
val dmem = Vec(p(RoccNMemChannels), new ClientUncachedTileLinkIO)
val iptw = new TLBPTWIO
val dptw = new TLBPTWIO
val pptw = new TLBPTWIO
val exception = Bool(INPUT)
}
abstract class RoCC(implicit p: Parameters) extends CoreModule()(p) {
val io = new RoCCInterface
io.mem.req.bits.phys := Bool(true) // don't perform address translation
}
class AccumulatorExample(n: Int = 4)(implicit p: Parameters) extends RoCC()(p) {
val regfile = Mem(UInt(width = xLen), n)
val busy = Reg(init=Vec(Bool(false), n))
val cmd = Queue(io.cmd)
val funct = cmd.bits.inst.funct
val addr = cmd.bits.inst.rs2(log2Up(n)-1,0)
val doWrite = funct === UInt(0)
val doRead = funct === UInt(1)
val doLoad = funct === UInt(2)
val doAccum = funct === UInt(3)
val memRespTag = io.mem.resp.bits.tag(log2Up(n)-1,0)
// datapath
val addend = cmd.bits.rs1
val accum = regfile(addr)
val wdata = Mux(doWrite, addend, accum + addend)
when (cmd.fire() && (doWrite || doAccum)) {
regfile(addr) := wdata
}
when (io.mem.resp.valid) {
regfile(memRespTag) := io.mem.resp.bits.data
}
// control
when (io.mem.req.fire()) {
busy(addr) := Bool(true)
}
when (io.mem.resp.valid) {
busy(memRespTag) := Bool(false)
}
val doResp = cmd.bits.inst.xd
val stallReg = busy(addr)
val stallLoad = doLoad && !io.mem.req.ready
val stallResp = doResp && !io.resp.ready
cmd.ready := !stallReg && !stallLoad && !stallResp
// command resolved if no stalls AND not issuing a load that will need a request
// PROC RESPONSE INTERFACE
io.resp.valid := cmd.valid && doResp && !stallReg && !stallLoad
// valid response if valid command, need a response, and no stalls
io.resp.bits.rd := cmd.bits.inst.rd
// Must respond with the appropriate tag or undefined behavior
io.resp.bits.data := accum
// Semantics is to always send out prior accumulator register value
io.busy := cmd.valid || busy.reduce(_||_)
// Be busy when have pending memory requests or committed possibility of pending requests
io.interrupt := Bool(false)
// Set this true to trigger an interrupt on the processor (please refer to supervisor documentation)
// MEMORY REQUEST INTERFACE
io.mem.req.valid := cmd.valid && doLoad && !stallReg && !stallResp
io.mem.req.bits.addr := addend
io.mem.req.bits.tag := addr
io.mem.req.bits.cmd := M_XRD // perform a load (M_XWR for stores)
io.mem.req.bits.typ := MT_D // D = 8 bytes, W = 4, H = 2, B = 1
io.mem.req.bits.data := Bits(0) // we're not performing any stores...
io.mem.invalidate_lr := false
io.imem.acquire.valid := false
io.imem.grant.ready := false
io.dmem.head.acquire.valid := false
io.dmem.head.grant.ready := false
io.iptw.req.valid := false
io.dptw.req.valid := false
io.pptw.req.valid := false
}
Would be better to see the complete source of Configs.scala, but it seems that Module constructor is missing an implicit parameter. If you add (implicit p: Parameters) to the method which contains the above statement (line 100), the code should work.
The issue has been solved by adding the following piece of code to configs.scala
class WithAccumRocc extends Config(
(pname,site,here) => pname match {
case RoccNMemChannels => 1
case RoccMaxTaggedMemXacts => 0
case BuildRoCC => {
Some((p: Parameters) =>
Module(new AccumulatorExample()(p.alterPartial({ case CoreName => "AccumRocc" }))))
}
}
)
class WithRoCCConfig extends Config(new WithAccumRocc ++ new DefaultFPGAConfig)
after this build rocket with the new config, i.e. make rocket CONFIG=WithRoCCConfig
also do not forget to regenerate the vivado project and bitstream with the same config parameter.
Related
I need to process a set of Ids and return the result as zio.Task[List[RelevantReadingRow]]
def getBaselinesForRequestIds(baseLineReqIds: Set[String]): Task[List[RelevantReadingRow]] =
dynamoConnection
.run(
table.getAll("baseline_req_id" in baseLineReqIds)
)
.flatMap(_.toList.separate match {
case (err :: _, _) => ZIO.fail(new Throwable(describe(err)))
case (Nil, relevantReadings) => ZIO.succeed(relevantReadings)
})
The code above works but I need to process them in batches of 25 element as mutch.
I have try this but then I get a List of zio.Task
def getBaselinesForRequestIds(baseLineReqIds: Set[String]): Task[List[RelevantReadingRow]] = {
val subSet = baseLineReqIds.grouped(25).toList
val res = for {
rows <- subSet.map(reqIds => dynamoConnection
.run(
table.getAll("baseline_req_id" in reqIds)
).flatMap(e => e.toList.separate match {
case (err :: _, _) => ZIO.fail(new Throwable(describe(err)))
case (Nil, relevantReadings) => ZIO.succeed(relevantReadings)
}))
} yield rows
res // this is List[zio.Task[List[RelevantReadingRow]]]
}
I dont know how to convert back to a zio.Task[List[RelevantReadingRow]]
any sugestion?
You can use ZIO.collectAll to convert List[Task] to Task[List], I thought it was ZIO.sequence..., maybe I'm getting confused with cats...
Following example works with Zio2
package sample
import zio._
object App extends ZIOAppDefault {
case class Result(value: Int) extends AnyVal
val data: List[Task[List[Result]]] = List(
Task { List(Result(1), Result(2)) },
Task { List(Result(3)) }
)
val flattenValues: Task[List[Result]] = for {
values <- ZIO.collectAll { data }
} yield values.flatten
val app = for {
values <- flattenValues
_ <- Console.putStrLn { values }
} yield()
def run = app
}
In particular for your sample ...
and assuming that 'separate' it's just an extension method to collect some errors (returns a tuple of error list and result list), and ignoring the method 'describe' to turn an err into a throwable
https://scastie.scala-lang.org/jgoday/XKxVP2ECSFOv4chgSFCckg/7
package sample
import zio._
object App extends ZIOAppDefault {
class DynamoMock {
def run: Task[List[RelevantReadingRow]] = Task {
List(
RelevantReadingRow(1),
RelevantReadingRow(2),
)
}
}
case class RelevantReadingRow(value: Int) extends AnyVal
implicit class ListSeparate(list: List[RelevantReadingRow]) {
def separate: (List[String], List[RelevantReadingRow]) =
(Nil, list)
}
def getBaselinesForRequestIds(baseLineReqIds: Set[String]): Task[List[RelevantReadingRow]] = {
val dynamoConnection = new DynamoMock()
val subSet = baseLineReqIds.grouped(25).toList
val res: List[Task[List[RelevantReadingRow]]] = for {
rows <- subSet.map(reqIds => dynamoConnection
.run.flatMap(e => e.toList.separate match {
case (err :: _, _) => ZIO.fail(new Throwable(err))
case (Nil, relevantReadings) => ZIO.succeed(relevantReadings)
}))
} yield rows
for {
rows <- ZIO.collectAll(res)
} yield rows.flatten
}
val app = for {
values <- getBaselinesForRequestIds(Set("id1", "id2"))
_ <- Console.putStrLn { values }
} yield()
def run = app
}
So, here is an alternative solution based on #jgoday answer
def getBaselinesForRequestIds(baseLineReqIds: Set[String]): Task[List[RelevantReadingRow]] =
for {
values <- ZIO.foreachPar(baseLineReqIds.grouped(25).toList) {
reqIds =>
dynamoConnection
.run(
table.getAll("baseline_req_id" in reqIds)
).flatMap(e => e.toList.separate match {
case (err :: _, _) => ZIO.fail(new Throwable(describe(err)))
case (Nil, relevantReadings) => ZIO.succeed(relevantReadings)
})
}
} yield values.flatten
What is the most elegant case to have None and Some() in the same case? Something like:
val data: Option[Int] = getSomeData()
data match {
case None || Some(data) && data > 50 =>
case _ =>
}
You can use Option.forall as condition.
def foo(data: Option[Int]): Unit =
if (data.forall(_ > 50)) println("OK")
else println("KO")
foo(None)
// => OK
foo(Some(1))
// => KO
foo(Some(51))
// OK
Normally such pattern matching can be written as follows
data match {
case None => doSomething()
case Some(data) if data > 50 => doSomething()
case _ => doOther()
}
If such combination (None || Some(data) && data > 50) happens often you can introduce custom extractor
object GreaterThan50OrEmpty {
def unapply(arg: Option[Int]): Boolean = arg match {
case None => true
case Some(data) if data > 50 => true
case _ => false
}
}
data match {
case GreaterThan50OrEmpty() => println("matches pattern")
case _ => println("default")
}
You can even call it as you want
object `None || Some(data) && data > 50` {
def unapply(arg: Option[Int]): Boolean = arg match {
case None => true
case Some(data) if data > 50 => true
case _ => false
}
}
data match {
case `None || Some(data) && data > 50`() => println("matches pattern")
case _ => println("default")
}
Slightly more general approach
class GreaterThanOrEmpty(dataBound: Int) {
def unapply(arg: Option[Int]): Boolean = arg match {
case None => true
case Some(data) if data > dataBound => true
case _ => false
}
}
val GreaterThan50OrEmpty = new GreaterThanOrEmpty(50)
data match {
case GreaterThan50OrEmpty() => println("matches pattern")
case _ => println("default")
}
You can even generate such unapply automatically (although I guess it's not worth it)
import scala.annotation.{StaticAnnotation, compileTimeOnly}
import scala.language.experimental.macros
import scala.reflect.macros.blackbox
#compileTimeOnly("enable macro paradise")
class extractor[A] extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro ExtractorMacro.impl
}
object ExtractorMacro {
def impl(c: blackbox.Context)(annottees: c.Tree*): c.Tree = {
import c.universe._
val typA = c.prefix.tree match {
case q"new extractor[$a]" => a
}
annottees match {
case q"$mods object $tname extends { ..$earlydefns } with ..$parents { $self => ..$body }" :: Nil =>
val cases = tname.decoded.split('|').map(s => s"case $s => true").mkString("\n")
val casesTree = c.parse(
s"""arg match {
| $cases
| case _ => false
|}""".stripMargin)
q"""$mods object $tname extends { ..$earlydefns } with ..$parents { $self =>
..$body
def unapply(arg: $typA): Boolean = $casesTree
}"""
case _ => c.abort(c.enclosingPosition, "not object")
}
}
}
Usage:
#extractor[Option[Int]]
object `Some(x) if x < 25 | None | Some(data) if data > 50`
//Warning:scalac: object ... extends scala.AnyRef {
// ...
// def unapply(arg: Option[Int]): Boolean = arg match {
// case Some((x # _)) if x.$less(25) => true
// case None => true
// case Some((data # _)) if data.$greater(50) => true
// case _ => false
// }
//}
def test(arg: Any) = arg match {
case `Some(x) if x < 25 | None | Some(data) if data > 50`() =>
println("matches pattern")
case _ => println("default")
}
test(None) // matches pattern
test(Some(51)) // matches pattern
test(Some(24)) // matches pattern
test(Some(30)) // default
Been looking for a way to do this without a queue, and make it tail-recursive. I'm thinking LazyLists might also help. Would a queue be faster? I'm basically sending mutated state down through each function call with the next level of children.
case class Tree [A] (
value : A,
Right: Option[Tree[A]],
Left: Option[Tree[A]]
)
object Tree {
def liftChildren[A](t: Tree[A]) = {
List(t.Left, t.Right).flatten
}
def findChild[A](value: A, t: Tree[A]) : Option[Tree[A]] = {
var lvl = 0
def searchChildren(t: List[Tree[A]]): (Option[Tree[A]], List[Tree[A]]) = {
// could be removed, just for fun
lvl += 1
t.foreach(tt => println(s"Scanning Level ${lvl.toString} Value ${tt.value.toString}"))
//
val curfind = t.find(tt => {
tt.value == value
})
curfind match {
case Some(tr) => (Some(tr), t)
case None => {
val children: List[Tree[A]] = t.flatMap(tt => Tree.liftChildren(tt))
children.isEmpty match {
case true => (None, List.empty)
case false => searchChildren(children)
}
}
}
}
searchChildren(List(t))._1
}
}
object main extends App {
println("hello world")
val tree = Tree[Int](
1,
Some(
Tree[Int](2, None, Some(
Tree[Int](5,None, Some(Tree[Int](6, None,None))))
)
) ,
Some(
Tree[Int](
3,
Some(
Tree[Int](4, None, Some(Tree[Int](7, None,None)))
), None
)
)
)
val res = Tree.findChild(6, tree)
println("FoundIt" + res)
}
It's working as I expect. I'm just wondering whther this could be any better or more idiomatic FP. Would the cats library help at all?
Here is a tail-recursive implementation, using pattern matching.
final case class Tree[+A](value: A, left: Option[Tree[A]], right: Option[Tree[A]])
def find[A](value: A)(tree: Tree[A]): Option[Tree[A]] = {
import scala.collection.immutable.Queue
#annotation.tailrec
def bfs(queue: Queue[Tree[A]]): Option[Tree[A]] =
queue.dequeueOption match {
case None => None
case Some((tree, remaining)) => tree match {
case Tree(`value`, _, _) => Some(tree)
case Tree(_, Some(left), Some(right)) => bfs(queue = remaining.enqueue(left).enqueue(right))
case Tree(_, Some(left), None) => bfs(queue = remaining.enqueue(left))
case Tree(_, None, Some(right)) => bfs(queue = remaining.enqueue(right))
case Tree(_, None, None) => bfs(queue = remaining)
}
}
bfs(queue = Queue(tree))
}
def find[A](value: A)(tree: Tree[A]): Option[Tree[A]] = {
#annotation.tailrec
def dfs(stack: List[Tree[A]]): Option[Tree[A]] =
stack match {
case Nil => None
case tree :: remaining => tree match {
case Tree(`value`, _, _) => Some(tree)
case Tree(_, Some(left), Some(right)) => dfs(stack = left :: right :: remaining)
case Tree(_, Some(left), None) => dfs(stack = left :: remaining)
case Tree(_, None, Some(right)) => dfs(stack = right :: remaining)
case Tree(_, None, None) => dfs(stack = remaining)
}
}
dfs(stack = List(tree))
}
Here are some implementations using LazyList.
final case class Tree[+A](value: A, children: List[Tree[A]])
// DFS by right.
def find[A](value: A)(tree: Tree[A]): Option[Tree[A]] =
LazyList.unfold(List(tree)) {
case Nil => None
case tree :: remaining => Some((tree, tree.children reverse_::: remaining))
}.find(tree => tree.value == value)
// DFS by left.
def find[A](value: A)(tree: Tree[A]): Option[Tree[A]] =
LazyList.unfold(List(tree)) {
case Nil => None
case tree :: remaining => Some((tree, tree.children ::: remaining))
}.find(tree => tree.value == value)
// BFS
def find[A](value: A)(tree: Tree[A]): Option[Tree[A]] =
LazyList.unfold(Queue(tree)) { queue =>
queue.dequeueOption.map {
case (tree, remaining) => (tree, remaining.enqueueAll(tree.children))
}
}.find(tree => tree.value == value)
I have implemented a Play! 2 QueryStringBindable in Scala for a Range type. A Range consists of either a min or max value or both (of type Float). In my QueryBindable implementation I use the internalBinder to convert the two possible parameters min and max to Option[Either[String, Float]], combine them in a tuple, do a pattern match over this and finally return an Option[Either[String, Range]]. This works but as you can see in the code below the pattern match is very verbose. Is there a more concise way of doing this in Scala?
Maybe leverage higher order functions somehow to get the same result structure back?
import play.api.mvc.QueryStringBindable
case class Range(min: Option[Float], max: Option[Float])
object Range {
implicit def rangeQueryStringBindable(implicit intBinder: QueryStringBindable[Float]) = new QueryStringBindable[Range] {
override def bind(key: String, params: Map[String, Seq[String]]): Option[Either[String, Range]] = {
val minOpt = intBinder.bind("min", params)
val maxOpt = intBinder.bind("max", params)
(minOpt, maxOpt) match {
case (None, None) => None
case (Some(Right(min)), Some(Right(max))) => Some(Right(Range(Some(min), Some(max))))
case (None, Some(Right(max))) => Some(Right(Range(None, Some(max))))
case (Some(Right(min)), None) => Some(Right(Range(Some(min), None)))
case (Some(Left(minError)), Some(Left(maxError))) => Some(Left(minError))
case (Some(Left(minError)), None) => Some(Left(minError))
case (None, Some(Left(maxError))) => Some(Left(maxError))
case (Some(Right(_)), Some(Left(maxError))) => Some(Left(maxError))
case (Some(Left(minError)), Some(Right(_))) => Some(Left(minError))
}
}
override def unbind(key: String, range: Range): String = {
(range.min, range.max) match {
case (Some(min), Some(max)) => intBinder.unbind("min", min) + "&" + intBinder.unbind("max", max)
case (Some(min), None) => intBinder.unbind("min", min)
case (None, Some(max)) => intBinder.unbind("max", max)
case (None, None) => throw new IllegalArgumentException("Range without values makes no sense")
}
}
}
}
(minOpt,maxOpt) match {
case (None,None) => None
case (Some(Left(m)),_) => Some(Left(m))
case (_,Some(Left(m))) => Some(Left(m))
case (_,_) => Some(Right(Range(minOpt.map(_.right.get),maxOpt.map(_.right.get))))
}
With a couple of functions to convert an Option[Either[Error, A]] to Either[Error, Option[A]] you can end up with something a bit cleaner in my view. I also recommend renaming Range since it conflicts with a class with the same name in scala.collections.immutable.
import play.api.mvc.QueryStringBindable
case class RealRange(min: Option[Float], max: Option[Float])
object BindingEitherUtils {
implicit class OptionWithEitherFlatten[A, B](value: Option[Either[A, B]]) {
def flattenRight: Either[A, Option[B]] = {
value.map { either =>
either.right.map{ right => Some(right) }
}.getOrElse{ Right(None) }
}
}
implicit class EitherWithUnflatten[A, B](value: Either[A, Option[B]]) {
def unflattenRight: Option[Either[A, B]] = {
value.fold(left => Some(Left(left)), _.map{ right => Right(right) })
}
}
}
object RealRange {
import BindingEitherUtils._
val minError = "Invalid minimum value for RealRange"
val maxError = "Invalid maximum value for RealRange"
implicit def rangeQueryStringBindable(implicit floatBinder: QueryStringBindable[Float]) = new QueryStringBindable[RealRange] {
override def bind(key: String, params: Map[String, Seq[String]]): Option[Either[String, RealRange]] = {
val minOpt = floatBinder.bind("min", params).flattenRight
val maxOpt = floatBinder.bind("max", params).flattenRight
minOpt.left.map{ _ => minError }.right.flatMap { min =>
maxOpt.left.map{ _ => maxError }.right.flatMap { max =>
(min, max) match {
case (None, None ) =>
Right(None)
case (Some(minVal), Some(maxVal)) if minVal > maxVal =>
Left("Minimum value is larger than maximum value")
case _ =>
Right(Some(RealRange(min, max)))
}
}
}.unflattenRight
}
override def unbind(key: String, range: RealRange): String = {
(range.min, range.max) match {
case (Some(min), Some(max)) => floatBinder.unbind("min", min) + "&" + floatBinder.unbind("max", max)
case (Some(min), None) => floatBinder.unbind("min", min)
case (None, Some(max)) => floatBinder.unbind("max", max)
case (None, None) => throw new IllegalArgumentException("RealRange without values makes no sense")
}
}
}
def test(): Unit = {
val binder = rangeQueryStringBindable
Seq[(String, String)](
("10", "20"),
("10", null),
(null, "10"),
(null, null),
("asd", "asd"),
("10", "asd"),
("asd", "10"),
("asd", null),
(null, "asd"),
("20", "10")
).foreach{ case (min, max) =>
val params = Seq(
Option(min).map{ m => "min" -> Seq(m) },
Option(max).map{ m => "max" -> Seq(m) }
).flatten.toMap
val result = binder.bind("", params)
println(s"$params => $result" )
}
}
}
Which results in:
Map(min -> List(10), max -> List(20)) =>
Some(Right(RealRange(Some(10.0),Some(20.0))))
Map(min -> List(10)) =>
Some(Right(RealRange(Some(10.0),None)))
Map(max -> List(10)) =>
Some(Right(RealRange(None,Some(10.0))))
Map() =>
None
Map(min -> List(asd), max -> List(asd)) =>
Some(Left(Invalid minimum value for RealRange))
Map(min -> List(10), max -> List(asd)) =>
Some(Left(Invalid maximum value for RealRange))
Map(min -> List(asd), max -> List(10)) =>
Some(Left(Invalid minimum value for RealRange))
Map(min -> List(asd)) =>
Some(Left(Invalid minimum value for RealRange))
Map(max -> List(asd)) =>
Some(Left(Invalid maximum value for RealRange))
Map(min -> List(20), max -> List(10)) =>
Some(Left(Minimum value is larger than maximum value))
Yes, it can be simplified.
For the bind method you can place a few wildcards, when you have errors to simplify it. That way you only have 4 permutations for the Range assembly logic. I wouldn't do too much magic here as it would complicate understanding your code.
override def bind(key: String, params: Map[String, Seq[String]]): Option[Either[String, Range]] = {
val minOpt = intBinder.bind("min", params)
val maxOpt = intBinder.bind("max", params)
(minOpt, maxOpt) match {
case (None, None) => None
case (Some(Right(min)), Some(Right(max))) => Some(Right(Range(Some(min), Some(max))))
case (None, Some(Right(max))) => Some(Right(Range(None, Some(max))))
case (Some(Right(min)), None) => Some(Right(Range(Some(min), None)))
// Error handling
case (Some(Left(minError)), _) => Some(Left(minError))
case (_, Some(Left(maxError))) => Some(Left(maxError))
}
}
For the unbind I would use a different approach, by utilizing Option's map function and then combining them into a Iterable you can call mkString and it will do nothing for 1 string and append a & if there are two strings. The code example has types, so you can understand easier.
def unbind(key: String, range: Range): String = {
val minString: Option[String] = range.min.map(min => intBinder.unbind("min", min))
val maxString: Option[String] = range.max.map(max => intBinder.unbind("max", max))
val strings: Iterable[String] = minString ++ maxString
strings match {
case Nil => throw new IllegalArgumentException("Range without values makes no sense")
case _ => strings.mkString("&")
}
}
And if you're into short code:
def unbind(key: String, range: Range): String = {
val minString = range.min.map(min => intBinder.unbind("min", min))
val maxString = range.max.map(max => intBinder.unbind("max", max))
minString ++ maxString match {
case Nil => throw new IllegalArgumentException("Range without values makes no sense")
case strings => strings.mkString("&")
}
}
I have multiple Option's. I want to check if they hold a value. If an Option is None, I want to reply to user about this. Else proceed.
This is what I have done:
val name:Option[String]
val email:Option[String]
val pass:Option[String]
val i = List(name,email,pass).find(x => x match{
case None => true
case _ => false
})
i match{
case Some(x) => Ok("Bad Request")
case None => {
//move forward
}
}
Above I can replace find with contains, but this is a very dirty way. How can I make it elegant and monadic?
Edit: I would also like to know what element was None.
Another way is as a for-comprehension:
val outcome = for {
nm <- name
em <- email
pwd <- pass
result = doSomething(nm, em, pwd) // where def doSomething(name: String, email: String, password: String): ResultType = ???
} yield (result)
This will generate outcome as a Some(result), which you can interrogate in various ways (all the methods available to the collections classes: map, filter, foreach, etc.). Eg:
outcome.map(Ok(result)).orElse(Ok("Bad Request"))
val ok = Seq(name, email, pass).forall(_.isDefined)
If you want to reuse the code, you can do
def allFieldValueProvided(fields: Option[_]*): Boolean = fields.forall(_.isDefined)
If you want to know all the missing values then you can find all missing values and if there is none, then you are good to go.
def findMissingValues(v: (String, Option[_])*) = v.collect {
case (name, None) => name
}
val missingValues = findMissingValues(("name1", option1), ("name2", option2), ...)
if(missingValues.isEmpty) {
Ok(...)
} else {
BadRequest("Missing values for " + missingValues.mkString(", ")))
}
val response = for {
n <- name
e <- email
p <- pass
} yield {
/* do something with n, e, p */
}
response getOrElse { /* bad request /* }
Or, with Scalaz:
val response = (name |#| email |#| pass) { (n, e, p) =>
/* do something with n, e, p */
}
response getOrElse { /* bad request /* }
if ((name :: email :: pass :: Nil) forall(!_.isEmpty)) {
} else {
// bad request
}
I think the most straightforward way would be this:
(name,email,pass) match {
case ((Some(name), Some(email), Some(pass)) => // proceed
case _ => // Bad request
}
A version with stone knives and bear skins:
import util._
object Test extends App {
val zero: Either[List[Int], Tuple3[String,String,String]] = Right((null,null,null))
def verify(fields: List[Option[String]]) = {
(zero /: fields.zipWithIndex) { (acc, v) => v match {
case (Some(s), i) => acc match {
case Left(_) => acc
case Right(t) =>
val u = i match {
case 0 => t copy (_1 = s)
case 1 => t copy (_2 = s)
case 2 => t copy (_3 = s)
}
Right(u)
}
case (None, i) =>
val fails = acc match {
case Left(f) => f
case Right(_) => Nil
}
Left(i :: fails)
}
}
}
def consume(name: String, email: String, pass: String) = Console println s"$name/$email/$pass"
def fail(is: List[Int]) = is map List("name","email","pass") foreach (Console println "Missing: " + _)
val name:Option[String] = Some("Bob")
val email:Option[String]= None
val pass:Option[String] = Some("boB")
val res = verify(List(name,email,pass))
res.fold(fail, (consume _).tupled)
val res2 = verify(List(name, Some("bob#bob.org"),pass))
res2.fold(fail, (consume _).tupled)
}
The same thing, using reflection to generalize the tuple copy.
The downside is that you must tell it what tuple to expect back. In this form, reflection is like one of those Stone Age advances that were so magical they trended on twitter for ten thousand years.
def verify[A <: Product](fields: List[Option[String]]) = {
import scala.reflect.runtime._
import universe._
val MaxTupleArity = 22
def tuple = {
require (fields.length <= MaxTupleArity)
val n = fields.length
val tupleN = typeOf[Tuple2[_,_]].typeSymbol.owner.typeSignature member TypeName(s"Tuple$n")
val init = tupleN.typeSignature member nme.CONSTRUCTOR
val ctor = currentMirror reflectClass tupleN.asClass reflectConstructor init.asMethod
val vs = Seq.fill(n)(null.asInstanceOf[String])
ctor(vs: _*).asInstanceOf[Product]
}
def zero: Either[List[Int], Product] = Right(tuple)
def nextProduct(p: Product, i: Int, s: String) = {
val im = currentMirror reflect p
val ts = im.symbol.typeSignature
val copy = (ts member TermName("copy")).asMethod
val args = copy.paramss.flatten map { x =>
val name = TermName(s"_$i")
if (x.name == name) s
else (im reflectMethod (ts member x.name).asMethod)()
}
(im reflectMethod copy)(args: _*).asInstanceOf[Product]
}
(zero /: fields.zipWithIndex) { (acc, v) => v match {
case (Some(s), i) => acc match {
case Left(_) => acc
case Right(t) => Right(nextProduct(t, i + 1, s))
}
case (None, i) =>
val fails = acc match {
case Left(f) => f
case Right(_) => Nil
}
Left(i :: fails)
}
}.asInstanceOf[Either[List[Int], A]]
}
def consume(name: String, email: String, pass: String) = Console println s"$name/$email/$pass"
def fail(is: List[Int]) = is map List("name","email","pass") foreach (Console println "Missing: " + _)
val name:Option[String] = Some("Bob")
val email:Option[String]= None
val pass:Option[String] = Some("boB")
type T3 = Tuple3[String,String,String]
val res = verify[T3](List(name,email,pass))
res.fold(fail, (consume _).tupled)
val res2 = verify[T3](List(name, Some("bob#bob.org"),pass))
res2.fold(fail, (consume _).tupled)
I know this doesn't scale well, but would this suffice?
(name, email, pass) match {
case (None, _, _) => "name"
case (_, None, _) => "email"
case (_, _, None) => "pass"
case _ => "Nothing to see here"
}