scala how to wrap this response to yield instead of val?
I have get method realization with vals, how can I refactor it with for yield structure
case Method.GET -> !! / "leagues" =>
val openDotaResponse: ZIO[Client, Throwable, Response] = Client.request("https://api.opendota.com/api/leagues")
val bodyOfResponse: ZIO[Client, Throwable, String] = openDotaResponse.flatMap(_.body.asString)
val eitherListOfLeagues: ZIO[Client, Throwable, Either[String, List[League]]] = bodyOfResponse.map(_.fromJson[List[League]])
val listOfLeagues: ZIO[Client, Throwable, List[League]] = eitherListOfLeagues.map(eitherList => eitherList.toOption.getOrElse(Nil))
val result: ZIO[Client, Throwable, Response] = listOfLeagues.map(listLeagues => Response.json(listLeagues.toJson))
result
case Method.GET -> !! / "4leagues" =>
// val response1: ZIO[Client, Throwable, Response] = for {
// openDotaResponse: ZIO[Client, Throwable, Response] <- Client.request("https://api.opendota.com/api/leagues")
// bodyOfResponse: ZIO[Client, Throwable, String] <- openDotaResponse.flatMap(_.body.asString)
// eitherListOfLeagues: ZIO[Client, Throwable, Either[String, List[League]]] <- bodyOfResponse.map(_.fromJson[List[League]])
// listOfLeagues: ZIO[Client, Throwable, List[League]] <- eitherListOfLeagues.map(eitherList => eitherList.getOrElse(Nil))
// result: ZIO[Client, Throwable, Response] <- listOfLeagues.map(listLeagues => Response.json(listLeagues.toJson)).map(_.body)
// val res1: Response = result.map(_.body)
// res1
// } yield res1
//response1
Try
for {
openDotaResponse <- Client.request("https://api.opendota.com/api/leagues")
bodyOfResponse <- openDotaResponse.body.asString
eitherListOfLeagues = bodyOfResponse.fromJson[List[League]]
listOfLeagues = eitherListOfLeagues.toOption.getOrElse(Nil)
result = Response.json(listOfLeagues.toJson)
} yield result
or just
for {
openDotaResponse <- Client.request("https://api.opendota.com/api/leagues")
bodyOfResponse <- openDotaResponse.body.asString
eitherListOfLeagues = bodyOfResponse.fromJson[List[League]]
listOfLeagues = eitherListOfLeagues.toOption.getOrElse(Nil)
} yield Response.json(listOfLeagues.toJson)
What is Scala's yield?
Suppose I have to following:
def f1: Int ( or def f1(): Int )
def f2 (x: Int): Int
def f3 (x: Int): Int
def f4: Int
...
...
note: 'Int' here is just an example
I would like to do ....
class Container[T] {
val values = mutable.ListBuffer.empty[T => Int]
def addValue(value: T => Int) = values += v
def doSome(t: T): Int = values.foldLeft[Int](0){ (complete, v) => complete + v(t) }
}
val ContainerWithParam = new Container[Int]
val ContainerWithoutParam = new Container[???]
ContainerWithParam.addValue(f2)
ContainerWithoutParam.addValue(f1)
val result = ContainerWithParam.doSome(1000) + ContainerWithoutParam.doSome(???)
One solution is to use Option[Nothing]
class Container[T] {
val values = mutable.ListBuffer.empty[T => Int]
def addValue(value: T => Int) = values += v
def doSome(t: T): Int = values.foldLeft[Int](0){ (complete, v) => complete + v(t) }
}
def f1(nothing: Option[Nothing]): Int
val ContainerWithoutParam = new Container[Option[Nothing]]
ContainerWithoutParam.doSome(None)
but I think this is not a very clean and nice code...
If def f1: Int = ??? then ...
val containerWithParam = new Container[Int]
val containerWithoutParam = new Container[Unit]
containerWithParam.addValue(f2)
containerWithoutParam.addValue(_ => f1)
val result = containerWithParam.doSome(1000) +
containerWithoutParam.doSome(())
If def f1(): Int = ??? then .addValue(_ => f1()).
The answer from #jwvh is right, but as an alternative you can create a separate class for the case where the function does not have a parameter. This can re-use the original implementation.
class ContainerNoParam {
private val container = new Container[Unit];
def addValue(value: => Int): Unit = container.addValue(_ => value)
def doSome(): Int = container.doSome(())
}
val ContainerWithParam = new Container[Int]
val ContainerWithoutParam = new ContainerNoParam
ContainerWithParam.addValue(f2)
ContainerWithoutParam.addValue(f1)
val result = ContainerWithParam.doSome(1000) + ContainerWithoutParam.doSome()
Simplified code:
val one: Future[String] = Future("1")
val many: Future[List[String]] = Future({"1","2","3"})
for {
a <- one
b <- many
} yield {
doSomething(a,b) // Type mismatch, expected String, actual: List[String]
}
What I want to happen is to call for each couple of one/many and get a list of the outputs
{doSomething("1","1"),doSomething("1","2"),doSomething("1","3")}
Can I get this to work with for comprehensions even when one is a Future[String] and the other a Future[List[String]]?
Try
val one: Future[String] = Future("1")
val many: Future[List[String]] = Future(List("1","2","3"))
def doSomething(a: String, b: String) = ???
for {
a <- one
b <- many
} yield {
b.map(v => doSomething(a, v))
}
Alternatively we could use scalaz ListT transformer like so
import scalaz._
import ListT._
import scalaz.std.scalaFuture.futureInstance
val one: Future[String] = Future("1")
val many: Future[List[String]] = Future(List("1","2","3"))
def doSomething(a: String, b: String) = ???
for {
a <- listT(one.map(v => List(v)))
b <- listT(many)
} yield {
doSomething(a, b)
}
This is my code
import org.apache.spark.SparkContext..
def main(args: Array[String]): Unit = {
val conf = new sparkConf().setMaster("local").setAppname("My app")
val sc = new SparkContext(conf_
val inputfile = "D:/test.txt"
val inputData = sc.textFile(inputFile)
val DupleRawData = inputData.map(_.split("\\<\\>").toList)
.map(s => (s(8),s(18)))
.map(s => (s, 1))
.reduceByKey(_ + _)
val UserShopCount = DupleRawData.groupBy(s => s._1._1)
.map(s => (s._1, s._2.toList.sortBy(z => z._2).reverse))
val ResultSet = UserShopCount.map(s => (s._1, s._2.take(1000).map(z => z._1._2, z._2))))
ResultSet.foreach(println)
//(aaa,List((100,4), (200,4), (300,3), (800,1)))
//(bbb,List((100,6), (400,5), (500,4)))
//(ccc,List((300,7), (400,6), (700,3)))
// here now I reach..
}
and this is the result I'm getting:
(aaa,List((100,4), (200,4), (300,3), (800,1)))
(bbb,List((100,6), (400,5), (500,4)))
(ccc,List((300,7), (400,6), (700,3)))
I want to final result set RDD is
// val ResultSet: org.apache.spark.rdd.RDD[(String, List[(String, Int)])]
(aaa, List(200,4), (800,1)) // because key of bbb and ccc except 100,300
(bbb, List((500,4)) // because aaa and ccc key except 100,400
(ccc, List((700,3)) // because aaa and bbb key except 300,400
please give me a solution or advice...sincerely
Here is my attempt:
val data: Seq[(String, List[(Int, Int)])] = Seq(
("aaa",List((1,4), (2,4), (3,3), (8,1))),
("bbb",List((1,6), (4,5), (5,4))),
("ccc",List((3,7), (6,6), (7,3)))
)
val uniqKeys = data.flatMap {
case (_, v) => {
v.map(_._1)
}
} groupBy(identity(_)) filter (_._2.size == 1)
val result = data.map {
case (pk, v) => val finalValue = v.filter {
case (k, _) => !uniqKeys.contains(k)
}
(pk, finalValue)
}
Output:
result: Seq[(String, List[(Int, Int)])] = List((aaa,List((1,4), (3,3))), (bbb,List((1,6))), (ccc,List((3,7))))
I am assuming your ResultSet is an RDD[String, List[(Int, Int)]]
val zeroVal1: (Long, String, (Int, Int)) = (Long.MaxValue, "", (0, 0))
val zeroVal2: List[(String, (Int, Int))] = List()
val yourNeededRdd = ResultSet
.zipWithIndex()
.flatMap({
((key, list), index) => list.map(t => (t._1, (index, key, t)))
})
.aggregateByKey(zeroVal1)(
(t1, t2) => { if (t1._1 <= t2._1) t1 else t2 },
(t1, t2) => { if (t1._1 <= t2._1) t1 else t2 }
)
.map({ case (t_1, (index, key, t)) => (key, t) })
.aggregateByKey(zeroVal2)(
(l, t) => { t :: l },
(l1, l2) => { l1 ++ l2 }
)
Having seen the answers coming out of questions like this one involving horror shows like trying to catch the NPE and dredge the mangled name out of the stack trace, I am asking this question so I can answer it.
Comments or further improvements welcome.
Like so:
case class ?:[T](x: T) {
def apply(): T = x
def apply[U >: Null](f: T => U): ?:[U] =
if (x == null) ?:[U](null)
else ?:[U](f(x))
}
And in action:
scala> val x = ?:("hel")(_ + "lo ")(_ * 2)(_ + "world")()
x: java.lang.String = hello hello world
scala> val x = ?:("hel")(_ + "lo ")(_ => (null: String))(_ + "world")()
x: java.lang.String = null
Added orElse
case class ?:[T](x: T) {
def apply(): T = x
def apply[U >: Null](f: T => U): ?:[U] =
if (x == null) ?:[U](null)
else ?:[U](f(x))
def orElse(y: T): T =
if (x == null) y
else x
}
scala> val x = ?:(obj)(_.subField)(_.subSubField).orElse("not found")
x: java.lang.String = not found
Or if you prefer named syntax as opposed to operator syntax
case class CoalesceNull[T](x: T) {
def apply(): T = x
def apply[U >: Null](f: T => U): CoalesceNull[U] =
if (x == null) CoalesceNull[U](null)
else CoalesceNull[U](f(x))
def orElse(y: T): T =
if (x == null) y
else x
}
scala> val x = CoalesceNull(obj)(_.subField)(_.subSubField).orElse("not found")
x: java.lang.String = not found
More examples
case class Obj[T](field: T)
test("last null") {
val obj: Obj[Obj[Obj[Obj[String]]]] = Obj(Obj(Obj(Obj(null))))
val res0 = CoalesceNull(obj)(_.field)(_.field)(_.field)(_.field)()
res0 should === (null)
val res1 = CoalesceNull(obj)(_.field)(_.field)(_.field)(_.field).orElse("not found")
res1 should === ("not found")
}
test("first null") {
val obj: Obj[Obj[Obj[Obj[String]]]] = null
val res0 = CoalesceNull(obj)(_.field)(_.field)(_.field)(_.field)()
res0 should === (null)
val res1 = CoalesceNull(obj)(_.field)(_.field)(_.field)(_.field).orElse("not found")
res1 should === ("not found")
}
test("middle null") {
val obj: Obj[Obj[Obj[Obj[String]]]] = Obj(Obj(null))
val res0 = CoalesceNull(obj)(_.field)(_.field)(_.field)(_.field)()
res0 should === (null)
val res1 = CoalesceNull(obj)(_.field)(_.field)(_.field)(_.field).orElse("not found")
res1 should === ("not found")
}
test("not null") {
val obj: Obj[Obj[Obj[Obj[String]]]] = Obj(Obj(Obj(Obj("something"))))
val res0 = CoalesceNull(obj)(_.field)(_.field)(_.field)(_.field)()
res0 should === ("something")
val res1 = CoalesceNull(obj)(_.field)(_.field)(_.field)(_.field).orElse("not found")
res1 should === ("something")
}
}