I am using ScalaTest and ScalaCheck. I wrote a custom generator and arbitrary generator as following:
import java.time.LocalDateTime
import org.scalacheck._
import org.scalatest.PropSpec
import org.scalatest.prop.Checkers
import Gen._
import Arbitrary.arbitrary
class AuthJwtSpec extends PropSpec with Checkers {
private val start = LocalDateTime.now.atZone(java.time.ZoneId.systemDefault()).toEpochSecond
private val end = LocalDateTime.now.plusDays(2).atZone(java.time.ZoneId.systemDefault()).toEpochSecond
private val pickTime = Gen.choose(start, end)
private val authUser: Arbitrary[AuthUser] =
Arbitrary {
for {
u <- arbitrary[String]
p <- arbitrary[String]
} yield AuthUser(u, p)
}
property("Generate JWT token.") {
check(Prop.forAll(authUser, pickTime) {(r1: AuthUser, r2: Long) =>
???
})
}
}
The problem is, that r1 has type Arbitrary[AuthUser] but I need AuthUser, how to get it?
As I see, the problem is with authUser field - it should be Gen[AuthUser], not Arbitary[AuthUser]:
import java.time.LocalDateTime
import org.scalacheck._
import org.scalatest._
import prop._
case class AuthUser(u: String, p: String)
class AuthJwtSpec extends PropSpec with Checkers with PropertyChecks {
private val start = LocalDateTime.now.atZone(java.time.ZoneId.systemDefault()).toEpochSecond
private val end = LocalDateTime.now.plusDays(2).atZone(java.time.ZoneId.systemDefault()).toEpochSecond
private val pickTime: Gen[Long] = Gen.choose(start, end)
// AuthUser should be Gen[AuthUser], not Arbitary[AuthUser]
private val authUser: Gen[AuthUser] =
for {
u <- Arbitrary.arbitrary[String]
p <- Arbitrary.arbitrary[String]
} yield AuthUser(u, p)
property("Generate JWT token.") {
val prop = Prop.forAll(authUser, pickTime) {(user: AuthUser, time: Long) =>
println(s"User: $user")
println(s"Time: $time")
// Property checks must always return true or false
true
}
check(prop)
}
}
Related
I created a project with open api then added a endpoint GET /product which is supposed to return a product( i am testing it so I just want it to return any product).
When I run the project I get the following error.
[error] found : org.openapitools.server.model.Product.type
[error] required: (?, ?, ?) => ?
[error] def toEntityMarshallerProduct: ToEntityMarshaller[Product] = jsonFormat3(Product)
the logs point at Product inside jsonFormat3
I noticed it's caused by the number of properties of the Product Model, if I reduce them to 3, it works ! this is weird! does anyone know how to resolve this?
this is the product model file
package org.openapitools.server.model
final case class Product (
id: Int,
name: String,
isAvailable: Boolean,
description: String,
category: String
)
this is the productAPI file
package org.openapitools.server.api
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.marshalling.ToEntityMarshaller
import akka.http.scaladsl.unmarshalling.FromEntityUnmarshaller
import akka.http.scaladsl.unmarshalling.FromStringUnmarshaller
import org.openapitools.server.AkkaHttpHelper._
import org.openapitools.server.model.Product
class ProductApi(
productService: ProductApiService,
productMarshaller: ProductApiMarshaller
) {
import productMarshaller._
lazy val route: Route =
path("product" / "all") {
get {
productService.productAllGet()
}
}
}
trait ProductApiService {
def productAllGet200(responseProduct: Product)(implicit toEntityMarshallerProduct: ToEntityMarshaller[Product]): Route =
complete((200, responseProduct))
def productAllGet()(implicit toEntityMarshallerProduct: ToEntityMarshaller[Product]): Route
}
trait ProductApiMarshaller {
implicit def toEntityMarshallerProduct: ToEntityMarshaller[Product]
}
and this is the main file
import akka.actor.typed.{ActorSystem, ActorRef}
import akka.actor.typed.scaladsl.Behaviors
import akka.http.scaladsl.Http
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.model.StatusCodes
// for JSON serialization/deserialization following dependency is required:
// "com.typesafe.akka" %% "akka-http-spray-json" % AkkaHttpVersion
import akka.http.scaladsl.marshalling.ToEntityMarshaller
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import spray.json.DefaultJsonProtocol._
import scala.io.StdIn
import akka.util.Timeout
import scala.concurrent.duration._
import scala.concurrent.{ ExecutionContext, Future }
import org.openapitools.server.api._
import org.openapitools.server.model._
object Main extends App {
// needed to run the route
implicit val system = ActorSystem(Behaviors.empty, "product")
// implicit val materializer = ActorMaterializer()
// needed for the future map/flatmap in the end and future in fetchItem and saveOrder
implicit val executionContext = system.executionContext
object DefaultMarshaller extends ProductApiMarshaller {
def toEntityMarshallerProduct: ToEntityMarshaller[Product] = jsonFormat3(Product)
}
object DefaultService extends ProductApiService {
def productAllGet() (implicit toEntityMarshallerProduct: ToEntityMarshaller[Product]) : Route = {
val reponse = Future {
Product(1,"product",false,"desc","pizza")
}
requestcontext => {
(reponse).flatMap {
(product: Product) =>
productAllGet200(product)(toEntityMarshallerProduct)(requestcontext)
}
}
}
}
val api = new ProductApi(DefaultService, DefaultMarshaller)
val host = "localhost"
val port = 3005
val bindingFuture = Http().newServerAt(host, port).bind(pathPrefix("api"){api.route})
println(s"Server online at http://${host}:${port}/\nPress RETURN to stop...")
bindingFuture.failed.foreach { ex =>
println(s"${ex} Failed to bind to ${host}:${port}!")
}
StdIn.readLine() // let it run until user presses return
bindingFuture
.flatMap(_.unbind()) // trigger unbinding from the port
.onComplete(_ => system.terminate()) // and shutdown when done
}
I noticed it's caused by the number of properties of the Product Model, if I reduce them to 3, it works ! this is weird! does anyone know how to resolve this?
try it with using a Data Transfer Object (DTO) which will provide by your api and a domain model object which is handled internally.
so you will use jsonFormat3 for your DTO and have no need to jsonFormat5.
a simple mapper or helper method inner of case classes provide a smooth integration of that.
for example:
final case class Product (
id: Int,
name: String,
isAvailable: Boolean,
description: String,
category: String
) {
def toDto: ProductDTO = ProductDTO(prop0, prop1, prop2)
}
final case class ProductDTO (
prop0: X,
prop1: Y,
prop2: Z
) {
def toDomain(propA: X, propB: Y): Product = Product(id, name, isAvailable, description, category)
}
object ProductSupport extends SprayJsonSupport with DefaultJsonProtocol {
implicit val productFormat = json3Format(ProductDTO)
}
I have a datastrean in flink and I generate my owns metrics using gauge in a ProcessFunction.
As these metrics are important for my activity, i would like to unit test them once the flow is executed.
Unfortunately, I didn't find a way to implement a proper test reporter.
Here is a simple code explaining my issue.
Two concerns with this code:
how do i trigger the gauge
how do I get the reporter instiantiated by env.execute
Here is the sample
import java.util.concurrent.atomic.AtomicInteger
import org.apache.flink.api.scala.metrics.ScalaGauge
import org.apache.flink.configuration.{ConfigConstants, Configuration}
import org.apache.flink.metrics.reporter.AbstractReporter
import org.apache.flink.metrics.{Gauge, Metric, MetricConfig}
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.ProcessFunction
import org.apache.flink.streaming.api.functions.sink.SinkFunction
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment, _}
import org.apache.flink.util.Collector
import org.scalatest.FunSuite
import org.scalatest.Matchers._
import org.scalatest.PartialFunctionValues._
import scala.collection.JavaConverters._
import scala.collection.mutable
/* Test based on Flink test example https://ci.apache.org/projects/flink/flink-docs-master/dev/stream/testing.html */
class MultiplyByTwo extends ProcessFunction[Long, Long] {
override def processElement(data: Long, context: ProcessFunction[Long, Long]#Context, collector: Collector[Long]): Unit = {
collector.collect(data * 2L)
}
val nbrCalls = new AtomicInteger(0)
override def open(parameters: Configuration): Unit = {
getRuntimeContext.getMetricGroup
.addGroup("counter")
.gauge[Int, ScalaGauge[Int]]("call" , ScalaGauge[Int]( () => nbrCalls.get()))
}
}
// create a testing sink
class CollectSink extends SinkFunction[Long] {
override def invoke(value: Long): Unit = {
synchronized {
CollectSink.values.add(value)
}
}
}
object CollectSink {
val values: java.util.ArrayList[Long] = new java.util.ArrayList[Long]()
}
class StackOverflowTestReporter extends AbstractReporter {
var gaugesMetrics : mutable.Map[String, String] = mutable.Map[String, String]()
override def open(metricConfig: MetricConfig): Unit = {}
override def close(): Unit = {}
override def filterCharacters(s: String): String = s
def report(): Unit = {
gaugesMetrics = this.gauges.asScala.map(t => (metricValue(t._1), t._2))
}
private def metricValue(m: Metric): String = {
m match {
case g: Gauge[_] => g.getValue.toString
case _ => ""
}
}
}
class StackOverflowTest extends FunSuite with StreamingMultipleProgramsTestBase{
def createConfigForReporter(reporterName : String) : Configuration = {
val cfg : Configuration = new Configuration()
cfg.setString(ConfigConstants.METRICS_REPORTER_PREFIX + reporterName + "." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, classOf[StackOverflowTestReporter].getName)
cfg
}
test("test_metrics") {
val env = StreamExecutionEnvironment.createLocalEnvironment(
StreamExecutionEnvironment.getDefaultLocalParallelism,
createConfigForReporter("reporter"))
// configure your test environment
env.setParallelism(1)
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime)
// values are collected in a static variable
CollectSink.values.clear()
// create a stream of custom elements and apply transformations
env.fromElements[Long](1L, 21L, 22L)
.process(new MultiplyByTwo())
.addSink(new CollectSink())
// execute
env.execute()
// verify your results
CollectSink.values should have length 3
CollectSink.values should contain (2L)
CollectSink.values should contain (42L)
CollectSink.values should contain (44L)
//verify gauge counter
//pseudo code ...
val testReporter : StackOverflowTestReporter = _ // how to get testReporter instantiate in env
testReporter.gaugesMetrics should have size 1
testReporter.gaugesMetrics should contain key "count.call"
testReporter.gaugesMetrics.valueAt("count.call") should be equals("3")
}
}
Solution thanks to Chesnay Schepler
import java.util.concurrent.atomic.AtomicInteger
import org.apache.flink.api.common.time.Time
import org.apache.flink.api.scala.metrics.ScalaGauge
import org.apache.flink.configuration.{ConfigConstants, Configuration}
import org.apache.flink.metrics.reporter.MetricReporter
import org.apache.flink.metrics.{Metric, MetricConfig, MetricGroup}
import org.apache.flink.streaming.api.functions.ProcessFunction
import org.apache.flink.streaming.api.functions.sink.SinkFunction
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment, _}
import org.apache.flink.test.util.MiniClusterResource
import org.apache.flink.util.Collector
import org.scalatest.Matchers._
import org.scalatest.PartialFunctionValues._
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import scala.collection.mutable
/* Test based on Flink test example https://ci.apache.org/projects/flink/flink-docs-master/dev/stream/testing.html */
class MultiplyByTwo extends ProcessFunction[Long, Long] {
override def processElement(data: Long, context: ProcessFunction[Long, Long]#Context, collector: Collector[Long]): Unit = {
nbrCalls.incrementAndGet()
collector.collect(data * 2L)
}
val nbrCalls = new AtomicInteger(0)
override def open(parameters: Configuration): Unit = {
getRuntimeContext.getMetricGroup
.addGroup("counter")
.gauge[Int, ScalaGauge[Int]]("call" , ScalaGauge[Int]( () => nbrCalls.get()))
}
}
// create a testing sink
class CollectSink extends SinkFunction[Long] {
import CollectSink._
override def invoke(value: Long): Unit = {
synchronized {
values.add(value)
}
}
}
object CollectSink {
val values: java.util.ArrayList[Long] = new java.util.ArrayList[Long]()
}
class StackOverflowTestReporter extends MetricReporter {
import StackOverflowTestReporter._
override def open(metricConfig: MetricConfig): Unit = {}
override def close(): Unit = {}
override def notifyOfAddedMetric(metric: Metric, metricName: String, group: MetricGroup) : Unit = {
metric match {
case gauge: ScalaGauge[_] => {
//drop group metrics meaningless for the test, seem's to be the first 6 items
val gaugeKey = group.getScopeComponents.toSeq.drop(6).mkString(".") + "." + metricName
gaugesMetrics(gaugeKey) = gauge.asInstanceOf[ScalaGauge[Int]]
}
case _ =>
}
}
override def notifyOfRemovedMetric(metric: Metric, metricName: String, group: MetricGroup): Unit = {}
}
object StackOverflowTestReporter {
var gaugesMetrics : mutable.Map[String, ScalaGauge[Int]] = mutable.Map[String, ScalaGauge[Int]]()
}
class StackOverflowTest extends FunSuite with BeforeAndAfterAll{
val miniClusterResource : MiniClusterResource = buildMiniClusterResource()
override def beforeAll(): Unit = {
CollectSink.values.clear()
StackOverflowTestReporter.gaugesMetrics.clear()
miniClusterResource.before()
}
override def afterAll(): Unit = {
miniClusterResource.after()
}
def createConfigForReporter() : Configuration = {
val cfg : Configuration = new Configuration()
cfg.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "reporter" + "." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, classOf[StackOverflowTestReporter].getName)
cfg
}
def buildMiniClusterResource() : MiniClusterResource = new MiniClusterResource(
new MiniClusterResource.MiniClusterResourceConfiguration(
createConfigForReporter(),1,1, Time.milliseconds(50L)))
test("test_metrics") {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.fromElements[Long](1L, 21L, 22L)
.process(new MultiplyByTwo())
.addSink(new CollectSink())
env.execute()
CollectSink.values should have length 3
CollectSink.values should contain (2L)
CollectSink.values should contain (42L)
CollectSink.values should contain (44L)
//verify gauge counter
val gaugeValues = StackOverflowTestReporter.gaugesMetrics.map(t => (t._1, t._2.getValue()))
gaugeValues should have size 1
gaugeValues should contain ("counter.call" -> 3)
}
}
your best bet is to use a MiniClusterResource to explicitly start a cluster before the job and configure a reporter that checks for specific metrics and exposes them through static fields.
#Rule
public final MiniClusterResource clusterResource = new MiniClusterResource(
new MiniClusterResourceConfiguration.Builder()
.setConfiguration(getConfig()));
private static Configuration getConfig() {
Configuration config = new Configuration();
config.setString(
ConfigConstants.METRICS_REPORTER_PREFIX +
"myTestReporter." +
ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX,
MyTestReporter.class.getName());
return config;
}
public static class MyTestReporter implements MetricReporter {
static volatile Gauge<?> myGauge = null;
#Override
public void open(MetricConfig metricConfig) {
}
#Override
public void close() {
}
#Override
public void notifyOfAddedMetric(Metric metric, String name, MetricGroup metricGroup) {
if ("myMetric".equals(name)) {
myGauge = (Gauge<?>) metric;
}
}
#Override
public void notifyOfRemovedMetric(Metric metric, String s, MetricGroup metricGroup) {
}
}
Sorry for my bad english!
I'm newbie in scala language, I'm compiling a project with some modifications and getting the error
"scala value is not member of"... some body can help me? follow my code:
Command.scala
package com.bla.cms.domain
import scala.collection.Map
import redis.clients.jedis.Jedis
sealed trait CommandResult
object Success extends CommandResult
object Failure extends CommandResult
object Unauthorized extends CommandResult
trait Command {
def captchaValidator:Validator
def logger:Logging
val properties = PropertiesHandler.readProperties(System.getProperty(PropertiesHandler.configFileNameKey))
}
RedisIcrementCommand.scala
package com.bla.cms
package domain
import scala.collection.Map
import redis.clients.jedis.JedisPool
import dispatch._
import org.apache.log4j.Logger
import redis.clients.jedis.Jedis
class RedisIncrementCommand(database: Jedis, val captchaValidator:Validator, val logger:Logging) extends Command {
def execute(parameters: Map[String,String]):CommandResult = {
captchaValidator.validate(parameters) match {
case Right(params) =>
executeInDatabase(params)
case Left(status) =>
logger.info("Falha ao tentar executar comando. Captcha inválido. Status: " + status)
Failure
}
}
def executeInDatabase(parameters: Map[String,String]):CommandResult = {
parameters.get("selectedAnswer") match {
case Some(selectedAnswer) =>
database.incr(selectedAnswer)
database.sadd(Answers.key, selectedAnswer)
logger.info("Incremento recebido com sucesso. Opção: " + selectedAnswer)
Success
case None =>
logger.info("Falha ao tentar computar incremento. Alternativa não informada.")
Failure
}
}
}
PollServle.scala
package com.bla.cms
import javax.servlet.http._
import redis.clients.jedis.JedisPool
import scala.collection.JavaConverters._
import scala.collection.JavaConversions._
import com.bla.cms.domain.Logging
import org.apache.log4j.Logger
import scala.util.matching.Regex
class PollServlet extends HttpServlet with RequiresConnection {
import com.bla.cms.domain._
import URLHandler._
override def doPost(request:HttpServletRequest, response:HttpServletResponse):Unit = {
val parametersMap = new ServletApiAdapter(request,response).parameters
val outcome = newIncrementCommand(request).execute(parametersMap)
redirect(request, response, outcome)
}
}
And when I compile, I've got:
[error]PollServlet.scala:19: value execute is nota member of com.bla.cms.domain.Command
val outcome = newIncrementCommand(request).execute(parametersMap)
Somebody can help me please?
You can:
Add def execute(parameters: Map[String,String]):CommandResult to Command trait.
Change newIncrementCommand(request) to return RedisIncrementCommand instead of Command.
Cast result of newIncrementCommand(request) to RedisIncrementCommand using asInstanceOf.
I prefer 1 which would be:
trait Command {
val captchaValidator: Validator
val logger: Logging
val properties = PropertiesHandler.readProperties(System.getProperty(PropertiesHandler.configFileNameKey))
def execute(parameters: Map[String,String]): CommandResult
def executeInDatabase(parameters: Map[String,String]): CommandResult
}
Pretty new to scala and play and I have been assigned a task to test someone else app,which is running fine btw.Please check if my tests are right and what is the error.
This is employeeEntry.scala file in models
package models
import models.database.Employee
import play.api.db.slick.Config.driver.simple._
import play.api.db.slick._
import play.api.Play.current
case class EmployeeEntry(eid :Int, ename: String, eadd: String, emob: String)
object Employee {
val DBemp = TableQuery[Employee]
def savedat(value: EmployeeEntry):Long = {
DB.withSession { implicit session =>
DBemp+=EmployeeEntry(eid=value.eid,ename=value.ename,eadd=value.eadd,emob=value.emob)
}}
/*val query = for (c <- Employee) yield c.ename
val result = DB.withSession {
implicit session =>
query.list // <- takes session implicitly
}*/
//val query = for (c <- Employee) yield c.ename
def getPersonList: List[EmployeeEntry] = DB.withSession { implicit session => DBemp.list }
def Update: Int = DB.withSession { implicit session =>
(DBemp filter (_.eid === 1) map (s => (s.ename,s.eadd))) update ("test","khair")}
def delet :Int =DB.withSession {
implicit session => (DBemp filter (_.eid === 1)).delete
}
}
And this is file Employee.scala in models/database
package models.database
import models._
import models.EmployeeEntry
import play.api.db.slick.Config.driver.simple._
import scala.slick.lifted._
class Employee(tag:Tag) extends Table[EmployeeEntry](tag,"employee")
{
//val a = "hello"
def eid = column[Int]("eid", O.PrimaryKey)
def ename = column[String]("name", O.DBType("VARCHAR(50)"))
def emob = column[String]("emob",O.DBType("VARCHAR(10)"))
def eadd =column[String]("eadd",O.DBType("VARCHAR(100)"))
def * = (eid, ename, emob, eadd) <> (EmployeeEntry.tupled, EmployeeEntry.unapply)
}
Finally this is my test I am running,which is failing :
import play.api.db.slick.Config.driver.simple._
import play.api.db.slick._
import play.api.Play.current
import org.scalatest.FunSpec
import org.scalatest.matchers.ShouldMatchers
import models.database.Employee
import scala.slick.lifted._
import models._
import models.EmployeeEntry
//import scala.slick.driver.H2Driver.simple._
class databasetest extends FunSpec with ShouldMatchers{
describe("this is to check database layer"){
it("can save a row"){
val a = EmployeeEntry(1006,"udit","schd","90909090")
Employee.savedat(a) should be (1)
}
it("getpersonlist"){
Employee.getPersonList.size should be (1)
}
}
}
The test is failing and error is
java.lang.RuntimeException: There is no started application
at scala.sys.package$.error(package.scala:27)
at play.api.Play$$anonfun$current$1.apply(Play.scala:71)
at play.api.Play$$anonfun$current$1.apply(Play.scala:71)
at scala.Option.getOrElse(Option.scala:120)
at play.api.Play$.current(Play.scala:71)
at models.Employee$.getPersonList(EmployeeEntry.scala:27)
at databasetest$$anonfun$1$$anonfun$apply$mcV$sp$2.apply$mcV$sp(databasetest.scala:39)
at databasetest$$anonfun$1$$anonfun$apply$mcV$sp$2.apply(databasetest.scala:39)
at databasetest$$anonfun$1$$anonfun$apply$mcV$sp$2.apply(databasetest.scala:39)
at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
By default play provides spec2 testing framework.so no need to add scalatest framework for unit testing.For database access layer testing required a connection with database so start a fake application.(this is not running code, just an idea to write unit test)
for more detail take a look play doc : https://www.playframework.com/documentation/2.3.x/ScalaFunctionalTestingWithSpecs2
import org.specs2.mutable.Specification
import models.database.Employee
import models._
import models.EmployeeEntry
import play.api.test.FakeApplication
import play.api.test.Helpers.running
import play.api.Play.current
class databasetest extends Specification {
"database layer" should {
"save a row" in {
running(FakeApplication()) {
val a = EmployeeEntry(1006,"udit","schd","90909090")
Employee.savedat(a) must be equalTo (1)
}
}
"get list" in {
running(FakeApplication()) {
Employee.getPersonList.size must be equalTo (1)
}
}
}
I am using Play framework 2.1.1 with scala.I query a database table return to controller as list and then convert list to string and return to ajax call from javascript code.
How to return query result as json and return to ajax call throught controller?
Application.scala
import play.api._
import play.api.mvc._
import play.api.data._
import views.html._
import models._
object Application extends Controller {
def index = Action {
Ok(views.html.index())
}
def getSummaryTable = Action{
var sum="Summary Table";
Ok(ajax_result.render((Timesheet.getAll).mkString("\n")))
}
def javascriptRoutes = Action { implicit request =>
import routes.javascript._
Ok(
Routes.javascriptRouter("jsRoutes")(
// Routes
controllers.routes.javascript.Application.getSummaryTable
)
).as("text/javascript")
}
}
TimeSheet.scala
// Use PostgresDriver to connect to a Postgres database
import scala.slick.driver.PostgresDriver.simple._
import scala.slick.lifted.{MappedTypeMapper,BaseTypeMapper,TypeMapperDelegate}
import scala.slick.driver.BasicProfile
import scala.slick.session.{PositionedParameters,PositionedResult}
// Use the implicit threadLocalSession
import Database.threadLocalSession
import java.sql.Date
import java.sql.Time
case class Timesheet(ID: Int, dateVal: String, entryTime: Time, exitTime: Time, someVal: String)
object Timesheet {
//Definition of Timesheet table
// object TS extends Table[(Int,String,Time,Time,String)]("timesheet"){
val TSTable = new Table[Timesheet]("timesheet"){
def ID = column[Int]("id")
def dateVal = column[String]("date")
def entryTime = column[Time]("entry_time")
def exitTime = column[Time]("exit_time")
def someVal = column[String]("someval")
def * = ID ~ dateVal ~ entryTime ~ exitTime ~ someVal <> (Timesheet.apply _, Timesheet.unapply _)
}
def getAll: Seq[Timesheet] = {
Database.forURL("jdbc:postgresql://localhost:5432/my_db", "postgres", "password",null, driver="org.postgresql.Driver") withSession{
val q = Query(TSTable)
val qFiltered = q.filter(_.ID === 41 )
val qDateFilter = qFiltered.filter(_.dateVal === "01/03/2013")
val qSorted = qDateFilter.sortBy(_.entryTime)
qSorted.list
}
}
}
Also, don't forget to provide an implicit (or not) Json deserializer for your model, otherwise, Scala compiler will yell at you :-). You can do something like :
def allTimesheet = Action {
val timesheetWrites = Json.writes[Timesheet] // here it's the deserializer
val listofTimeSheet = Timesheet.getAll
Ok( Json.toJson( listofTimeSheet )( timesheetWrites ) )
}
or you can use implicits like :
def allTimesheet = Action {
implicit val timesheetWrites = Json.writes[Timesheet] // here it's the deserializer
val listofTimeSheet = Timesheet.getAll
Ok( Json.toJson( listofTimeSheet ) )
}
and even declare your deserializer in your model companion object like :
companion object
object Timesheet {
implicit val timesheetWrites = Json.writes[Timesheet] // here it's the deserializer
....
}
and in the controller
import models.Timesheet.timesheetWrites
def allTimesheet = Action {
val listofTimeSheet = Timesheet.getAll
Ok( Json.toJson( listofTimeSheet ) )
}
I recommend you use play.api.libs.Json.toJson.
Here's an example:
object Products extends Controller {
def list = Action {
val productCodes = Product.findAll.map(_.ean)
Ok(Json.toJson(productCodes))
}
Json.toJson returns a JsValue for which Play will automatically add a application/json header.
See Play For Scala chapter 8.