I have a problem using Play 2.5.x and ReactiveMongo Play. I am trying to create a Generic Repository and I have serious problems when serialize and deserialize objects to database. It always give me the following error: No Json deserializer found for type E. Try to implement an implicit Reads or Format for this type.
Here is my generic code:
package repositories.mongo
import javax.inject.Inject
import core.Entity
import play.modules.reactivemongo.ReactiveMongoApi
import reactivemongo.api.QueryOpts
import repositories.Repository
import scala.collection.Seq
import scala.concurrent.{ExecutionContext, Future}
import reactivemongo.play.json._
import play.api.libs.json._
import reactivemongo.play.json.collection.JSONCollection
class MongoRepository[K, E <: Entity[K]] #Inject()(reactiveMongo: ReactiveMongoApi) extends Repository[K, E] {
protected def collection(implicit ec: ExecutionContext) = reactiveMongo.database.map(_.collection[JSONCollection](this.getCollectionName))
protected def getCollectionName: String = {
"users"
}
def getAll(count: Int, skip: Int)(implicit ec: ExecutionContext): Future[Seq[E]] = {
this.collection.flatMap(_.find(Json.obj())
.options(QueryOpts(skipN = skip))
.cursor[E]().collect[Seq[E]](count))
}
def getFilter(count: Int, skip: Int, f: E => Boolean)(implicit ec: ExecutionContext): Future[Seq[E]] = {
this.collection.flatMap(_.find(f)
.options(QueryOpts(skipN = skip))
.cursor[E]().collect[Seq[E]](count))
}
def getById(id: K)(implicit ec: ExecutionContext): Future[Option[E]] = {
this.collection.flatMap(_.find(Json.obj("_id" -> id.toString)).one[E])
}
def create(entity: E)(implicit ec: ExecutionContext): Future[Option[E]] = {
this.collection.flatMap(_.insert(entity)).flatMap(_ => Future.successful(Option(entity)))
}
def updateById(id: K, entity: E)(implicit ec: ExecutionContext): Future[Option[E]] = {
this.collection.flatMap(_.findAndUpdate(Json.obj("_id" -> id.toString), entity)
.map(_.result[E]))
}
def deleteById(id: K)(implicit ec: ExecutionContext): Future[Option[E]] = {
this.collection.flatMap(_.findAndRemove(Json.obj("_id" -> id.toString))
.map(_.result[E]))
}
}
Here is my concrete class that includes the json format serializer.
package core
import play.api.libs.json.Json
trait Entity[K] {
val id: K
}
case class User(
id: String,
name: String,
email: String
) extends Entity[String] {
}
object User {
implicit val jsonFormat = Json.format[User]
}
When you create your MongoRepository you need to say that E needs a json Format. You can do it like this:
class MongoRepository[K, E <: Entity[K]: Format]
// this is the same as
class MongoRespository[K, E <: Entity[K]](implicit formatter: Format[E])
Related
I have a Play Framework project using Scala 2.13 and the latest Play SBT plugin v2.8.16. I want to use a wrapper class to represent an ID type. I had been using a value class (AnyVal) but I want to migrate to newtype for performance reasons.
/model/package.scala:
import io.estatico.newtype.macros.newtype
import play.api.libs.json.{Json, Reads, Writes}
package object model {
#newtype case class FileId(raw: String)
object FileId {
implicit val reads: Reads[FileId] = json => json.validate[String].map(FileId(_))
implicit val writes: Writes[FileId] = fileId => Json.toJson(fileId.raw)
}
}
I want to be able to pass in a file ID as a path segment in a URL. Here is the relevant route:
GET /files/:id controllers.HomeController.getFileContents(id: model.FileId)
/controllers/HomeController.scala:
package controllers
import model.{FileContents, FileId}
import javax.inject._
import play.api.libs.json.Json
import play.api.mvc._
#Singleton
class HomeController #Inject()(val controllerComponents: ControllerComponents) extends BaseController {
def getFileContents(id: FileId) = Action { implicit request: Request[AnyContent] =>
val fileContents = FileContents(id, "file.txt", "this is the file contents")
Ok(Json.toJson(fileContents))
}
}
I wrote a custom binder for this.
/binders/CustomBinders.scala
package binders
import model.FileId
import play.api.mvc.PathBindable
object CustomBinders {
implicit def fileIdPathBindable(implicit stringBinder: PathBindable[String]): PathBindable[FileId] =
new PathBindable[FileId] {
override def bind(key: String, value: String): Either[String, FileId] =
stringBinder.bind(key, value).map(FileId(_))
override def unbind(key: String, value: FileId): String =
stringBinder.unbind(key, value.raw)
}
}
I configured build.sbt to enable macros and to use the custom binder. When I try to compile, I get an error:
[error] /play-scala-seed/conf/routes:8:1: class type required but model.FileId.Type found
[error] GET /files/:id controllers.HomeController.getFileContents(id: model.FileId)
Enabling debug for the newtype shows this but I can't quite figure out what's needed to make this work:
Expanded #newtype FileId:
{
type FileId = FileId.Type;
object FileId extends scala.AnyRef {
def <init>() = {
super.<init>();
()
};
implicit val reads: Reads[FileId] = ((json) => json.validate[String].map(((x$1) => FileId(x$1))));
implicit val writes: Writes[FileId] = ((fileId) => Json.toJson(fileId.raw));
def apply(raw: String): FileId = raw.asInstanceOf[FileId];
final implicit class Ops$newtype extends AnyVal {
<paramaccessor> val $this$: Type = _;
def <init>($this$: Type) = {
super.<init>();
()
};
def raw: String = $this$.asInstanceOf[String]
};
implicit def opsThis(x: Ops$newtype): Type = x.$this$;
#new _root_.scala.inline() implicit def unsafeWrap: Coercible[Repr, Type] = Coercible.instance;
#new _root_.scala.inline() implicit def unsafeUnwrap: Coercible[Type, Repr] = Coercible.instance;
#new _root_.scala.inline() implicit def unsafeWrapM[M[_]]: Coercible[M[Repr], M[Type]] = Coercible.instance;
#new _root_.scala.inline() implicit def unsafeUnwrapM[M[_]]: Coercible[M[Type], M[Repr]] = Coercible.instance;
#new _root_.scala.inline() implicit def cannotWrapArrayAmbiguous1: Coercible[_root_.scala.Array[Repr], _root_.scala.Array[Type]] = Coercible.instance;
#new _root_.scala.inline() implicit def cannotWrapArrayAmbiguous2: Coercible[_root_.scala.Array[Repr], _root_.scala.Array[Type]] = Coercible.instance;
#new _root_.scala.inline() implicit def cannotUnwrapArrayAmbiguous1: Coercible[_root_.scala.Array[Type], _root_.scala.Array[Repr]] = Coercible.instance;
#new _root_.scala.inline() implicit def cannotUnwrapArrayAmbiguous2: Coercible[_root_.scala.Array[Type], _root_.scala.Array[Repr]] = Coercible.instance;
def deriving[TC[_]](implicit ev: TC[Repr]): TC[Type] = ev.asInstanceOf[TC[Type]];
type Repr = String;
type Base = _root_.scala.Any {
type __FileId__newtype
};
abstract trait Tag extends _root_.scala.Any;
type Type <: Base with Tag
};
()
}
I would like to use the newtype FileId as path segment parameter in my URL. I have tried looking around for guides on this but have not found anything that works. What changes do I need to make?
Im trying to create a Generic Class in Scala so I can create a repository for different collection without repeating myself.
The problem is that if I do it as a Generic Class(as in this example) I get a problem in this line:
val codecRegistry = fromRegistries(fromProviders(classOf[T]), DEFAULT_CODEC_REGISTRY)
Expected Class but Found [T]
But if I change T for any other class (lets say User) in all the code it works.
This is my class:
package persistence.repository.impl
import akka.stream.Materializer
import akka.stream.alpakka.mongodb.scaladsl.{MongoSink, MongoSource}
import akka.stream.scaladsl.{Sink, Source}
import akka.{Done, NotUsed}
import com.mongodb.reactivestreams.client.MongoClients
import constants.MongoConstants._
import org.bson.codecs.configuration.CodecRegistries.{fromProviders, fromRegistries}
import org.mongodb.scala.MongoClient.DEFAULT_CODEC_REGISTRY
import org.mongodb.scala.bson.codecs.Macros._
import org.mongodb.scala.model.Filters
import persistence.entity.{ProductItem}
import persistence.repository.Repository
import scala.concurrent.{ExecutionContext, Future}
class UserMongoDatabase[T](implicit materializer: Materializer,
executionContext: ExecutionContext)
extends Repository[T] {
val codecRegistry = fromRegistries(fromProviders(classOf[T]), DEFAULT_CODEC_REGISTRY)
val client = MongoClients.create(HOST)
val db = client.getDatabase(DATABASE)
val requestedCollection = db
.getCollection(USER_COLLECTION, classOf[T])
.withCodecRegistry(codecRegistry)
val source: Source[T, NotUsed] =
MongoSource(requestedCollection.find(classOf[T]))
val rows: Future[Seq[T]] = source.runWith(Sink.seq)
override def getAll: Future[Seq[T]] = rows
override def getById(id: AnyVal): Future[Option[T]] = rows.map {
list =>
list.filter {
user => user.asInstanceOf[ {def _id: AnyVal}]._id == id
}.headOption
}
override def getByEmail(email: String): Future[Option[T]] = rows.map {
list =>
list.filter {
user => user.asInstanceOf[ {def email: AnyVal}].email == email
}.headOption
}
override def save(obj: T): Future[T] = {
val source = Source.single(obj)
source.runWith(MongoSink.insertOne(requestedCollection)).map(_ => obj)
}
override def delete(id: AnyVal): Future[Done] = {
val source = Source.single(id).map(i => Filters.eq("_id", id))
source.runWith(MongoSink.deleteOne(requestedCollection))
}
}
This is my repository trait:
package persistence.repository
import akka.Done
import scala.concurrent.Future
trait Repository[T]{
def getAll: Future[Seq[T]]
def getById(id: AnyVal): Future[Option[T]]
def save(user: T): Future[T]
def delete(id: AnyVal): Future[Done]
def getByEmail(email:String): Future[Option[T]]
}
As said in the comments, this is the perfect example of usage of ClassTag in Scala. It allow to retain the actual class of a generic/parameterized class.
class DefaultMongoDatabase[T](implicit ..., ct: ClassTag[T])
extends Repository[T] {
val codecRegistry = fromRegistries(fromProviders(ev.runtimeClass), ...)
(You can move the classtag logic in the trait if you want.)
I want to build an interface that doesn't have any dependencies, only scala library
Let's imagine this is what i want:
iface.jar
trait jsonIface[JsValue] {
def turnJsonIntoClass[T](t: JsValue)
}
As you see it doesn't contains any imports.
Let's go to implementation:
iface_implementation1.jar
import play.api.libs.json._
trait myPlayJsonImpl extends jsonIface[JsValue] {
def turnJsonIntoClass[T](t: JsValue) { t.as[T] }
}
But this wouldn't compile because as[T] needs implicit Reads[T]
So i rewrote my iface like that:
trait jsonIface[JsValue] {
type metaInfo[T]
def turnJsonIntoClass[T](t: JsValue)(implicit meta: metaInfo[T])
}
and play json impl looks like that:
import play.api.libs.json._
trait myPlayJsonImpl extends jsonIface[JsValue] {
type conv[M] = Reads[M]
def turnJsonIntoClass[T](t: JsValue)(implicit reads: Reads[T]) { t.as[T] }
}
and json4s looks like that:
import org.json4s.JsonAST._
trait json4sImpl extends jsonIface[JValue] {
type conv[M] = Manifest[M]
def turnJsonIntoClass[T](t: JsValue)(implicit reads: Manifest[T]) { t.extract[T] }
}
This compiles but it looks cumbersome
Normally when you start to work with type-class traits you continue to do so rather than work with OOP traits:
import org.json4s.Formats
import org.json4s.JsonAST.JValue
import play.api.libs.json.{JsValue, Reads}
trait jsonIface[JsValue, T] {
def turnJsonIntoClass(t: JsValue): T
}
object jsonIface {
implicit def json4sImpl[T](implicit formats: Formats, manifest: Manifest[T]): jsonIface[JValue, T] = new jsonIface[JValue, T] {
def turnJsonIntoClass(t: JValue): T = t.extract[T]
}
implicit def myPlayJsonImpl[T](implicit reads: Reads[T]): jsonIface[JsValue, T] = new jsonIface[JsValue, T] {
def turnJsonIntoClass(t: JsValue): T = t.as[T]
}
}
or
object jsonIface {
implicit def json4sImpl[T](implicit formats: Formats, manifest: Manifest[T]): jsonIface[JValue, T] = (t: JValue) => t.extract[T]
implicit def myPlayJsonImpl[T](implicit reads: Reads[T]): jsonIface[JsValue, T] = (t: JsValue) => t.as[T]
}
I'm scala/play/slick newbie so please don't be too mad if I ask dumb question.
Here goes the question.
I have several slick table definitions, here is one of them:
import javax.inject.Inject
import play.api.db.slick.{DatabaseConfigProvider, HasDatabaseConfigProvider}
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.db.NamedDatabase
import slick.driver.JdbcProfile
import scala.concurrent.Future
case class User(id: Int, login: String, password: String) extends Identifiable
class UserDAO #Inject()(#NamedDatabase protected val dbConfigProvider: DatabaseConfigProvider) extends HasDatabaseConfigProvider[JdbcProfile] {
import driver.api._
private val users = TableQuery[UsersTable]
def all(): Future[Seq[User]] = db.run(users.result)
def insert(dog: User): Future[Unit] = db.run(users += dog).map { _ => () }
def delete(id: Int): Future[Int] = db.run(users.filter(_.id === id).delete)
private class UsersTable(tag: Tag) extends Table[User](tag, "USER") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def email = column[String]("email")
def password = column[String]("password")
def * = (id, email, password) <> (User.tupled, User.unapply)
}
}
Imagine I have much more tables which have def id = column[Int]("id", O.PrimaryKey, O.AutoInc) to eliminate this I need to write something like:
trait Identifiable {
this: Table[_] =>
def id = column[String]("id", O.PrimaryKey)
}
But how do I import Table here in a database agnostic manner? Moreover there is more room for enhancements: all DAO objects providing access to Identifiable Tables can be inherited from a common abstract class containing all, insert, find and delete methods. Something like (was unable to compile it):
abstract class BaseDAO[E <: Identifiable] extends DAO[E] with HasDatabaseConfigProvider[JdbcProfile] {
import driver.api._
private val entities = TableQuery[BaseTable]
def all(): Future[Seq[E]] = db.run(entities.result)
def insert(entity: E): Future[Unit] = db.run(entities += entity).map { _ => () }
def delete(entity: E): Future[Int] = db.run(entities.filter(_.id === entity.id).delete)
def find(id: Int): Future[E] = db.run(entities.filter(_.id === entities.id))
trait BaseTable { this: Table[_] =>
def id = column[String]("id", O.PrimaryKey, O.AutoInc)
}
}
Could somebody please point me to my mistakes? Thanks.
Database agnostic and Code is highly reusable
I am using Slick with Playframework and this is how I achieved database agnostic and generic repository.
Note that this work is inspired from Active Slick
I want to have basic crud operations like this to be defined on my case class. I should be able to do count, update, delete and create. I want to write the curd code just once and reuse it for ever.
Here is the snippet which demonstrates this.
case class Dog(name: String, id: Option[Long] = None)
Dog("some_dog").save()
Dog("some_dog").insert()
Dog("some_dog", Some(1)).delete()
CrudActions.scala
import slick.backend.DatabaseConfig
import slick.driver.JdbcProfile
import scala.concurrent.ExecutionContext
trait CrudActions {
val dbConfig: DatabaseConfig[JdbcProfile]
import dbConfig.driver.api._
type Model
def count: DBIO[Int]
def save(model: Model)(implicit ec: ExecutionContext): DBIO[Model]
def update(model: Model)(implicit ec: ExecutionContext): DBIO[Model]
def delete(model: Model)(implicit ec: ExecutionContext): DBIO[Int]
def fetchAll(fetchSize: Int = 100)(implicit ec: ExecutionContext): StreamingDBIO[Seq[Model], Model]
}
Now lets get our Entity into picture. Note that Entity is nothing but our case class
Entity is case class on which we do crud operations. For locating our entity lets also have Id in place. Id is important for locating and operating an entity or record in the database. Also Id uniquely identities for entity
EntityActionsLike.scala
import slick.backend.DatabaseConfig
import slick.driver.JdbcProfile
import scala.concurrent.ExecutionContext
trait EntityActionsLike extends CrudActions {
val dbConfig: DatabaseConfig[JdbcProfile]
import dbConfig.driver.api._
type Entity
type Id
type Model = Entity
def insert(entity: Entity)(implicit ec: ExecutionContext): DBIO[Id]
def deleteById(id: Id)(implicit ec: ExecutionContext): DBIO[Int]
def findById(id: Id)(implicit ec: ExecutionContext): DBIO[Entity]
def findOptionById(id: Id)(implicit ec: ExecutionContext): DBIO[Option[Entity]]
}
import slick.ast.BaseTypedType
import slick.backend.DatabaseConfig
import slick.driver.JdbcProfile
import scala.concurrent.ExecutionContext
Now lets implement these methods. For doing operations we need Table and TableQuery. Lets say we have table and tableQuery. The good about traits is we can declare a contract and leave the implementation details to subclasses or subtypes
EntityActions.scala
trait EntityActions extends EntityActionsLike {
val dbConfig: DatabaseConfig[JdbcProfile]
import dbConfig.driver.api._
type EntityTable <: Table[Entity]
def tableQuery: TableQuery[EntityTable]
def $id(table: EntityTable): Rep[Id]
def modelIdContract: ModelIdContract[Entity,Id]
override def count: DBIO[Int] = tableQuery.size.result
override def insert(entity: Entity)(implicit ec: ExecutionContext): DBIO[Id] = {
tableQuery.returning(tableQuery.map($id(_))) += entity
}
override def deleteById(id: Id)(implicit ec: ExecutionContext): DBIO[Int] = {
filterById(id).delete
}
override def findById(id: Id)(implicit ec: ExecutionContext): DBIO[Entity] = {
filterById(id).result.head
}
override def findOptionById(id: Id)(implicit ec: ExecutionContext): DBIO[Option[Entity]] = {
filterById(id).result.headOption
}
override def save(model: Entity)(implicit ec: ExecutionContext): DBIO[Entity] = {
insert(model).flatMap { id =>
filterById(id).result.head
}.transactionally
}
override def update(model: Entity)(implicit ec: ExecutionContext): DBIO[Entity] = {
filterById(modelIdContract.get(model)).update(model).map { _ => model }.transactionally
}
override def delete(model: Entity)(implicit ec: ExecutionContext): DBIO[Int] = {
filterById(modelIdContract.get(model)).delete
}
override def fetchAll(fetchSize: Int)(implicit ec: ExecutionContext): StreamingDBIO[Seq[Entity], Entity] = {
tableQuery.result.transactionally.withStatementParameters(fetchSize = fetchSize)
}
def filterById(id: Id) = tableQuery.filter($id(_) === id)
def baseTypedType: BaseTypedType[Id]
protected implicit lazy val btt: BaseTypedType[Id] = baseTypedType
}
ActiveRecord.scala
import slick.dbio.DBIO
import scala.concurrent.ExecutionContext
abstract class ActiveRecord[R <: CrudActions](val repo: R) {
def model: repo.Model
def save()(implicit ec: ExecutionContext): DBIO[repo.Model] = repo.save(model)
def update()(implicit ec: ExecutionContext): DBIO[repo.Model] = repo.update(model)
def delete()(implicit ec: ExecutionContext): DBIO[Int] = repo.delete(model)
}
ModelContract.scala
case class ModelIdContract[A, B](get: A => B, set: (A, B) => A)
How to Use
Sample.scala
import com.google.inject.{Inject, Singleton}
import play.api.db.slick.DatabaseConfigProvider
import slick.ast.BaseTypedType
import slick.backend.DatabaseConfig
import slick.driver.JdbcProfile
import slick.{ActiveRecord, EntityActions, ModelIdContract}
case class Dog(name: String, id: Option[Long] = None)
#Singleton
class DogActiveRecord #Inject() (databaseConfigProvider: DatabaseConfigProvider) extends EntityActions {
override val dbConfig: DatabaseConfig[JdbcProfile] = databaseConfigProvider.get[JdbcProfile]
import dbConfig.driver.api._
override def tableQuery = TableQuery(new Dogs(_))
override def $id(table: Dogs): Rep[Id] = table.id
override def modelIdContract: ModelIdContract[Dog, Id] = ModelIdContract(dog => dog.id.get, (dog, id) => dog.copy(id = Some(id)))
override def baseTypedType: BaseTypedType[Id] = implicitly[BaseTypedType[Id]]
override type Entity = Dog
override type Id = Long
override type EntityTable = Dogs
class Dogs(tag: Tag) extends Table[Dog](tag, "DogsTable") {
def name = column[String]("name")
def id = column[Long]("id", O.PrimaryKey)
def * = (name, id.?) <> (Dog.tupled, Dog.unapply)
}
implicit class ActiveRecordImplicit(val model: Entity) extends ActiveRecord(this)
import scala.concurrent.ExecutionContext.Implicits.global
val result = Dog("some_dog").save()
val res2 = Dog("some_other_dog", Some(1)).delete()
val res3 = Dog("some_crazy_dog", Some(1)).update()
}
Now we can do operations on Dog directly like this
Dog("some_dog").save()
This implicit does the magic for us
implicit class ActiveRecordImplicit(val model: Entity) extends ActiveRecord(this)
You can also add scheme creation and dropping logic in EntityActions
tableQuery.schema.create
table.schema.drop
Basically what I want to achieve is a combination of:
Slick 3.0.0 database agnostism
and
Slick 3 reusable generic repository
I tried a lot, actually, but I can't get this to work at all.
abstract class BaseModel[T <: slick.lifted.AbstractTable[_]](query: TableQuery[T], val driver: JdbcProfile, val dbTableName: String)
{
lazy val all: TableQuery[T] = TableQuery[T]
import driver.api._
def createTable = all.schema.create
def dropTable = all.schema.create
abstract class BaseTable[B](val tag: Tag) extends Table[B](tag, dbTableName)
{
def id = column[Long]("id", O.PrimaryKey, O.AutoInc)
}
}
Now here we have a problem already:
def createTable = all.schema.create and the same with dropTable... -> schema cannot be resolved here, although I import the driver before.
But an even bigger problem comes in when I subclass this:
Here is the code
class NodeModel(driver: JdbcProfile, dbTableName: String) extends BaseModel[NodeTable](TableQuery[NodeTable], driver, dbTableName) {
val dbDriver = driver
import dbDriver.api._
class NodeTable(tag: Tag) extends BaseTable[Node](tag)
{
override def * = id.? <> (Node, Node.unapply)
}
//lazy val all: TableQuery[NodeTable] = TableQuery[NodeTable]
def createTable: DBIO[Unit] = all.schema.create
def dropTable: DBIO[Unit] = all.schema.drop
def insert(node: Node) = all += node
}
This won't compile obviously because I cannot pass NodeTable as T, but gives an idea of what I want to achieve.
Do you have any idea how to solve this? I also tried with companion objects, moving the BaseTable out of the BaseModel and trying to load a simpleDriver... but it looks like that functionality was removed from Slick in a recent version :(
Database agnostic and Code is highly reusable
I am using Slick with Playframework and this is how I achieved database agnostic and generic repository.
Note that this work is inspired from Active Slick
I want to have basic crud operations like this to be defined on my case class. I should be able to do count, update, delete and create. I want to write the curd code just once and reuse it for ever.
Here is the snippet which demonstrates this.
case class Dog(name: String, id: Option[Long] = None)
Dog("some_dog").save()
Dog("some_dog").insert()
Dog("some_dog", Some(1)).delete()
CrudActions.scala
import slick.backend.DatabaseConfig
import slick.driver.JdbcProfile
import scala.concurrent.ExecutionContext
trait CrudActions {
val dbConfig: DatabaseConfig[JdbcProfile]
import dbConfig.driver.api._
type Model
def count: DBIO[Int]
def save(model: Model)(implicit ec: ExecutionContext): DBIO[Model]
def update(model: Model)(implicit ec: ExecutionContext): DBIO[Model]
def delete(model: Model)(implicit ec: ExecutionContext): DBIO[Int]
def fetchAll(fetchSize: Int = 100)(implicit ec: ExecutionContext): StreamingDBIO[Seq[Model], Model]
}
Now lets get our Entity into picture. Note that Entity is nothing but our case class
Entity is case class on which we do crud operations. For locating our entity lets also have Id in place. Id is important for locating and operating an entity or record in the database. Also Id uniquely identities for entity
EntityActionsLike.scala
import slick.backend.DatabaseConfig
import slick.driver.JdbcProfile
import scala.concurrent.ExecutionContext
trait EntityActionsLike extends CrudActions {
val dbConfig: DatabaseConfig[JdbcProfile]
import dbConfig.driver.api._
type Entity
type Id
type Model = Entity
def insert(entity: Entity)(implicit ec: ExecutionContext): DBIO[Id]
def deleteById(id: Id)(implicit ec: ExecutionContext): DBIO[Int]
def findById(id: Id)(implicit ec: ExecutionContext): DBIO[Entity]
def findOptionById(id: Id)(implicit ec: ExecutionContext): DBIO[Option[Entity]]
}
Now lets implement these methods. For doing operations we need Table and TableQuery. Lets say we have table and tableQuery. The good about traits is we can declare a contract and leave the implementation details to subclasses or subtypes
EntityActions.scala
import slick.ast.BaseTypedType
import slick.backend.DatabaseConfig
import slick.driver.JdbcProfile
import scala.concurrent.ExecutionContext
trait EntityActions extends EntityActionsLike {
val dbConfig: DatabaseConfig[JdbcProfile]
import dbConfig.driver.api._
type EntityTable <: Table[Entity]
def tableQuery: TableQuery[EntityTable]
def $id(table: EntityTable): Rep[Id]
def modelIdContract: ModelIdContract[Entity,Id]
override def count: DBIO[Int] = tableQuery.size.result
override def insert(entity: Entity)(implicit ec: ExecutionContext): DBIO[Id] = {
tableQuery.returning(tableQuery.map($id(_))) += entity
}
override def deleteById(id: Id)(implicit ec: ExecutionContext): DBIO[Int] = {
filterById(id).delete
}
override def findById(id: Id)(implicit ec: ExecutionContext): DBIO[Entity] = {
filterById(id).result.head
}
override def findOptionById(id: Id)(implicit ec: ExecutionContext): DBIO[Option[Entity]] = {
filterById(id).result.headOption
}
override def save(model: Entity)(implicit ec: ExecutionContext): DBIO[Entity] = {
insert(model).flatMap { id =>
filterById(id).result.head
}.transactionally
}
override def update(model: Entity)(implicit ec: ExecutionContext): DBIO[Entity] = {
filterById(modelIdContract.get(model)).update(model).map { _ => model }.transactionally
}
override def delete(model: Entity)(implicit ec: ExecutionContext): DBIO[Int] = {
filterById(modelIdContract.get(model)).delete
}
override def fetchAll(fetchSize: Int)(implicit ec: ExecutionContext): StreamingDBIO[Seq[Entity], Entity] = {
tableQuery.result.transactionally.withStatementParameters(fetchSize = fetchSize)
}
def filterById(id: Id) = tableQuery.filter($id(_) === id)
def baseTypedType: BaseTypedType[Id]
protected implicit lazy val btt: BaseTypedType[Id] = baseTypedType
}
ActiveRecord.scala
import slick.dbio.DBIO
import scala.concurrent.ExecutionContext
abstract class ActiveRecord[R <: CrudActions](val repo: R) {
def model: repo.Model
def save()(implicit ec: ExecutionContext): DBIO[repo.Model] = repo.save(model)
def update()(implicit ec: ExecutionContext): DBIO[repo.Model] = repo.update(model)
def delete()(implicit ec: ExecutionContext): DBIO[Int] = repo.delete(model)
}
ModelContract.scala
case class ModelIdContract[A, B](get: A => B, set: (A, B) => A)
How to Use
Sample.scala
import com.google.inject.{Inject, Singleton}
import play.api.db.slick.DatabaseConfigProvider
import slick.ast.BaseTypedType
import slick.backend.DatabaseConfig
import slick.driver.JdbcProfile
import slick.{ActiveRecord, EntityActions, ModelIdContract}
case class Dog(name: String, id: Option[Long] = None)
#Singleton
class DogActiveRecord #Inject() (databaseConfigProvider: DatabaseConfigProvider) extends EntityActions {
override val dbConfig: DatabaseConfig[JdbcProfile] = databaseConfigProvider.get[JdbcProfile]
import dbConfig.driver.api._
override def tableQuery = TableQuery(new Dogs(_))
override def $id(table: Dogs): Rep[Id] = table.id
override def modelIdContract: ModelIdContract[Dog, Id] = ModelIdContract(dog => dog.id.get, (dog, id) => dog.copy(id = Some(id)))
override def baseTypedType: BaseTypedType[Id] = implicitly[BaseTypedType[Id]]
override type Entity = Dog
override type Id = Long
override type EntityTable = Dogs
class Dogs(tag: Tag) extends Table[Dog](tag, "DogsTable") {
def name = column[String]("name")
def id = column[Long]("id", O.PrimaryKey)
def * = (name, id.?) <> (Dog.tupled, Dog.unapply)
}
implicit class ActiveRecordImplicit(val model: Entity) extends ActiveRecord(this)
import scala.concurrent.ExecutionContext.Implicits.global
val result = Dog("some_dog").save()
val res2 = Dog("some_other_dog", Some(1)).delete()
val res3 = Dog("some_crazy_dog", Some(1)).update()
}
Now we can do operations on Dog directly like this
Dog("some_dog").save()
This implicit does the magic for us
implicit class ActiveRecordImplicit(val model: Entity) extends ActiveRecord(this)
You can also add scheme creation and dropping logic in EntityActions
tableQuery.schema.create
table.schema.drop