I'm trying to select the coupled user by getting the correct linkedAccount.
The query that is created is correct but when trying to use a property
on dbuser e.g dbuser.lastName I get a compile error since dbuser is not
of type User but Query1 size=?
It's probably something really simple but I can't figure it out since I'm
a scala and squeryl noob!
Why doesn't it return the correct value and what have I done wrong in my query?
Also, saving works without any issues.
User:
class User(
#Column("id") val id: Long,
#Column("first_name") val firstName : String,
#Column("last_name") val lastName : String,
#Column("email") val email : String,
#Column("email_validated") val emailValidated: Boolean = false,
#Column("last_login") val lastLogin: Timestamp = null,
val created: Timestamp,
val modified: Timestamp,
val active: Boolean = false
) extends KeyedEntity[Long] {
lazy val linkedAccounts: OneToMany[LinkedAccount] = AppDB.usersToLinkedAccounts.left(this)
}
LinkedAccount:
class LinkedAccount(
#Column("id") val id: Long,
#Column("user_id") val userId: Long,
#Column("provider_user_id") val providerUserId: String,
#Column("salt") val salt: String,
#Column("provider_key") val providerKey: String) extends KeyedEntity[Long] {
lazy val user: ManyToOne[User] = AppDB.usersToLinkedAccounts.right(this)
}
AppDB:
object AppDB extends Schema {
val users = table[User]("users")
val linkedAccounts = table[LinkedAccount]("linked_account")
val usersToLinkedAccounts = oneToManyRelation(users, linkedAccounts).via((u, l) => u.id === l.userId)
def userByLinkedAccount(prodivderKey: String, providerUserId: String) = {
from(AppDB.users)(u =>
where(u.id in
from(AppDB.linkedAccounts)(la =>
where(la.userId === u.id and la.providerKey === prodivderKey and la.providerUserId === providerUserId)
select (la.userId)
)
)
select (u)
)
}
The call:
val dbuser = inTransaction {
val u2 = AppDB.userByLinkedAccount(id.providerId, id.id)
println(u2.statement)
}
println(dbuser.lastName)
The sql generated
Select
users10.last_login as users10_last_login,
users10.email as users10_email,
users10.modified as users10_modified,
users10.last_name as users10_last_name,
users10.first_name as users10_first_name,
users10.id as users10_id,
users10.created as users10_created,
users10.email_validated as users10_email_validated,
users10.active as users10_active
From
users users10
Where
(users10.id in ((Select
linked_account13.user_id as linked_account13_user_id
From
linked_account linked_account13
Where
(((linked_account13.user_id = users10.id) and (linked_account13.provider_key = 'facebook')) and (linked_account13.provider_user_id = 'XXXXXXXXXX'))
) ))
BTW, in the documentation to #Column and #ColumnBase it is said:
The preferred way to define column metadata is not not define them (!)
So, you can define columns just as
val id: Long,
instead of
#Column("id") val id: Long
Ok figured it out. I need to make the call, in this case:
.headOption
Also fixed the query after some tips from Per
def userByLinkedAccount(providerKey : String, providerUserId : String) = {
inTransaction {
from(AppDB.users, AppDB.linkedAccounts)((u,la) =>
where (u.id === la.userId and la.providerKey === providerKey and la.providerUserId === providerUserId)
select(u)
).headOption
}
}
Related
I am using Lagom(scala) framework and i could find any way to save scala case class object in cassandra with has complex Type. so how to i insert cassandra UDT in Lagom scala. and can any one explain hoe to use BoundStatement.setUDTValue() method.
I have tried to do by using com.datastax.driver.mapping.annotations.UDT.
but does not work for me. I have also tried com.datastax.driver.core
Session Interface. but again it does not.
case class LeadProperties(
name: String,
label: String,
description: String,
groupName: String,
fieldDataType: String,
options: Seq[OptionalData]
)
object LeadProperties{
implicit val format: Format[LeadProperties] = Json.format[LeadProperties]
}
#UDT(keyspace = "leadpropertieskeyspace", name="optiontabletype")
case class OptionalData(label: String)
object OptionalData {
implicit val format: Format[OptionalData] = Json.format[OptionalData]
}
my query:----
val optiontabletype= """
|CREATE TYPE IF NOT EXISTS optiontabletype(
|value text
|);
""".stripMargin
val createLeadPropertiesTable: String = """
|CREATE TABLE IF NOT EXISTS leadpropertiestable(
|name text Primary Key,
|label text,
|description text,
|groupname text,
|fielddatatype text,
|options List<frozen<optiontabletype>>
);
""".stripMargin
def createLeadProperties(obj: LeadProperties): Future[List[BoundStatement]] = {
val bindCreateLeadProperties: BoundStatement = createLeadProperties.bind()
bindCreateLeadProperties.setString("name", obj.name)
bindCreateLeadProperties.setString("label", obj.label)
bindCreateLeadProperties.setString("description", obj.description)
bindCreateLeadProperties.setString("groupname", obj.groupName)
bindCreateLeadProperties.setString("fielddatatype", obj.fieldDataType)
here is the problem I am not getting any method for cassandra Udt.
Future.successful(List(bindCreateLeadProperties))
}
override def buildHandler(): ReadSideProcessor.ReadSideHandler[PropertiesEvent] = {
readSide.builder[PropertiesEvent]("PropertiesOffset")
.setGlobalPrepare(() => PropertiesRepository.createTable)
.setPrepare(_ => PropertiesRepository.prepareStatements)
.setEventHandler[PropertiesCreated](ese ⇒
PropertiesRepository.createLeadProperties(ese.event.obj))
.build()
}
I was faced with the same issue and solve it following way:
Define type and table:
def createTable(): Future[Done] = {
session.executeCreateTable("CREATE TYPE IF NOT EXISTS optiontabletype(filed1 text, field2 text)")
.flatMap(_ => session.executeCreateTable(
"CREATE TABLE IF NOT EXISTS leadpropertiestable ( " +
"id TEXT, options list<frozen <optiontabletype>>, PRIMARY KEY (id))"
))
}
Call this method in buildHandler() like this:
override def buildHandler(): ReadSideProcessor.ReadSideHandler[FacilityEvent] =
readSide.builder[PropertiesEvent]("PropertiesOffset")
.setPrepare(_ => prepare())
.setGlobalPrepare(() => {
createTable()
})
.setEventHandler[PropertiesCreated](processPropertiesCreated)
.build()
Then in processPropertiesCreated() I used it like:
private val writePromise = Promise[PreparedStatement] // initialized in prepare
private def writeF: Future[PreparedStatement] = writePromise.future
private def processPropertiesCreated(eventElement: EventStreamElement[PropertiesCreated]): Future[List[BoundStatement]] = {
writeF.map { ps =>
val userType = ps.getVariables.getType("options").getTypeArguments.get(0).asInstanceOf[UserType]
val newValue = userType.newValue().setString("filed1", "1").setString("filed2", "2")
val bindWriteTitle = ps.bind()
bindWriteTitle.setString("id", eventElement.event.id)
bindWriteTitle.setList("options", eventElement.event.keys.map(_ => newValue).toList.asJava) // todo need to convert, now only stub
List(bindWriteTitle)
}
}
And read it like this:
def toFacility(r: Row): LeadPropertiesTable = {
LeadPropertiesTable(
id = r.getString(fId),
options = r.getList("options", classOf[UDTValue]).asScala.map(udt => OptiontableType(field1 = udt.getString("field1"), field2 = udt.getString("field2"))
)
}
My prepare() function:
private def prepare(): Future[Done] = {
val f = session.prepare("INSERT INTO leadpropertiestable (id, options) VALUES (?, ?)")
writePromise.completeWith(f)
f.map(_ => Done)
}
This is not a very well written code, but I think will help to proceed work.
I am trying to define a nullable date field in postgres, while using anorm as connection to the database.
I am trying to update an entry:
def update(id: Long, startTime: Option[LocalDate]){
SQL("""UPDATE my_table
|SET start_date = {start_date}
|WHERE id = {id}
""".stripMargin)
.on(
'id ->id,
'start_date -> startDate,
).executeUpdate()
}
But I get a compilation error, looks like anorm can't handle Option[DateTime], although when I configured a parser it works form me:
val parser: RowParser[Info] = {
get[Long]("id") ~
get[Option[DateTime]]("start_date") map {
case id ~ startTime => Info(id, startDate)
}
}
What am I missing here?
Thanks!
I added my own implicit definitions:
implicit def rowToLocalDate: Column[LocalDate] = Column.nonNull {(value, meta) =>
val MetaDataItem(qualified, nullable, clazz) = meta
value match {
case ts: java.sql.Timestamp => Right(new LocalDate(ts.getTime))
case d: java.sql.Date => Right(new LocalDate(d.getTime))
case str: java.lang.String => Right(fmt.parseLocalDate(str))
case _ => Left(TypeDoesNotMatch("Cannot convert " + value + ":" + value.asInstanceOf[AnyRef].getClass) )
}
}
implicit val localDateToStatement = new ToStatement[LocalDate] {
def set(s: java.sql.PreparedStatement, index: Int, aValue: LocalDate): Unit = {
s.setTimestamp(index, new java.sql.Timestamp(aValue.toDateTimeAtStartOfDay().getMillis()))
}
}
And the relevant ParameterMetaData
implicit object LocalDateClassMetaData extends ParameterMetaData[LocalDate] {
val sqlType = ParameterMetaData.DateParameterMetaData.sqlType
val jdbcType = ParameterMetaData.DateParameterMetaData.jdbcType
}
That made the trick
Related question, Anorm compare/search by java.time LocalDateTime what it worked for me is just update to new version (not-yet-release-one)
Is there a way to have Slick's code generation generate code for only a single schema? Say, public? I have extensions that create a whole ton of tables (eg postgis, pg_jobman) that make the code that slick generates gigantic.
Use this code with appropriate values and schema name,
object CodeGenerator {
def outputDir :String =""
def pkg:String =""
def schemaList:String = "schema1, schema2"
def url:String = "dburl"
def fileName:String =""
val user = "dbUsername"
val password = "dbPassword"
val slickDriver="scala.slick.driver.PostgresDriver"
val JdbcDriver = "org.postgresql.Driver"
val container = "Tables"
def generate() = {
val driver: JdbcProfile = buildJdbcProfile
val schemas = createSchemaList
var model = createModel(driver,schemas)
val codegen = new SourceCodeGenerator(model){
// customize Scala table name (table class, table values, ...)
override def tableName = dbTableName => dbTableName match {
case _ => dbTableName+"Table"
}
override def code = {
//imports is copied right out of
//scala.slick.model.codegen.AbstractSourceCodeGenerator
val imports = {
"import scala.slick.model.ForeignKeyAction\n" +
(if (tables.exists(_.hlistEnabled)) {
"import scala.slick.collection.heterogenous._\n" +
"import scala.slick.collection.heterogenous.syntax._\n"
} else ""
) +
(if (tables.exists(_.PlainSqlMapper.enabled)) {
"import scala.slick.jdbc.{GetResult => GR}\n" +
"// NOTE: GetResult mappers for plain SQL are only generated for tables where Slick knows how to map the types of all columns.\n"
} else ""
) + "\n\n" //+ tables.map(t => s"implicit val ${t.model.name.table}Format = Json.format[${t.model.name.table}]").mkString("\n")+"\n\n"
}
val bySchema = tables.groupBy(t => {
t.model.name.schema
})
val schemaFor = (schema: Option[String]) => {
bySchema(schema).sortBy(_.model.name.table).map(
_.code.mkString("\n")
).mkString("\n\n")
}
}
val joins = tables.flatMap( _.foreignKeys.map{ foreignKey =>
import foreignKey._
val fkt = referencingTable.TableClass.name
val pkt = referencedTable.TableClass.name
val columns = referencingColumns.map(_.name) zip
referencedColumns.map(_.name)
s"implicit def autojoin${fkt + name.toString} = (left:${fkt} ,right:${pkt}) => " +
columns.map{
case (lcol,rcol) =>
"left."+lcol + " === " + "right."+rcol
}.mkString(" && ")
})
override def entityName = dbTableName => dbTableName match {
case _ => dbTableName
}
override def Table = new Table(_) {
table =>
// customize table value (TableQuery) name (uses tableName as a basis)
override def TableValue = new TableValue {
override def rawName = super.rawName.uncapitalize
}
// override generator responsible for columns
override def Column = new Column(_){
// customize Scala column names
override def rawName = (table.model.name.table,this.model.name) match {
case _ => super.rawName
}
}
}
}
println(outputDir+"\\"+fileName)
(new File(outputDir)).mkdirs()
val fw = new FileWriter(outputDir+File.separator+fileName)
fw.write(codegen.packageCode(slickDriver, pkg, container))
fw.close()
}
def createModel(driver: JdbcProfile, schemas:Set[Option[String]]): Model = {
driver.simple.Database
.forURL(url, user = user, password = password, driver = JdbcDriver)
.withSession { implicit session =>
val filteredTables = driver.defaultTables.filter(
(t: MTable) => schemas.contains(t.name.schema)
)
PostgresDriver.createModel(Some(filteredTables))
}
}
def createSchemaList: Set[Option[String]] = {
schemaList.split(",").map({
case "" => None
case (name: String) => Some(name)
}).toSet
}
def buildJdbcProfile: JdbcProfile = {
val module = currentMirror.staticModule(slickDriver)
val reflectedModule = currentMirror.reflectModule(module)
val driver = reflectedModule.instance.asInstanceOf[JdbcProfile]
driver
}
}
I encountered the same problem and I found this question. The answer by S.Karthik sent me in the right direction. However, the code in the answer is slightly outdated. And I think a bit over-complicated. So I crafted my own solution:
import slick.codegen.SourceCodeGenerator
import slick.driver.JdbcProfile
import slick.model.Model
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, ExecutionContext}
val slickDriver = "slick.driver.PostgresDriver"
val jdbcDriver = "org.postgresql.Driver"
val url = "jdbc:postgresql://localhost:5432/mydb"
val outputFolder = "/path/to/src/test/scala"
val pkg = "com.mycompany"
val user = "user"
val password = "password"
object MySourceCodeGenerator {
def run(slickDriver: String, jdbcDriver: String, url: String, outputDir: String,
pkg: String, user: Option[String], password: Option[String]): Unit = {
val driver: JdbcProfile =
Class.forName(slickDriver + "$").getField("MODULE$").get(null).asInstanceOf[JdbcProfile]
val dbFactory = driver.api.Database
val db = dbFactory.forURL(url, driver = jdbcDriver, user = user.orNull,
password = password.orNull, keepAliveConnection = true)
try {
// **1**
val allSchemas = Await.result(db.run(
driver.createModel(None, ignoreInvalidDefaults = false)(ExecutionContext.global).withPinnedSession), Duration.Inf)
// **2**
val publicSchema = new Model(allSchemas.tables.filter(_.name.schema.isEmpty), allSchemas.options)
// **3**
new SourceCodeGenerator(publicSchema).writeToFile(slickDriver, outputDir, pkg)
} finally db.close
}
}
MySourceCodeGenerator.run(slickDriver, jdbcDriver, url, outputFolder, pkg, Some(user), Some(password))
I'll explain what's going on here:
I copied the run function from the SourceCodeGenerator class that's in the slick-codegen library. (I used version slick-codegen_2.10-3.1.1.)
// **1**: In the origninal code, the generated Model was referenced in a val called m. I renamed that to allSchemas.
// **2**: I created a new Model (publicSchema), using the options from the original model, and using a filtered version of the tables set from the original model. It turns out tables from the public schema don't get a schema name in the model. Hence the isEmpty. Should you need tables from one or more other schemas, you can easily create a different filter expression.
// **3**: I create a SourceCodeGenerator with the created publicSchema model.
Of course, it would even be better if the Slick codegenerator could incorporate an option to select one or more schemas.
I'm trying to do eager fetch with Anorm, this is lazy fetch like here https://gist.github.com/guillaumebort/2788715
package models
import java.util.Date
import anorm._
import anorm.SqlParser._
import play.api.db.DB
import play.api.Play.current
// table users
case class User(id: Option[Long] = None, firstName: Option[String], lastName: Option[String]) {
// mismatch types Set[Nicknames] and List[Nicknames] because * returns List
lazy val nickNames: Set[Nicknames] = DB.withConnection { implicit connection =>
SQL"""
SELECT * FROM nicknames
JOIN types t ON nicknames.type_id = t.id
JOIN events e ON nicknames.events_id = e.id
WHERE user.id = $id
""".as(Nickname.withTypeAndEvents.*)
}
}
object User {
val simple = {
get[Option[Long]]("users.id") ~
get[Option[String]]("users.first_name") ~
get[Option[String]]("users.last_name") map {
case id ~ firstName ~ lastName => User(id, firstName, lastName)
}
}
def findById(id: Long): Option[User] = DB.withConnection { implicit connection =>
SQL"SELECT * FROM users WHERE id = $id".as(simple.singleOpt)
}
}
// table nicknames
case class Nickname(id: Option[Long] = None, name: Option[String], startDate: Option[Date],
nType: Option[NickType] = None,
user: Option[User] = None,
events: Option[Set[Event]] = None) // I use Set here to remove duplicates
object Nickname {
val simple = {
get[Option[Long]]("nicknames.id") ~
get[Option[String]]("nicknames.name") ~
get[Option[Date]]("nicknames.start_date") map {
case id ~ name ~ startDate => Nickname(id, name, startDate)
}
}
val withTypeAndUser = simple ~ NickType.simple ~ User.simple map {
case nick ~ nType ~ user => nick.copy(nType = Some(nType), user = Some(user))
}
val withTypeAndEvents = withTypeAndUser ~ Event.simple map {
// it's wrong I think
case nick ~ event => nick.copy(events = Some(Set(event)))
}
}
// table types
case class NickType(id: Option[Long] = None, name: Option[String])
object NickType {
val simple = get[Option[Long]]("types.id") ~ get[Option[String]]("types.name") map {
case id ~ name => NickType(id, name)
}
}
// table events
case class Event(id: Option[Long] = None, regDate: Option[Date], description: Option[String],
user: Option[User] = None)
object Event {
val simple = {
get[Option[Long]]("events.id") ~
get[Option[Date]]("events.reg_date") ~
get[Option[String]]("events.description") map {
case id ~ regDate ~ description => Event(id, regDate, description)
}
}
}
I need that findById return my User with nicknames using eager fetch not lazy.
User -> Nicknames use One To Many relation
Nicknames -> User use Many To One relation join column user_id
Nicknames -> Events use One To Many relation
Events -> Nicknames use Many To One relation join column event_id
I have created one method "saveInDB" which will takes 3 parameters which are passed to "imgData()" class from where all values are set to object "picDetail" of that class when i push to store to riak database only keys are stored but objects of picDetail are not being stored. I can't figure it out what's happening.
val riakClient = RiakFactory.pbcClient
val bucketName = riakClient.createBucket("bucket_name").execute
def saveInDB(title: String, desc: String, imageName: String ): Boolean = {
val picDetail = new imgData()
picDetail.title = title
picDetail.desc = desc
val s = imageName.replace(".png", "")
picDetail.imageName = imageName
try{
riakBucketName.store(s , picDetail).execute
true
}catch{
case e: Exception => false
}
}
#Update: Riak Version : 1.3.2 and Riak Java Client : 1.1.4
Any idea will be greatly appreciated.
Thanks in advance
I figured out the Solution as i tried to pass object "picDetail" to riak it is unable to store so i convert the object into json String. Now it is working fine. My code is like
case class ImgData(
title: String,
desc: String,
imageName: String
)
def getJsonString(title:String, desc:String, imageName:String) : String = {
import play.api.libs.json._
import play.api.libs.functional.syntax._
implicit val cardsWrites: Writes[CardsModel] = (
(__ \ "title").write[String] and
(__ \ "desc").write[String] and
(__ \ "imageName").write[String]
)(unlift(ImgData.unapply))
val jObject = (title, desc, imageName)
val jString = Json.toJson(jObject)
jString.toString
}
def saveInDB(title: String, desc:String, imageName: String ): Boolean ={
val obj:String = getJsonString(title, desc, imageName)
val s:String = imageName.replace(".png", "")
try{
riakBucketName.store(s ,obj).execute
true
}catch{
case e: Exception => false
}
true
}
Thank you!
Just found out another easy way to store.
def saveInDB(title: String, desc:String, imageName: String ): Boolean ={
val obj = ImgData(title, desc, imageName)
val s:String = imageName.replace(".png", "")
val jsonString: String = generate(obj)
var riakObject = RiakObjectBuilder.
newBuilder("bucket_name", s).
withContentType("application/json").
withValue(jsonString).
build
try{
riakBucketName.store(s, riakObject).returnBody(true).execute
fetchUserData(userID)
true
}catch{
case e: Exception => e
}
true
}