#transient lazy val field serialization - scala

I have a problem on Scala. I serialize an instance of class with #transient lazy val field. And then I deserialize it, the field is assigned null. I expect the lazy evaluation after deserialization. What should I do?
Following is a sample code.
object Test {
def main(args: Array[String]){
//----------------
// ClassA - with #transient
//----------------
val objA1 = ClassA("world");
println(objA1);
// This works as expected as follows:
// "Good morning."
// "Hello, world"
saveObject("testA.dat", objA1);
val objA2 = loadObject("testA.dat").asInstanceOf[ClassA];
println(objA2);
// I expect this will work as follows:
// "Good morning."
// "Hello, world"
// but actually it works as follows:
// "null"
//----------------
// ClassB - without #transient
// this works as expected
//----------------
val objB1 = ClassB("world");
println(objB1);
// This works as expected as follows:
// "Good morning."
// "Hello, world"
saveObject("testB.dat", objB1);
val objB2 = loadObject("testB.dat").asInstanceOf[ClassB];
println(objB2);
// This works as expected as follows:
// "Hello, world"
}
case class ClassA(name: String){
#transient private lazy val msg = {
println("Good morning.");
"Hello, " + name;
}
override def toString = msg;
}
case class ClassB(name: String){
private lazy val msg = {
println("Good morning.");
"Hello, " + name;
}
override def toString = msg;
}
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
def saveObject(fname: String, obj: AnyRef){
val fop = new FileOutputStream(fname);
val oop = new ObjectOutputStream(fop);
try {
oop.writeObject(obj);
} finally {
oop.close();
}
}
def loadObject(fname: String): AnyRef = {
val fip = new FileInputStream(fname);
val oip = new ObjectInputStream(fip);
try {
oip.readObject();
} finally {
oip.close();
}
}
}

There's a couple of tickets on this in Scala's Trac:
1573
1574
I'd advise you to test against a trunk build of 2.9, as it may already be fixed.

Related

Unable to Analyse data

val patterns = ctx.getBroadcastState(patternStateDescriptor)
The imports I made
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.common.state.{MapStateDescriptor, ValueState, ValueStateDescriptor}
import org.apache.flink.api.scala.typeutils.Types
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.datastream.BroadcastStream
import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010
import org.apache.flink.streaming.api.scala._
import org.apache.flink.util.Collector
Here's the code
val env = StreamExecutionEnvironment.getExecutionEnvironment
val properties = new Properties()
properties.setProperty("bootstrap.servers","localhost:9092")
val patternStream = new FlinkKafkaConsumer010("patterns", new SimpleStringSchema, properties)
val patterns = env.addSource(patternStream)
var patternData = patterns.map {
str =>
val splitted_str = str.split(",")
PatternStream(splitted_str(0).trim, splitted_str(1).trim, splitted_str(2).trim)
}
val logsStream = new FlinkKafkaConsumer010("logs", new SimpleStringSchema, properties)
// logsStream.setStartFromEarliest()
val logs = env.addSource(logsStream)
var data = logs.map {
str =>
val splitted_str = str.split(",")
LogsTest(splitted_str.head.trim, splitted_str(1).trim, splitted_str(2).trim)
}
val keyedData: KeyedStream[LogsTest, String] = data.keyBy(_.metric)
val bcStateDescriptor = new MapStateDescriptor[Unit, PatternStream]("patterns", Types.UNIT, Types.of[PatternStream]) // first type defined is for the key and second data type defined is for the value
val broadcastPatterns: BroadcastStream[PatternStream] = patternData.broadcast(bcStateDescriptor)
val alerts = keyedData
.connect(broadcastPatterns)
.process(new PatternEvaluator())
alerts.print()
// println(alerts.getClass)
// val sinkProducer = new FlinkKafkaProducer010("output", new SimpleStringSchema(), properties)
env.execute("Flink Broadcast State Job")
}
class PatternEvaluator()
extends KeyedBroadcastProcessFunction[String, LogsTest, PatternStream, (String, String, String)] {
private lazy val patternStateDescriptor = new MapStateDescriptor("patterns", classOf[String], classOf[String])
private var lastMetricState: ValueState[String] = _
override def open(parameters: Configuration): Unit = {
val lastMetricDescriptor = new ValueStateDescriptor("last-metric", classOf[String])
lastMetricState = getRuntimeContext.getState(lastMetricDescriptor)
}
override def processElement(reading: LogsTest,
readOnlyCtx: KeyedBroadcastProcessFunction[String, LogsTest, PatternStream, (String, String, String)]#ReadOnlyContext,
out: Collector[(String, String, String)]): Unit = {
val metrics = readOnlyCtx.getBroadcastState(patternStateDescriptor)
if (metrics.contains(reading.metric)) {
val metricPattern: String = metrics.get(reading.metric)
val metricPatternValue: String = metrics.get(reading.value)
val lastMetric = lastMetricState.value()
val logsMetric = (reading.metric)
val logsValue = (reading.value)
if (logsMetric == metricPattern) {
if (metricPatternValue == logsValue) {
out.collect((reading.timestamp, reading.value, reading.metric))
}
}
}
}
override def processBroadcastElement(
update: PatternStream,
ctx: KeyedBroadcastProcessFunction[String, LogsTest, PatternStream, (String, String, String)]#Context,
out: Collector[(String, String, String)]
): Unit = {
val patterns = ctx.getBroadcastState(patternStateDescriptor)
if (update.metric == "IP") {
patterns.put(update.metric /*,update.operator*/ , update.value)
}
// else if (update.metric == "username"){
// patterns.put(update.metric, update.value)
// }
// else {
// println("No required data found")
// }
// }
}
}
Sample Data :- Logs Stream
"21/09/98","IP", "5.5.5.5"
Pattern Stream
"IP","==","5.5.5.5"
I'm unable to analyse data by getting desired result, i.e = 21/09/98,IP,5.5.5.5
There's no error as of now, it's just not analysing the data
The code is reading streams (Checked)
One common source of trouble in cases like this is that the API offers no control over the order in which the patterns and the data are ingested. It could be that processElement is being called before processBroadcastElement.

How to eval code that uses InterfaceStability annotation (that fails with "illegal cyclic reference involving class InterfaceStability")?

I want to dynamically generate some kafka stream code when the program is running, but I get an exception when I compile the following code for kafka stream with scala toolbox eval:
val toolbox = runtimeMirror(getClass.getClassLoader).mkToolBox()
val code =
"""
|import org.apache.kafka.streams.scala.kstream.KStream
|import org.apache.kafka.streams.scala.StreamsBuilder
|
|class ClassA {
| def createStream(): KStream[String, String] = {
| import org.apache.kafka.streams.scala.ImplicitConversions._
| import org.apache.kafka.streams.scala.Serdes._
| new StreamsBuilder().stream("TestTopic")
| }
|}
|scala.reflect.classTag[ClassA].runtimeClass
""".stripMargin
toolbox.eval(toolbox.parse(code))
errors:
reflective compilation has failed:
illegal cyclic reference involving class InterfaceStability
scala.tools.reflect.ToolBoxError: reflective compilation has failed:
illegal cyclic reference involving class InterfaceStability
at scala.tools.reflect.ToolBoxFactory$ToolBoxImpl$ToolBoxGlobal.throwIfErrors(ToolBoxFactory.scala:331)
at scala.tools.reflect.ToolBoxFactory$ToolBoxImpl$ToolBoxGlobal.wrapInPackageAndCompile(ToolBoxFactory.scala:213)
at scala.tools.reflect.ToolBoxFactory$ToolBoxImpl$ToolBoxGlobal.compile(ToolBoxFactory.scala:267)
at scala.tools.reflect.ToolBoxFactory$ToolBoxImpl.$anonfun$compile$13(ToolBoxFactory.scala:444)
at scala.tools.reflect.ToolBoxFactory$ToolBoxImpl$withCompilerApi$.apply(ToolBoxFactory.scala:370)
at scala.tools.reflect.ToolBoxFactory$ToolBoxImpl.compile(ToolBoxFactory.scala:437)
at scala.tools.reflect.ToolBoxFactory$ToolBoxImpl.eval(ToolBoxFactory.scala:459)
It seems that the kafka InterfaceStability class annotated itself:
package org.apache.kafka.common.annotation;
#InterfaceStability.Evolving
public class InterfaceStability {
...
}
Scala version:"2.12.8"
kafkaVersion: "2.1.0"
Sbt version : "1.2.7"
After switching to the api below the scala.tools.nsc package, I can compile successfully.
refer to https://eknet.org/main/dev/runtimecompilescala.html [broken link]
import scala.reflect.internal.util.{AbstractFileClassLoader, BatchSourceFile}
import scala.reflect.io.{AbstractFile, VirtualDirectory}
import scala.reflect.runtime
import scala.reflect.runtime.universe
import scala.reflect.runtime.universe._
import scala.tools.nsc.{Global, Settings}
import scala.collection.mutable
import java.security.MessageDigest
import java.math.BigInteger
class Compiler(targetDir: Option[File]) {
val target = targetDir match {
case Some(dir) => AbstractFile.getDirectory(dir)
case None => new VirtualDirectory("(memory)", None)
}
val classCache = mutable.Map[String, Class[_]]()
private val settings = new Settings()
settings.deprecation.value = true // enable detailed deprecation warnings
settings.unchecked.value = true // enable detailed unchecked warnings
settings.outputDirs.setSingleOutput(target)
settings.usejavacp.value = true
private val global = new Global(settings)
private lazy val run = new global.Run
val classLoader = new AbstractFileClassLoader(target, this.getClass.getClassLoader)
/**Compiles the code as a class into the class loader of this compiler.
*
* #param code
* #return
*/
def compile(code: String) = {
val className = classNameForCode(code)
findClass(className).getOrElse {
val sourceFiles = List(new BatchSourceFile("(inline)", wrapCodeInClass(className, code)))
run.compileSources(sourceFiles)
findClass(className).get
}
}
/** Compiles the source string into the class loader and
* evaluates it.
*
* #param code
* #tparam T
* #return
*/
def eval[T](code: String): T = {
val cls = compile(code)
cls.getConstructor().newInstance().asInstanceOf[() => Any].apply().asInstanceOf[T]
}
def findClass(className: String): Option[Class[_]] = {
synchronized {
classCache.get(className).orElse {
try {
val cls = classLoader.loadClass(className)
classCache(className) = cls
Some(cls)
} catch {
case e: ClassNotFoundException => None
}
}
}
}
protected def classNameForCode(code: String): String = {
val digest = MessageDigest.getInstance("SHA-1").digest(code.getBytes)
"sha"+new BigInteger(1, digest).toString(16)
}
/*
* Wrap source code in a new class with an apply method.
*/
private def wrapCodeInClass(className: String, code: String) = {
"class " + className + " extends (() => Any) {\n" +
" def apply() = {\n" +
code + "\n" +
" }\n" +
"}\n"
}
}

What is the best way to get the name of the caller class in an object?

I could get this working using this:
scala> object LOGGER {
| def warning(msg: String)(implicit className:String) = {
| className
| }
| }
defined object LOGGER
scala> class testing {
| lazy implicit val className = this.getClass.getName
| def test = LOGGER.warning("Testing")
| }
defined class testing
scala> val obj = new testing()
obj: testing = testing#11fb4f69
scala> obj.test
res51: String = testing <=======
scala> class testing2 {
| lazy implicit val className = this.getClass.getName
| def test = LOGGER.warning("Testing")
| }
defined class testing2
scala> val obj2 = new testing2()
obj2: testing2 = testing2#2ca3a203
scala> obj2.test
res53: String = testing2 <=====
I also tried using Thread.currentThread.getStackTrace in the object LOGGER but couldn't get it to print the calling class testing in the warning function.
Any other ways to do this?
Dynamic variable
One way to do it is DymamicVariable
import scala.util.DynamicVariable
object LOGGER {
val caller = new DynamicVariable[String]("---")
def time = new Date().toString
def warning(msg: String) = println(s"[${caller.value} : $time] $msg")
}
trait Logging {
def logged[T](action: => T) = LOGGER.caller.withValue(this.getClass.getName)(action)
}
class testing extends Logging {
def test = logged {
//some actions
LOGGER.warning("test something")
//some other actions
}
}
val t = new testing
t.test
will print something like
[testing : Wed Nov 25 11:29:23 MSK 2015] test something
Or instead of mixing in Logging you can use it directly
class testing {
def test = LOGGER.caller.withValue(this.getClass.getName) {
//some actions
LOGGER.warning("test something")
//some other actions
}
}
Macro
Another more powerfull, but more complex to support approach is to build some simple macro
You could define in other source, preferrably in other subproject
import scala.reflect.macros.blackbox.Context
import scala.language.experimental.macros
class LoggerImpl(val c: Context) {
import c.universe._
def getClassSymbol(s: Symbol): Symbol = if (s.isClass) s else getClassSymbol(s.owner)
def logImpl(msg: Expr[String]): Expr[Unit] = {
val cl = getClassSymbol(c.internal.enclosingOwner).toString
val time = c.Expr[String](q"new java.util.Date().toString")
val logline = c.Expr[String](q""" "[" + $cl + " : " + $time + "]" + $msg """)
c.Expr[Unit](q"println($logline)")
}
}
object Logger {
def warning(msg: String): Unit = macro LoggerImpl.logImpl
}
Now you don't need to change the testing class:
class testing {
def test = {
//some actions
Logger.warning("something happen")
//some other actions
}
}
And see desired output.
Thsi could be very-perfomant alternative to runtime stack introspection
I use this technique in my custom classloader project to get the name of the first class up the stack not in my package. The general idea is copied from the UrlClassloader.
String callerClassName="";
StackTraceElement[] stackTrace=Thread.currentThread().getStackTrace();
for (int i=1; i < stackTrace.length; i++) {
String candidateClassName=stackTrace[i].getClassName();
if(!candidateClassName.startsWith("to.be.ignored") &&
!candidateClassName.startsWith("java")){
callerClassName=candidateClassName;
break;
}
}
The approach has it's drawbacks since it only gets the name of the class, not the actual class or even better the object.

Creating serializable objects from Scala source code at runtime

To embed Scala as a "scripting language", I need to be able to compile text fragments to simple objects, such as Function0[Unit] that can be serialised to and deserialised from disk and which can be loaded into the current runtime and executed.
How would I go about this?
Say for example, my text fragment is (purely hypothetical):
Document.current.elements.headOption.foreach(_.open())
This might be wrapped into the following complete text:
package myapp.userscripts
import myapp.DSL._
object UserFunction1234 extends Function0[Unit] {
def apply(): Unit = {
Document.current.elements.headOption.foreach(_.open())
}
}
What comes next? Should I use IMain to compile this code? I don't want to use the normal interpreter mode, because the compilation should be "context-free" and not accumulate requests.
What I need to get hold off from the compilation is I guess the binary class file? In that case, serialisation is straight forward (byte array). How would I then load that class into the runtime and invoke the apply method?
What happens if the code compiles to multiple auxiliary classes? The example above contains a closure _.open(). How do I make sure I "package" all those auxiliary things into one object to serialize and class-load?
Note: Given that Scala 2.11 is imminent and the compiler API probably changed, I am happy to receive hints as how to approach this problem on Scala 2.11
Here is one idea: use a regular Scala compiler instance. Unfortunately it seems to require the use of hard disk files both for input and output. So we use temporary files for that. The output will be zipped up in a JAR which will be stored as a byte array (that would go into the hypothetical serialization process). We need a special class loader to retrieve the class again from the extracted JAR.
The following assumes Scala 2.10.3 with the scala-compiler library on the class path:
import scala.tools.nsc
import java.io._
import scala.annotation.tailrec
Wrapping user provided code in a function class with a synthetic name that will be incremented for each new fragment:
val packageName = "myapp"
var userCount = 0
def mkFunName(): String = {
val c = userCount
userCount += 1
s"Fun$c"
}
def wrapSource(source: String): (String, String) = {
val fun = mkFunName()
val code = s"""package $packageName
|
|class $fun extends Function0[Unit] {
| def apply(): Unit = {
| $source
| }
|}
|""".stripMargin
(fun, code)
}
A function to compile a source fragment and return the byte array of the resulting jar:
/** Compiles a source code consisting of a body which is wrapped in a `Function0`
* apply method, and returns the function's class name (without package) and the
* raw jar file produced in the compilation.
*/
def compile(source: String): (String, Array[Byte]) = {
val set = new nsc.Settings
val d = File.createTempFile("temp", ".out")
d.delete(); d.mkdir()
set.d.value = d.getPath
set.usejavacp.value = true
val compiler = new nsc.Global(set)
val f = File.createTempFile("temp", ".scala")
val out = new BufferedOutputStream(new FileOutputStream(f))
val (fun, code) = wrapSource(source)
out.write(code.getBytes("UTF-8"))
out.flush(); out.close()
val run = new compiler.Run()
run.compile(List(f.getPath))
f.delete()
val bytes = packJar(d)
deleteDir(d)
(fun, bytes)
}
def deleteDir(base: File): Unit = {
base.listFiles().foreach { f =>
if (f.isFile) f.delete()
else deleteDir(f)
}
base.delete()
}
Note: Doesn't handle compiler errors yet!
The packJar method uses the compiler output directory and produces an in-memory jar file from it:
// cf. http://stackoverflow.com/questions/1281229
def packJar(base: File): Array[Byte] = {
import java.util.jar._
val mf = new Manifest
mf.getMainAttributes.put(Attributes.Name.MANIFEST_VERSION, "1.0")
val bs = new java.io.ByteArrayOutputStream
val out = new JarOutputStream(bs, mf)
def add(prefix: String, f: File): Unit = {
val name0 = prefix + f.getName
val name = if (f.isDirectory) name0 + "/" else name0
val entry = new JarEntry(name)
entry.setTime(f.lastModified())
out.putNextEntry(entry)
if (f.isFile) {
val in = new BufferedInputStream(new FileInputStream(f))
try {
val buf = new Array[Byte](1024)
#tailrec def loop(): Unit = {
val count = in.read(buf)
if (count >= 0) {
out.write(buf, 0, count)
loop()
}
}
loop()
} finally {
in.close()
}
}
out.closeEntry()
if (f.isDirectory) f.listFiles.foreach(add(name, _))
}
base.listFiles().foreach(add("", _))
out.close()
bs.toByteArray
}
A utility function that takes the byte array found in deserialization and creates a map from class names to class byte code:
def unpackJar(bytes: Array[Byte]): Map[String, Array[Byte]] = {
import java.util.jar._
import scala.annotation.tailrec
val in = new JarInputStream(new ByteArrayInputStream(bytes))
val b = Map.newBuilder[String, Array[Byte]]
#tailrec def loop(): Unit = {
val entry = in.getNextJarEntry
if (entry != null) {
if (!entry.isDirectory) {
val name = entry.getName
// cf. http://stackoverflow.com/questions/8909743
val bs = new ByteArrayOutputStream
var i = 0
while (i >= 0) {
i = in.read()
if (i >= 0) bs.write(i)
}
val bytes = bs.toByteArray
b += mkClassName(name) -> bytes
}
loop()
}
}
loop()
in.close()
b.result()
}
def mkClassName(path: String): String = {
require(path.endsWith(".class"))
path.substring(0, path.length - 6).replace("/", ".")
}
A suitable class loader:
class MemoryClassLoader(map: Map[String, Array[Byte]]) extends ClassLoader {
override protected def findClass(name: String): Class[_] =
map.get(name).map { bytes =>
println(s"defineClass($name, ...)")
defineClass(name, bytes, 0, bytes.length)
} .getOrElse(super.findClass(name)) // throws exception
}
And a test case which contains additional classes (closures):
val exampleSource =
"""val xs = List("hello", "world")
|println(xs.map(_.capitalize).mkString(" "))
|""".stripMargin
def test(fun: String, cl: ClassLoader): Unit = {
val clName = s"$packageName.$fun"
println(s"Resolving class '$clName'...")
val clazz = Class.forName(clName, true, cl)
println("Instantiating...")
val x = clazz.newInstance().asInstanceOf[() => Unit]
println("Invoking 'apply':")
x()
}
locally {
println("Compiling...")
val (fun, bytes) = compile(exampleSource)
val map = unpackJar(bytes)
println("Classes found:")
map.keys.foreach(k => println(s" '$k'"))
val cl = new MemoryClassLoader(map)
test(fun, cl) // should call `defineClass`
test(fun, cl) // should find cached class
}

Looking for a good example of polymorphic serialization deserialization using jackson with scala

Looking for a good example of polymorphic serialization deserialization using jackson with scala
got an exception :
Exception in thread "main"
Blockquote
org.codehaus.jackson.map.exc.UnrecognizedPropertyException:
Unrecognized field "animals" (Class Zoo), not marked as ignorable
after trying the following code :
import org.codehaus.jackson.annotate.{ JsonTypeInfo, JsonSubTypes }
import org.codehaus.jackson.annotate.JsonSubTypes.Type
#JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include= JsonTypeInfo.As.PROPERTY,
property = "type"
)
#JsonSubTypes(Array(
new Type(value= classOf[Cat] , name = "cat"),
new Type(value= classOf[Dog] , name = "dog")
)
)
abstract class Animal {
val name:String = "NoName"
}
class Cat extends Animal{
val favoriteToy = "edi"
}
class Dog extends Animal{
val breed = "German Shepherd"
val color = "brown"
}
class Zoo {
val animals = new scala.collection.mutable.ListBuffer[Animal]
}
import org.codehaus.jackson.map.ObjectMapper
object Foo {
def main (args:Array[String]) {
val mapper = new ObjectMapper()
mapper.setPropertyNamingStrategy(CamelCaseNamingStrategy )
val source = scala.io.Source.fromFile("input.json" )
val input = source.mkString
source.close
val zoo = mapper.readValue(input,classOf[Zoo])
println(mapper.writeValueAsString(zoo))
}
import org.codehaus.jackson.map.introspect.{AnnotatedField, AnnotatedMethod}
import org.codehaus.jackson.map.{MapperConfig, PropertyNamingStrategy}
object CamelCaseNamingStrategy extends PropertyNamingStrategy{
override def nameForGetterMethod (config: MapperConfig[_], method: AnnotatedMethod, defaultName: String) =
{
translate(defaultName)
}
override def nameForSetterMethod (config: MapperConfig[_], method: AnnotatedMethod, defaultName: String) = {
translate(defaultName)
}
override def nameForField (config: MapperConfig[_], field: AnnotatedField, defaultName: String) = {
translate(defaultName)
}
def translate(defaultName:String) = {
val nameChars = defaultName.toCharArray
val nameTranslated = new StringBuilder(nameChars.length*2)
for ( c <- nameChars){
if (Character.isUpperCase(c)){
nameTranslated.append("_")
}
nameTranslated.append( Character.toLowerCase(c))
}
nameTranslated.toString
}
}
file input.json
{
"animals":
[
{"type":"dog","name":"Spike","breed":"mutt","color":"red"},
{"type":"cat","name":"Fluffy","favoriteToy":"spider ring"}
]
}
If you're doing polymorphic deserialization in Scala I'd strongly recommend using case classes and Jackson's scala module.
object Test {
import com.fasterxml.jackson.annotation.JsonSubTypes.Type
import com.fasterxml.jackson.annotation.{JsonSubTypes, JsonTypeInfo}
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
#JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.PROPERTY,
property = "type"
)
#JsonSubTypes(Array(
new Type(value = classOf[Cat], name = "cat"),
new Type(value = classOf[Dog], name = "dog")
))
trait Animal
case class Dog(name: String, breed: String, leash_color: String) extends Animal
case class Cat(name: String, favorite_toy: String) extends Animal
case class Zoo(animals: Iterable[Animal])
def main(args: Array[String]): Unit = {
val objectMapper = new ObjectMapper with ScalaObjectMapper
objectMapper.registerModule(DefaultScalaModule)
val dogStr = """{"type": "dog", "name": "Spike", "breed": "mutt", "leash_color": "red"}"""
val catStr = """{"type": "cat", "name": "Fluffy", "favorite_toy": "spider ring"}"""
val zooStr = s"""{"animals":[$dogStr, $catStr]}"""
val zoo = objectMapper.readValue[Zoo](zooStr)
println(zoo)
// Prints: Zoo(List(Dog(Spike,mutt,red), Cat(Fluffy,spider ring)))
}
}
Ok, Got it here is a working example with scala based on Deserialize JSON with Jackson into Polymorphic by Programmer Bruce:
import org.codehaus.jackson.annotate.JsonSubTypes.Type
import org.codehaus.jackson.annotate.{JsonSubTypes, JsonTypeInfo}
#JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include= JsonTypeInfo.As.PROPERTY,
property = "type"
)
#JsonSubTypes(Array(
new Type(value= classOf[Cat] , name = "cat"),
new Type(value= classOf[Dog] , name = "dog")
)
)
abstract class Animal {
var name:String =""
}
class Dog extends Animal{
var breed= "German Shepherd"
var color = "brown"
}
class Cat extends Animal{
var favoriteToy:String = "nothing"
}
class Zoo {
var animals = new Array[Animal](5)
}
import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility
import org.codehaus.jackson.annotate.JsonMethod
import org.codehaus.jackson.map.{DeserializationConfig, ObjectMapper}
object Foo {
def main (args:Array[String]) {
val mapper = new ObjectMapper().setVisibility(JsonMethod.FIELD,Visibility.ANY)
mapper.configure(DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,false)
val source = scala.io.Source.fromFile("/input.json" )
val input = source.mkString
println("input " + input)
source.close
val zoo = mapper.readValue(input,classOf[Zoo])
println(mapper.writeValueAsString(zoo))
}
}
file:input.json { "animals": [
{"type":"dog","name":"Spike","breed":"mutt","color":"red"},
{"type":"cat","name":"Fluffy","favoriteToy":"spider ring"}
] }