WebSockets with Embedding Jetty - scala

I implemented example explained in this link. But I rewrite it in scala. But I am getting java.lang.NoClassDefFoundError: org/eclipse/jetty/io/nio/AsyncConnection error.
Here is my RoomWebSocketHandler class:
import java.io.IOException
import javax.servlet.http.HttpServletRequest
import org.eclipse.jetty.websocket.WebSocket.Connection
import org.eclipse.jetty.websocket.{WebSocket, WebSocketHandler}
import scala.collection.mutable
class RoomWebSocketHandler extends WebSocketHandler {
private val webSockets = new mutable.ArrayBuffer[StateWebSocket]()
override def doWebSocketConnect(request: HttpServletRequest, protocol: String): WebSocket = {
new StateWebSocket()
}
private class StateWebSocket extends WebSocket.OnTextMessage {
var connection: Connection = _
def onOpen(connection: Connection) {
this.connection = connection
webSockets += this
}
def onMessage(data: String) {
try {
for (webSocket <- webSockets) {
webSocket.connection.sendMessage(data)
}
} catch {
case x: IOException => this.connection.close()
}
}
def onClose(closeCode: Int, message: String) {
webSockets -= this
}
}
}
and this is my Main class:
import java.net.InetSocketAddress
import grizzled.slf4j.Logger
import org.eclipse.jetty.server.{Handler, Server}
import org.eclipse.jetty.server.handler.{DefaultHandler, HandlerList, ResourceHandler}
import org.eclipse.jetty.servlet.ServletContextHandler
object Main {
var jettyServer: Option[Server] = None
def startServer(): Unit = {
val LOCAL_PORT = 4041
logger.debug("startServer begin")
jettyServer match {
case Some(s) =>
logger.info("Server is already running")
logger.debug("startServer end")
return
case None =>
logger.info("Server is not running")
}
val server = new Server(new InetSocketAddress("127.0.0.1", LOCAL_PORT))
server.setStopAtShutdown(true)
val handlers = new HandlerList()
val roomWebSocketHandler = new RoomWebSocketHandler();
roomWebSocketHandler.setHandler(new DefaultHandler());
handlers.setHandlers(Array[Handler](roomWebSocketHandler, new DefaultHandler()))
server.setHandler(handlers)
logger.debug("Starting jetty-server")
jettyServer = Some(server)
server.start()
logger.info("Server started on port: " + LOCAL_PORT)
logger.debug("startServer end")
}
def main(args: Array[String]): Unit = {
startServer()
}
}
and here is my dependencies:
dependencies {
compile 'org.slf4j:slf4j-api:1.7.12'
compile 'ch.qos.logback:logback-classic:1.1.3'
compile 'com.h2database:h2:1.4.188'
compile 'org.clapper:grizzled-slf4j_2.11:1.0.2'
compile 'org.eclipse.jetty:jetty-webapp:9.3.3.v20150827'
compile 'org.eclipse.jetty:jetty-websocket:8.1.17.v20150415'
compile 'org.eclipse.jetty:jetty-http:8.1.17.v20150415'
compile 'org.eclipse.jetty:jetty-io:8.1.17.v20150415'
compile 'org.eclipse.jetty:jetty-util:8.1.17.v20150415'
compile 'org.eclipse.jetty:jetty-continuation:8.1.17.v20150415'
compile 'org.eclipse.jetty:jetty-server:8.1.17.v20150415'
compile 'org.eclipse.jetty:jetty-jmx:8.1.17.v20150415'
compile 'commons-cli:commons-cli:1.3.1'
compile 'org.scala-lang:scala-library:2.11.7'
runtime 'javax.servlet:javax.servlet-api:3.1.0'
testCompile 'org.scalacheck:scalacheck_2.11:1.12.4'
}
Error:
Exception in thread "main" java.lang.NoClassDefFoundError: org/eclipse/jetty/io/nio/AsyncConnection
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:760)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:760)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at org.eclipse.jetty.websocket.WebSocketHandler.<init>(WebSocketHandler.java:32)
at mypackage.RoomWebSocketHandler.<init>(RoomStateWebSocketHandler.scala:15)
at mypackage.Main$.startServer(Main.scala:48)
at mypackage.Main$.main(Main.scala:109)
at mypackage.Main.main(Main.scala)
Caused by: java.lang.ClassNotFoundException: org.eclipse.jetty.io.nio.AsyncConnection
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 29 more

I updated all my dependencies to Jetty 9.x and used examples shown in this link

Related

Small number causes java.lang.ClassCastException when snakeyaml deserialized object is passed to Gatling feeder

I'm running a gatling simulation that uses numeric input from a yml file to feed its scenario. Everything works when my numeric inputs are large enough that they cannot be parsed as instances of java.lang.Integer, but small numeric values are apparently parsed to Integers and result in a ClassCastException.
import java.io.FileInputStream
import io.gatling.core.Predef.{Feeder, Simulation}
import org.yaml.snakeyaml.Yaml
import org.yaml.snakeyaml.constructor.Constructor
import io.gatling.core.Predef.{scenario, _}
import scala.collection.JavaConversions
class TestClass extends Simulation {
val yaml = new Yaml(new Constructor(classOf[Holder]))
val holder = yaml.load(new FileInputStream("src/test/resources/data.yml")).asInstanceOf[Holder]
scenario("sim").feed(getUserEmulationFeeder(holder))
def getUserEmulationFeeder(holder:Holder) : Feeder[Long] = {
val iterable = JavaConversions.iterableAsScalaIterable(holder.numbers)
iterable.map(l => Map("userToEmulate" -> l)).iterator
}
}
data.yml has the following data:
numbers:
- 30687965369
- 31415388869
- 2
and is being deserialized into:
import scala.beans.BeanProperty
class Holder {
#BeanProperty var numbers = new java.util.ArrayList[Long]()
}
Removing the 2 fixes the ClassCastException.
The full stacktrace is:
java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at io.gatling.mojo.MainWithArgsInFile.runMain(MainWithArgsInFile.java:50)
at io.gatling.mojo.MainWithArgsInFile.main(MainWithArgsInFile.java:33)
Caused by: java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.Long
at scala.runtime.BoxesRunTime.unboxToLong(BoxesRunTime.java:105)
at com.mercurygate.TestClass$$anonfun$getUserEmulationFeeder$1.apply(TestClass.scala:25)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.Iterator$class.foreach(Iterator.scala:893)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
at scala.collection.AbstractTraversable.map(Traversable.scala:104)
at com.mercurygate.TestClass.getUserEmulationFeeder(TestClass.scala:25)
at com.mercurygate.TestClass.<init>(TestClass.scala:21)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at java.lang.Class.newInstance(Class.java:442)
at io.gatling.app.Gatling$.io$gatling$app$Gatling$$$anonfun$1(Gatling.scala:41)
at io.gatling.app.Gatling$lambda$1.apply(Gatling.scala:41)
at io.gatling.app.Gatling$lambda$1.apply(Gatling.scala:41)
at io.gatling.app.Gatling.run(Gatling.scala:92)
at io.gatling.app.Gatling.runIfNecessary(Gatling.scala:75)
at io.gatling.app.Gatling.start(Gatling.scala:65)
at io.gatling.app.Gatling$.start(Gatling.scala:57)
at io.gatling.app.Gatling$.fromArgs(Gatling.scala:49)
at io.gatling.app.Gatling$.main(Gatling.scala:43)
at io.gatling.app.Gatling.main(Gatling.scala)
... 6 more
P.S. Sorry for the complexity of the example. It's only when I combine snakeyaml, gatling, and the small input that I get the error.

java.lang.NoClassDefFoundError issue while running a scala code for a UDF

I am trying to write a UDF in scala in order to get all the months between two dates passed. This is what i have written.
package com.company.datediff
import org.apache.hadoop.hive.ql.exec.UDF
import java.time._
class hive_udf extends UDF {
def evaluate(date1: String, date2: String): String = {
val s1 = LocalDate.parse(date1)
val s2= LocalDate.parse(date2)
val p = Period.between(s1, s2)
val l=p.getMonths()
val min1= s1.getMonthValue()
val max1= s2.getMonthValue()
var arr1=""
for (i <- min1 to max1){
arr1=arr1.concat(","+ i)
}
/*var i=min1
while (i<= max1){
arr1=arr1.concat(","+ i)
}*/
return arr1
}
}
When running this code without for loop, code runs perfectly fine. After inclusion of for loop, I am getting 'java.lang.NoClassDefFoundError' and
Execution Error, return code -101 from
'org.apache.hadoop.hive.ql.exec.FunctionTask. scala/Function1'
PFB the details of full error:
java.lang.NoClassDefFoundError: scala/Function1
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at org.apache.hadoop.hive.ql.exec.Registry.registerToSessionRegistry(Registry.java:518)
at org.apache.hadoop.hive.ql.exec.Registry.registerPermanentFunction(Registry.java:207)
at org.apache.hadoop.hive.ql.exec.FunctionRegistry.registerPermanentFunction(FunctionRegistry.java:1536)
at org.apache.hadoop.hive.ql.exec.FunctionTask.createPermanentFunction(FunctionTask.java:136)
at org.apache.hadoop.hive.ql.exec.FunctionTask.execute(FunctionTask.java:75)
at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)
at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89)
at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1748)
at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1494)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1291)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1158)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1148)
at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:217)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:169)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:380)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:740)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:685)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.util.RunJar.run(RunJar.java:233)
at org.apache.hadoop.util.RunJar.main(RunJar.java:148)
Caused by: java.lang.ClassNotFoundException: scala.Function1
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 26 more
FAILED: Execution Error, return code -101 from org.apache.hadoop.hive.ql.exec.FunctionTask. scala/Function1
I have a limited exposure to java or Scala. Not sure where I am going wrong. Any help is appreciated. Thanks

Failed to Read Data from csv in Spark using the spark session

I am new to spark , while trying to read data from the CSV file using the spark session , I am facing the Error
Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/spark/sql/sources/HadoopFsRelationProvider
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at java.util.ServiceLoader$LazyIterator.nextService(ServiceLoader.java:370)
at java.util.ServiceLoader$LazyIterator.next(ServiceLoader.java:404)
at java.util.ServiceLoader$1.next(ServiceLoader.java:480)
at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:42)
at scala.collection.Iterator$class.foreach(Iterator.scala:727)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
at scala.collection.TraversableLike$class.filter(TraversableLike.scala:263)
at scala.collection.AbstractTraversable.filter(Traversable.scala:105)
at org.apache.spark.sql.execution.datasources.DataSource.lookupDataSource(DataSource.scala:126)
at org.apache.spark.sql.execution.datasources.DataSource.providingClass$lzycompute(DataSource.scala:78)
at org.apache.spark.sql.execution.datasources.DataSource.providingClass(DataSource.scala:78)
at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:310)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:149)
at org.apache.spark.sql.DataFrameReader.csv(DataFrameReader.scala:401)
at org.apache.spark.sql.DataFrameReader.csv(DataFrameReader.scala:342)
at ReadFromCsv$.main(ReadFromCsv.scala:22)
at ReadFromCsv.main(ReadFromCsv.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:144)
Caused by: java.lang.ClassNotFoundException: org.apache.spark.sql.sources.HadoopFsRelationProvider
Code :
object ReadFromCsv {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("InputFromFile").setMaster("local")
val session = SparkSession.builder()
.config(conf = conf)
.master("local")
.appName("Spark CSV Reader")
.getOrCreate
val df = session.read
.format("com.databricks.spark.csv")
.option("header","true")
.csv("file.csv")
df.createOrReplaceTempView("train")
df.cache()
val resultsCsvDF = session.sql("SELECT * from train ")
resultsCsvDF.show()
}
}
Library dependencies used
org.apache.spark :: spark-sql_2.10, spark-hive_2.10, spark-core_2.10
com.databricks :: spark-csv_2.10

getting NoClassDefFounderror for hbase context

even after adding the dependencies at runtime I'm getting follow error
Exception in thread "main" java.lang.NoClassDefFoundError: com/cloudera/spark/hbase/HBaseContext
at Spark_HBase.SparkHBaseExample$.main(SparkHBaseExample.scala:36)
at Spark_HBase.SparkHBaseExample.main(SparkHBaseExample.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.SparkSubmit$.launch(SparkSubmit.scala:358)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:75)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.ClassNotFoundException: com.cloudera.spark.hbase.HBaseContext
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
Code:
import com.cloudera.spark.hbase.HBaseContext
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.{Scan, HTable, Get}
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.{SparkContext, SparkConf}
object SparkHBaseExample {
def main(args: Array[String]) {
val sparkConf = new SparkConf().setAppName("HBaseSpark").setMaster("local[2]")
val sc = new SparkContext(sparkConf)
val conf = HBaseConfiguration.create()
conf.set(TableInputFormat.INPUT_TABLE, "tmp")
var table = new HTable(conf, "tmp");
var hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat], classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable], classOf[org.apache.hadoop.hbase.client.Result])
var g= new Get(Bytes.toBytes("abc"))
var result=table.get(g)
var value = result.getValue(Bytes.toBytes("cf"),Bytes.toBytes("val"));
var name = Bytes.toString(value)
println("############################################################"+name)
val hbaseContext = new HBaseContext(sc, conf)
var scan = new Scan()
scan.setCaching(100)
var getRdd = hbaseContext.hbaseRDD("tmp", scan)
println("############################GETRDD################################")
getRdd.foreach(println(_))}}

Why not giving compilation error?

I have below piece of code
object SubClass extends MyTrait {
private[this] val a = 10
def main(args: Array[String]) {
println(a)
}
}
trait MyTrait {
protected val a = 5
}
And it gives following runtime error. Can somebody explain why we didn't catch it in compile time.
Exception in thread "main" java.lang.ClassFormatError: Duplicate field
name&signature in class file SubClass$ at
java.lang.ClassLoader.defineClass1(Native Method) at
java.lang.ClassLoader.defineClassCond(ClassLoader.java:631) at
java.lang.ClassLoader.defineClass(ClassLoader.java:615) at
java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:283) at
java.net.URLClassLoader.access$000(URLClassLoader.java:58) at
java.net.URLClassLoader$1.run(URLClassLoader.java:197) at
java.security.AccessController.doPrivileged(Native Method) at
java.net.URLClassLoader.findClass(URLClassLoader.java:190) at
java.lang.ClassLoader.loadClass(ClassLoader.java:306) at
sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at
java.lang.ClassLoader.loadClass(ClassLoader.java:247) at
SubClass.main(TraitTest.scala)
Because software has bugs?
https://issues.scala-lang.org/browse/SI-7475
That would be my guess.
The related ticket has received recent attention:
https://issues.scala-lang.org/browse/SI-2568