java.lang.NoClassDefFoundError: org/apache/flink/streaming/api/scala/StreamExecutionEnvironment - scala

package com.knoldus
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.time.Time
object SocketWindowWordCount {
def main(args: Array[String]) : Unit = {
var hostname: String = "localhost"
var port: Int = 9000
try {
val params = ParameterTool.fromArgs(args)
hostname = if (params.has("hostname")) params.get("hostname") else "localhost"
port = params.getInt("port")
} catch {
case e: Exception => {
System.err.println("No port specified. Please run 'SocketWindowWordCount " +
"--hostname <hostname> --port <port>', where hostname (localhost by default) and port " +
"is the address of the text server")
System.err.println("To start a simple text server, run 'netcat -l <port>' " +
"and type the input text into the command line")
return
}
}
// get the execution environment
val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
// get input data by connecting to the socket
val text: DataStream[String] = env.socketTextStream(hostname, port, '\n')
// parse the data, group it, window it, and aggregate the counts
val windowCounts = text
.flatMap { w => w.split("\\s") }
.map { w => WordWithCount(w, 1) }
.keyBy("word")
.timeWindow(Time.seconds(5))
.sum("count")
// print the results with a single thread, rather than in parallel
windowCounts.print().setParallelism(1)
env.execute("Socket Window WordCount")
}
/** Data type for words with count */
case class WordWithCount(word: String, count: Long)
}
In the end while running this code on Intellij i am getting this error
Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/flink/streaming/api/scala/StreamExecutionEnvironment$
at com.knoldus.SocketWindowWordCount$.main(SocketWindowWordCount.scala:43)
at com.knoldus.SocketWindowWordCount.main(SocketWindowWordCount.scala)
Caused by: java.lang.ClassNotFoundException: org.apache.flink.streaming.api.scala.StreamExecutionEnvironment$
at java.base/jdk.internal.loader.BuiltinClassLoader.loadClass(BuiltinClassLoader.java:581)
at java.base/jdk.internal.loader.ClassLoaders$AppClassLoader.loadClass(ClassLoaders.java:178)
at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:521)
... 2 more

Select menu item "Run" => "Edit Configurations...",
then in the "Build and run" section select "Modify options" => Java => Add dependencies with "Provided" scope to classpath in your local configuration.
In this way you don't have to remove the <scope>provided</scope>.

I resolved this by removing the <scope>provided</scope> which was present in the maven import.

Related

Problems with database connection Ktor!!! I just don't understand why the ktor not to see the path of application.conf jbdc

I'm having a problem with my web project. It is my first time when i try connecting database. I want to connect database to see that working. I use Postgres and have set up a database. I want to connect by using the following code :
Data base settings
package com.testreftul
import com.testreftul.model.Orders
import com.testreftul.model.Products
import com.testreftul.model.User
import com.typesafe.config.ConfigFactory
import io.ktor.server.config.*
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext
import org.jetbrains.exposed.sql.Database
import org.jetbrains.exposed.sql.SchemaUtils
import org.jetbrains.exposed.sql.transactions.transaction
object DbSettings {
private val appConfig = HoconApplicationConfig(ConfigFactory.load())
private var dbUrl = appConfig.property("jbdc.url").getString()
private var dbUser = appConfig.property("jbdc.username").getString()
private var dbPassword = appConfig.property("jbdc.password").getString()
fun init(dbUrl: String, dbUser: String, dbPassword: String) {
this.dbUrl = dbUrl
this.dbUser = dbUser
this.dbPassword = dbPassword
pgConnection()
transaction {
SchemaUtils.create(User, Products, Orders)
}
}
private fun pgConnection() = Database.connect(
url = dbUrl,
driver = "org.postgresql.Driver",
user = dbUser,
password = dbPassword
)
suspend fun <T> dbQuery(block:()->T): T =
withContext(Dispatchers.IO){
transaction {
block()
}
}
}
Database plugin
package com.testreftul.plugins
import com.testreftul.DbSettings
import io.ktor.server.application.*
fun Application.connectDatabase(){
val url = environment.config.property("jdbc.url").getString()
val username = environment.config.property("jdbc.username").getString()
val password = environment.config.property("jdbc.password").getString()
DbSettings.init(url,username,password)
}
Application.conf
ktor {
deployment {
port = 8080
port = ${?PORT}
}
application {
modules = [ com.testreftul.ApplicationKt.module ]
}
}
jdbc{
url = "jdbc:postgresql://localhost:5432/restest"
username = "postgres"
password = "admin"
}
build.gradle.kts
val ktor_version: String by project
val kotlin_version: String by project
val logback_version: String by project
val exposed_version:String by project
val postgresql_jdbc:String by project
plugins {
application
kotlin("jvm") version "1.6.20"
id("org.jetbrains.kotlin.plugin.serialization") version "1.6.20"
}
group = "com.testreftul"
version = "0.0.1"
application {
mainClass.set("io.ktor.server.netty.EngineMain")
val isDevelopment: Boolean = project.ext.has("development")
applicationDefaultJvmArgs = listOf("-Dio.ktor.development=$isDevelopment")
}
repositories {
mavenCentral()
maven { url = uri("https://maven.pkg.jetbrains.space/public/p/ktor/eap") }
}
dependencies {
implementation("io.ktor:ktor-server-core-jvm:$ktor_version")
implementation("io.ktor:ktor-server-content-negotiation-jvm:$ktor_version")
implementation("io.ktor:ktor-serialization-kotlinx-json-jvm:$ktor_version")
implementation("io.ktor:ktor-server-netty-jvm:$ktor_version")
implementation("ch.qos.logback:logback-classic:$logback_version")
implementation ("org.jetbrains.exposed:exposed-core:$exposed_version")
implementation ("org.jetbrains.exposed:exposed-dao:$exposed_version")
implementation ("org.jetbrains.exposed:exposed-jdbc:$exposed_version")
implementation ("org.jetbrains.exposed:exposed-java-time:$exposed_version")
implementation ("org.postgresql:postgresql:$postgresql_jdbc")
testImplementation("io.ktor:ktor-server-tests-jvm:$ktor_version")
testImplementation("org.jetbrains.kotlin:kotlin-test-junit:$kotlin_version")
}
gradle.properties
ktor_version=2.0.0-beta-1
kotlin_version=1.6.20
logback_version=1.2.3
kotlin.code.style=official
exposed_version=0.37.3
postgresql_jdbc=42.3.3
Application.kt
package com.testreftul
import io.ktor.server.application.*
import com.testreftul.plugins.*
fun main(args: Array<String>): Unit =
io.ktor.server.netty.EngineMain.main(args)
#Suppress("unused") // application.conf references the main function. This annotation prevents the IDE from marking it as unused.
fun Application.module() {
connectDatabase()
configureRouting()
configureSerialization()
}
I installed all the plugins that were needed and created a database,and also i configure my ide and connect to database with dialect postgres.When i run the console return Caused by: io.ktor.server.config.ApplicationConfigurationException: Property jbdc.url not found.
> Task :run FAILED
2022-04-06 01:55:22.805 [main] TRACE Application - {
# application.conf # file:/C:/Users/eljan/IdeaProjects/ktor-restful1/build/resources/main/application.conf: 6
"application" : {
# application.conf # file:/C:/Users/eljan/IdeaProjects/ktor-restful1/build/resources/main/application.conf: 7
"modules" : [
# application.conf # file:/C:/Users/eljan/IdeaProjects/ktor-restful1/build/resources/main/application.conf: 7
"com.testreftul.ApplicationKt.module"
]
},
# application.conf # file:/C:/Users/eljan/IdeaProjects/ktor-restful1/build/resources/main/application.conf: 2
"deployment" : {
# application.conf # file:/C:/Users/eljan/IdeaProjects/ktor-restful1/build/resources/main/application.conf: 3
"port" : 8080
},
# Content hidden
"security" : "***"
}
2022-04-06 01:55:22.976 [main] INFO Application - Autoreload is disabled because the development mode is off.
Exception in thread "main" java.lang.ExceptionInInitializerError
at com.testreftul.plugins.DatabaseKt.connectDatabase(Database.kt:11)
at com.testreftul.ApplicationKt.module(Application.kt:11)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:78)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:567)
at kotlin.reflect.jvm.internal.calls.CallerImpl$Method.callMethod(CallerImpl.kt:97)
at kotlin.reflect.jvm.internal.calls.CallerImpl$Method$Static.call(CallerImpl.kt:106)
at kotlin.reflect.jvm.internal.KCallableImpl.call(KCallableImpl.kt:108)
at kotlin.reflect.jvm.internal.KCallableImpl.callDefaultMethod$kotlin_reflection(KCallableImpl.kt:159)
at kotlin.reflect.jvm.internal.KCallableImpl.callBy(KCallableImpl.kt:112)
at io.ktor.server.engine.internal.CallableUtilsKt.callFunctionWithInjection(CallableUtils.kt:119)
at io.ktor.server.engine.internal.CallableUtilsKt.executeModuleFunction(CallableUtils.kt:36)
at io.ktor.server.engine.ApplicationEngineEnvironmentReloading$launchModuleByName$1.invoke(ApplicationEngineEnvironmentReloading.kt:333)
at io.ktor.server.engine.ApplicationEngineEnvironmentReloading$launchModuleByName$1.invoke(ApplicationEngineEnvironmentReloading.kt:332)
at io.ktor.server.engine.ApplicationEngineEnvironmentReloading.avoidingDoubleStartupFor(ApplicationEngineEnvironmentReloading.kt:357)
at io.ktor.server.engine.ApplicationEngineEnvironmentReloading.launchModuleByName(ApplicationEngineEnvironmentReloading.kt:332)
at io.ktor.server.engine.ApplicationEngineEnvironmentReloading.access$launchModuleByName(ApplicationEngineEnvironmentReloading.kt:32)
at io.ktor.server.engine.ApplicationEngineEnvironmentReloading$instantiateAndConfigureApplication$1.invoke(ApplicationEngineEnvironmentReloading.kt:313)
at io.ktor.server.engine.ApplicationEngineEnvironmentReloading$instantiateAndConfigureApplication$1.invoke(ApplicationEngineEnvironmentReloading.kt:311)
at io.ktor.server.engine.ApplicationEngineEnvironmentReloading.avoidingDoubleStartup(ApplicationEngineEnvironmentReloading.kt:339)
at io.ktor.server.engine.ApplicationEngineEnvironmentReloading.instantiateAndConfigureApplication(ApplicationEngineEnvironmentReloading.kt:311)
at io.ktor.server.engine.ApplicationEngineEnvironmentReloading.createApplication(ApplicationEngineEnvironmentReloading.kt:144)
at io.ktor.server.engine.ApplicationEngineEnvironmentReloading.start(ApplicationEngineEnvironmentReloading.kt:278)
at io.ktor.server.netty.NettyApplicationEngine.start(NettyApplicationEngine.kt:183)
at io.ktor.server.netty.EngineMain.main(EngineMain.kt:26)
Caused by: io.ktor.server.config.ApplicationConfigurationException: Property jbdc.url not found.
at io.ktor.server.config.HoconApplicationConfig.property(HoconApplicationConfig.kt:15)
at com.testreftul.DbSettings.<clinit>(DbSettings.kt:16)
... 26 more
Caused by: io.ktor.server.config.ApplicationConfigurationException: Property jbdc.url not found.
Execution failed for task ':run'.
> Process 'command 'C:\Users\eljan\.jdks\corretto-16.0.2\bin\java.exe'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
You have a typo in config's property names in the definition of default values for private variables. It should be jdbc instead of jbdc:
private var dbUrl = appConfig.property("jbdc.url").getString()
private var dbUser = appConfig.property("jbdc.username").getString()
private var dbPassword = appConfig.property("jbdc.password").getString()

testcontainer initializationError while running a test suite

I have multiple test classes running the same docker-compose with testcontainer.
The suite fails with initializationError although each test passes when performed separately.
Here is the relevant part of the stacktrace occuring during the second test.
./gradlew e2e:test -i
io.foo.e2e.AuthTest > initializationError FAILED
org.testcontainers.containers.ContainerLaunchException: Container startup failed
at org.testcontainers.containers.GenericContainer.doStart(GenericContainer.java:330)
at org.testcontainers.containers.GenericContainer.start(GenericContainer.java:311)
at org.testcontainers.containers.DockerComposeContainer.startAmbassadorContainers(DockerComposeContainer.java:331)
at org.testcontainers.containers.DockerComposeContainer.start(DockerComposeContainer.java:178)
at io.foo.e2e.bases.BaseE2eTest$Companion.beforeAll$e2e(BaseE2eTest.kt:62)
at io.foo.e2e.bases.BaseE2eTest.beforeAll$e2e(BaseE2eTest.kt)
...
Caused by:
org.rnorth.ducttape.RetryCountExceededException: Retry limit hit with exception
at org.rnorth.ducttape.unreliables.Unreliables.retryUntilSuccess(Unreliables.java:88)
at org.testcontainers.containers.GenericContainer.doStart(GenericContainer.java:323)
... 83 more
Caused by:
org.testcontainers.containers.ContainerLaunchException: Could not create/start container
at org.testcontainers.containers.GenericContainer.tryStart(GenericContainer.java:497)
at org.testcontainers.containers.GenericContainer.lambda$doStart$0(GenericContainer.java:325)
at org.rnorth.ducttape.unreliables.Unreliables.retryUntilSuccess(Unreliables.java:81)
... 84 more
Caused by:
org.testcontainers.containers.ContainerLaunchException: Aborting attempt to link to container btraq5fzahac_worker_1 as it is not running
at org.testcontainers.containers.GenericContainer.applyConfiguration(GenericContainer.java:779)
at org.testcontainers.containers.GenericContainer.tryStart(GenericContainer.java:359)
... 86 more
It seems to me that the second test doesn't wait for the first to shutdown previous containers.
Here the base class that all tests inherit from. It is responsible for spinning up the containers.
open class BaseE2eTest {
...
companion object {
const val A = "containera_1"
const val B = "containerb_1"
const val C = "containerc_1"
val dockerCompose: KDockerComposeContainer by lazy {
defineDockerCompose()
.withLocalCompose(true)
.withExposedService(A, 8080, Wait.forListeningPort())
.withExposedService(B, 8081)
.withExposedService(C, 5672, Wait.forListeningPort())
}
class KDockerComposeContainer(file: File) : DockerComposeContainer<KDockerComposeContainer>(file)
private fun defineDockerCompose() = KDockerComposeContainer(File("../docker-compose.yml"))
#BeforeAll
#JvmStatic
internal fun beforeAll() {
dockerCompose.start()
}
#AfterAll
#JvmStatic
internal fun afterAll() {
dockerCompose.stop()
}
}
}
docker-compose version 1.27.4, build 40524192
testcontainer 1.15.2
testcontainers:junit-jupiter:1.15.2
After watching this talk, I realized that my testcontainers instantiation approach with Junit5 was wrong.
Here is the working code:
#TestInstance(TestInstance.Lifecycle.PER_CLASS)
open class BaseE2eTest {
...
val A = "containera_1"
val B = "containerb_1"
val C = "containerc_1"
val dockerCompose: KDockerComposeContainer by lazy {
defineDockerCompose()
.withLocalCompose(true)
.withExposedService(A, 8080, Wait.forListeningPort())
.withExposedService(B, 8081)
.withExposedService(C, 5672, Wait.forListeningPort())
}
class KDockerComposeContainer(file: File) : DockerComposeContainer<KDockerComposeContainer>(file)
private fun defineDockerCompose() = KDockerComposeContainer(File("../docker-compose.yml"))
#BeforeAll
fun beforeAll() {
dockerCompose.start()
}
#AfterAll
fun afterAll() {
dockerCompose.stop()
}
}
Now the test suite passes.
For anyone stumbling upon this, I had a similar problem that suddenly occurred after months of working perfectly fine. It was caused by this problem: Docker "ERROR: could not find an available, non-overlapping IPv4 address pool among the defaults to assign to the network"
After removing all unused networks the problem was fixed

How to stream downloads using Scalaj-Http and Hadoop HttpFs

My question is how to use a Buffered stream when using Scalaj-Http.
I have written the following code which is a complete working example that will download a file from Hadoop HDFS using HttpFS. My goal is to handle very large files and this will require using a buffered approach with multiple I/O writes to a local file.
I have not been able to find documentation on how to use a stream with the ScalaJ-Http interface. I am interested in an example for both download and upload that can handle large multi GB files. My code below uses in memory buffering which is appropriate for only prototyping.
import scalaj.http._
import ujson.Js
import java.text.SimpleDateFormat
import java.net.SocketTimeoutException
import java.io.InputStream
import java.io.BufferedOutputStream
import java.io.FileOutputStream
import java.io.FileNotFoundException
object CopyFileFromHdfs {
def main(args: Array[String]) {
val host = "hadoop.example.com"
val user = "root"
var dstFile = ""
var srcFile = ""
val operation = "OPEN"
val port = 14000
System.setProperty("sun.net.http.allowRestrictedHeaders", "true")
if (args.length != 2)
{
println("Error: Missing or too many arguments")
println("Usage: CopyFileFromHdfs <srcfile> <dstfile>")
System.exit(1)
}
srcFile = args(0)
dstFile = args(1)
// ********************************************************************************
// Create the URL string that we will use to connect to Hadoop HttpFS
//
// The string will look like this:
// http://root#123.456.789.012:14000/webhdfs/v1/?user.name=root&op=OPEN
// ********************************************************************************
val url = makeHttpfsUrl(host, user, srcFile, operation, port)
// ********************************************************************************
// Using HTTP, call the HttpFS server
//
// Exceptions:
// java.net.SocketTimeoutException
// java.net.UnknownHostException
// java.lang.IllegalArgumentException
// Remote Exceptions:
// java.io.FileNotFoundException
// com.sun.jersey.api.NotFoundException
// ********************************************************************************
try {
var response = Http(url)
.timeout(connTimeoutMs = 1000, readTimeoutMs = 5000)
.asBytes
// ********************************************************************************
// Check for an error. We are expecting an HTTP 200 response
// ********************************************************************************
if (response.code < 200 || response.code > 299)
{
val data = ujson.read(response.body)
printf("Error: Cannot download file: %s\n", dstFile)
println(removeQuotes(data("RemoteException")("message").str))
println(removeQuotes(data("RemoteException")("exception").str))
System.exit(1)
}
val is = new FileOutputStream(dstFile)
val bs = new BufferedOutputStream(is)
bs.write(response.body, 0, response.body.length)
bs.close()
is.close()
} catch {
case e: SocketTimeoutException => {
printf("Error: Cannot connect to host %s on port %d\n", host, port)
println(e)
System.exit(1);
}
case e: Exception => {
printf("Error (other): Cannot download file %s\n", srcFile)
println(e)
System.exit(1);
}
}
printf("Success: File downloaded. %s -> %s\n", srcFile, dstFile)
System.exit(0)
}
// ********************************************************************************
// The Json strings are surrounded by quotes.
// This function will remove them (only at the start and the end).
// ********************************************************************************
def removeQuotes(str: String): String = {
// This expression will delete quotes at the beginning and end of a string
return str.replaceAll("^\"|\"$", "");
}
// ********************************************************************************
// Create the URL string that we will use to connect to Hadoop HttpFS
//
// The string will look like this:
// http://root#123.456.789.012:14000/webhdfs/v1/?user.name=root&op=LISTSTATUS
// ********************************************************************************
def makeHttpfsUrl(
host: String,
user: String,
hdfsPath: String,
operation: String,
port: Integer) : String = {
var url = "http://" + user + "#" + host + ":" + port.toString + "/webhdfs/v1"
if (hdfsPath(0) == '/')
url += hdfsPath
else
url += "/" + hdfsPath
url += "?user.name=" + user + "&op=" + operation
return url
}
}

How to publish data to using MQTT

I used this docker image to install Mosquitto MQTT.
Now it's running and showing the following message in the terminal:
1515680808: mosquitto version 1.4.14 (build date Mon, 10 Jul 2017 23:48:43 +0100) starting
1515680808: Config loaded from /mqtt/config/mosquitto.conf.
1515680808: Opening websockets listen socket on port 9001.
1515680808: Opening ipv4 listen socket on port 1883.
1515680808: Opening ipv6 listen socket on port 1883.
Then I created a simple Maven project:
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-mqtt_2.11</artifactId>
<version>1.6.3</version>
</dependency>
I tried to publish some data to a topic using the code shown below. I point to localhost:1883 as the MqttBrokerUrl and a topic test. However, I get this error:
Exception in thread "main" java.lang.NullPointerException at
org.eclipse.paho.client.mqttv3.MqttConnectOptions.validateURI(MqttConnectOptions.java:457)
at
org.eclipse.paho.client.mqttv3.MqttAsyncClient.(MqttAsyncClient.java:273)
at
org.eclipse.paho.client.mqttv3.MqttAsyncClient.(MqttAsyncClient.java:167)
at
org.eclipse.paho.client.mqttv3.MqttClient.(MqttClient.java:224)
at org.test.MQTTPublisher$.main(MQTTPublisher.scala:37) at
org.test.MQTTPublisher.main(MQTTPublisher.scala)
Code:
package org.test
import org.apache.log4j.{Level, Logger}
import org.eclipse.paho.client.mqttv3._
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.mqtt._
import org.apache.spark.SparkConf
object MQTTPublisher {
def main(args: Array[String]) {
if (args.length < 2) {
System.err.println("Usage: MQTTPublisher <MqttBrokerUrl> <topic>")
System.exit(1)
}
// Set logging level if log4j not configured (override by adding log4j.properties to classpath)
if (!Logger.getRootLogger.getAllAppenders.hasMoreElements) {
Logger.getRootLogger.setLevel(Level.WARN)
}
val Seq(brokerUrl, topic) = args.toSeq
var client: MqttClient = null
try {
val persistence = new MemoryPersistence()
client = new MqttClient("localhost:1883", MqttClient.generateClientId(), persistence)
client.connect()
val msgtopic = client.getTopic(topic)
val msgContent = "test test test"
val message = new MqttMessage(msgContent.getBytes("utf-8"))
while (true) {
try {
msgtopic.publish(message)
println(s"Published data. topic: ${msgtopic.getName()}; Message: $message")
} catch {
case e: MqttException if e.getReasonCode == MqttException.REASON_CODE_MAX_INFLIGHT =>
Thread.sleep(10)
println("Queue is full, wait for to consume data from the message queue")
}
}
} catch {
case e: MqttException => println("Exception Caught: " + e)
} finally {
if (client != null) {
client.disconnect()
}
}
}
}
The MqttClient() constructor takes a URI.
What you have provided is just a hostname and port number (localhost:1883), it's missing a protocol section which should be tcp:// (which is what the library is expecting and getting null back. This really should throw a better error.)
You need to change the line to be
client = new MqttClient("tcp://localhost:1883", MqttClient.generateClientId(), persistence);
I think you are giving the wrong Url i.e you are not specifying the protocol over which it has to connect that is my hunch.
Try changing the url to :
tcp://localhost:1883
I think it would work ! Rest all seems fine to me.
For a working example See this : https://github.com/shiv4nsh/scala-mqtt-client-rasberrypi-starter-kit/blob/master/src/main/scala/com/knoldus/MQTTPublisher.scala

Play 2.1 Scala SQLException Connection Timed out waiting for a free available connection

I have been working on this issue for quite a while now and I cannot find a solution...
A web app built with play framework 2.2.1 using h2 db (for dev) and a simple Model package.
I am trying to implement a REST JSON endpoint and the code works... but only once per server instance.
def createOtherModel() = Action(parse.json) {
request =>
request.body \ "name" match {
case _: JsUndefined => BadRequest(Json.obj("error" -> true,
"message" -> "Could not match name =(")).as("application/json")
case name: JsValue =>
request.body \ "value" match {
case _: JsUndefined => BadRequest(Json.obj("error" -> true,
"message" -> "Could not match value =(")).as("application/json")
case value: JsValue =>
// this breaks the secod time
val session = ThinkingSession.dummy
val json = Json.obj(
"content" -> value,
"thinkingSession" -> session.id,
)
)
Ok(Json.obj("content" -> json)).as("application/json")
}
} else {
BadRequest(Json.obj("error" -> true,
"message" -> "Name was not content =(")).as("application/json")
}
}
}
so basically I read the JSON, echo the "value" value, create a model obj and send it's id.
the ThinkingSession.dummy function does this:
def all(): List[ThinkingSession] = {
// Tried explicitly closing connection, no difference
//val conn = DB.getConnection()
//try {
// DB.withConnection { implicit conn =>
// SQL("select * from thinking_session").as(ThinkingSession.DBParser *)
// }
//} finally {
// conn.close()
//}
DB.withConnection { implicit conn =>
SQL("select * from thinking_session").as(ThinkingSession.DBParser *)
}
}
def dummy: ThinkingSession = {
(all() head)
}
So this should do a SELECT * FROM thinking_session, create a model obj list from the result and return the first out of the list.
This works fine the first time after server start but the second time I get a
play.api.Application$$anon$1: Execution exception[[SQLException: Timed out waiting for a free available connection.]]
at play.api.Application$class.handleError(Application.scala:293) ~[play_2.10.jar:2.2.1]
at play.api.DefaultApplication.handleError(Application.scala:399) [play_2.10.jar:2.2.1]
at play.core.server.netty.PlayDefaultUpstreamHandler$$anonfun$2$$anonfun$applyOrElse$3.apply(PlayDefaultUpstreamHandler.scala:261) [play_2.10.jar:2.2.1]
at play.core.server.netty.PlayDefaultUpstreamHandler$$anonfun$2$$anonfun$applyOrElse$3.apply(PlayDefaultUpstreamHandler.scala:261) [play_2.10.jar:2.2.1]
at scala.Option.map(Option.scala:145) [scala-library.jar:na]
at play.core.server.netty.PlayDefaultUpstreamHandler$$anonfun$2.applyOrElse(PlayDefaultUpstreamHandler.scala:261) [play_2.10.jar:2.2.1]
Caused by: java.sql.SQLException: Timed out waiting for a free available connection.
at com.jolbox.bonecp.DefaultConnectionStrategy.getConnectionInternal(DefaultConnectionStrategy.java:88) ~[bonecp.jar:na]
at com.jolbox.bonecp.AbstractConnectionStrategy.getConnection(AbstractConnectionStrategy.java:90) ~[bonecp.jar:na]
at com.jolbox.bonecp.BoneCP.getConnection(BoneCP.java:553) ~[bonecp.jar:na]
at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:131) ~[bonecp.jar:na]
at play.api.db.DBApi$class.getConnection(DB.scala:67) ~[play-jdbc_2.10.jar:2.2.1]
at play.api.db.BoneCPApi.getConnection(DB.scala:276) ~[play-jdbc_2.10.jar:2.2.1]
My application.conf (db section)
db.default.driver=org.h2.Driver
db.default.url="jdbc:h2:file:database/[my_db]"
db.default.logStatements=true
db.default.idleConnectionTestPeriod=5 minutes
db.default.connectionTestStatement="SELECT 1"
db.default.maxConnectionAge=0
db.default.connectionTimeout=10000
Initially the only thing set in my config was the connection and the error occurred. I added all the other stuff while reading up on the issue on the web.
What is interesting is that when I use the h2 in memory db it works once after server start and after that it fails. when I use the h2 file system db it only works once, regardless of the server instances.
Can anyone give me some insight on this issue? Have found some stuff on bonecp problem and tried upgrading to 0.8.0-rc1 but nothing changed... I am at a loss =(
Try to set a maxConnectionAge and idle timeout
turns out the error was quite somewhere else... it was a good ol' stack overflow... have not seen one in a long time. I tried down-voting my question but it's not possible^^