Flink with Kafka connection - apache-kafka

Environment:
Ubuntu 16.04.1 LTS
Flink 1.1.3
Kakfa 0.10.1.1
I'm trying to connect flink with kafka (Flink 1.1.3 Kakfa 0.10.1.1)
I already try all the fixes that i could find, but none of them work.
pom.xml :
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>ux</groupId>
<artifactId>logs</artifactId>
<version>1.3-SNAPSHOT</version>
<packaging>jar</packaging>
<name>Flink Quickstart Job</name>
<url>http://www.myorganization.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<flink.version>1.3-SNAPSHOT</flink.version>
<slf4j.version>1.7.7</slf4j.version>
<log4j.version>1.2.17</log4j.version>
</properties>
<repositories>
<repository>
<id>apache.snapshots</id>
<name>Apache Development Snapshot Repository</name>
<url>https://repository.apache.org/content/repositories/snapshots/</url>
<releases>
<enabled>false</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka-0.10_2.10</artifactId>
<version>1.3-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
</dependency>
</dependencies>
<profiles>
<profile>
<id>build-jar</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.11</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.11</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.1</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<artifactSet>
<excludes combine.self="override"></excludes>
</artifactSet>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.1</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<artifactSet>
<excludes>
<exclude>org.apache.flink:flink-annotations</exclude>
<exclude>org.apache.flink:flink-shaded-hadoop2</exclude>
<exclude>org.apache.flink:flink-shaded-curator-recipes</exclude>
<exclude>org.apache.flink:flink-core</exclude>
<exclude>org.apache.flink:flink-scala_2.11</exclude>
<exclude>org.apache.flink:flink-runtime_2.11</exclude>
<exclude>org.apache.flink:flink-optimizer_2.11</exclude>
<exclude>org.apache.flink:flink-clients_2.11</exclude>
<exclude>org.apache.flink:flink-avro_2.11</exclude>
<exclude>org.apache.flink:flink-examples-batch_2.11</exclude>
<exclude>org.apache.flink:flink-examples-streaming_2.11</exclude>
<exclude>org.apache.flink:flink-streaming-scala_2.11</exclude>
<exclude>org.apache.flink:flink-scala-shell_2.11</exclude>
<exclude>org.apache.flink:flink-python</exclude>
<exclude>org.apache.flink:flink-metrics-core</exclude>
<exclude>org.apache.flink:flink-metrics-jmx</exclude>
<exclude>org.apache.flink:flink-statebackend-rocksdb_2.11</exclude>
<exclude>log4j:log4j</exclude>
<exclude>org.scala-lang:scala-library</exclude>
<exclude>org.scala-lang:scala-compiler</exclude>
<exclude>org.scala-lang:scala-reflect</exclude>
<exclude>com.data-artisans:flakka-actor_*</exclude>
<exclude>com.data-artisans:flakka-remote_*</exclude>
<exclude>com.data-artisans:flakka-slf4j_*</exclude>
<exclude>io.netty:netty-all</exclude>
<exclude>io.netty:netty</exclude>
<exclude>commons-fileupload:commons-fileupload</exclude>
<exclude>org.apache.avro:avro</exclude>
<exclude>commons-collections:commons-collections</exclude>
<exclude>org.codehaus.jackson:jackson-core-asl</exclude>
<exclude>org.codehaus.jackson:jackson-mapper-asl</exclude>
<exclude>com.thoughtworks.paranamer:paranamer</exclude>
<exclude>org.xerial.snappy:snappy-java</exclude>
<exclude>org.apache.commons:commons-compress</exclude>
<exclude>org.tukaani:xz</exclude>
<exclude>com.esotericsoftware.kryo:kryo</exclude>
<exclude>com.esotericsoftware.minlog:minlog</exclude>
<exclude>org.objenesis:objenesis</exclude>
<exclude>com.twitter:chill_*</exclude>
<exclude>com.twitter:chill-java</exclude>
<exclude>commons-lang:commons-lang</exclude>
<exclude>junit:junit</exclude>
<exclude>org.apache.commons:commons-lang3</exclude>
<exclude>org.slf4j:slf4j-api</exclude>
<exclude>org.slf4j:slf4j-log4j12</exclude>
<exclude>log4j:log4j</exclude>
<exclude>org.apache.commons:commons-math</exclude>
<exclude>org.apache.sling:org.apache.sling.commons.json</exclude>
<exclude>commons-logging:commons-logging</exclude>
<exclude>commons-codec:commons-codec</exclude>
<exclude>com.fasterxml.jackson.core:jackson-core</exclude>
<exclude>com.fasterxml.jackson.core:jackson-databind</exclude>
<exclude>com.fasterxml.jackson.core:jackson-annotations</exclude>
<exclude>stax:stax-api</exclude>
<exclude>com.typesafe:config</exclude>
<exclude>org.uncommons.maths:uncommons-maths</exclude>
<exclude>com.github.scopt:scopt_*</exclude>
<exclude>commons-io:commons-io</exclude>
<exclude>commons-cli:commons-cli</exclude>
</excludes>
</artifactSet>
<filters>
<filter>
<artifact>org.apache.flink:*</artifact>
<excludes>
<exclude>org/apache/flink/shaded/com/**</exclude>
<exclude>web-docs/**</exclude>
</excludes>
</filter>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
<createDependencyReducedPom>false</createDependencyReducedPom>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
my java code :
import java.util.Properties;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
import org.apache.flink.streaming.util.serialization.SimpleStringSchema;
public class App
{
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
Properties properties = new Properties();
properties.setProperty("bootstrap.servers", "localhost:9092");
properties.setProperty("zookeeper.connect", "localhost:2181");
properties.setProperty("group.id", "flink_consumer");
DataStream<String> messageStream = env.addSource(new FlinkKafkaConsumer010<>
("ux_logs", new SimpleStringSchema(), properties));
messageStream.rebalance().map(new MapFunction<String, String>() {
private static final long serialVersionUID = -6867736771747690202L;
public String map(String value) throws Exception {
return "Kafka and Flink says: " + value;
}
}).print();
env.execute();
}
}
But I get this error:
java.lang.NoClassDefFoundError: org/apache/flink/streaming/api/checkpoint/CheckpointedFunction
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at ux.App.main(App.java:27)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:509)
at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:403)
at org.apache.flink.client.program.ClusterClient.run(ClusterClient.java:320)
at org.apache.flink.client.CliFrontend.executeProgram(CliFrontend.java:777)
at org.apache.flink.client.CliFrontend.run(CliFrontend.java:253)
at org.apache.flink.client.CliFrontend.parseParameters(CliFrontend.java:1005)
at org.apache.flink.client.CliFrontend.main(CliFrontend.java:1048)
Caused by: java.lang.ClassNotFoundException: org.apache.flink.streaming.api.checkpoint.CheckpointedFunction
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
Do i need to remove my kafka, and run an older version?
Is my flink kafka connector wrong?
i tried to use this plugin but it didn't work. (https://ci.apache.org/projects/flink/flink-docs-release-1.3/dev/linking.html)
Thanks.

You have to downgrade your connector:
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka-0.8_2.10</artifactId>
<version>1.1.2</version>
</dependency>
Here is the similar question: https://stackoverflow.com/a/40037895/1252056

If you want to connect to Kafka 0.10~ you will have to move to Flink 1.2, otherwise, as #streetturte mentioned, you will have to downgrade your Kafka connector.
Have a look at the reference table here:
https://ci.apache.org/projects/flink/flink-docs-release-1.2/dev/connectors/kafka.html

Kafka 0.9 and newer version don't need to zookeeper.
You need to upgrade your flink or downgrade Kafka version
--topic myTopic --bootstrap.servers 10.123.34.56:9092 --group.id myGroup
FlinkKafkaConsumer09<CarDB> consumerBinden = new FlinkKafkaConsumer09<CarDB>(
kafkaTopic,
new ClassClassSchema(),
parameterTool.getProperties());
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka-0.9_2.10</artifactId>
<version>${flink.version}</version>
</dependency>

Perhaps you are using Flink-Kafka consumer dependency for Scala in your pom file, whereas your code is in Java. Try flink kafka consumer for Java in your pom. Hope it helps.

Related

java.lang.NoClassDefFoundError: scala/Function1

I tried to start scala application in jar.
The following code:
object Starter {
def main(args: Array[String]): Unit = {
args.foreach(path => {
val (minX, maxX, minY, maxY) = MainStage.run(path)
println(minX)
println(maxX)
println(minY)
println(maxY)
})
}
}
induces an error on same computer (though it works inside Intellij Idea):
$ java -cp foo-1.0-SNAPSHOT.jar ogr.bar.system.Starter
Exception in thread "main" java.lang.NoClassDefFoundError: scala/Function1
at ogr.bar.system.Starter.main(Starter.scala)
Caused by: java.lang.ClassNotFoundException: scala.Function1
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:338)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 1 more
Then I tried to make a simple hello world application:
object Starter {
def main(args: Array[String]): Unit = {
println("hello world")
//args.foreach(path => {
// val (minX, maxX, minY, maxY) = MainStage.run(path)
// println(minX)
// println(maxX)
// println(minY)
// println(maxY)
//})
}
}
And it raises exception:
$ java -cp foo-1.0-SNAPSHOT.jar ogr.bar.system.Starter
Exception in thread "main" java.lang.NoClassDefFoundError: scala/Predef$
at ogr.bar.system.Starter$.main(Starter.scala:7)
at ogr.bar.system.Starter.main(Starter.scala)
Caused by: java.lang.ClassNotFoundException: scala.Predef$
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:338)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 2 more
The following is a fragment of my pom.xml
<properties>
<javaVersion>1.8</javaVersion>
</properties>
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.11.8</version>
</dependency>
<dependency>
<groupId>org.scala-lang.modules</groupId>
<artifactId>scala-xml_2.11</artifactId>
<version>1.0.4</version>
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_2.11</artifactId>
<version>3.0.1</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.3</version>
<configuration>
<source>${javaVersion}</source>
<target>${javaVersion}</target>
</configuration>
</plugin>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<version>2.15.2</version>
<executions>
<execution>
<goals>
<goal>compile</goal>
<goal>testCompile</goal>
</goals>
</execution>
</executions>
<configuration>
<sourceDir>src/main/scala</sourceDir>
<testSourceDir>src/test/scala</testSourceDir>
</configuration>
</plugin>
</plugins>
</build>
What should I do to make my app work?
The problem is that you probably build a thin version of your application.
It does not contain all dependencies, like scala-library. Thus the error
java.lang.NoClassDefFoundError: scala/Predef$
Check this on how to do build fat jars in maven.

Spark + cassandra - Just one record getting inserted

Am trying to get data from hive and insert the same in Cassandra using Spark.Very surprisingly i see only one record inserted into cassandra inspite of Data frame having 4000+ records.
import org.apache.spark.sql.SparkSession
import com.datastax.spark.connector.cql.CassandraConnector
import com.typesafe.config.ConfigFactory
import org.apache.spark.sql.cassandra._
import com.datastax.spark.connector._
import java.math.BigDecimal
case class sales(wk_nbr: Int,
store_nbr: Int,
sales_amt: BigDecimal)
object HiveConnector extends App {
val cassandraConfig = ConfigFactory.load("cassandra.conf")
println("cassandraConfig loaded = " + cassandraConfig)
val spark = SparkSession.builder().appName("HiveConnector")
.config("spark.sql.warehouse.dir", "file:/data/raw/historical/tables")
.config("hive.exec.dynamic.partition.mode", "nonstrict")
.config("mapred.input.dir.recursive","true")
.config("mapreduce.input.fileinputformat.input.dir.recursive","true")
.config("spark.cassandra.connection.host", "***********")
.config("spark.cassandra.auth.username", "*****un****")
.config("spark.cassandra.auth.password", "******pw*****")
.enableHiveSupport()
.master("yarn").getOrCreate()
import spark.implicits._
val query = "select wk_nbr,store_nbr,sum(sales_amt) as sales_amt from scan where visit_dt between '2018-05-08' and '2018-05-11' group by wm_yr_wk_nbr,store_nbr"
val resDF = spark.sql(query)
resDF.persist()
println("RESDF size = " + resDF.count()) //prints the record count
println("RESDF sample rec = " + resDF.show(2)) //see 2 records in the log
CassandraConnector(spark.sparkContext).withSessionDo { spark =>
spark.execute("CREATE TABLE raaspoc.sales_data (wk_nbr INT PRIMARY KEY, store_nbr INT, sales_amt DOUBLE)")
}
/*
None of the following saveToCassandra work - meaning not inserting all records but only one record
*/
resDF.map { x => sales.apply(x.get(0).asInstanceOf[Int], x.get(1).asInstanceOf[Int],x.get(2).asInstanceOf[BigDecimal])
}.rdd.saveToCassandra("raaspoc","sales_data") // Not working
resDF.rdd.saveToCassandra("raaspoc","sales_data") // Not working
resDF.write.format("org.apache.spark.sql.cassandra").options(Map("table" -> "sales_data", "keyspace" -> "raaspoc")).save() // Not working
resDF.write.cassandraFormat("sales_data","raaspoc").save() // Not working
/*
When the data frame is written to HDFS, i see all 4000+ records in the sales.csv
*/
resDF.write.format("csv").save("hdfs:/dev/test/sales.csv")
println("RESDF size after write to cassandra = " + resDF.count()) //prints 4732 (record count)
spark.close()
}
I dont see any errors in the log and Spark submit completes without any errors but inserts only one record. Following is my pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.test.raas</groupId>
<artifactId>RaasDataPipelines</artifactId>
<version>1.0-SNAPSHOT</version>
<inceptionYear>2008</inceptionYear>
<properties>
<scala.version>2.11.0</scala.version>
<spark.version>2.2.0</spark.version>
</properties>
<repositories>
<repository>
<id>scala-tools.org</id>
<name>Scala-Tools Maven2 Repository</name>
<url>http://scala-tools.org/repo-releases</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<id>scala-tools.org</id>
<name>Scala-Tools Maven2 Repository</name>
<url>http://scala-tools.org/repo-releases</url>
</pluginRepository>
</pluginRepositories>
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>${spark.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>${spark.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.4</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.specs</groupId>
<artifactId>specs</artifactId>
<version>1.2.5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.datastax.spark</groupId>
<artifactId>spark-cassandra-connector_2.11</artifactId>
<version>2.0.7</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<version>2.11</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_2.10</artifactId>
<version>1.0.0</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
<version>3.5.0</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-mapping</artifactId>
<version>3.5.0</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-extras</artifactId>
<version>3.5.0</version>
<scope>compile</scope>
</dependency>
</dependencies>
<build>
<sourceDirectory>src/main/scala</sourceDirectory>
<testSourceDirectory>src/test/scala</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>compile</goal>
<goal>testCompile</goal>
</goals>
</execution>
</executions>
<configuration>
<scalaVersion>${scala.version}</scalaVersion>
<args>
<arg>-target:jvm-1.5</arg>
</args>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.3</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>
shade
</goal>
</goals>
</execution>
</executions>
<configuration>
<minimizeJar>true</minimizeJar>
<shadedArtifactAttached>true</shadedArtifactAttached>
<shadedClassifierName>fat</shadedClassifierName>
<relocations>
<relocation>
<pattern>com.google</pattern>
<shadedPattern>shaded.guava</shadedPattern>
<includes>
<include>com.google.**</include>
</includes>
<excludes>
<exclude>com.google.common.base.Optional</exclude>
<exclude>com.google.common.base.Absent</exclude>
<exclude>com.google.common.base.Present</exclude>
</excludes>
</relocation>
</relocations>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
</plugin>
</plugins>
</build>
<reporting>
<plugins>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<configuration>
<scalaVersion>${scala.version}</scalaVersion>
</configuration>
</plugin>
</plugins>
</reporting>
</project>
is there a possibility that your primary key(wk_nbr) to be the same on all the 4000+ rows?

ArrayOutOfBoundIndex on xls Rules

ArrayOutOfBound Error
enter image description here
I have an xls rule [1 & 2], when I run it as JUnit, it works fine, but when I run it as Maven test, an ArrayOutOfBounds [3] error appears and I couldn't find any explanation. Any hint ?
========================================================
The pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.ibm.cio.cloud.cost</groupId>
<artifactId>bluecost</artifactId>
<packaging>jar</packaging>
<version>1.0-SNAPSHOT</version>
<name>bluecost</name>
<url>http://maven.apache.org</url>
<profiles>
<profile>
<id>dev</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<properties>
<build.profile.id>dev</build.profile.id>
</properties>
</profile>
<profile>
<id>test</id>
<properties>
<build.profile.id>test</build.profile.id>
</properties>
<build>
<plugins>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<phase>install</phase>
<goals>
<goal>copy-dependencies</goal>
</goals>
<configuration>
<outputDirectory>${project.build.directory}/lib</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>prod</id>
<properties>
<build.profile.id>prod</build.profile.id>
</properties>
</profile>
</profiles>
<properties>
<jdk.version>1.8</jdk.version>
<spring.version>4.3.9.RELEASE</spring.version>
<spring.batch.version>3.0.8.RELEASE</spring.batch.version>
<mysql.driver.version>5.1.44</mysql.driver.version>
<db2.driver.version>10.1.0</db2.driver.version>
</properties>
<dependencies>
<!-- Spring Core -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${spring.version}</version>
</dependency>
<!-- Spring Core -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<version>${spring.version}</version>
</dependency>
<!-- Spring Test -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring.version}</version>
</dependency>
<!-- Spring Batch dependencies -->
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-core</artifactId>
<version>${spring.batch.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-infrastructure</artifactId>
<version>${spring.batch.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-test</artifactId>
<version>${spring.batch.version}</version>
<scope>test</scope>
</dependency>
<!-- MySQL database driver -->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.driver.version}</version>
</dependency>
<dependency>
<groupId>com.ibm.db2.jcc</groupId>
<artifactId>db2jcc_license_cisuz</artifactId>
<version>${db2.driver.version}</version>
</dependency>
<dependency>
<groupId>com.ibm.db2.jcc</groupId>
<artifactId>db2jcc</artifactId>
<version>${db2.driver.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>commons-dbcp</groupId>
<artifactId>commons-dbcp</artifactId>
<version>1.4</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<version>2.4.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>commons-pool</groupId>
<artifactId>commons-pool</artifactId>
<version>1.6</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.drools</groupId>
<artifactId>drools-spring</artifactId>
<version>5.4.0.Final</version>
</dependency>
<dependency>
<groupId>org.eclipse.jdt</groupId>
<artifactId>org.eclipse.jdt.core</artifactId>
<version>3.7.1</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.14</version>
</dependency>
</dependencies>
<build>
<finalName>bluecost-batch</finalName>
<filters>
<filter>profiles/${build.profile.id}/config.properties</filter>
</filters>
<resources>
<resource>
<filtering>true</filtering>
<directory>src/main/resources</directory>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-eclipse-plugin</artifactId>
<version>2.9</version>
<configuration>
<downloadSources>true</downloadSources>
<downloadJavadocs>false</downloadJavadocs>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>${jdk.version}</source>
<target>${jdk.version}</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.20.1</version>
<configuration>
<excludes>
<exclude></exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
<organization>
<name>IBM Corporation</name>
</organization>`enter code here`
</project>
========================
=====================
And the JUnit I created...
#Before
public void setUp() {
final Logger logger = Logger.getLogger("setup()");
logger.info("-----------------------------");
}
#Test
public void testDefaultValuesbyChannelName(){
BlueReport testBlueReport = new BlueReport();
// Arbitrary value, this should be "SOFTLYER"
testBlueReport.setAccount_id("29302"); // Must exist
testBlueReport.setOrg_co("SOFTLYER");
SSCData testSSCdataOutput = new SSCData();
testSSCdataOutput.setORIG_LOC_CD(null); // values will be filled by the rule
testSSCdataOutput.setSERVICE_CD(null); // this too will be filled by the rule
// values will be filled by the rules
String expectedOrigLocCd = "SLR";
//SSCDataOutput.setORIG_LOC_CD("SLR");
String expectedServiceTypCd = "SLR";
//SSCDataOutput.setSERVICE_TYP_CD("SLR");
String expectedServiceCd = "SLIC";
//SSCDataOutput.setSERVICE_CD("SLIC");
String expectedServiceGroupId = "BASE";
//SSCDataOutput.setSERVICE_GROUP_ID("BASE");
String expectedRateClasCd = "OGS";
//SSCDataOutput.setRATECLAS_CD("OGS");
String expectedLocalField1 = "INVCE ID";
//SSCDataOutput.setLOCAL_FIELD_1("INVCE ID");
//SSCDataOutput.setLOCAL_FIELD_2(null);//Not anymore at the Invoice level. can be Nulled.
String expectedLocalField3 = "ACCT ID";
//SSCDataOutput.setLOCAL_FIELD_3("ACCT ID");
//SSCDataOutput.setLOCAL_FIELD_5(null);
//SSCDataOutput.setLOCAL_FIELD_6(null);
final Logger logger = Logger.getLogger("testDefaultValuesbyChannelName");
dtSession.execute( Arrays.asList(new Object[] { testSSCdataOutput, testBlueReport }) );
logger.info("Rules that activated: " + testSSCdataOutput.getRuleAudit());
dtSession.setGlobal("logger", logger);
logger.info("Expecting ORIG_LOC_CD to be SLR");
assertTrue("I expected SLR", testSSCdataOutput.getORIG_LOC_CD().contains(expectedOrigLocCd));
logger.info("Expecting SERVICE_CD to be SLIC");
assertTrue("I expected SLIC", testSSCdataOutput.getSERVICE_CD().contains(expectedServiceCd));
logger.info("Expecting SERVICE_TYPE_CD to be SLR");
assertTrue("I expected SLR", testSSCdataOutput.getSERVICE_TYP_CD().contains(expectedServiceTypCd));
logger.info("Expecting SERVICE_GROUP_ID to be BASE");
assertTrue("I expected BASE", testSSCdataOutput.getSERVICE_GROUP_ID().contains(expectedServiceGroupId));
logger.info("Expecting RATECLAS_CD to be OGS");
assertTrue("I expected OGS", testSSCdataOutput.getRATECLAS_CD().contains(expectedRateClasCd));
logger.info("Expecting LOCAL_FIELD1 to be INVCE ID");
assertTrue("I expected INVCE ID", testSSCdataOutput.getLOCAL_FIELD_1().contains(expectedLocalField1));
logger.info("Expecting LOCAL_FIELD3 to be ACCT ID");
assertTrue("I expected ACCCT ID", testSSCdataOutput.getLOCAL_FIELD_3().contains(expectedLocalField3));
And this is the error on Run As ... 'Maven test'
======================================================
Running com.ibm.cio.cloud.cost.unit.DroolsDefaultValuesByChannelNameTest
[ERROR] TestContextManager - Caught exception while allowing TestExecutionListener [org.springframework.test.context.support.DependencyInjectionTestExecutionListener#1de6f05] to prepare test instance [com.ibm.cio.cloud.cost.unit.DroolsDefaultValuesByChannelNameTest#114e780] <java.lang.IllegalStateException: Failed to load ApplicationContext>java.lang.IllegalStateException: Failed to load ApplicationContext
at org.springframework.test.context.cache.DefaultCacheAwareContextLoaderDelegate.loadContext(DefaultCacheAwareContextLoaderDelegate.java:124)
at org.springframework.test.context.support.DefaultTestContext.getApplicationContext(DefaultTestContext.java:83)
at org.springframework.test.context.support.DependencyInjectionTestExecutionListener.injectDependencies(DependencyInjectionTestExecutionListener.java:117)
at org.springframework.test.context.support.DependencyInjectionTestExecutionListener.prepareTestInstance(DependencyInjectionTestExecutionListener.java:83)
at org.springframework.test.context.TestContextManager.prepareTestInstance(TestContextManager.java:230)
at org.springframework.test.context.junit4.SpringJUnit4ClassRunner.createTest(SpringJUnit4ClassRunner.java:228)
at org.springframework.test.context.junit4.SpringJUnit4ClassRunner$1.runReflectiveCall(SpringJUnit4ClassRunner.java:287)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.springframework.test.context.junit4.SpringJUnit4ClassRunner.methodBlock(SpringJUnit4ClassRunner.java:289)
at org.springframework.test.context.junit4.SpringJUnit4ClassRunner.runChild(SpringJUnit4ClassRunner.java:247)
at org.springframework.test.context.junit4.SpringJUnit4ClassRunner.runChild(SpringJUnit4ClassRunner.java:94)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.springframework.test.context.junit4.statements.RunBeforeTestClassCallbacks.evaluate(RunBeforeTestClassCallbacks.java:61)
at org.springframework.test.context.junit4.statements.RunAfterTestClassCallbacks.evaluate(RunAfterTestClassCallbacks.java:70)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.springframework.test.context.junit4.SpringJUnit4ClassRunner.run(SpringJUnit4ClassRunner.java:191)
at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369)
at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275)
at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239)
at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160)
at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373)
at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334)
at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119)
at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407)
Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'dtBase': Invocation of init method failed; nested exception is java.lang.ArrayIndexOutOfBoundsException: 38851
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1628)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:555)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:483)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:306)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:230)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:302)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:197)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:742)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:760)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:482)
at org.springframework.test.context.support.AbstractGenericContextLoader.loadContext(AbstractGenericContextLoader.java:128)
at org.springframework.test.context.support.AbstractGenericContextLoader.loadContext(AbstractGenericContextLoader.java:60)
at org.springframework.test.context.support.AbstractDelegatingSmartContextLoader.delegateLoading(AbstractDelegatingSmartContextLoader.java:108)
at org.springframework.test.context.support.AbstractDelegatingSmartContextLoader.loadContext(AbstractDelegatingSmartContextLoader.java:251)
at org.springframework.test.context.cache.DefaultCacheAwareContextLoaderDelegate.loadContextInternal(DefaultCacheAwareContextLoaderDelegate.java:98)
at org.springframework.test.context.cache.DefaultCacheAwareContextLoaderDelegate.loadContext(DefaultCacheAwareContextLoaderDelegate.java:116)
... 27 more
Caused by: java.lang.ArrayIndexOutOfBoundsException: 38851
at jxl.read.biff.Record.<init>(Record.java:79)
at jxl.read.biff.File.next(File.java:181)
at jxl.read.biff.WorkbookParser.parse(WorkbookParser.java:569)
at jxl.Workbook.getWorkbook(Workbook.java:271)
at org.drools.decisiontable.parser.xls.ExcelParser.parseFile(ExcelParser.java:77)
at org.drools.decisiontable.SpreadsheetCompiler.compile(SpreadsheetCompiler.java:89)
at org.drools.decisiontable.SpreadsheetCompiler.compile(SpreadsheetCompiler.java:68)
at org.drools.decisiontable.DecisionTableProviderImpl.compileStream(DecisionTableProviderImpl.java:37)
at org.drools.decisiontable.DecisionTableProviderImpl.loadFromInputStream(DecisionTableProviderImpl.java:20)
at org.drools.compiler.DecisionTableFactory.loadFromInputStream(DecisionTableFactory.java:15)
at org.drools.compiler.PackageBuilder.decisionTableToPackageDescr(PackageBuilder.java:454)
at org.drools.compiler.PackageBuilder.addPackageFromDecisionTable(PackageBuilder.java:448)
at org.drools.compiler.PackageBuilder.addKnowledgeResource(PackageBuilder.java:690)
at org.drools.builder.impl.KnowledgeBuilderImpl.add(KnowledgeBuilderImpl.java:45)
at org.drools.builder.impl.KnowledgeBuilderImpl.add(KnowledgeBuilderImpl.java:34)
at org.drools.container.spring.beans.KnowledgeBaseBeanFactory.afterPropertiesSet(KnowledgeBaseBeanFactory.java:110)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1687)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1624)
... 42 more

java.lang.ClassNotFoundException: java.time.temporal.TemporalField when running Spark code

This question is related to the previous thread. I am extracting sessions from the stream of users' click events. For validation purposes, I am always waiting on a timeout of 2 minutes, and if the user was inactive during these 2 minutes (no click events), then I assume that the session was finished. These finished sessions should be saved in finishedSessions.
The below-given code provides the error (see below).
settings = ssc.sparkContext.broadcast(Map(
"metadataBrokerList_OutputQueue" -> metadataBrokerList_OutputQueue,
"topicOutput" -> topicOutput))
val spec = StateSpec.function(Utils.updateState _).timeout(Minutes(2))
val latestSessionInfo = membersSessions.map[(String, (Long, Long, Long, List[String]))](a => {
//transform to (member_id, (time, time, counter, events within session))
(a._1, (a._2._1, a._2._2, 1, a._2._3))
})
val finishedSessions = latestSessionInfo.mapWithState(spec).filter(_.isDefined)
finishedSessions.foreachRDD( rdd => {
rdd.foreachPartition{iter =>
val producer = Utils.createProducer(settings.value("metadataBrokerList_OutputQueue"))
iter.foreach { msg =>
val jsonString = msg.get._4.toString()
val streamEvent = new ProducerRecord[String, String](settings.value("topicOutput"), null, jsonString)
producer.send(streamEvent)
}
producer.close()
}
})
This is my serializable object Utils:
object Utils extends Serializable {
def updateState(key: String,
value: Option[(Long, Long, Long, List[String])],
state: State[(Long, Long, Long, List[String])]): Option[(Long, Long, Long, List[String])] = {
def reduce(first: (Long, Long, Long, List[String]), second: (Long, Long, Long, List[String])) = {
(Math.min(first._1, second._1), Math.max(first._2, second._2), first._3 + second._3, first._4 ++ second._4)
}
value match {
case Some(currentValue) =>
val result = state
.getOption()
.map(currentState => reduce(currentState, currentValue))
.getOrElse(currentValue)
state.update(result)
None
case _ if state.isTimingOut() => state.getOption()
}
}
def createProducer(metadataBrokerList: String): KafkaProducer[String, String] = {
val kafkaProps = new Properties()
println("metadataBrokerList: " + metadataBrokerList)
kafkaProps.put("bootstrap.servers", metadataBrokerList)
// This is mandatory, even though we don't send key
kafkaProps.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
kafkaProps.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
kafkaProps.put("acks", "1")
// how many times to retry when produce request fails?
kafkaProps.put("retries", "3")
// This is an upper limit of how many messages Kafka Producer will attempt to batch before sending (bytes)
kafkaProps.put("batch.size", "5")
// How long will the producer wait before sending in order to allow more messages to get accumulated in the same batch
kafkaProps.put("linger.ms", "5")
new KafkaProducer[String, String](kafkaProps)
}
}
EDIT:
After defining settings as a local Broadcast variable (val settings = ... instead of settings), the error message disappeared. However, the new error appeared and it's related to Caused by: java.lang.ClassNotFoundException: java.time.temporal.TemporalField. What does it mean?:
Driver stacktrace:
org.apache.spark.SparkException: Job aborted due to stage failure: Task 6 in stage 3.0 failed 4 times, most recent failure: Lost task 6.3 in stage 3.0 (TID 34, ip-172-20-233-19.eu-west-1.compute.internal): java.lang.NoClassDefFoundError: java/time/temporal/TemporalField
at org.testconsumer.kafka.KafkaEventsConsumer$$anonfun$run$2$$anonfun$apply$1.apply(KafkaEventsConsumer.scala:163)
at org.testconsumer.kafka.KafkaEventsConsumer$$anonfun$run$2$$anonfun$apply$1.apply(KafkaEventsConsumer.scala:160)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$33.apply(RDD.scala:920)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$33.apply(RDD.scala:920)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)
at org.apache.spark.scheduler.Task.run(Task.scala:89)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ClassNotFoundException: java.time.temporal.TemporalField
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
... 12 more
Driver stacktrace:
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1431)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1419)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1418)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1418)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:799)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:799)
at scala.Option.foreach(Option.scala:236)
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:799)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1640)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1599)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1588)
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:620)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1832)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1845)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1858)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1929)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:920)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:918)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:316)
at org.apache.spark.rdd.RDD.foreachPartition(RDD.scala:918)
at org.testconsumer.kafka.KafkaEventsConsumer$$anonfun$run$2.apply(KafkaEventsConsumer.scala:160)
at org.testconsumer.kafka.KafkaEventsConsumer$$anonfun$run$2.apply(KafkaEventsConsumer.scala:159)
at org.apache.spark.streaming.dstream.DStream$$anonfun$foreachRDD$1$$anonfun$apply$mcV$sp$3.apply(DStream.scala:661)
at org.apache.spark.streaming.dstream.DStream$$anonfun$foreachRDD$1$$anonfun$apply$mcV$sp$3.apply(DStream.scala:661)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ForEachDStream.scala:50)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:50)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:50)
at org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:426)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply$mcV$sp(ForEachDStream.scala:49)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:49)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:49)
at scala.util.Try$.apply(Try.scala:161)
at org.apache.spark.streaming.scheduler.Job.run(Job.scala:39)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply$mcV$sp(JobScheduler.scala:224)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:224)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:224)
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler.run(JobScheduler.scala:223)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.NoClassDefFoundError: java/time/temporal/TemporalField
at org.testconsumer.kafka.KafkaEventsConsumer$$anonfun$run$2$$anonfun$apply$1.apply(KafkaEventsConsumer.scala:163)
at org.testconsumer.kafka.KafkaEventsConsumer$$anonfun$run$2$$anonfun$apply$1.apply(KafkaEventsConsumer.scala:160)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$33.apply(RDD.scala:920)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1$$anonfun$apply$33.apply(RDD.scala:920)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)
at org.apache.spark.scheduler.Task.run(Task.scala:89)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)
... 3 more
Caused by: java.lang.ClassNotFoundException: java.time.temporal.TemporalField
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
... 12 more
Exception in thread "streaming-job-executor-0" java.lang.Error: java.lang.InterruptedException
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1151)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.InterruptedException
at java.lang.Object.wait(Native Method)
at java.lang.Object.wait(Object.java:503)
at org.apache.spark.scheduler.JobWaiter.awaitResult(JobWaiter.scala:73)
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:612)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1832)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1845)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1858)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1929)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:920)
at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:918)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:316)
at org.apache.spark.rdd.RDD.foreachPartition(RDD.scala:918)
at org.testconsumer.kafka.KafkaEventsConsumer$$anonfun$run$2.apply(KafkaEventsConsumer.scala:160)
at org.testconsumer.kafka.KafkaEventsConsumer$$anonfun$run$2.apply(KafkaEventsConsumer.scala:159)
at org.apache.spark.streaming.dstream.DStream$$anonfun$foreachRDD$1$$anonfun$apply$mcV$sp$3.apply(DStream.scala:661)
at org.apache.spark.streaming.dstream.DStream$$anonfun$foreachRDD$1$$anonfun$apply$mcV$sp$3.apply(DStream.scala:661)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ForEachDStream.scala:50)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:50)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:50)
at org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:426)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply$mcV$sp(ForEachDStream.scala:49)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:49)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:49)
at scala.util.Try$.apply(Try.scala:161)
at org.apache.spark.streaming.scheduler.Job.run(Job.scala:39)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply$mcV$sp(JobScheduler.scala:224)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:224)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:224)
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler.run(JobScheduler.scala:223)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
... 2 more
16/11/28 18:42:08 ERROR LiveListenerBus: SparkListenerBus has already stopped! Dropping event SparkListenerStageCompleted(org.apache.spark.scheduler.StageInfo#764e333a)
16/11/28 18:42:08 ERROR LiveListenerBus: SparkListenerBus has already stopped! Dropping event SparkListenerJobEnd(1,1480354928309,JobFailed(org.apache.spark.SparkException: Job 1 cancelled because SparkContext was shut down))
It seems to be related to the version of Java. I have the following version on the cluster, but then how can I deploy my code and how to avoid the erro with java.time.temporal.TemporalField?:
java version "1.7.0_111"
EDIT #2 (POM.xml):
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.test</groupId>
<artifactId>streaming_test</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>1.7</java.version>
<scala.version>2.10.6</scala.version>
<spark.version>1.6.2</spark.version>
<jackson.version>2.8.3</jackson.version>
</properties>
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka_2.10</artifactId>
<version>${spark.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.10</artifactId>
<version>${spark.version}</version>
</dependency>
<!--<dependency>-->
<!--<groupId>org.apache.spark</groupId>-->
<!--<artifactId>spark-core_2.10</artifactId>-->
<!--<version>${spark.version}</version>-->
<!--</dependency>-->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.10</artifactId>
<version>${spark.version}</version>
</dependency>
<!--<dependency>-->
<!--<groupId>org.apache.spark</groupId>-->
<!--<artifactId>spark-mllib_2.10</artifactId>-->
<!--<version>${spark.version}</version>-->
<!--</dependency>-->
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-scala_2.10</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>net.debasishg</groupId>
<artifactId>redisclient_2.10</artifactId>
<version>3.3</version>
</dependency>
<dependency>
<groupId>org.sedis</groupId>
<artifactId>sedis_2.10</artifactId>
<version>1.2.2</version>
</dependency>
<dependency>
<groupId>com.lambdaworks</groupId>
<artifactId>jacks_2.10</artifactId>
<version>2.3.3</version>
</dependency>
<dependency>
<groupId>com.typesafe</groupId>
<artifactId>config</artifactId>
<version>1.3.1</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-aws</artifactId>
<version>2.6.0</version>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId>
<version>1.11.53</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.5.2</version>
</dependency>
<dependency>
<groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId>
<version>0.9.4</version>
</dependency>
<!--<dependency>-->
<!--<groupId>com.github.nscala-time</groupId>-->
<!--<artifactId>nscala-time_2.10</artifactId>-->
<!--<version>2.12.0</version>-->
<!--</dependency>-->
<dependency>
<groupId>com.databricks</groupId>
<artifactId>spark-csv_2.10</artifactId>
<version>1.5.0</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<executions>
<execution>
<id>build-a</id>
<configuration>
<archive>
<manifest>
<mainClass>org.test.consumer.SessionizerRunner</mainClass>
</manifest>
</archive>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<finalName>myTest</finalName>
</configuration>
<phase>package</phase>
<goals>
<goal>assembly</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- Configure maven-compiler-plugin to use the desired Java version -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
</configuration>
</plugin>
<!-- Use build-helper-maven-plugin to add Scala source and test source directories -->
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>1.10</version>
<executions>
<execution>
<id>add-source</id>
<phase>generate-sources</phase>
<goals>
<goal>add-source</goal>
</goals>
<configuration>
<sources>
<source>src/main/scala</source>
</sources>
</configuration>
</execution>
<execution>
<id>add-test-source</id>
<phase>generate-test-sources</phase>
<goals>
<goal>add-test-source</goal>
</goals>
<configuration>
<sources>
<source>src/test/scala</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
<!-- Use scala-maven-plugin for Scala support -->
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>3.2.2</version>
<executions>
<execution>
<goals>
<!-- Need to specify this explicitly, otherwise plugin won't be called when doing e.g. mvn compile -->
<goal>compile</goal>
<goal>testCompile</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
It seems to be related to the version of Java. I have the following version on the cluster, but then how can I deploy my code and how to avoid the erro with java.time.temporal.TemporalField?:
java.time was added in Java 8. Search for any uses of java.time. in your code. Current Spark version requires Java 7 and Kafka too, so they shouldn't be the problem (but check any other libraries you use).
Caused by: java.io.NotSerializableException:
org.testconsumer.kafka.KafkaEventsConsumer
Your error trace clearly shows what is the problem. KafkaProducer is not serialized so You have to annotate your kafka classes #transient or you can get help from this

Scala akka - not able to run program from jar-with-dependency builded by Maven

I'm new in scala and akka. I tried to setup simple akka remote project with client and server. I'm using Eclipse with maven scala plugin. Everything is working fine if I run this project from IDE. Client is able to connect to server. Unfortunately when I build my project to jar-with-dependency using maven-assembly-plugin I'm not able to run it from command line. I'm getting error like this:
F:\Projects\sag-project-scala\sag-scala\target>scala sag-scala-1.0-jar-with-dependencies.jar server
com.typesafe.config.ConfigException$Missing: No configuration setting found for key 'akka.loggers'
at com.typesafe.config.impl.SimpleConfig.findKey(SimpleConfig.java:124)
at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:145)
at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:151)
at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:159)
at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:164)
at com.typesafe.config.impl.SimpleConfig.getList(SimpleConfig.java:212)
at com.typesafe.config.impl.SimpleConfig.getHomogeneousUnwrappedList(SimpleConfig.java:271)
at com.typesafe.config.impl.SimpleConfig.getStringList(SimpleConfig.java:329)
at akka.actor.ActorSystem$Settings.(ActorSystem.scala:179)
at akka.actor.ActorSystemImpl.(ActorSystem.scala:504)
at akka.actor.ActorSystem$.apply(ActorSystem.scala:141)
at akka.actor.ActorSystem$.apply(ActorSystem.scala:118)
at com.sag.remote.ServerObject$.run(Server.scala:28)
at com.sag.remote.ServerObject$.main(Server.scala:25)
at com.sag.remote.ServerObject.main(Server.scala)
at com.sag.main.ScalaRunner.main(ScalaRunner.java:23)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
at java.lang.reflect.Method.invoke(Unknown Source)
at scala.tools.nsc.util.ScalaClassLoader$$anonfun$run$1.apply(ScalaClassLoader.scala:71)
at scala.tools.nsc.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
at scala.tools.nsc.util.ScalaClassLoader$URLClassLoader.asContext(ScalaClassLoader.scala:139)
at scala.tools.nsc.util.ScalaClassLoader$class.run(ScalaClassLoader.scala:71)
at scala.tools.nsc.util.ScalaClassLoader$URLClassLoader.run(ScalaClassLoader.scala:139)
at scala.tools.nsc.CommonRunner$class.run(ObjectRunner.scala:28)
at scala.tools.nsc.JarRunner$.run(MainGenericRunner.scala:16)
at scala.tools.nsc.CommonRunner$class.runAndCatch(ObjectRunner.scala:35)
at scala.tools.nsc.JarRunner$.runJar(MainGenericRunner.scala:28)
at scala.tools.nsc.MainGenericRunner.runTarget$1(MainGenericRunner.scala:78)
at scala.tools.nsc.MainGenericRunner.process(MainGenericRunner.scala:96)
at scala.tools.nsc.MainGenericRunner$.main(MainGenericRunner.scala:105)
at scala.tools.nsc.MainGenericRunner.main(MainGenericRunner.scala)
If I understand akka documentation, in custom *.conf files I'm overriding the default configuration and akka internally parses those files into one file.
I'm pretty sure, that I'm missing something very small but after two days Googling I gave up. That's why I'm asking You for help.
It seems like it's not able to find proper configuration from application.conf but why from IDE everything is working fine?
Here are files which were used to build my example:
ScalaRunner.java
package com.sag.main;
import com.sag.remote.ClientObject;
import com.sag.remote.ServerObject;
/**
* Runner class to simply run jar file from command line.
* #author ddr
*
*/
public class ScalaRunner {
private static String SERVER_MODE = "server";
private static String CLIENT_MODE = "client";
public static void main(String[] args) {
String mode = SERVER_MODE;
if(args != null && args.length > 0){
mode = args[0];
}
if(CLIENT_MODE.equals(mode)){
ClientObject.main(args);
}
else if (SERVER_MODE.equals(mode)){
ServerObject.main(args);
}
}
}
Client.scala
package com.sag.remote
import akka.actor._
import akka.actor.ActorDSL._
import com.typesafe.config.ConfigFactory
class Client extends Actor {
def receive = {
case msg: String => println("joe received " + msg + " from " + sender)
case _ => println("Received unknown msg ")
}
}
object ClientObject {
def main(args: Array[String]): Unit = run()
def run() = {
println("STARTING CLIENT")
implicit val client = ActorSystem("Client", ConfigFactory.load("client"))
val server = client.actorFor("akka.tcp://server#127.0.0.1:6969/user/server")
println("That 's remote server:" + server)
server ! "Hello"
}
}
Server.scala
package com.sag.remote
import akka.actor.Actor
import akka.actor.ActorSystem
import akka.actor.Props
import com.typesafe.config.Config
import scala.concurrent.duration.Duration
import java.util.concurrent.TimeUnit
import akka.actor.Extension
import akka.actor.ExtensionIdProvider
import akka.actor.ExtensionId
import akka.actor.ExtendedActorSystem
import akka.actor.ActorSystem.Settings
import com.typesafe.config.ConfigFactory
class Server extends Actor {
def receive = {
case msg: String => println("joe received " + msg + " from " + sender)
case _ => println("Received unknown msg ")
}
}
object ServerObject {
def main(args: Array[String]): Unit = run()
def run() = {
val server = ActorSystem("server", ConfigFactory.load("server"))
val serverActor = server.actorOf(Props[Server], name = "server")
println(serverActor.path)
println()
println("Server ready")
}
}
common.conf
include "application.conf"
akka{
stdout-loglevel = "DEBUG"
loglevel = "DEBUG"
actor {
provider = "akka.remote.RemoteActorRefProvider"
}
remote {
enabled-transports = ["akka.remote.netty.tcp"]
netty.tcp {
hostname = "127.0.0.1"
}
}
}
client.conf
include "common"
akka {
remote.netty.tcp.port = 2552
}
server.conf
include "common"
akka {
remote.netty.tcp.port = 6969
}
pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>sag-scala</groupId>
<artifactId>sag-scala</artifactId>
<version>1.0</version>
<inceptionYear>2008</inceptionYear>
<properties>
<scala.version>2.10.1</scala.version>
</properties>
<repositories>
<repository>
<id>scala-tools.org</id>
<name>Scala-Tools Maven2 Repository</name>
<url>http://scala-tools.org/repo-releases</url>
</repository>
<repository>
<id>akka-snapshots</id>
<snapshots>
<enabled>true</enabled>
</snapshots>
<url>http://repo.akka.io/snapshots/</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<id>scala-tools.org</id>
<name>Scala-Tools Maven2 Repository</name>
<url>http://scala-tools.org/repo-releases</url>
</pluginRepository>
</pluginRepositories>
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-remote_2.10</artifactId>
<version>2.3.2</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-actor_2.10</artifactId>
<version>2.3.2</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.4</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.specs</groupId>
<artifactId>specs</artifactId>
<version>1.2.5</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<sourceDirectory>src/main/scala</sourceDirectory>
<testSourceDirectory>src/test/scala</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-eclipse-plugin</artifactId>
<configuration>
<downloadSources>true</downloadSources>
<buildcommands>
<buildcommand>ch.epfl.lamp.sdt.core.scalabuilder</buildcommand>
</buildcommands>
<additionalProjectnatures>
<projectnature>ch.epfl.lamp.sdt.core.scalanature</projectnature>
</additionalProjectnatures>
<classpathContainers>
<classpathContainer>org.eclipse.jdt.launching.JRE_CONTAINER</classpathContainer>
<classpathContainer>ch.epfl.lamp.sdt.launching.SCALA_CONTAINER</classpathContainer>
</classpathContainers>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.2-beta-5</version>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<archive>
<manifest>
<mainClass>com.sag.main.ScalaRunner</mainClass>
</manifest>
</archive>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<archive>
<manifest>
<addClasspath>true</addClasspath>
<mainClass>com.sag.main.ScalaRunner</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
</plugins>
</build>
<reporting>
<plugins>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<configuration>
<scalaVersion>${scala.version}</scalaVersion>
</configuration>
</plugin>
</plugins>
</reporting>
</project>
I will be grateful for any help.
Regards,
Dariusz
Here is modified pom.xml which solved my problem. Maybe it will help someone.
pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>sag-scala</groupId>
<artifactId>sag-scala</artifactId>
<version>1.0</version>
<inceptionYear>2008</inceptionYear>
<properties>
<scala.version>2.10.1</scala.version>
</properties>
<repositories>
<repository>
<id>scala-tools.org</id>
<name>Scala-Tools Maven2 Repository</name>
<url>http://scala-tools.org/repo-releases</url>
</repository>
<repository>
<id>akka-snapshots</id>
<snapshots>
<enabled>true</enabled>
</snapshots>
<url>http://repo.akka.io/snapshots/</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<id>scala-tools.org</id>
<name>Scala-Tools Maven2 Repository</name>
<url>http://scala-tools.org/repo-releases</url>
</pluginRepository>
</pluginRepositories>
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-remote_2.10</artifactId>
<version>2.3.2</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-actor_2.10</artifactId>
<version>2.3.2</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.4</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.specs</groupId>
<artifactId>specs</artifactId>
<version>1.2.5</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<sourceDirectory>src/main/scala</sourceDirectory>
<testSourceDirectory>src/test/scala</testSourceDirectory>
<plugins>
<!-- plugin> <groupId>org.scala-tools</groupId> <artifactId>maven-scala-plugin</artifactId>
<executions> <execution> <goals> <goal>compile</goal> <goal>testCompile</goal>
</goals> </execution> </executions> <configuration> <scalaVersion>${scala.version}</scalaVersion>
<args> <arg>-target:jvm-1.5</arg> </args> </configuration> </plugin -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-eclipse-plugin</artifactId>
<configuration>
<downloadSources>true</downloadSources>
<buildcommands>
<buildcommand>ch.epfl.lamp.sdt.core.scalabuilder</buildcommand>
</buildcommands>
<additionalProjectnatures>
<projectnature>ch.epfl.lamp.sdt.core.scalanature</projectnature>
</additionalProjectnatures>
<classpathContainers>
<classpathContainer>org.eclipse.jdt.launching.JRE_CONTAINER</classpathContainer>
<classpathContainer>ch.epfl.lamp.sdt.launching.SCALA_CONTAINER</classpathContainer>
</classpathContainers>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.3</version>
<configuration>
<transformers>
<transformer
implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>reference.conf</resource>
</transformer>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<manifestEntries>
<Main-Class>com.sag.main.ScalaRunner</Main-Class>
</manifestEntries>
</transformer>
</transformers>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<archive>
<manifest>
<addClasspath>true</addClasspath>
<mainClass>com.sag.main.ScalaRunner</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
</plugins>
</build>
<reporting>
<plugins>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<configuration>
<scalaVersion>${scala.version}</scalaVersion>
</configuration>
</plugin>
</plugins>
</reporting>
</project>