Connect mongodb to minecraft plugin - mongodb

I'm making a minecraft plugin for my minecraft server but I have an error that I can't found the solution. The context: I want to store the data of a player like level/xp/rank etc... Can you help me to do this part of the plugin, I'm a beginner in Java.
This is my code :
import com.mongodb.MongoClient;
import com.mongodb.client.MongoClients;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import me.codexis.velocitylobbygeneral.commands.Lobby;
import me.codexis.velocitylobbygeneral.commands.MoveBot;
import me.codexis.velocitylobbygeneral.commands.Test;
import me.codexis.velocitylobbygeneral.event.*;
import org.bson.Document;
import org.bukkit.ChatColor;
import org.bukkit.plugin.java.JavaPlugin;
public final class VelocityLobbyGeneral extends JavaPlugin {
private static VelocityLobbyGeneral instance;
#Override
public void onEnable() {
setInstance(this);
// Listeners
getServer().getPluginManager().registerEvents(new OnJoinQuit(), this);
getServer().getPluginManager().registerEvents(new FormatChat(), this);
getServer().getPluginManager().registerEvents(new Scoreboard(), this);
// Channels
getServer().getMessenger().registerOutgoingPluginChannel(this, "BungeeCord");
// Commands
new Lobby();
new MoveBot();
new Test();
// Connection to database
MongoClient mongoClient = (MongoClient) MongoClients.create("mongodb+srv://myusername:#databasemc.ehssc.mongodb.net/VelocityMC?retryWrites=true&w=majority");
MongoDatabase mongoDatabase = mongoClient.getDatabase("VelocityMC");
MongoCollection<Document> mongoCollection = mongoDatabase.getCollection("Vide");
getLogger().info(ChatColor.GREEN + "Connected to Database");
getLogger().info("=============================================");
getLogger().info(" >>>> Velocity Lobby General Loaded <<<< ");
getLogger().info("=============================================");
}
#Override
public void onDisable() {
getLogger().info("===============================================");
getLogger().info(" >>>> Velocity Lobby General disabled <<<< ");
getLogger().info("===============================================");
}
public static VelocityLobbyGeneral getInstance(){
return instance;
}
private static void setInstance(VelocityLobbyGeneral instance){
velocityLobbyGeneral.instance = instance;
}
}
And this is my error :
[12:32:49 WARN]: java.lang.NoClassDefFoundError: com/mongodb/client/MongoClients
[12:32:49 WARN]: at VelocityLobbyGeneral.jar//me.codexis.velocitylobbygeneral.VelocityLobbyGeneral.onEnable(VelocityLo
bbyGeneral.java:38)
[12:32:49 WARN]: at org.bukkit.plugin.java.JavaPlugin.setEnabled(JavaPlugin.java:264)
[12:32:49 WARN]: at
org.bukkit.plugin.java.JavaPluginLoader.enablePlugin(JavaPluginLoader.java:370)
[12:32:49 WARN]: at
org.bukkit.plugin.SimplePluginManager.enablePlugin(SimplePluginManager.java:500)
[12:32:49 WARN]: at
org.bukkit.craftbukkit.v1_17_R1.CraftServer.enablePlugin(CraftServer.java:535)
[12:32:49 WARN]: at
org.bukkit.craftbukkit.v1_17_R1.CraftServer.enablePlugins(CraftServer.java:449)
[12:32:49 WARN]: at
org.bukkit.craftbukkit.v1_17_R1.CraftServer.reload(CraftServer.java:970)
[12:32:49 WARN]: at org.bukkit.Bukkit.reload(Bukkit.java:769)
[12:32:49 WARN]: at org.bukkit.command.defaults.ReloadCommand.execute(ReloadCommand.java:54)
[12:32:49 WARN]: at org.bukkit.command.SimpleCommandMap.dispatch(SimpleCommandMap.java:159)
[12:32:49 WARN]: at org.bukkit.craftbukkit.v1_17_R1.CraftServer.dispatchCommand(CraftServer.java:838)
[12:32:49 WARN]: at org.bukkit.craftbukkit.v1_17_R1.CraftServer.dispatchServerCommand(CraftServer.java:801)
[12:32:49 WARN]: at net.minecraft.server.dedicated.DedicatedServer.handleCommandQueue(DedicatedServer.java:518)
[12:32:49 WARN]: at net.minecraft.server.dedicated.DedicatedServer.b(DedicatedServer.java:480)
[12:32:49 WARN]: at net.minecraft.server.MinecraftServer.a(MinecraftServer.java:1475)
[12:32:49 WARN]: at net.minecraft.server.MinecraftServer.x(MinecraftServer.java:1274)
[12:32:49 WARN]: at net.minecraft.server.MinecraftServer.lambda$spin$0(MinecraftServer.java:319)
[12:32:49 WARN]: at java.base/java.lang.Thread.run(Thread.java:831)
[12:32:49 WARN]: Caused by: java.lang.ClassNotFoundException: com.mongodb.client.MongoClients
[12:32:49 WARN]: at org.bukkit.plugin.java.PluginClassLoader.loadClass0(PluginClassLoader.java:146)
[12:32:49 WARN]: at org.bukkit.plugin.java.PluginClassLoader.loadClass(PluginClassLoader.java:103)
[12:32:49 WARN]: at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:519)
[12:32:49 WARN]: ... 18 more
Please can anyone help me.
Pom.xml :
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>me.codexisphantom</groupId>
<artifactId>VelocityLobby</artifactId>
<version>1.0</version>
<packaging>jar</packaging>
<name>VelocityLobby</name>
<description>Official VelocityMC Plugin</description>
<properties>
<java.version>1.8</java.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<url>www.velocity-net.com</url>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.2.4</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<createDependencyReducedPom>false</createDependencyReducedPom>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
<resources>
<resource>
<directory>src/main/resources</directory>
<filtering>true</filtering>
</resource>
</resources>
</build>
<repositories>
<repository>
<id>papermc-repo</id>
<url>https://papermc.io/repo/repository/maven-public/</url>
</repository>
<repository>
<id>sonatype</id>
<url>https://oss.sonatype.org/content/groups/public/</url>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>io.papermc.paper</groupId>
<artifactId>paper-api</artifactId>
<version>1.17.1-R0.1-SNAPSHOT</version>
<scope>provided</scope>
</dependency>
</dependencies>
Mongodb dependency :
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-driver</artifactId>
<version>3.12.10</version>
<scope>compile</scope>
</dependency>
Plugin added :
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
</execution>
</executions>
</plugin>

In your maven configuration, you are importing MongoDB. So, it will compile and you will be able to use it when you will develop.
But, when you will compile, it will not be included in final jar. And such as MongoDB isn't in spigot, it will create your error (don't find MongoDB class).
To fix it, there is multiple tutorial 1, 2, 3, 4 ... and I'm sure we can find more.
I suggest you to import mongo db like that :
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-driver</artifactId>
<version>3.12.10</version> <!-- The version that you want -->
<scope>compile</scope> <!-- In my maven project, it include this project in builded jar -->
</dependency>
You can find all versions of MongoDB here
My full config which works :
<properties>
<java.version>1.8</java.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.2.4</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<createDependencyReducedPom>false</createDependencyReducedPom>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
<repositories>
<repository>
<id>sonatype</id>
<url>https://oss.sonatype.org/content/groups/public/</url>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-driver</artifactId>
<version>3.12.10</version>
<scope>compile</scope>
</dependency>
</dependencies>

Related

Spark + cassandra - Just one record getting inserted

Am trying to get data from hive and insert the same in Cassandra using Spark.Very surprisingly i see only one record inserted into cassandra inspite of Data frame having 4000+ records.
import org.apache.spark.sql.SparkSession
import com.datastax.spark.connector.cql.CassandraConnector
import com.typesafe.config.ConfigFactory
import org.apache.spark.sql.cassandra._
import com.datastax.spark.connector._
import java.math.BigDecimal
case class sales(wk_nbr: Int,
store_nbr: Int,
sales_amt: BigDecimal)
object HiveConnector extends App {
val cassandraConfig = ConfigFactory.load("cassandra.conf")
println("cassandraConfig loaded = " + cassandraConfig)
val spark = SparkSession.builder().appName("HiveConnector")
.config("spark.sql.warehouse.dir", "file:/data/raw/historical/tables")
.config("hive.exec.dynamic.partition.mode", "nonstrict")
.config("mapred.input.dir.recursive","true")
.config("mapreduce.input.fileinputformat.input.dir.recursive","true")
.config("spark.cassandra.connection.host", "***********")
.config("spark.cassandra.auth.username", "*****un****")
.config("spark.cassandra.auth.password", "******pw*****")
.enableHiveSupport()
.master("yarn").getOrCreate()
import spark.implicits._
val query = "select wk_nbr,store_nbr,sum(sales_amt) as sales_amt from scan where visit_dt between '2018-05-08' and '2018-05-11' group by wm_yr_wk_nbr,store_nbr"
val resDF = spark.sql(query)
resDF.persist()
println("RESDF size = " + resDF.count()) //prints the record count
println("RESDF sample rec = " + resDF.show(2)) //see 2 records in the log
CassandraConnector(spark.sparkContext).withSessionDo { spark =>
spark.execute("CREATE TABLE raaspoc.sales_data (wk_nbr INT PRIMARY KEY, store_nbr INT, sales_amt DOUBLE)")
}
/*
None of the following saveToCassandra work - meaning not inserting all records but only one record
*/
resDF.map { x => sales.apply(x.get(0).asInstanceOf[Int], x.get(1).asInstanceOf[Int],x.get(2).asInstanceOf[BigDecimal])
}.rdd.saveToCassandra("raaspoc","sales_data") // Not working
resDF.rdd.saveToCassandra("raaspoc","sales_data") // Not working
resDF.write.format("org.apache.spark.sql.cassandra").options(Map("table" -> "sales_data", "keyspace" -> "raaspoc")).save() // Not working
resDF.write.cassandraFormat("sales_data","raaspoc").save() // Not working
/*
When the data frame is written to HDFS, i see all 4000+ records in the sales.csv
*/
resDF.write.format("csv").save("hdfs:/dev/test/sales.csv")
println("RESDF size after write to cassandra = " + resDF.count()) //prints 4732 (record count)
spark.close()
}
I dont see any errors in the log and Spark submit completes without any errors but inserts only one record. Following is my pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.test.raas</groupId>
<artifactId>RaasDataPipelines</artifactId>
<version>1.0-SNAPSHOT</version>
<inceptionYear>2008</inceptionYear>
<properties>
<scala.version>2.11.0</scala.version>
<spark.version>2.2.0</spark.version>
</properties>
<repositories>
<repository>
<id>scala-tools.org</id>
<name>Scala-Tools Maven2 Repository</name>
<url>http://scala-tools.org/repo-releases</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<id>scala-tools.org</id>
<name>Scala-Tools Maven2 Repository</name>
<url>http://scala-tools.org/repo-releases</url>
</pluginRepository>
</pluginRepositories>
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>${spark.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>${spark.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.4</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.specs</groupId>
<artifactId>specs</artifactId>
<version>1.2.5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.datastax.spark</groupId>
<artifactId>spark-cassandra-connector_2.11</artifactId>
<version>2.0.7</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<version>2.11</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_2.10</artifactId>
<version>1.0.0</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
<version>3.5.0</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-mapping</artifactId>
<version>3.5.0</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-extras</artifactId>
<version>3.5.0</version>
<scope>compile</scope>
</dependency>
</dependencies>
<build>
<sourceDirectory>src/main/scala</sourceDirectory>
<testSourceDirectory>src/test/scala</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>compile</goal>
<goal>testCompile</goal>
</goals>
</execution>
</executions>
<configuration>
<scalaVersion>${scala.version}</scalaVersion>
<args>
<arg>-target:jvm-1.5</arg>
</args>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.3</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>
shade
</goal>
</goals>
</execution>
</executions>
<configuration>
<minimizeJar>true</minimizeJar>
<shadedArtifactAttached>true</shadedArtifactAttached>
<shadedClassifierName>fat</shadedClassifierName>
<relocations>
<relocation>
<pattern>com.google</pattern>
<shadedPattern>shaded.guava</shadedPattern>
<includes>
<include>com.google.**</include>
</includes>
<excludes>
<exclude>com.google.common.base.Optional</exclude>
<exclude>com.google.common.base.Absent</exclude>
<exclude>com.google.common.base.Present</exclude>
</excludes>
</relocation>
</relocations>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
</plugin>
</plugins>
</build>
<reporting>
<plugins>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<configuration>
<scalaVersion>${scala.version}</scalaVersion>
</configuration>
</plugin>
</plugins>
</reporting>
</project>
is there a possibility that your primary key(wk_nbr) to be the same on all the 4000+ rows?

ArrayOutOfBoundIndex on xls Rules

ArrayOutOfBound Error
enter image description here
I have an xls rule [1 & 2], when I run it as JUnit, it works fine, but when I run it as Maven test, an ArrayOutOfBounds [3] error appears and I couldn't find any explanation. Any hint ?
========================================================
The pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.ibm.cio.cloud.cost</groupId>
<artifactId>bluecost</artifactId>
<packaging>jar</packaging>
<version>1.0-SNAPSHOT</version>
<name>bluecost</name>
<url>http://maven.apache.org</url>
<profiles>
<profile>
<id>dev</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<properties>
<build.profile.id>dev</build.profile.id>
</properties>
</profile>
<profile>
<id>test</id>
<properties>
<build.profile.id>test</build.profile.id>
</properties>
<build>
<plugins>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<phase>install</phase>
<goals>
<goal>copy-dependencies</goal>
</goals>
<configuration>
<outputDirectory>${project.build.directory}/lib</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>prod</id>
<properties>
<build.profile.id>prod</build.profile.id>
</properties>
</profile>
</profiles>
<properties>
<jdk.version>1.8</jdk.version>
<spring.version>4.3.9.RELEASE</spring.version>
<spring.batch.version>3.0.8.RELEASE</spring.batch.version>
<mysql.driver.version>5.1.44</mysql.driver.version>
<db2.driver.version>10.1.0</db2.driver.version>
</properties>
<dependencies>
<!-- Spring Core -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${spring.version}</version>
</dependency>
<!-- Spring Core -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<version>${spring.version}</version>
</dependency>
<!-- Spring Test -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring.version}</version>
</dependency>
<!-- Spring Batch dependencies -->
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-core</artifactId>
<version>${spring.batch.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-infrastructure</artifactId>
<version>${spring.batch.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-test</artifactId>
<version>${spring.batch.version}</version>
<scope>test</scope>
</dependency>
<!-- MySQL database driver -->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.driver.version}</version>
</dependency>
<dependency>
<groupId>com.ibm.db2.jcc</groupId>
<artifactId>db2jcc_license_cisuz</artifactId>
<version>${db2.driver.version}</version>
</dependency>
<dependency>
<groupId>com.ibm.db2.jcc</groupId>
<artifactId>db2jcc</artifactId>
<version>${db2.driver.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>commons-dbcp</groupId>
<artifactId>commons-dbcp</artifactId>
<version>1.4</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<version>2.4.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>commons-pool</groupId>
<artifactId>commons-pool</artifactId>
<version>1.6</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.drools</groupId>
<artifactId>drools-spring</artifactId>
<version>5.4.0.Final</version>
</dependency>
<dependency>
<groupId>org.eclipse.jdt</groupId>
<artifactId>org.eclipse.jdt.core</artifactId>
<version>3.7.1</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.14</version>
</dependency>
</dependencies>
<build>
<finalName>bluecost-batch</finalName>
<filters>
<filter>profiles/${build.profile.id}/config.properties</filter>
</filters>
<resources>
<resource>
<filtering>true</filtering>
<directory>src/main/resources</directory>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-eclipse-plugin</artifactId>
<version>2.9</version>
<configuration>
<downloadSources>true</downloadSources>
<downloadJavadocs>false</downloadJavadocs>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>${jdk.version}</source>
<target>${jdk.version}</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.20.1</version>
<configuration>
<excludes>
<exclude></exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
<organization>
<name>IBM Corporation</name>
</organization>`enter code here`
</project>
========================
=====================
And the JUnit I created...
#Before
public void setUp() {
final Logger logger = Logger.getLogger("setup()");
logger.info("-----------------------------");
}
#Test
public void testDefaultValuesbyChannelName(){
BlueReport testBlueReport = new BlueReport();
// Arbitrary value, this should be "SOFTLYER"
testBlueReport.setAccount_id("29302"); // Must exist
testBlueReport.setOrg_co("SOFTLYER");
SSCData testSSCdataOutput = new SSCData();
testSSCdataOutput.setORIG_LOC_CD(null); // values will be filled by the rule
testSSCdataOutput.setSERVICE_CD(null); // this too will be filled by the rule
// values will be filled by the rules
String expectedOrigLocCd = "SLR";
//SSCDataOutput.setORIG_LOC_CD("SLR");
String expectedServiceTypCd = "SLR";
//SSCDataOutput.setSERVICE_TYP_CD("SLR");
String expectedServiceCd = "SLIC";
//SSCDataOutput.setSERVICE_CD("SLIC");
String expectedServiceGroupId = "BASE";
//SSCDataOutput.setSERVICE_GROUP_ID("BASE");
String expectedRateClasCd = "OGS";
//SSCDataOutput.setRATECLAS_CD("OGS");
String expectedLocalField1 = "INVCE ID";
//SSCDataOutput.setLOCAL_FIELD_1("INVCE ID");
//SSCDataOutput.setLOCAL_FIELD_2(null);//Not anymore at the Invoice level. can be Nulled.
String expectedLocalField3 = "ACCT ID";
//SSCDataOutput.setLOCAL_FIELD_3("ACCT ID");
//SSCDataOutput.setLOCAL_FIELD_5(null);
//SSCDataOutput.setLOCAL_FIELD_6(null);
final Logger logger = Logger.getLogger("testDefaultValuesbyChannelName");
dtSession.execute( Arrays.asList(new Object[] { testSSCdataOutput, testBlueReport }) );
logger.info("Rules that activated: " + testSSCdataOutput.getRuleAudit());
dtSession.setGlobal("logger", logger);
logger.info("Expecting ORIG_LOC_CD to be SLR");
assertTrue("I expected SLR", testSSCdataOutput.getORIG_LOC_CD().contains(expectedOrigLocCd));
logger.info("Expecting SERVICE_CD to be SLIC");
assertTrue("I expected SLIC", testSSCdataOutput.getSERVICE_CD().contains(expectedServiceCd));
logger.info("Expecting SERVICE_TYPE_CD to be SLR");
assertTrue("I expected SLR", testSSCdataOutput.getSERVICE_TYP_CD().contains(expectedServiceTypCd));
logger.info("Expecting SERVICE_GROUP_ID to be BASE");
assertTrue("I expected BASE", testSSCdataOutput.getSERVICE_GROUP_ID().contains(expectedServiceGroupId));
logger.info("Expecting RATECLAS_CD to be OGS");
assertTrue("I expected OGS", testSSCdataOutput.getRATECLAS_CD().contains(expectedRateClasCd));
logger.info("Expecting LOCAL_FIELD1 to be INVCE ID");
assertTrue("I expected INVCE ID", testSSCdataOutput.getLOCAL_FIELD_1().contains(expectedLocalField1));
logger.info("Expecting LOCAL_FIELD3 to be ACCT ID");
assertTrue("I expected ACCCT ID", testSSCdataOutput.getLOCAL_FIELD_3().contains(expectedLocalField3));
And this is the error on Run As ... 'Maven test'
======================================================
Running com.ibm.cio.cloud.cost.unit.DroolsDefaultValuesByChannelNameTest
[ERROR] TestContextManager - Caught exception while allowing TestExecutionListener [org.springframework.test.context.support.DependencyInjectionTestExecutionListener#1de6f05] to prepare test instance [com.ibm.cio.cloud.cost.unit.DroolsDefaultValuesByChannelNameTest#114e780] <java.lang.IllegalStateException: Failed to load ApplicationContext>java.lang.IllegalStateException: Failed to load ApplicationContext
at org.springframework.test.context.cache.DefaultCacheAwareContextLoaderDelegate.loadContext(DefaultCacheAwareContextLoaderDelegate.java:124)
at org.springframework.test.context.support.DefaultTestContext.getApplicationContext(DefaultTestContext.java:83)
at org.springframework.test.context.support.DependencyInjectionTestExecutionListener.injectDependencies(DependencyInjectionTestExecutionListener.java:117)
at org.springframework.test.context.support.DependencyInjectionTestExecutionListener.prepareTestInstance(DependencyInjectionTestExecutionListener.java:83)
at org.springframework.test.context.TestContextManager.prepareTestInstance(TestContextManager.java:230)
at org.springframework.test.context.junit4.SpringJUnit4ClassRunner.createTest(SpringJUnit4ClassRunner.java:228)
at org.springframework.test.context.junit4.SpringJUnit4ClassRunner$1.runReflectiveCall(SpringJUnit4ClassRunner.java:287)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.springframework.test.context.junit4.SpringJUnit4ClassRunner.methodBlock(SpringJUnit4ClassRunner.java:289)
at org.springframework.test.context.junit4.SpringJUnit4ClassRunner.runChild(SpringJUnit4ClassRunner.java:247)
at org.springframework.test.context.junit4.SpringJUnit4ClassRunner.runChild(SpringJUnit4ClassRunner.java:94)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.springframework.test.context.junit4.statements.RunBeforeTestClassCallbacks.evaluate(RunBeforeTestClassCallbacks.java:61)
at org.springframework.test.context.junit4.statements.RunAfterTestClassCallbacks.evaluate(RunAfterTestClassCallbacks.java:70)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.springframework.test.context.junit4.SpringJUnit4ClassRunner.run(SpringJUnit4ClassRunner.java:191)
at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369)
at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275)
at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239)
at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160)
at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373)
at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334)
at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119)
at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407)
Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'dtBase': Invocation of init method failed; nested exception is java.lang.ArrayIndexOutOfBoundsException: 38851
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1628)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:555)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:483)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:306)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:230)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:302)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:197)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:742)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:760)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:482)
at org.springframework.test.context.support.AbstractGenericContextLoader.loadContext(AbstractGenericContextLoader.java:128)
at org.springframework.test.context.support.AbstractGenericContextLoader.loadContext(AbstractGenericContextLoader.java:60)
at org.springframework.test.context.support.AbstractDelegatingSmartContextLoader.delegateLoading(AbstractDelegatingSmartContextLoader.java:108)
at org.springframework.test.context.support.AbstractDelegatingSmartContextLoader.loadContext(AbstractDelegatingSmartContextLoader.java:251)
at org.springframework.test.context.cache.DefaultCacheAwareContextLoaderDelegate.loadContextInternal(DefaultCacheAwareContextLoaderDelegate.java:98)
at org.springframework.test.context.cache.DefaultCacheAwareContextLoaderDelegate.loadContext(DefaultCacheAwareContextLoaderDelegate.java:116)
... 27 more
Caused by: java.lang.ArrayIndexOutOfBoundsException: 38851
at jxl.read.biff.Record.<init>(Record.java:79)
at jxl.read.biff.File.next(File.java:181)
at jxl.read.biff.WorkbookParser.parse(WorkbookParser.java:569)
at jxl.Workbook.getWorkbook(Workbook.java:271)
at org.drools.decisiontable.parser.xls.ExcelParser.parseFile(ExcelParser.java:77)
at org.drools.decisiontable.SpreadsheetCompiler.compile(SpreadsheetCompiler.java:89)
at org.drools.decisiontable.SpreadsheetCompiler.compile(SpreadsheetCompiler.java:68)
at org.drools.decisiontable.DecisionTableProviderImpl.compileStream(DecisionTableProviderImpl.java:37)
at org.drools.decisiontable.DecisionTableProviderImpl.loadFromInputStream(DecisionTableProviderImpl.java:20)
at org.drools.compiler.DecisionTableFactory.loadFromInputStream(DecisionTableFactory.java:15)
at org.drools.compiler.PackageBuilder.decisionTableToPackageDescr(PackageBuilder.java:454)
at org.drools.compiler.PackageBuilder.addPackageFromDecisionTable(PackageBuilder.java:448)
at org.drools.compiler.PackageBuilder.addKnowledgeResource(PackageBuilder.java:690)
at org.drools.builder.impl.KnowledgeBuilderImpl.add(KnowledgeBuilderImpl.java:45)
at org.drools.builder.impl.KnowledgeBuilderImpl.add(KnowledgeBuilderImpl.java:34)
at org.drools.container.spring.beans.KnowledgeBaseBeanFactory.afterPropertiesSet(KnowledgeBaseBeanFactory.java:110)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1687)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1624)
... 42 more

Flink with Kafka connection

Environment:
Ubuntu 16.04.1 LTS
Flink 1.1.3
Kakfa 0.10.1.1
I'm trying to connect flink with kafka (Flink 1.1.3 Kakfa 0.10.1.1)
I already try all the fixes that i could find, but none of them work.
pom.xml :
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>ux</groupId>
<artifactId>logs</artifactId>
<version>1.3-SNAPSHOT</version>
<packaging>jar</packaging>
<name>Flink Quickstart Job</name>
<url>http://www.myorganization.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<flink.version>1.3-SNAPSHOT</flink.version>
<slf4j.version>1.7.7</slf4j.version>
<log4j.version>1.2.17</log4j.version>
</properties>
<repositories>
<repository>
<id>apache.snapshots</id>
<name>Apache Development Snapshot Repository</name>
<url>https://repository.apache.org/content/repositories/snapshots/</url>
<releases>
<enabled>false</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka-0.10_2.10</artifactId>
<version>1.3-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
</dependency>
</dependencies>
<profiles>
<profile>
<id>build-jar</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.11</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.11</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.1</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<artifactSet>
<excludes combine.self="override"></excludes>
</artifactSet>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.1</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<artifactSet>
<excludes>
<exclude>org.apache.flink:flink-annotations</exclude>
<exclude>org.apache.flink:flink-shaded-hadoop2</exclude>
<exclude>org.apache.flink:flink-shaded-curator-recipes</exclude>
<exclude>org.apache.flink:flink-core</exclude>
<exclude>org.apache.flink:flink-scala_2.11</exclude>
<exclude>org.apache.flink:flink-runtime_2.11</exclude>
<exclude>org.apache.flink:flink-optimizer_2.11</exclude>
<exclude>org.apache.flink:flink-clients_2.11</exclude>
<exclude>org.apache.flink:flink-avro_2.11</exclude>
<exclude>org.apache.flink:flink-examples-batch_2.11</exclude>
<exclude>org.apache.flink:flink-examples-streaming_2.11</exclude>
<exclude>org.apache.flink:flink-streaming-scala_2.11</exclude>
<exclude>org.apache.flink:flink-scala-shell_2.11</exclude>
<exclude>org.apache.flink:flink-python</exclude>
<exclude>org.apache.flink:flink-metrics-core</exclude>
<exclude>org.apache.flink:flink-metrics-jmx</exclude>
<exclude>org.apache.flink:flink-statebackend-rocksdb_2.11</exclude>
<exclude>log4j:log4j</exclude>
<exclude>org.scala-lang:scala-library</exclude>
<exclude>org.scala-lang:scala-compiler</exclude>
<exclude>org.scala-lang:scala-reflect</exclude>
<exclude>com.data-artisans:flakka-actor_*</exclude>
<exclude>com.data-artisans:flakka-remote_*</exclude>
<exclude>com.data-artisans:flakka-slf4j_*</exclude>
<exclude>io.netty:netty-all</exclude>
<exclude>io.netty:netty</exclude>
<exclude>commons-fileupload:commons-fileupload</exclude>
<exclude>org.apache.avro:avro</exclude>
<exclude>commons-collections:commons-collections</exclude>
<exclude>org.codehaus.jackson:jackson-core-asl</exclude>
<exclude>org.codehaus.jackson:jackson-mapper-asl</exclude>
<exclude>com.thoughtworks.paranamer:paranamer</exclude>
<exclude>org.xerial.snappy:snappy-java</exclude>
<exclude>org.apache.commons:commons-compress</exclude>
<exclude>org.tukaani:xz</exclude>
<exclude>com.esotericsoftware.kryo:kryo</exclude>
<exclude>com.esotericsoftware.minlog:minlog</exclude>
<exclude>org.objenesis:objenesis</exclude>
<exclude>com.twitter:chill_*</exclude>
<exclude>com.twitter:chill-java</exclude>
<exclude>commons-lang:commons-lang</exclude>
<exclude>junit:junit</exclude>
<exclude>org.apache.commons:commons-lang3</exclude>
<exclude>org.slf4j:slf4j-api</exclude>
<exclude>org.slf4j:slf4j-log4j12</exclude>
<exclude>log4j:log4j</exclude>
<exclude>org.apache.commons:commons-math</exclude>
<exclude>org.apache.sling:org.apache.sling.commons.json</exclude>
<exclude>commons-logging:commons-logging</exclude>
<exclude>commons-codec:commons-codec</exclude>
<exclude>com.fasterxml.jackson.core:jackson-core</exclude>
<exclude>com.fasterxml.jackson.core:jackson-databind</exclude>
<exclude>com.fasterxml.jackson.core:jackson-annotations</exclude>
<exclude>stax:stax-api</exclude>
<exclude>com.typesafe:config</exclude>
<exclude>org.uncommons.maths:uncommons-maths</exclude>
<exclude>com.github.scopt:scopt_*</exclude>
<exclude>commons-io:commons-io</exclude>
<exclude>commons-cli:commons-cli</exclude>
</excludes>
</artifactSet>
<filters>
<filter>
<artifact>org.apache.flink:*</artifact>
<excludes>
<exclude>org/apache/flink/shaded/com/**</exclude>
<exclude>web-docs/**</exclude>
</excludes>
</filter>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
<createDependencyReducedPom>false</createDependencyReducedPom>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
my java code :
import java.util.Properties;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
import org.apache.flink.streaming.util.serialization.SimpleStringSchema;
public class App
{
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
Properties properties = new Properties();
properties.setProperty("bootstrap.servers", "localhost:9092");
properties.setProperty("zookeeper.connect", "localhost:2181");
properties.setProperty("group.id", "flink_consumer");
DataStream<String> messageStream = env.addSource(new FlinkKafkaConsumer010<>
("ux_logs", new SimpleStringSchema(), properties));
messageStream.rebalance().map(new MapFunction<String, String>() {
private static final long serialVersionUID = -6867736771747690202L;
public String map(String value) throws Exception {
return "Kafka and Flink says: " + value;
}
}).print();
env.execute();
}
}
But I get this error:
java.lang.NoClassDefFoundError: org/apache/flink/streaming/api/checkpoint/CheckpointedFunction
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at ux.App.main(App.java:27)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:509)
at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:403)
at org.apache.flink.client.program.ClusterClient.run(ClusterClient.java:320)
at org.apache.flink.client.CliFrontend.executeProgram(CliFrontend.java:777)
at org.apache.flink.client.CliFrontend.run(CliFrontend.java:253)
at org.apache.flink.client.CliFrontend.parseParameters(CliFrontend.java:1005)
at org.apache.flink.client.CliFrontend.main(CliFrontend.java:1048)
Caused by: java.lang.ClassNotFoundException: org.apache.flink.streaming.api.checkpoint.CheckpointedFunction
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
Do i need to remove my kafka, and run an older version?
Is my flink kafka connector wrong?
i tried to use this plugin but it didn't work. (https://ci.apache.org/projects/flink/flink-docs-release-1.3/dev/linking.html)
Thanks.
You have to downgrade your connector:
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka-0.8_2.10</artifactId>
<version>1.1.2</version>
</dependency>
Here is the similar question: https://stackoverflow.com/a/40037895/1252056
If you want to connect to Kafka 0.10~ you will have to move to Flink 1.2, otherwise, as #streetturte mentioned, you will have to downgrade your Kafka connector.
Have a look at the reference table here:
https://ci.apache.org/projects/flink/flink-docs-release-1.2/dev/connectors/kafka.html
Kafka 0.9 and newer version don't need to zookeeper.
You need to upgrade your flink or downgrade Kafka version
--topic myTopic --bootstrap.servers 10.123.34.56:9092 --group.id myGroup
FlinkKafkaConsumer09<CarDB> consumerBinden = new FlinkKafkaConsumer09<CarDB>(
kafkaTopic,
new ClassClassSchema(),
parameterTool.getProperties());
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka-0.9_2.10</artifactId>
<version>${flink.version}</version>
</dependency>
Perhaps you are using Flink-Kafka consumer dependency for Scala in your pom file, whereas your code is in Java. Try flink kafka consumer for Java in your pom. Hope it helps.

#RequestBody working with String but not working with pojo model Rest Web Service

Why the service below works perfectly if I use String as parameter but it doesn't work with model class. I didn't get error at all but it allways receives null if I use a model instead of String. Note: I do have mvc:annotation-driven in mvc-dispatcher-servelet and alls Jackson libraries. My guess is that I have somethng wrong with the libraries because it was running correctly before I changed the application from Spring 3 to Spring 4. Please, could some check if I need something else in POM?
#Controller
#RequestMapping("/log")
public class Lo_Controller {
#RequestMapping(value="display/last2", method=RequestMethod.POST, headers = "Accept=application/json")
#ResponseStatus(HttpStatus.CREATED)
#ResponseBody
//after taking out #ModelAtribute I don't reach this method when debugging
public String getTest(#RequestBody Mas60010b mas60010b) { //if I change Mas60010b mas60010b to String strTest ...
Lo_Mas60010 lo_Mas60010 = new Lo_Mas60010();
System.out.println(mas60010b.getSubCd());
//System.out.println(strTest); //... it will work
return "returnTestPost";
}
Model:
public class Mas60010b {
private String subCd;
private String firstDT;
private String currDT;
public String getSubCd() {
return subCd;
}
public void setSubCd(String subCd) {
this.subCd = subCd;
}
public String getFirstDT() {
return firstDT;
}
public void setFirstDT(String firstDT) {
this.firstDT = firstDT;
}
public String getCurrDT() {
return currDT;
}
public void setCurrDT(String currDT) {
this.currDT = currDT;
}
The client tester
public class Main {
public static void main(String[] args) throws ClientProtocolException, IOException {
HttpClient client2 = new DefaultHttpClient();
HttpPost post2 = new HttpPost("http://localhost:8080/MHE2/log/display/last2");
Map<String, String> map2 = new HashMap<String, String>();
map2.put("subCd", "A");
map2.put("firstDT", "2014-09-29 00:00:00.0");
map2.put("currDT", "2014-09-30 16:45:33.379731");
ObjectMapper mapper2 = new ObjectMapper();
String strJson2 = mapper2.writeValueAsString(map2);
StringEntity input2 = new StringEntity(strJson2);
input2.setContentType("application/json");
post2.setEntity(input2);
//Without #ModelAtribute I get [WARNING ] SRVE8094W: WARNING: Cannot set header. Response already committed.
HttpResponse response2 = client2.execute(post2);
BufferedReader rd2 = new BufferedReader(new InputStreamReader(response2.getEntity().getContent()));
String line2 = "";
while ((line2 = rd2.readLine()) != null) {
System.out.println(line2);
}
}
}
Effective Pom:
<?xml version="1.0"?>
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<groupId>MHE2</groupId>
<artifactId>MHE2</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>war</packaging>
<name>MHE Maven Webapp</name>
<url>http://maven.apache.org</url>
<properties>
<org.aspectj-version>1.7.4</org.aspectj-version>
<spring.version>4.1.2.RELEASE</spring.version>
<java-version>1.6</java-version>
<jackson.databind-version>2.2.3</jackson.databind-version>
<org.slf4j-version>1.7.5</org.slf4j-version>
</properties>
<dependencies>
<dependency>
<groupId>aopalliance</groupId>
<artifactId>aopalliance</artifactId>
<version>1.0</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.3.3</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>4.3.3</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<version>2.5</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context-support</artifactId>
<version>4.1.2.RELEASE</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>4.1.2.RELEASE</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
<version>4.1.2.RELEASE</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
<version>4.1.2.RELEASE</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>4.1.2.RELEASE</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-aop</artifactId>
<version>4.1.2.RELEASE</version>
<scope>compile</scope>
<exclusions>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.1.1</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>4.1.2.RELEASE</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-expression</artifactId>
<version>4.1.2.RELEASE</version>
<scope>compile</scope>
</dependency>
</dependencies>
<repositories>
<repository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>central</id>
<name>Central Repository</name>
<url>http://repo.maven.apache.org/maven2</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<releases>
<updatePolicy>never</updatePolicy>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>central</id>
<name>Central Repository</name>
<url>http://repo.maven.apache.org/maven2</url>
</pluginRepository>
</pluginRepositories>
<build>
<sourceDirectory>C:\STS\ws\MHE_original\src</sourceDirectory>
<scriptSourceDirectory>C:\STS\ws\MHE_original\src\main\scripts</scriptSourceDirectory>
<testSourceDirectory>C:\STS\ws\MHE_original\src\test\java</testSourceDirectory>
<outputDirectory>C:\STS\ws\MHE_original\WebContent\WEB-INF\classes</outputDirectory>
<testOutputDirectory>C:\STS\ws\MHE_original\target\test-classes</testOutputDirectory>
<resources>
<resource>
<directory>C:\STS\ws\MHE_original\src</directory>
<excludes>
<exclude>**/*.java</exclude>
</excludes>
</resource>
</resources>
<testResources>
<testResource>
<directory>C:\STS\ws\MHE_original\src\test\resources</directory>
</testResource>
</testResources>
<directory>C:\STS\ws\MHE_original\target</directory>
<finalName>MHE2-0.0.1-SNAPSHOT</finalName>
<pluginManagement>
<plugins>
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<version>1.3</version>
</plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.2-beta-5</version>
</plugin>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<version>2.8</version>
</plugin>
<plugin>
<artifactId>maven-release-plugin</artifactId>
<version>2.3.2</version>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<executions>
<execution>
<id>default-testCompile</id>
<phase>test-compile</phase>
<goals>
<goal>testCompile</goal>
</goals>
<configuration>
<source>1.6</source>
<target>1.6</target>
</configuration>
</execution>
<execution>
<id>default-compile</id>
<phase>compile</phase>
<goals>
<goal>compile</goal>
</goals>
<configuration>
<source>1.6</source>
<target>1.6</target>
</configuration>
</execution>
</executions>
<configuration>
<source>1.6</source>
<target>1.6</target>
</configuration>
</plugin>
<plugin>
<artifactId>maven-war-plugin</artifactId>
<version>2.4</version>
<executions>
<execution>
<id>default-war</id>
<phase>package</phase>
<goals>
<goal>war</goal>
</goals>
<configuration>
<warSourceDirectory>WebContent</warSourceDirectory>
</configuration>
</execution>
</executions>
<configuration>
<warSourceDirectory>WebContent</warSourceDirectory>
</configuration>
</plugin>
<plugin>
<artifactId>maven-clean-plugin</artifactId>
<version>2.5</version>
<executions>
<execution>
<id>default-clean</id>
<phase>clean</phase>
<goals>
<goal>clean</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-install-plugin</artifactId>
<version>2.4</version>
<executions>
<execution>
<id>default-install</id>
<phase>install</phase>
<goals>
<goal>install</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-resources-plugin</artifactId>
<version>2.6</version>
<executions>
<execution>
<id>default-resources</id>
<phase>process-resources</phase>
<goals>
<goal>resources</goal>
</goals>
</execution>
<execution>
<id>default-testResources</id>
<phase>process-test-resources</phase>
<goals>
<goal>testResources</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.12.4</version>
<executions>
<execution>
<id>default-test</id>
<phase>test</phase>
<goals>
<goal>test</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-deploy-plugin</artifactId>
<version>2.7</version>
<executions>
<execution>
<id>default-deploy</id>
<phase>deploy</phase>
<goals>
<goal>deploy</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-site-plugin</artifactId>
<version>3.3</version>
<executions>
<execution>
<id>default-site</id>
<phase>site</phase>
<goals>
<goal>site</goal>
</goals>
<configuration>
<outputDirectory>C:\STS\ws\MHE_original\target\site</outputDirectory>
<reportPlugins>
<reportPlugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-project-info-reports-plugin</artifactId>
</reportPlugin>
</reportPlugins>
</configuration>
</execution>
<execution>
<id>default-deploy</id>
<phase>site-deploy</phase>
<goals>
<goal>deploy</goal>
</goals>
<configuration>
<outputDirectory>C:\STS\ws\MHE_original\target\site</outputDirectory>
<reportPlugins>
<reportPlugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-project-info-reports-plugin</artifactId>
</reportPlugin>
</reportPlugins>
</configuration>
</execution>
</executions>
<configuration>
<outputDirectory>C:\STS\ws\MHE_original\target\site</outputDirectory>
<reportPlugins>
<reportPlugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-project-info-reports-plugin</artifactId>
</reportPlugin>
</reportPlugins>
</configuration>
</plugin>
</plugins>
</build>
<reporting>
<outputDirectory>C:\STS\ws\MHE_original\target\site</outputDirectory>
</reporting>
</project>
Log from WebSphere 8.5 Liberty profile
[WARNING ] Cannot search for matching files underneath URL [bundleresource://96.fwk327208746/] because it does not correspond to a directory in the file system
URL [bundleresource://96.fwk327208746/] cannot be resolved to absolute file path because it does not reside in the file system: bundleresource://96.fwk327208746/
[WARNING ] Cannot search for matching files underneath URL [bundleresource://95.fwk327208746/] because it does not correspond to a directory in the file system
URL [bundleresource://95.fwk327208746/] cannot be resolved to absolute file path because it does not reside in the file system: bundleresource://95.fwk327208746/
[WARNING ] Skipping [C:\STS\ws\MHE_original\WebContent\WEB-INF\lib\db2java.jar] because it does not denote a directory
[WARNING ] Skipping [C:\STS\ws\MHE_original\WebContent\WEB-INF\lib\jackson-core-asl-1.9.12.jar] because it does not denote a directory
...
[WARNING ] Skipping [C:\Users\e049447.m2\repository\org\springframework\spring-expression\4.1.2.RELEASE\spring-expression-4.1.2.RELEASE.jar] because it does not denote a directory
[WARNING ] SRVE8094W: WARNING: Cannot set header. Response already committed.
[12/3/14 8:55:38:812 CST] 0000001a ramework.core.io.support.PathMatchingResourcePatternResolver W Cannot search for matching files underneath URL [bundleresource://96.fwk327208746/] because it does not correspond to a directory in the file system
java.io.FileNotFoundException: URL [bundleresource://96.fwk327208746/] cannot be resolved to absolute file path because it does not reside in the file system: bundleresource://96.fwk327208746/
at org.springframework.util.ResourceUtils.getFile(ResourceUtils.java:212)
at org.springframework.core.io.AbstractFileResolvingResource.getFile(AbstractFileResolvingResource.java:52)
at org.springframework.core.io.UrlResource.getFile(UrlResource.java:212)
...
[12/3/14 8:55:39:470 CST] 0000001a b.servlet.mvc.method.annotation.RequestMappingHandlerMapping I Mapped "{[/log/display/last2],methods=[POST],params=[],headers=[],consumes=[],produces=[application/json],custom=[]}" onto public java.lang.String com.mastercard.mhe.common.controller.Lo_Controller.getTest(com.mastercard.mhe.log.domain2.Mas60010b)
[12/3/14 8:55:39:782 CST] 0000001a b.servlet.mvc.method.annotation.RequestMappingHandlerAdapter I Looking for #ControllerAdvice: WebApplicationContext for namespace 'mvc-dispatcher-servlet': startup date [Wed Dec 03 08:55:37 CST 2014]; parent: Root WebApplicationContext
[12/3/14 8:55:40:063 CST] 0000001a b.servlet.mvc.method.annotation.RequestMappingHandlerAdapter I Looking for #ControllerAdvice: WebApplicationContext for namespace 'mvc-dispatcher-servlet': startup date [Wed Dec 03 08:55:37 CST 2014]; parent: Root WebApplicationContext
[12/3/14 8:55:40:267 CST] 0000001a .springframework.web.servlet.handler.SimpleUrlHandlerMapping I Mapped URL path [/**] onto handler 'org.springframework.web.servlet.resource.ResourceHttpRequestHandler#0'
[12/3/14 8:55:40:313 CST] 0000001a org.springframework.web.servlet.DispatcherServlet I FrameworkServlet 'mvc-dispatcher': initialization completed in 2548 ms
[12/3/14 8:55:40:313 CST] 0000001a com.ibm.ws.webcontainer.servlet I SRVE0242I: [MHE_original] [/MHE2] [mvc-dispatcher]: Initialization successful.
[12/3/14 8:55:40:470 CST] 00000022 SystemOut O null
[12/3/14 8:56:29:485 CST] 0000001a com.ibm.ws.webcontainer.srt W SRVE8094W: WARNING: Cannot set header. Response already committed.
Like #Bohuslav has said in their comment, #RequestBody and #ModelAttribute must not be used on the same method parameter. Remove #ModelAttribute and then try again. After that your model parameter will be populated correctly.

Scala akka - not able to run program from jar-with-dependency builded by Maven

I'm new in scala and akka. I tried to setup simple akka remote project with client and server. I'm using Eclipse with maven scala plugin. Everything is working fine if I run this project from IDE. Client is able to connect to server. Unfortunately when I build my project to jar-with-dependency using maven-assembly-plugin I'm not able to run it from command line. I'm getting error like this:
F:\Projects\sag-project-scala\sag-scala\target>scala sag-scala-1.0-jar-with-dependencies.jar server
com.typesafe.config.ConfigException$Missing: No configuration setting found for key 'akka.loggers'
at com.typesafe.config.impl.SimpleConfig.findKey(SimpleConfig.java:124)
at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:145)
at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:151)
at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:159)
at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:164)
at com.typesafe.config.impl.SimpleConfig.getList(SimpleConfig.java:212)
at com.typesafe.config.impl.SimpleConfig.getHomogeneousUnwrappedList(SimpleConfig.java:271)
at com.typesafe.config.impl.SimpleConfig.getStringList(SimpleConfig.java:329)
at akka.actor.ActorSystem$Settings.(ActorSystem.scala:179)
at akka.actor.ActorSystemImpl.(ActorSystem.scala:504)
at akka.actor.ActorSystem$.apply(ActorSystem.scala:141)
at akka.actor.ActorSystem$.apply(ActorSystem.scala:118)
at com.sag.remote.ServerObject$.run(Server.scala:28)
at com.sag.remote.ServerObject$.main(Server.scala:25)
at com.sag.remote.ServerObject.main(Server.scala)
at com.sag.main.ScalaRunner.main(ScalaRunner.java:23)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
at java.lang.reflect.Method.invoke(Unknown Source)
at scala.tools.nsc.util.ScalaClassLoader$$anonfun$run$1.apply(ScalaClassLoader.scala:71)
at scala.tools.nsc.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
at scala.tools.nsc.util.ScalaClassLoader$URLClassLoader.asContext(ScalaClassLoader.scala:139)
at scala.tools.nsc.util.ScalaClassLoader$class.run(ScalaClassLoader.scala:71)
at scala.tools.nsc.util.ScalaClassLoader$URLClassLoader.run(ScalaClassLoader.scala:139)
at scala.tools.nsc.CommonRunner$class.run(ObjectRunner.scala:28)
at scala.tools.nsc.JarRunner$.run(MainGenericRunner.scala:16)
at scala.tools.nsc.CommonRunner$class.runAndCatch(ObjectRunner.scala:35)
at scala.tools.nsc.JarRunner$.runJar(MainGenericRunner.scala:28)
at scala.tools.nsc.MainGenericRunner.runTarget$1(MainGenericRunner.scala:78)
at scala.tools.nsc.MainGenericRunner.process(MainGenericRunner.scala:96)
at scala.tools.nsc.MainGenericRunner$.main(MainGenericRunner.scala:105)
at scala.tools.nsc.MainGenericRunner.main(MainGenericRunner.scala)
If I understand akka documentation, in custom *.conf files I'm overriding the default configuration and akka internally parses those files into one file.
I'm pretty sure, that I'm missing something very small but after two days Googling I gave up. That's why I'm asking You for help.
It seems like it's not able to find proper configuration from application.conf but why from IDE everything is working fine?
Here are files which were used to build my example:
ScalaRunner.java
package com.sag.main;
import com.sag.remote.ClientObject;
import com.sag.remote.ServerObject;
/**
* Runner class to simply run jar file from command line.
* #author ddr
*
*/
public class ScalaRunner {
private static String SERVER_MODE = "server";
private static String CLIENT_MODE = "client";
public static void main(String[] args) {
String mode = SERVER_MODE;
if(args != null && args.length > 0){
mode = args[0];
}
if(CLIENT_MODE.equals(mode)){
ClientObject.main(args);
}
else if (SERVER_MODE.equals(mode)){
ServerObject.main(args);
}
}
}
Client.scala
package com.sag.remote
import akka.actor._
import akka.actor.ActorDSL._
import com.typesafe.config.ConfigFactory
class Client extends Actor {
def receive = {
case msg: String => println("joe received " + msg + " from " + sender)
case _ => println("Received unknown msg ")
}
}
object ClientObject {
def main(args: Array[String]): Unit = run()
def run() = {
println("STARTING CLIENT")
implicit val client = ActorSystem("Client", ConfigFactory.load("client"))
val server = client.actorFor("akka.tcp://server#127.0.0.1:6969/user/server")
println("That 's remote server:" + server)
server ! "Hello"
}
}
Server.scala
package com.sag.remote
import akka.actor.Actor
import akka.actor.ActorSystem
import akka.actor.Props
import com.typesafe.config.Config
import scala.concurrent.duration.Duration
import java.util.concurrent.TimeUnit
import akka.actor.Extension
import akka.actor.ExtensionIdProvider
import akka.actor.ExtensionId
import akka.actor.ExtendedActorSystem
import akka.actor.ActorSystem.Settings
import com.typesafe.config.ConfigFactory
class Server extends Actor {
def receive = {
case msg: String => println("joe received " + msg + " from " + sender)
case _ => println("Received unknown msg ")
}
}
object ServerObject {
def main(args: Array[String]): Unit = run()
def run() = {
val server = ActorSystem("server", ConfigFactory.load("server"))
val serverActor = server.actorOf(Props[Server], name = "server")
println(serverActor.path)
println()
println("Server ready")
}
}
common.conf
include "application.conf"
akka{
stdout-loglevel = "DEBUG"
loglevel = "DEBUG"
actor {
provider = "akka.remote.RemoteActorRefProvider"
}
remote {
enabled-transports = ["akka.remote.netty.tcp"]
netty.tcp {
hostname = "127.0.0.1"
}
}
}
client.conf
include "common"
akka {
remote.netty.tcp.port = 2552
}
server.conf
include "common"
akka {
remote.netty.tcp.port = 6969
}
pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>sag-scala</groupId>
<artifactId>sag-scala</artifactId>
<version>1.0</version>
<inceptionYear>2008</inceptionYear>
<properties>
<scala.version>2.10.1</scala.version>
</properties>
<repositories>
<repository>
<id>scala-tools.org</id>
<name>Scala-Tools Maven2 Repository</name>
<url>http://scala-tools.org/repo-releases</url>
</repository>
<repository>
<id>akka-snapshots</id>
<snapshots>
<enabled>true</enabled>
</snapshots>
<url>http://repo.akka.io/snapshots/</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<id>scala-tools.org</id>
<name>Scala-Tools Maven2 Repository</name>
<url>http://scala-tools.org/repo-releases</url>
</pluginRepository>
</pluginRepositories>
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-remote_2.10</artifactId>
<version>2.3.2</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-actor_2.10</artifactId>
<version>2.3.2</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.4</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.specs</groupId>
<artifactId>specs</artifactId>
<version>1.2.5</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<sourceDirectory>src/main/scala</sourceDirectory>
<testSourceDirectory>src/test/scala</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-eclipse-plugin</artifactId>
<configuration>
<downloadSources>true</downloadSources>
<buildcommands>
<buildcommand>ch.epfl.lamp.sdt.core.scalabuilder</buildcommand>
</buildcommands>
<additionalProjectnatures>
<projectnature>ch.epfl.lamp.sdt.core.scalanature</projectnature>
</additionalProjectnatures>
<classpathContainers>
<classpathContainer>org.eclipse.jdt.launching.JRE_CONTAINER</classpathContainer>
<classpathContainer>ch.epfl.lamp.sdt.launching.SCALA_CONTAINER</classpathContainer>
</classpathContainers>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.2-beta-5</version>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<archive>
<manifest>
<mainClass>com.sag.main.ScalaRunner</mainClass>
</manifest>
</archive>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<archive>
<manifest>
<addClasspath>true</addClasspath>
<mainClass>com.sag.main.ScalaRunner</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
</plugins>
</build>
<reporting>
<plugins>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<configuration>
<scalaVersion>${scala.version}</scalaVersion>
</configuration>
</plugin>
</plugins>
</reporting>
</project>
I will be grateful for any help.
Regards,
Dariusz
Here is modified pom.xml which solved my problem. Maybe it will help someone.
pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>sag-scala</groupId>
<artifactId>sag-scala</artifactId>
<version>1.0</version>
<inceptionYear>2008</inceptionYear>
<properties>
<scala.version>2.10.1</scala.version>
</properties>
<repositories>
<repository>
<id>scala-tools.org</id>
<name>Scala-Tools Maven2 Repository</name>
<url>http://scala-tools.org/repo-releases</url>
</repository>
<repository>
<id>akka-snapshots</id>
<snapshots>
<enabled>true</enabled>
</snapshots>
<url>http://repo.akka.io/snapshots/</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<id>scala-tools.org</id>
<name>Scala-Tools Maven2 Repository</name>
<url>http://scala-tools.org/repo-releases</url>
</pluginRepository>
</pluginRepositories>
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-remote_2.10</artifactId>
<version>2.3.2</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-actor_2.10</artifactId>
<version>2.3.2</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.4</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.specs</groupId>
<artifactId>specs</artifactId>
<version>1.2.5</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<sourceDirectory>src/main/scala</sourceDirectory>
<testSourceDirectory>src/test/scala</testSourceDirectory>
<plugins>
<!-- plugin> <groupId>org.scala-tools</groupId> <artifactId>maven-scala-plugin</artifactId>
<executions> <execution> <goals> <goal>compile</goal> <goal>testCompile</goal>
</goals> </execution> </executions> <configuration> <scalaVersion>${scala.version}</scalaVersion>
<args> <arg>-target:jvm-1.5</arg> </args> </configuration> </plugin -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-eclipse-plugin</artifactId>
<configuration>
<downloadSources>true</downloadSources>
<buildcommands>
<buildcommand>ch.epfl.lamp.sdt.core.scalabuilder</buildcommand>
</buildcommands>
<additionalProjectnatures>
<projectnature>ch.epfl.lamp.sdt.core.scalanature</projectnature>
</additionalProjectnatures>
<classpathContainers>
<classpathContainer>org.eclipse.jdt.launching.JRE_CONTAINER</classpathContainer>
<classpathContainer>ch.epfl.lamp.sdt.launching.SCALA_CONTAINER</classpathContainer>
</classpathContainers>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.3</version>
<configuration>
<transformers>
<transformer
implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>reference.conf</resource>
</transformer>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<manifestEntries>
<Main-Class>com.sag.main.ScalaRunner</Main-Class>
</manifestEntries>
</transformer>
</transformers>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<archive>
<manifest>
<addClasspath>true</addClasspath>
<mainClass>com.sag.main.ScalaRunner</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
</plugins>
</build>
<reporting>
<plugins>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<configuration>
<scalaVersion>${scala.version}</scalaVersion>
</configuration>
</plugin>
</plugins>
</reporting>
</project>