Executable Jar created by Gradle bootRepackage missing jars - eclipse

What i'd like to do:
Creating an executable Jar using gradle, and eclipse as IDE.
Jar should include all libs required to run, and could be used inside a docker container.
Issues so far:
org.springframework.boot is not included in the Jar.
Code:
src/main/java/SampleController.java
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
#Controller
#EnableAutoConfiguration
public class SampleController {
#RequestMapping("/")
#ResponseBody
String home() {
return "Hello World!";
}
public static void main(String[] args) throws Exception {
SpringApplication.run(SampleController.class, args);
}
}
build.gradle
buildscript {
repositories {
jcenter()
mavenCentral()
}
dependencies {
classpath("org.springframework.boot:spring-boot-gradle-plugin:1.5.10.RELEASE")
}
}
apply plugin: 'java-library'
apply plugin: 'eclipse'
apply plugin: 'org.springframework.boot'
// In this section you declare where to find the dependencies of your project
repositories {
// Use jcenter for resolving your dependencies.
// You can declare any Maven/Ivy/file repository here.
jcenter()
mavenCentral()
}
dependencies {
// This dependency is exported to consumers, that is to say found on their compile classpath.
api 'org.apache.commons:commons-math3:3.6.1'
// This dependency is used internally, and not exposed to consumers on their own compile classpath.
implementation 'com.google.guava:guava:21.0'
api 'org.springframework.boot:spring-boot-starter-web'
api 'org.springframework.boot:spring-boot-starter'
// Use JUnit test framework
testImplementation 'junit:junit:4.12'
}
springBoot {
mainClass = "SampleController"
}
sourceCompatibility = 1.8
targetCompatibility = 1.8
Command used:
In workspace/project/build/libs
java -jar project.jar
Error:
java : Exception in thread "main" java.lang.reflect.InvocationTargetException
At line:1 char:1
+ java -jar RSSFeedAggregator.jar
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ CategoryInfo : NotSpecified: (Exception in th...TargetException:String) [], RemoteException
+ FullyQualifiedErrorId : NativeCommandError
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.springframework.boot.loader.MainMethodRunner.run(MainMethodRunner.java:48)
at org.springframework.boot.loader.Launcher.launch(Launcher.java:87)
at org.springframework.boot.loader.Launcher.launch(Launcher.java:50)
at org.springframework.boot.loader.JarLauncher.main(JarLauncher.java:51)
Caused by: java.lang.NoClassDefFoundError: org/springframework/boot/SpringApplication
at SampleController.main(SampleController.java:18)
... 8 more
Caused by: java.lang.ClassNotFoundException: org.springframework.boot.SpringApplication
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at org.springframework.boot.loader.LaunchedURLClassLoader.loadClass(LaunchedURLClassLoader.java:94)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 9 more
I'm kind of a newbie when it comes to this. Does anyone has any ideas on how to resolve my problem ?

guess I found the solution.
From my understanding, when running directly in eclipse, gradle api / implementation automatically add the jars at runtime.
But when creating the fat jars with :bootRepackage, runtime libs need to be explicitly added. (The program runned well on eclipse directly)
runtime 'org.springframework.boot:spring-boot-starter'
runtime 'org.springframework.boot:spring-boot-starter-web'
This resolved my problem.
I won't close the topic yet if anyone has something to add on this issue.

Related

Getting class not found error with Gradle jar

I'm new to Gradle and built a Jar build using Gradle. When I tried executing that jar using spark-submit, I'm getting a Class Not Found error. I'm able to execute the same application in IntelliJ IDEA, but getting error when executing as Independent JAR.
Below are the contents of build.gradle:
/*
* This file was generated by the Gradle 'init' task.
*/
apply plugin: 'scala'
apply plugin: 'idea'
apply plugin: 'org.sonarqube'
// Applying Sonar Qube details for gradle
apply from: "${rootProject.rootDir}/gradle/sonar.gradle"
repositories {
mavenLocal()
mavenCentral()
maven {
url = uri('https://repo1.maven.org/maven2')
}
}
dependencies {
implementation 'com.google.code.gson:gson:2.8.2'
implementation 'io.netty:netty-all:4.1.42.Final'
implementation 'com.github.jengelman.gradle.plugins:shadow:4.0.1'
testImplementation "org.scalatest:scalatest_2.11:$scalaTestVersion"
runtime "com.apple.jvm.commons:commons-metrics:0.13.1"
}
buildscript {
dependencies {
classpath 'org.sonarsource.scanner.gradle:sonarqube-gradle-plugin:2.6'
}
}
sourceSets {
main {
scala {
srcDirs = ['src/main/scala']
}
}
test {
scala.srcDirs = ['src/test/scala']
}
}
sonarqube {
properties {
property "sonar.projectName", "App-name"
property "sonar.projectKey", "App-name"
property "sonar.sourceEncoding", "UTF-8"
property "sonar.tests", "src/test"
property "sonar.sources", "src/main"
property "sonar.scala.coverage.reportPaths", "$buildDir/reports/scoverage/scoverage.xml"
property "sonar.coverage.jacoco.xmlReportPaths", "$buildDir/jacoco/jacoco.xml"
property "sonar.test.exclusions", "src/test/**"
}
}
configurations {
jars
}
// Force Jacksom-module version to handle below error.
//com.fasterxml.jackson.databind.JsonMappingException: Scala module 2.8.8 requires Jackson Databind version >= 2.8.0 and < 2.9.0
configurations.all {
resolutionStrategy {
force "com.fasterxml.jackson.module:jackson-module-scala_2.11:2.9.5"
}
}
jar {
manifest {
attributes(
'Class-Path': configurations.compile.files.collect {"$it.name"}.join(' '),
'Main-Class': 'com.github.MainClass'
)}
from {
files(sourceSets.main.output.classesDirs)
configurations.compile.collect { it.isDirectory() ? it : zipTree(it) }
configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) }
}{
exclude "META-INF/*.SF"
exclude "META-INF/*.DSA"
exclude "META-INF/*.RSA"
}
zip64 true
archiveClassifier = 'jar-with-dependencies'
}
/*task fatJar(type: Jar) {
//manifest.from jar.manifest
manifest {
attributes 'Main-Class': 'com.github.MainClass'
}
zip64 true
archiveClassifier = 'jar-with-dependencies'
from files(sourceSets.main.output.classesDirs)
from {
configurations.runtime.asFileTree.files.collect {zipTree(it) }
configurations.compile.collect { it.isDirectory() ? it : zipTree(it) }
configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) }
} {
exclude "META-INF/*.SF"
exclude "META-INF/*.DSA"
exclude "META-INF/*.RSA"
}
with jar
}
artifacts {
archives fatJar
}*/
I tried building the Jar with both ./gradlew clean build and ./gradlew clean fatJar
and executing below command:
spark-submit --master local --deploy-mode client --class com.github.MainClass /local_path/.out/libs/App-name-1.0-SNAPSHOT-jar-with-dependencies.jar
Error:
java.lang.ClassNotFoundException: com.github.MainClass
at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at org.apache.spark.util.Utils$.classForName(Utils.scala:238)
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:810)
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:167)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:924)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
2020-12-02 13:02:49 INFO ShutdownHookManager:54 - Shutdown hook called
2020-12-02 13:02:49 INFO ShutdownHookManager:54 - Deleting directory /private/var/folders/sq/npjk1mkn7lgfm57mf9g_3rrh0000gn/T/spark-8ab912b5-23ef-4182-9c70-947f2cd2831a

Product Flavor, Duplicate class, Error while merging dex archives

I only get this error after I added the Gradle-AspectJ library (i.e. apply plugin: 'com.archinamon.aspectj' ).
Without flavors, in a new project there is no such issue, everything's just fine.
If you have encountered this problem and have the answer, please let me know. I have spent way too much time on this, tried everything I could think of and found.
Root level build.gradle:
buildscript {
ext.kotlin_version = "1.3.72"
ext.detekt_version = "1.9.1"
repositories {
google()
jcenter()
}
dependencies {
classpath "com.android.tools.build:gradle:4.0.1"
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
...
classpath "com.archinamon:android-gradle-aspectj:4.2.1"
}
}
...
allprojects {
repositories {
google()
jcenter()
...
flatDir {
dirs 'lib'
}
}
}
...
app/build.gradle:
apply plugin: 'com.android.application'
apply plugin: 'kotlin-android'
apply plugin: 'kotlin-android-extensions'
apply plugin: 'kotlin-kapt'
...
apply plugin: 'com.archinamon.aspectj'
...
android {
compileSdkVersion 29
buildToolsVersion "29.0.3"
defaultConfig {
applicationId ""
minSdkVersion 24
targetSdkVersion 29
...
}
aspectj {
includeAspectsFromJar "SecLib3.3.0"
includeAllJars false
ajcArgs << "-Xlint:ignore"
excludeJar "mpulse", "aspectj"
weaveInfo true
compileTests = false
}
...
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
flavorDimensions "server", "datasource"
productFlavors {
mock {
// Mocked network layer
dimension "datasource"
}
live {
// Real network layer, different URLs based on "server" flavor dimension
dimension "datasource"
}
fat {
dimension "server"
...
}
...
}
/* Removing unused flavor dimension combinations:
- demo and prod mockDebug
*/
variantFilter { variant ->
if (variant.getFlavors().get(0).name != 'fat'
&& variant.getFlavors().get(1).name == 'mock') {
variant.setIgnore(true)
}
}
}
dependencies {
implementation fileTree(include: ['*.jar', '*.aar'], dir: 'libs')
...
// Kotlin
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
// Kotlin KTX
implementation "androidx.core:core-ktx:1.3.0"
// androidx
implementation "androidx.appcompat:appcompat:1.1.0"
implementation "androidx.constraintlayout:constraintlayout:2.0.0-beta7"
implementation "androidx.cardview:cardview:1.0.0"
// Material Components
implementation "com.google.android.material:material:1.2.0-beta01"
...
}
The error I'm getting:
AGPBI: {"kind":"error","text":"Type android.support.v4.app.INotificationSideChannel is defined multiple times: /home/...../app/build/intermediates/project_dex_archive/fatMockDebug/out/android/support/v4/app/INotificationSideChannel.dex, ...../app/build/intermediates/mixed_scope_dex_archive/fatMockDebug/out/android/support/v4/app/INotificationSideChannel.dex","sources":[{"file":"...../app/build/intermediates/project_dex_archive/fatMockDebug/out/android/support/v4/app/INotificationSideChannel.dex"}],"tool":"D8"}
com.android.builder.dexing.DexArchiveMergerException: Error while merging dex archives:
Learn how to resolve the issue at https://developer.android.com/studio/build/dependencies#duplicate_classes.
Type android.support.v4.app.INotificationSideChannel is defined multiple times: ...../app/build/intermediates/project_dex_archive/fatMockDebug/out/android/support/v4/app/INotificationSideChannel.dex, ...../app/build/intermediates/mixed_scope_dex_archive/fatMockDebug/out/android/support/v4/app/INotificationSideChannel.dex
at com.android.builder.dexing.D8DexArchiveMerger.getExceptionToRethrow(D8DexArchiveMerger.java:132)
at com.android.builder.dexing.D8DexArchiveMerger.mergeDexArchives(D8DexArchiveMerger.java:119)
at com.android.build.gradle.internal.transforms.DexMergerTransformCallable.call(DexMergerTransformCallable.java:102)
at com.android.build.gradle.internal.tasks.DexMergingTaskRunnable.run(DexMergingTask.kt:441)
at com.android.build.gradle.internal.tasks.Workers$ActionFacade.run(Workers.kt:242)
at org.gradle.workers.internal.AdapterWorkAction.execute(AdapterWorkAction.java:50)
at org.gradle.workers.internal.DefaultWorkerServer.execute(DefaultWorkerServer.java:50)
at org.gradle.workers.internal.NoIsolationWorkerFactory$1$1.create(NoIsolationWorkerFactory.java:63)
at org.gradle.workers.internal.NoIsolationWorkerFactory$1$1.create(NoIsolationWorkerFactory.java:59)
at org.gradle.internal.classloader.ClassLoaderUtils.executeInClassloader(ClassLoaderUtils.java:98)
at org.gradle.workers.internal.NoIsolationWorkerFactory$1.lambda$execute$0(NoIsolationWorkerFactory.java:59)
at org.gradle.workers.internal.AbstractWorker$1.call(AbstractWorker.java:44)
at org.gradle.workers.internal.AbstractWorker$1.call(AbstractWorker.java:41)
at org.gradle.internal.operations.DefaultBuildOperationExecutor$CallableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:416)
at org.gradle.internal.operations.DefaultBuildOperationExecutor$CallableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:406)
at org.gradle.internal.operations.DefaultBuildOperationExecutor$1.execute(DefaultBuildOperationExecutor.java:165)
at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:250)
at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:158)
at org.gradle.internal.operations.DefaultBuildOperationExecutor.call(DefaultBuildOperationExecutor.java:102)
at org.gradle.internal.operations.DelegatingBuildOperationExecutor.call(DelegatingBuildOperationExecutor.java:36)
at org.gradle.workers.internal.AbstractWorker.executeWrappedInBuildOperation(AbstractWorker.java:41)
at org.gradle.workers.internal.NoIsolationWorkerFactory$1.execute(NoIsolationWorkerFactory.java:53)
at org.gradle.workers.internal.DefaultWorkerExecutor.lambda$submitWork$2(DefaultWorkerExecutor.java:200)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at org.gradle.internal.work.DefaultConditionalExecutionQueue$ExecutionRunner.runExecution(DefaultConditionalExecutionQueue.java:215)
at org.gradle.internal.work.DefaultConditionalExecutionQueue$ExecutionRunner.runBatch(DefaultConditionalExecutionQueue.java:164)
at org.gradle.internal.work.DefaultConditionalExecutionQueue$ExecutionRunner.run(DefaultConditionalExecutionQueue.java:131)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:64)
at org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:48)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:56)
at java.lang.Thread.run(Thread.java:748)
Caused by: com.android.tools.r8.CompilationFailedException: Compilation failed to complete
at com.android.tools.r8.utils.W.a(:87)
at com.android.tools.r8.D8.run(:11)
at com.android.builder.dexing.D8DexArchiveMerger.mergeDexArchives(D8DexArchiveMerger.java:117)
Caused by: com.android.tools.r8.CompilationFailedException: Compilation failed to complete
I managed to resolve the issue.
The main source of the problem was duplication in dependencies (firebase-core, firebase-crashlytics, firebase-perf). Maybe aspectj has something to do with them?
After removing these three Firebase dependencies the problem seemed to go away.
/app/build/intermediates/mixed_scope_dex_archive/fatMockDebug/out/android/support/v4/app/INotificationSideChannel.dex
Just delete the folder(app) containing the file INotificationSideChannel.dex
The app folder is highlighted with bold letters in the above path
And run the project in the android studio. Now this error does not come.

Why the error is displayed as java.lang.ClassNotFoundException: for the following groovy code?

class First {
public First() {
super()
// TODO Auto-generated constructor stub
}
static void main(String s)
{
print('Hii');
}
}
After running the code in eclipse using Groovy Console option the following exception is being shown.
java.lang.ClassNotFoundException: groovy.ui.Console
at org.codehaus.groovy.tools.RootLoader.findClass(RootLoader.java:179)
at java.lang.ClassLoader.loadClass(Unknown Source)
at org.codehaus.groovy.tools.RootLoader.loadClass(RootLoader.java:151)
at java.lang.ClassLoader.loadClass(Unknown Source)
Nothing wrong with the code. Groovy Console dependencies are not included by default with Groovy 2.5+ You can use Groovy 2.4 which bundles groovy-all or run as Java Application since you have a class with a main method.

Unable to Send Spark Data Frame to Kafka (java.lang.ClassNotFoundException: Failed to find data source: kafka.)

I am facing issue while pushing data to Kafka with Spark data frame.
Let me explain my scenario in detail with sample example. I want to load the data to spark and send the spark output to kafka. I am using Gradle 3.5 and Spark 2.3.1 & Kafka 1.0.1
Here is build.gradle
buildscript {
ext {
springBootVersion = '1.5.15.RELEASE'
}
repositories {
mavenCentral()
}
dependencies {
classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}")
}
}
apply plugin: 'scala'
apply plugin: 'java'
apply plugin: 'eclipse'
apply plugin: 'org.springframework.boot'
group = 'com.sample'
version = '0.0.1-SNAPSHOT'
sourceCompatibility = 1.8
repositories {
mavenCentral()
}
dependencies {
compile('org.springframework.boot:spring-boot-starter')
compile ('org.apache.spark:spark-core_2.11:2.3.1')
compile ('org.apache.spark:spark-sql_2.11:2.3.1')
compile ('org.apache.spark:spark-streaming-kafka-0-10_2.11:2.3.1')
compile ('org.apache.spark:spark-sql-kafka-0-10_2.11:2.3.1')
testCompile('org.springframework.boot:spring-boot-starter-test')
}
And here is my code:
package com.sample
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.functions._
object SparkConnection {
case class emp(empid:Integer, empname:String, empsal:Float)
def main(args:Array[String]) {
val sparkConf = new SparkConf().setAppName("Spark
Connection").setMaster("local[*]")
val sc = new SparkContext(sparkConf)
val dataRdd = sc.textFile("/home/sample/data/sample.txt")
val mapRdd = dataRdd.map(row => row.split(","))
val empRdd = mapRdd.map( row => emp(row(0).toInt, row(1), row(2).toFloat))
val sqlContext = new SQLContext(sc)
import sqlContext.implicits._
val empDF = empRdd.toDF()
empDF.
select(to_json(struct(empDF.columns.map(column):_*)).alias("value"))
.write.format("kafka")
.option("kafka.bootstrap.servers", "localhost:9092")
.option("topic", "my-kafka-topic").save()
}
}
Please ignore spring boot framework API in build.gradle.
After build my package using Gradle, I can able to see all dependent classes mentioned in .gradle file.
But when I run the code with spark-submit like
spark-submit --class com.sample.SparkConnection spark_kafka_integration.jar
I am getting the following error
Exception in thread "main" java.lang.ClassNotFoundException: Failed to find data source: kafka. Please find packages at http://spark.apache.org/third-party-projects.html
at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSource(DataSource.scala:635)
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:241)
at com.iniste.SparkConnection$.main(SparkConnection.scala:29)
at com.iniste.SparkConnection.main(SparkConnection.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:894)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:198)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:228)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:137)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.ClassNotFoundException: kafka.DefaultSource
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$23$$anonfun$apply$15.apply(DataSource.scala:618)
at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$23$$anonfun$apply$15.apply(DataSource.scala:618)
at scala.util.Try$.apply(Try.scala:192)
at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$23.apply(DataSource.scala:618)
at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$23.apply(DataSource.scala:618)
at scala.util.Try.orElse(Try.scala:84)
at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSource(DataSource.scala:618)
... 13 more
2018-09-05 17:41:17 INFO SparkContext:54 - Invoking stop() from shutdown hook
2018-09-05 17:41:17 INFO AbstractConnector:318 - Stopped Spark#51684e4a{HTTP/1.1,[http/1.1]}{0.0.0.0:4040}
2018-09-05 17:41:17 INFO MapOutputTrackerMasterEndpoint:54 - MapOutputTrackerMasterEndpoint stopped!
2018-09-05 17:41:17 INFO MemoryStore:54 - MemoryStore cleared
2018-09-05 17:41:17 INFO BlockManager:54 - BlockManager stopped
2018-09-05 17:41:17 INFO BlockManagerMaster:54 - BlockManagerMaster stopped
2018-09-05 17:41:17 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint:54 - OutputCommitCoordinator stopped!
2018-09-05 17:41:17 INFO SparkContext:54 - Successfully stopped SparkContext
2018-09-05 17:41:17 INFO ShutdownHookManager:54 - Shutdown hook called
2018-09-05 17:41:17 INFO ShutdownHookManager:54 - Deleting directory /tmp/spark-bd4cb4ef-3883-4c26-a93f-f355b13ef306
2018-09-05 17:41:17 INFO ShutdownHookManager:54 - Deleting directory /tmp/spark-156dfdbd-cff4-4c70-943f-35ef403a01ed
Please help me to get out of this error. And some of the blogs they suggested me to use --packages option with spark-submit. But there is some proxy limitation with me which is required to download the mentioned packages. But I am unable to understand that why spark-submit is unable to fetch the jars which are already available. Please correct me where I am doing wrong.
As with any Spark applications, spark-submit is used to launch your application. spark-sql-kafka-0-10_2.11 and its dependencies can be directly added to spark-submit using --packages, such as below in your case
spark-submit --packages org.apache.spark:spark-sql-kafka-0-10_2.11:2.3.1 com.sample.SparkConnection spark_kafka_integration.jar
This can be found here
However, as per cricket_007 suggestions i have added the shadowjar to your build.gradle
So the new one may look similar to this.
buildscript {
ext {
springBootVersion = '1.5.15.RELEASE'
}
repositories {
mavenCentral()
}
dependencies {
classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}")
}
}
plugins {
id "com.github.johnrengelman.shadow" version "2.0.4"
}
apply plugin: 'scala'
apply plugin: 'java'
apply plugin: 'eclipse'
apply plugin: 'org.springframework.boot'
apply plugin: "com.github.johnrengelman.shadow"
group = 'com.sample'
version = '0.0.1-SNAPSHOT'
sourceCompatibility = 1.8
repositories {
mavenCentral()
}
dependencies {
compile('org.springframework.boot:spring-boot-starter')
compile ('org.apache.spark:spark-core_2.11:2.3.1')
compile ('org.apache.spark:spark-sql_2.11:2.3.1')
compile ('org.apache.spark:spark-streaming-kafka-0-10_2.11:2.3.1')
compile ('org.apache.spark:spark-sql-kafka-0-10_2.11:2.3.1')
compile 'org.scala-lang:scala-library:2.11.12'
// https://mvnrepository.com/artifact/org.apache.kafka/kafka
//compile group: 'org.apache.kafka', name: 'kafka_2.10', version: '0.8.0'
testCompile('org.springframework.boot:spring-boot-starter-test')
}
shadowJar{
baseName = "spark_kafka_integration"
zip64 true
classifier = null
version = null
}
So to create your jar the command would be just :shadowJar from your gradle.

Scala shadow jar not able to find Main class

Does anyone know what is the problem. It is something with the scala version. I am trying to create a shadow jar using gradle and scala. I am continuously getting this error. My main class is exactly at this location com.pro.TempMain. I have tried different version nothing seems to work. However if I put a java class it totally works.
Error: A JNI error has occurred, please check your installation and try again
Exception in thread "main" java.lang.NoClassDefFoundError: scala/Function0
at java.lang.Class.getDeclaredMethods0(Native Method)
at java.lang.Class.privateGetDeclaredMethods(Class.java:2701)
at java.lang.Class.privateGetMethodRecursive(Class.java:3048)
at java.lang.Class.getMethod0(Class.java:3018)
at java.lang.Class.getMethod(Class.java:1784)
at sun.launcher.LauncherHelper.validateMainClass(LauncherHelper.java:544)
at sun.launcher.LauncherHelper.checkAndLoadMain(LauncherHelper.java:526)
Caused by: java.lang.ClassNotFoundException: scala.Function0
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:335)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 7 more
This is my gradle file configuation:
buildscript {
repositories {
mavenCentral()
maven {
url "https://plugins.gradle.org/m2/"
}
}
dependencies {
classpath "org.sonarsource.scanner.gradle:sonarqube-gradle-plugin:2.0.1"
}
}
plugins {
id 'java'
id 'com.github.johnrengelman.shadow' version '1.2.3'
id 'com.github.maiflai.scalatest' version '0.12'
}
group 'com.pro'
version '1.0-SNAPSHOT'
apply plugin: 'java'
apply plugin: 'scala'
apply plugin: 'com.github.johnrengelman.shadow'
apply plugin: 'idea'
sourceCompatibility = 1.8
targetCompatibility = 1.8
repositories {
mavenCentral()
}
jar {
manifest {
attributes 'Main-Class': 'com.pro.TempMain'
}
}
sourceSets {
main {
scala {
srcDirs = ['src/main/scala']
}
}
test {
scala {
srcDirs = ['src/test/scala']
}
}
}
shadowJar {
zip64 true
}
dependencies {
compile 'org.scala-lang:scala-library:2.12.0'
}