4

I have a simple Spark program in scala with following code but get the exception.All im trying to do is run the main code. I have also included the gradle config. Any help will be appreciated.

Error:-

    Exception in thread "main" java.lang.NoClassDefFoundError: com/fasterxml/jackson/module/scala/DefaultScalaModule$
    at org.apache.spark.SparkContext.withScope(SparkContext.scala:714)
    at org.apache.spark.SparkContext.parallelize(SparkContext.scala:728)...
Caused by: java.lang.ClassNotFoundException: com.fasterxml.jackson.module.scala.DefaultScalaModule$
    at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
    at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:357)

Main:-

def main(args: Array[String]) {

    val conf = new SparkConf()
      .setAppName("TempratureRDD")
      .setMaster("local[2]")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    val sc = new SparkContext(conf)
    print("___________________________________________________________________________________________")

    val vertexArray = Array(
      (1L, ("Sensor1", 28)),
      (2L, ("Sensor2", 27)),
      (3L, ("Sensor3", 65)),
      (4L, ("Sensor4", 42)),
      (5L, ("Sensor5", 55)),
      (6L, ("Sensor6", 50))
    )
    val edgeArray = Array(
      Edge(2L, 1L, 7),
      Edge(2L, 4L, 2),
      Edge(3L, 2L, 4),
      Edge(3L, 6L, 3),
      Edge(4L, 1L, 1),
      Edge(5L, 2L, 2),
      Edge(5L, 3L, 8),
      Edge(5L, 6L, 3)
    )




    val vertexRDD: RDD[(Long, (String, Int))] = sc.parallelize(vertexArray)
    val edgeRDD: RDD[Edge[Int]] = sc.parallelize(edgeArray)

    val graph: Graph[(String, Int), Int] = Graph(vertexRDD, edgeRDD)

    for ((id,(name,age)) <- graph.vertices.filter { case (id,(name,age)) => age > 30 }.collect) {
      println(s"$name is $age")
    }

  }

build.gradle:-

dependencies {

compile fileTree(dir: 'lib', include: ['*.jar'])
// The production code uses the SLF4J logging API at compile time
compile 'org.slf4j:slf4j-api:1.7.12'
compile 'org.scala-lang:scala-library:2.11.8'
testCompile 'junit:junit:4.12'
compile 'com.sparkjava:spark-core:2.5'
// https://mvnrepository.com/artifact/org.apache.spark/spark-streaming_2.11
compile group: 'org.apache.spark', name: 'spark-streaming_2.11', version: '1.6.0'
// https://mvnrepository.com/artifact/org.apache.spark/spark-streaming-mqtt_2.10
compile group: 'org.apache.spark', name: 'spark-streaming-mqtt_2.10', version: '1.6.2'
// https://mvnrepository.com/artifact/org.eclipse.paho/org.eclipse.paho.client.mqttv3
compile group: 'org.eclipse.paho', name: 'org.eclipse.paho.client.mqttv3', version: '1.1.0'
// https://mvnrepository.com/artifact/com.google.code.gson/gson
compile group: 'com.google.code.gson', name: 'gson', version: '2.7'
// https://mvnrepository.com/artifact/org.apache.spark/spark-graphx_2.10
compile group: 'org.apache.spark', name: 'spark-graphx_2.10', version: '2.0.0'

}

no other dependencies

1
  • 2
    First of all you are mixing scala versions in your dependencies... you are compiling with 2.11.8 then you have spark-graphx_2.10 and spark-streaming-mqtt_2.10 which were compiled against scala 2.10.x. First fix that... and worry about everything else later on. Commented Sep 19, 2016 at 8:55

1 Answer 1

2

I was able to fix this issue by compiling spark with scala 2.11.8 and then including the jars thanks @Sarvesh Kumar Singh for pointing out! See below links for HowTo's

See Building Spark and Building Apache Spark on your Local Machine

Sign up to request clarification or add additional context in comments.

1 Comment

Your second link appears to be broken

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.