简体   繁体   中英

exception in thread “main” java.lang.NoSuchMethodError scala.collection.immutable.hashset$

Header 1 # imported spark code to run on eclipse Getting build errors Its working fine on the terminal

Header 2

/*SampleApp.scala: This application simply counts the number of lines that contain "val" */

import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf

object SimpleApp {
    def main(args: Array[String]) {

        val txtFile = "file:///home/edureka/Desktop/readme.txt"
        val conf = new SparkConf().setMaster("local[2]").setAppName("Sample Application")
        val sc = new SparkContext(conf)
        val txtFileLines = sc.textFile(txtFile , 2).cache()
        val numAs = txtFileLines.filter(line => line.contains("bash")).count()
        println("Lines with bash: %s".format(numAs))


    }
}

Header 3 "

LF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/edureka/.ivy2/cache/org.slf4j/slf4j-log4j12/jars/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/edureka/spark-1.1.1/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop2.2.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
15/08/16 17:00:16 WARN util.Utils: Your hostname, localhost.localdomain resolves to a loopback address: 127.0.0.1; using 192.168.211.130 instead (on interface eth2)
15/08/16 17:00:16 WARN util.Utils: Set SPARK_LOCAL_IP if you need to bind to another address
15/08/16 17:00:16 INFO spark.SecurityManager: Changing view acls to: edureka
15/08/16 17:00:16 INFO spark.SecurityManager: Changing modify acls to: edureka
15/08/16 17:00:16 INFO spark.SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(edureka); users with modify permissions: Set(edureka)
Exception in thread "main" java.lang.NoSuchMethodError: scala.collection.immutable.HashSet$.empty()Lscala/collection/immutable/HashSet;
    at akka.actor.ActorCell$.<init>(ActorCell.scala:305)
    at akka.actor.ActorCell$.<clinit>(ActorCell.scala)
    at akka.actor.RootActorPath.$div(ActorPath.scala:152)
    at akka.actor.LocalActorRefProvider.<init>(ActorRefProvider.scala:465)
    at akka.remote.RemoteActorRefProvider.<init>(RemoteActorRefProvider.scala:124)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
    at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)
    at scala.util.Try$.apply(Try.scala:191)
    at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)
    at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)
    at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)
    at scala.util.Success.flatMap(Try.scala:230)
    at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)
    at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:550)
    at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)
    at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)
    at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)
    at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)
    at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)
    at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1504)
    at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:166)
    at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1495)
    at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)
    at org.apache.spark.SparkEnv$.create(SparkEnv.scala:153)
    at org.apache.spark.SparkContext.<init>(SparkContext.scala:204)
    at SimpleApp$.main(SampleApp.scala:14)
    at SimpleApp.main(SampleApp.scala)

Be careful, this kind of problems happen quite often with Spark. If you don't want other surprises, you can build Spak yourself against the right versions of the dependencies you may be using (Guava, log4j, Scala, Jackson) Also, consider using spark.driver.userClassPathFirst and spark.executor.userClassPathFirst properties in order to make your classpath priotary over Spark bundled dependencies. Personally it only worked for me when passing them as a parameter of spark-submit . It does not work when setting them in SparkConf (which makes sense).

Even with these properties set to true, you may still have problems because Spark uses a separate classloader, which can lead to some issues even if your dependencies have the same version number. In this case, only manually building Spark will allow to fix it (to my knowledge).

I actually did try and install spark with all dependencies and tried to run the code. This actually did work. The main point was to set the directory structure correctly. Create a project the create the file structure src/main/scala inside it and then the actual program(code) file code.scala. And the dependencies file .sbt should be inside the main Project file. Thanks @Dici

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM