繁体   English   中英

线程“ main”中的异常java.lang.NoSuchMethodError scala.collection.immutable.hashset $

[英]exception in thread “main” java.lang.NoSuchMethodError scala.collection.immutable.hashset$

标头1#导入了可在eclipse上运行的spark代码获取构建错误在终端上的工作正常

标头2

/*SampleApp.scala:此应用程序仅计算包含“ val”的行数* /

import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf

object SimpleApp {
    def main(args: Array[String]) {

        val txtFile = "file:///home/edureka/Desktop/readme.txt"
        val conf = new SparkConf().setMaster("local[2]").setAppName("Sample Application")
        val sc = new SparkContext(conf)
        val txtFileLines = sc.textFile(txtFile , 2).cache()
        val numAs = txtFileLines.filter(line => line.contains("bash")).count()
        println("Lines with bash: %s".format(numAs))


    }
}

标头3“

LF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/edureka/.ivy2/cache/org.slf4j/slf4j-log4j12/jars/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/edureka/spark-1.1.1/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop2.2.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
15/08/16 17:00:16 WARN util.Utils: Your hostname, localhost.localdomain resolves to a loopback address: 127.0.0.1; using 192.168.211.130 instead (on interface eth2)
15/08/16 17:00:16 WARN util.Utils: Set SPARK_LOCAL_IP if you need to bind to another address
15/08/16 17:00:16 INFO spark.SecurityManager: Changing view acls to: edureka
15/08/16 17:00:16 INFO spark.SecurityManager: Changing modify acls to: edureka
15/08/16 17:00:16 INFO spark.SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(edureka); users with modify permissions: Set(edureka)
Exception in thread "main" java.lang.NoSuchMethodError: scala.collection.immutable.HashSet$.empty()Lscala/collection/immutable/HashSet;
    at akka.actor.ActorCell$.<init>(ActorCell.scala:305)
    at akka.actor.ActorCell$.<clinit>(ActorCell.scala)
    at akka.actor.RootActorPath.$div(ActorPath.scala:152)
    at akka.actor.LocalActorRefProvider.<init>(ActorRefProvider.scala:465)
    at akka.remote.RemoteActorRefProvider.<init>(RemoteActorRefProvider.scala:124)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
    at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)
    at scala.util.Try$.apply(Try.scala:191)
    at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)
    at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)
    at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)
    at scala.util.Success.flatMap(Try.scala:230)
    at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)
    at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:550)
    at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)
    at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)
    at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)
    at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)
    at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)
    at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1504)
    at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:166)
    at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1495)
    at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)
    at org.apache.spark.SparkEnv$.create(SparkEnv.scala:153)
    at org.apache.spark.SparkContext.<init>(SparkContext.scala:204)
    at SimpleApp$.main(SampleApp.scala:14)
    at SimpleApp.main(SampleApp.scala)

请注意,Spark经常会发生此类问题。 如果您不希望其他意外,可以针对可能使用的正确版本的依赖项(Guava,log4j,Scala,Jackson)构建Spak自己,同时考虑使用spark.driver.userClassPathFirstspark.executor.userClassPathFirst属性为了使您的类路径优先于Spark捆绑的依赖项。 就个人而言,仅当将它们作为spark-submit的参数传递时,它才对我spark-submit SparkConf设置它们时SparkConf (这很有意义)。

即使将这些属性设置为true,您仍然可能会遇到问题,因为Spark使用单独的类加载器,即使您的依赖项具有相同的版本号,也可能导致某些问题。 在这种情况下,(据我所知)只有手动构建Spark才能对其进行修复。

我实际上确实尝试安装了所有依赖项的spark并尝试运行代码。 这实际上确实起作用。 要点是正确设置目录结构。 创建一个项目,在其中创建文件结构src / main / scala,然后创建实际程序(代码)文件code.scala。 依赖文件.sbt应该在主项目文件中。 谢谢@Dici

暂无
暂无

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM