线程“main”中的异常java.lang.NoSuchMethodError scala.collection.immutable.hashset $

时间:2015-08-16 18:41:58

标签: eclipse scala apache-spark

标题1# 导入了在eclipse上运行的spark代码 出现构建错误 它在终端上工作正常

标题2

/ * SampleApp.scala:     此应用程序只计算包含“val”的行数     * /

import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf

object SimpleApp {
    def main(args: Array[String]) {

        val txtFile = "file:///home/edureka/Desktop/readme.txt"
        val conf = new SparkConf().setMaster("local[2]").setAppName("Sample Application")
        val sc = new SparkContext(conf)
        val txtFileLines = sc.textFile(txtFile , 2).cache()
        val numAs = txtFileLines.filter(line => line.contains("bash")).count()
        println("Lines with bash: %s".format(numAs))


    }
}

标题3“

LF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/edureka/.ivy2/cache/org.slf4j/slf4j-log4j12/jars/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/edureka/spark-1.1.1/assembly/target/scala-2.10/spark-assembly-1.1.1-hadoop2.2.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
15/08/16 17:00:16 WARN util.Utils: Your hostname, localhost.localdomain resolves to a loopback address: 127.0.0.1; using 192.168.211.130 instead (on interface eth2)
15/08/16 17:00:16 WARN util.Utils: Set SPARK_LOCAL_IP if you need to bind to another address
15/08/16 17:00:16 INFO spark.SecurityManager: Changing view acls to: edureka
15/08/16 17:00:16 INFO spark.SecurityManager: Changing modify acls to: edureka
15/08/16 17:00:16 INFO spark.SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(edureka); users with modify permissions: Set(edureka)
Exception in thread "main" java.lang.NoSuchMethodError: scala.collection.immutable.HashSet$.empty()Lscala/collection/immutable/HashSet;
    at akka.actor.ActorCell$.<init>(ActorCell.scala:305)
    at akka.actor.ActorCell$.<clinit>(ActorCell.scala)
    at akka.actor.RootActorPath.$div(ActorPath.scala:152)
    at akka.actor.LocalActorRefProvider.<init>(ActorRefProvider.scala:465)
    at akka.remote.RemoteActorRefProvider.<init>(RemoteActorRefProvider.scala:124)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
    at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78)
    at scala.util.Try$.apply(Try.scala:191)
    at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73)
    at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)
    at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84)
    at scala.util.Success.flatMap(Try.scala:230)
    at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84)
    at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:550)
    at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)
    at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)
    at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)
    at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:54)
    at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)
    at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1504)
    at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:166)
    at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1495)
    at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:56)
    at org.apache.spark.SparkEnv$.create(SparkEnv.scala:153)
    at org.apache.spark.SparkContext.<init>(SparkContext.scala:204)
    at SimpleApp$.main(SampleApp.scala:14)
    at SimpleApp.main(SampleApp.scala)

2 个答案:

答案 0 :(得分:0)

小心,Spark会经常发生这种问题。如果您不想要其他惊喜,您可以针对您可能正在使用的依赖项的正确版本(Guava,log4j,Scala,Jackson)构建Spak。另外,请考虑使用spark.driver.userClassPathFirst和{{1} }属性,以使您的classpath比Spark捆绑的依赖项更具吸引力。就个人而言,当它作为spark.executor.userClassPathFirst的参数传递时,它只适用于我。在spark-submit中进行设置时,它不起作用(这是有道理的)。

即使这些属性设置为true,您仍可能遇到问题,因为Spark使用单独的类加载器,即使您的依赖项具有相同的版本号,也可能导致一些问题。在这种情况下,只有手动构建Spark才能修复它(据我所知)。

答案 1 :(得分:0)

我实际上尝试使用所有依赖项安装spark并尝试运行代码。这确实有效。重点是正确设置目录结构。创建一个项目,在其中创建文件结构src / main / scala,然后创建实际的程序(代码)文件code.scala。依赖项文件.sbt应该在主项目文件中。谢谢@Dici