java.lang.NoClassDefFoundError:无法初始化类org.apache.spark.deploy.SparkHadoopUtil $

时间:2015-12-30 13:36:44

标签: scala hadoop apache-spark yarn

当我遇到下面提到的错误时,我正在尝试在Spark Shell中初始化SparkContext。我使用的是Cloudera CDH 5.3.0(VM),Spark 1.2.0和Scala 2.11.7。

尝试以纱线客户端模式提交,导致以下日志:

scala> val sc = new SparkContext(hConf)
    15/12/30 06:03:58 WARN SparkContext: Another SparkContext is being constructed (or threw an exception in its constructor).  This may indicate an error, since only one SparkContext may be running in this JVM (see SPARK-2243). The other SparkContext was created at:
    org.apache.spark.SparkContext.<init>(SparkContext.scala:70)
    org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:986)
    $line3.$read$$iwC$$iwC.<init>(<console>:9)
    $line3.$read$$iwC.<init>(<console>:18)
    $line3.$read.<init>(<console>:20)
    $line3.$read$.<init>(<console>:24)
    $line3.$read$.<clinit>(<console>)
    $line3.$eval$.<init>(<console>:7)
    $line3.$eval$.<clinit>(<console>)
    $line3.$eval.$print(<console>)
    sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    java.lang.reflect.Method.invoke(Method.java:606)
    org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)
    org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)
    org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)
    org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)
    org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)
    org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:828)
    15/12/30 06:03:58 INFO SparkContext: Spark configuration:
    fs.file.impl=org.apache.hadoop.fs.LocalFileSystem
    spark.app.name=org.apache.spark.repl.Main
    spark.driver.extraLibraryPath=/opt/cloudera/parcels/CDH-5.3.0-1.cdh5.3.0.p0.30/lib/hadoop/lib/native
    spark.eventLog.dir=hdfs://localhost.localdomain:8020/user/spark/applicationHistory
    spark.eventLog.enabled=true
    spark.executor.extraLibraryPath=/opt/cloudera/parcels/CDH-5.3.0-1.cdh5.3.0.p0.30/lib/hadoop/lib/native
    spark.executor.memory=2g
    spark.jars=
    spark.logConf=true
    spark.master=yarn-client
    spark.yarn.historyServer.address=http://localhost.localdomain:18088
    15/12/30 06:03:58 INFO SecurityManager: Changing view acls to: cloudera
    15/12/30 06:03:58 INFO SecurityManager: Changing modify acls to: cloudera
    15/12/30 06:03:58 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(cloudera); users with modify permissions: Set(cloudera)
    15/12/30 06:03:59 INFO Slf4jLogger: Slf4jLogger started
    15/12/30 06:03:59 INFO Remoting: Starting remoting
    15/12/30 06:03:59 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriver@10.113.234.150:49139]
    15/12/30 06:03:59 INFO Remoting: Remoting now listens on addresses: [akka.tcp://sparkDriver@10.113.234.150:49139]
    15/12/30 06:03:59 INFO Utils: Successfully started service 'sparkDriver' on port 49139.
    15/12/30 06:03:59 INFO SparkEnv: Registering MapOutputTracker
    15/12/30 06:03:59 INFO SparkEnv: Registering BlockManagerMaster
    15/12/30 06:03:59 INFO DiskBlockManager: Created local directory at /tmp/spark-local-20151230060359-4a55
    15/12/30 06:03:59 INFO MemoryStore: MemoryStore started with capacity 267.3 MB
    java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.deploy.SparkHadoopUtil$
        at org.apache.spark.util.Utils$.getSparkOrYarnConfig(Utils.scala:1792)
        at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:105)
        at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:180)
        at org.apache.spark.SparkEnv$.create(SparkEnv.scala:292)
        at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:159)
        at org.apache.spark.SparkContext.<init>(SparkContext.scala:232)
        at $iwC$$iwC$$iwC$$iwC.<init>(<console>:14)
        at $iwC$$iwC$$iwC.<init>(<console>:19)
        at $iwC$$iwC.<init>(<console>:21)
        at $iwC.<init>(<console>:23)
        at <init>(<console>:25)
        at .<init>(<console>:29)
        at .<clinit>(<console>)
        at .<init>(<console>:7)
        at .<clinit>(<console>)
        at $print(<console>)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)
        at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)
        at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)
        at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:828)
        at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:873)
        at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:785)
        at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:628)
        at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:636)
        at org.apache.spark.repl.SparkILoop.loop(SparkILoop.scala:641)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:968)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
        at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
        at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
        at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
        at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
        at org.apache.spark.repl.Main$.main(Main.scala:31)
        at org.apache.spark.repl.Main.main(Main.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.apache.spark.deploy.SparkSubmit$.launch(SparkSubmit.scala:358)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:75)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

0 个答案:

没有答案