连接spark master java的安全异常

时间:2015-07-20 11:55:17

标签: java mongodb scala apache-spark

我是Spark的新人。在我的项目中,我将master url和app name设置为SparkConf对象。

这是我的代码

import org.apache.hadoop.conf.Configuration;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.bson.BSONObject;
import com.mongodb.hadoop.MongoInputFormat;
import com.mongodb.hadoop.MongoOutputFormat;

public class SparkExample {

@SuppressWarnings("resource")
public static void main(String[] args) {

    Configuration conf = new Configuration();

    conf.set("mongo.job.input.format", "com.mongodb.hadoop.MongoInputFormat");

    conf.set("mongo.input.uri", "mongodb://192.168.1.149:27017/test.sns");



    JavaSparkContext sc = new JavaSparkContext(new SparkConf().setMaster("spark://localhost:7077").setAppName("vedat"));

    JavaPairRDD<Object, BSONObject> documents = sc.newAPIHadoopRDD(conf, MongoInputFormat.class, Object.class,
            BSONObject.class);

    Configuration outConfig = new Configuration();

    outConfig.set("mongo.job.output.format", "com.mongodb.hadoop.MongoOutputFormat");

    outConfig.set("mongo.output.uri", "mongodb://192.168.1.149:27017/test.ou");

    documents.saveAsNewAPIHadoopFile("file://44", Object.class, BSONObject.class, MongoOutputFormat.class,
            outConfig);

    Configuration bsonConf = new Configuration();
    bsonConf.set("mongo.job.output.format", "com.mongodb.hadoop.BSONFileOutputFormat");

    // documents.saveAsNewAPIHadoopFile("hds://localhost:8020/user/spark/demo",
    // Object.class, BSONObject.class,
    // BSONFileOutputformat.class, bsonConf);

}

}

当我运行代码时,我得到了这个堆栈跟踪

ERROR spark.SparkContext: Error initializing SparkContext.
java.lang.SecurityException: class "javax.servlet.FilterRegistration"'s      signer information does not match signer information of other classes in the same package
at java.lang.ClassLoader.checkCerts(ClassLoader.java:952)
at java.lang.ClassLoader.preDefineClass(ClassLoader.java:666)
at java.lang.ClassLoader.defineClass(ClassLoader.java:794)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:449)
at java.net.URLClassLoader.access$100(URLClassLoader.java:71)
at java.net.URLClassLoader$1.run(URLClassLoader.java:361)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:136)
at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:129)
at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:98)
at    
org.apache.spark.ui.JettyUtils$.createServletHandler(JettyUtils.scala:108)

at org.apache.spark.ui.JettyUtils$.createServletHandler(JettyUtils.scala:99)
at org.apache.spark.ui.WebUI.attachPage(WebUI.scala:78)
at org.apache.spark.ui.WebUI$$anonfun$attachTab$1.apply(WebUI.scala:62)
at org.apache.spark.ui.WebUI$$anonfun$attachTab$1.apply(WebUI.scala:62)
at  scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
at org.apache.spark.ui.WebUI.attachTab(WebUI.scala:62)
at org.apache.spark.ui.SparkUI.initialize(SparkUI.scala:61)
at org.apache.spark.ui.SparkUI.<init>(SparkUI.scala:74)
at org.apache.spark.ui.SparkUI$.create(SparkUI.scala:190)
at org.apache.spark.ui.SparkUI$.createLiveUI(SparkUI.scala:141)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:440)
at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
at tr.vedat.spark.SparkExample.main(SparkExample.java:24)
15/07/20 14:44:42 INFO spark.MapOutputTrackerMasterEndpoint:       MapOutputTrackerMasterEndpoint stopped!
15/07/20 14:44:42 ERROR spark.SparkContext: Error stopping SparkContext after init error.
java.lang.NullPointerException
at      
  org.apache.spark.network.netty.NettyBlockTransferService.close(NettyBlockTransfe   rService.scala:152)
at org.apache.spark.storage.BlockManager.stop(BlockManager.scala:1214)
at org.apache.spark.SparkEnv.stop(SparkEnv.scala:96)
at org.apache.spark.SparkContext.stop(SparkContext.scala:1657)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:565)
at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
at tr.vedat.spark.SparkExample.main(SparkExample.java:24)
Exception in thread "main" java.lang.SecurityException: class    "javax.servlet.FilterRegistration"'s signer information does not match signer information of other classes in the same package
at java.lang.ClassLoader.checkCerts(ClassLoader.java:952)
at java.lang.ClassLoader.preDefineClass(ClassLoader.java:666)
at java.lang.ClassLoader.defineClass(ClassLoader.java:794)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:449)
at java.net.URLClassLoader.access$100(URLClassLoader.java:71)
at java.net.URLClassLoader$1.run(URLClassLoader.java:361)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:136)
at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:129)
at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:98)
at org.apache.spark.ui.JettyUtils$.createServletHandler(JettyUtils.scala:108)
at org.apache.spark.ui.JettyUtils$.createServletHandler(JettyUtils.scala:99)
at org.apache.spark.ui.WebUI.attachPage(WebUI.scala:78)
at org.apache.spark.ui.WebUI$$anonfun$attachTab$1.apply(WebUI.scala:62)
at org.apache.spark.ui.WebUI$$anonfun$attachTab$1.apply(WebUI.scala:62)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
at org.apache.spark.ui.WebUI.attachTab(WebUI.scala:62)
at org.apache.spark.ui.SparkUI.initialize(SparkUI.scala:61)
at org.apache.spark.ui.SparkUI.<init>(SparkUI.scala:74)
at org.apache.spark.ui.SparkUI$.create(SparkUI.scala:190)
at org.apache.spark.ui.SparkUI$.createLiveUI(SparkUI.scala:141)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:440)
at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
at tr.vedat.spark.SparkExample.main(SparkExample.java:24)

这是我的pom.xml

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<groupId>tr.com.vedat</groupId>
<artifactId>aa</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>

<name>aa</name>
<url>http://maven.apache.org</url>

<properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>

<dependencies>
    <dependency>
        <groupId>junit</groupId>
        <artifactId>junit</artifactId>
        <version>3.8.1</version>
        <scope>test</scope>
    </dependency>


    <dependency>
        <groupId>ant</groupId>
        <artifactId>ant</artifactId>
        <version>1.6.5</version>
    </dependency>
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-common</artifactId>
        <version>2.6.0</version>
        <exclusions>
            <exclusion>
                <groupId>org.eclipse.jetty</groupId>
                <artifactId>*</artifactId>
            </exclusion>
            <exclusion>
                <groupId>javax.servlet</groupId>
                <artifactId>*</artifactId>
            </exclusion>
        </exclusions>

    </dependency>
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-hdfs</artifactId>
        <version>2.6.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-core_2.10</artifactId>
        <version>1.4.0</version>
        <exclusions>
            <exclusion>
                <groupId>org.apache.hadoop</groupId>
                <artifactId>hadoop-client</artifactId>
            </exclusion>
        </exclusions>
    </dependency>
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-mapreduce-client-app</artifactId>
        <version>2.6.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-mllib_2.10</artifactId>
        <version>1.4.0</version>
    </dependency>

    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-core</artifactId>
        <version>1.2.1</version>
    </dependency>
    <dependency>
        <groupId>commons-cli</groupId>
        <artifactId>commons-cli</artifactId>
        <version>1.2</version>
    </dependency>
    <dependency>
        <groupId>commons-configuration</groupId>
        <artifactId>commons-configuration</artifactId>
        <version>1.6</version>
    </dependency>
    <dependency>
        <groupId>commons-httpclient</groupId>
        <artifactId>commons-httpclient</artifactId>
        <version>3.0.1</version>
    </dependency>
    <dependency>
        <groupId>commons-lang</groupId>
        <artifactId>commons-lang</artifactId>
        <version>2.4</version>
    </dependency>
    <dependency>
        <groupId>commons-logging</groupId>
        <artifactId>commons-logging</artifactId>
        <version>1.1.1</version>
    </dependency>
    <dependency>
        <groupId>commons-logging</groupId>
        <artifactId>commons-logging-api</artifactId>
        <version>1.0.4</version>
    </dependency>
    <dependency>
        <groupId>org.codehaus.jackson</groupId>
        <artifactId>jackson-core-asl</artifactId>
        <version>1.8.8</version>
    </dependency>
    <dependency>
        <groupId>org.codehaus.jackson</groupId>
        <artifactId>jackson-mapper-asl</artifactId>
        <version>1.8.8</version>
    </dependency>

    <dependency>
        <groupId>org.mongodb</groupId>
        <artifactId>mongo-hadoop-core_1.0.0</artifactId>
        <version>1.0.0-rc0</version>
    </dependency>

    <dependency>
        <groupId>org.mongodb</groupId>
        <artifactId>mongo-java-driver</artifactId>
        <version>2.10.1</version>
    </dependency>

</dependencies>

那么,任何人都可以帮助我展示我做错了什么吗?为什么我收到此错误?

1 个答案:

答案 0 :(得分:2)

Spark-core取决于: org.eclipse.jetty.orbit:的javax.servlet:3.0.0.v201112011016

你应该排除它。不确定这是否是唯一的问题,但如果你在其他容器中运行它,至少是它的一部分。