ClassNotFoundException in

时间:2017-12-07 17:51:01

标签: apache-spark apache-spark-sql

我有以下代码。我把它变成了一个罐子

import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.SQLContext


case class Student (StudentID: Long,
                                creationdate : java.sql.Timestamp,
                                mark1 : String,
                                mark2 : String
                               )

object Main {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("EMRCustomerApplication")
    conf.set("spark.driver.allowMultipleContexts","true")
    val spark = SparkSession
      .builder()
      .appName("Spark In Action")
      .master("local")
      .enableHiveSupport()
      .getOrCreate()
    System.setProperty("hive.metastore.uris", "thrift://internal-shared-hive-metastore-elb-550897717.us-west-2.elb.amazonaws.com:9083")
    System.setProperty("hive.metastore.execute.setugi", "false")
    System.setProperty("spark.driver.allowMultipleContexts","true")
    val sc = new SparkContext(conf)
    conf.set("spark.driver.allowMultipleContexts","true")
    import spark.implicits._

    val allRecordsDF = spark.sql(""" select * fromstudent """)


  }
}

我收到以下异常。我以为--class选项是提到spark jar的主类。这是我执行的命令。

spark-submit --class "Main" s3://Users/test/test_2.10-1.0.jar 

我做到了。任何人都可以看到问题所在。

java.lang.ClassNotFoundException: Main
    at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
    at java.lang.Class.forName0(Native Method)
    at java.lang.Class.forName(Class.java:348)
    at org.apache.spark.util.Utils$.classForName(Utils.scala:229)
    at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:695)
    at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187)
    at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212)
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126)
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Command exiting with ret '101'

1 个答案:

答案 0 :(得分:0)

将类放在一个包下并尝试运行它将包作为前缀添加到类中。