线程“main”scala.ScalaReflectionException中的异常

时间:2016-05-09 14:07:53

标签: eclipse scala apache-spark-sql spark-dataframe

这是我加入两个数据帧的代码

package org.test.rddjoins
import org.apache.spark.SparkConf
import org.apache.spark.SparkConf
import org.apache.spark._
import org.apache.spark.rdd.RDD


object rdd {

case class Score(name: String, score: Int)
case class Age(name: String, age: Int)

def main(args: Array[String]) {

val sparkConf = new SparkConf()
  .setAppName("rdd")
  .setMaster("local[2]")
val sc = new SparkContext(sparkConf)
val sqlContext = new org.apache.spark.sql.SQLContext(sc)

import sqlContext._


val scores = sc.textFile("scores.txt").map(_.split(",")).map(s =>     Score(s(0), s(1).trim.toInt))
val ages = sc.textFile("ages.txt").map(_.split(",")).map(s => Age(s(0), s(1).trim.toInt))

scores.registerAsTable("scores")
ages.registerAsTable("ages")

val joined = sqlContext.sql("""
SELECT a.name, a.age, s.score
FROM ages a JOIN scores s
ON a.name = s.name""")
joined.collect().foreach(println)

}
}

运行时出现以下错误:

Exception in thread "main" scala.ScalaReflectionException: class org.apache.spark.sql.catalyst.ScalaReflection in JavaMirror with primordial classloader with boot classpath [C:\Users\Owner\Downloads\Compressed\eclipse\plugins\org.scala-lang.scala-library_2.11.8.v20160304-115712-1706a37eb8.jar;C:\Users\Owner\Downloads\Compressed\eclipse\plugins\org.scala-lang.scala-reflect_2.11.8.v20160304-115712-1706a37eb8.jar;C:\Program Files\Java\jdk1.8.0_77\jre\lib\resources.jar;C:\Program Files\Java\jdk1.8.0_77\jre\lib\rt.jar;C:\Program Files\Java\jdk1.8.0_77\jre\lib\sunrsasign.jar;C:\Program Files\Java\jdk1.8.0_77\jre\lib\jsse.jar;C:\Program Files\Java\jdk1.8.0_77\jre\lib\jce.jar;C:\Program Files\Java\jdk1.8.0_77\jre\lib\charsets.jar;C:\Program Files\Java\jdk1.8.0_77\jre\lib\jfr.jar;C:\Program Files\Java\jdk1.8.0_77\jre\classes] not found.
    at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:123)
    at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:22)
    at org.apache.spark.sql.catalyst.ScalaReflection$$typecreator1$1.apply(ScalaReflection.scala:115)
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:232)
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:232)
    at scala.reflect.api.TypeTags$class.typeOf(TypeTags.scala:341)
    at scala.reflect.api.Universe.typeOf(Universe.scala:61)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.schemaFor(ScalaReflection.scala:115)
    at org.apache.spark.sql.catalyst.ScalaReflection$.schemaFor(ScalaReflection.scala:33)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.schemaFor(ScalaReflection.scala:100)
    at org.apache.spark.sql.catalyst.ScalaReflection$.schemaFor(ScalaReflection.scala:33)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.attributesFor(ScalaReflection.scala:94)
    at org.apache.spark.sql.catalyst.ScalaReflection$.attributesFor(ScalaReflection.scala:33)
    at org.apache.spark.sql.SQLContext.createSchemaRDD(SQLContext.scala:111)
    at org.test.rddjoins.rdd$.main(rdd.scala:27)

帮助!!!

1 个答案:

答案 0 :(得分:0)

在类路径中遗漏了Apache Spark库。

Exception表示classpath找不到与Spark相关的类之一。 您应该修改classpath以添加指定的jar。