我正在尝试编写一个简单的程序,以便在Spark SQL中将数据从HBase加载到Dataframe。某个旧版本的Spark正在偷偷摸摸,我收到了错误..请告知
Caused by: java.lang.NoClassDefFoundError: org/apache/spark/Logging
主程序
object LoadHBase {
def main(args: Array[String]): Unit = {
val sparkSession = SparkSession.builder().appName("HBase-Demo").master("local[*]").getOrCreate()
val sqlContext = sparkSession.sqlContext
val dataset = sqlContext.read.format("org.apache.hadoop.hbase.spark").options(Map(
"hbase.columns.mapping" -> "KEY_FIELD STRING :key, A_FIELD STRING c:a, B_FIELD STRING c:b",
"hbase.table" -> "default:employee"
)).load()
dataset.createOrReplaceTempView("temp_emp")
sqlContext.sql("SELECT count(*) from temp_emp").show()
}
}
build.sbt
name := "scalaworks"
version := "1.0"
scalaVersion := "2.11.0"
resolvers += "Cloudera Repo" at "https://repository.cloudera.com/artifactory/cloudera-repos"
libraryDependencies += "org.apache.spark" % "spark-core_2.11" % "2.2.0"
libraryDependencies += "org.apache.spark" % "spark-streaming_2.11" % "2.2.0"
libraryDependencies += "org.apache.spark" % "spark-mllib_2.11" % "2.2.0"
libraryDependencies += "org.apache.spark" % "spark-sql_2.11" % "2.2.0"
libraryDependencies += "org.apache.hbase" % "hbase-server" % "1.2.0-cdh5.10.0"
libraryDependencies += "org.apache.hbase" % "hbase-client" % "1.2.0-cdh5.10.0"
libraryDependencies += "org.apache.hbase" % "hbase-common" % "1.2.0-cdh5.10.0"
libraryDependencies += "org.apache.hbase" % "hbase-spark" % "1.2.0-cdh5.10.0"
错误堆栈跟踪
Caused by: java.lang.NoClassDefFoundError: org/apache/spark/Logging
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:335)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$21$$anonfun$apply$12.apply(DataSource.scala:533)
... 7 more
Caused by: java.lang.ClassNotFoundException: org.apache.spark.Logging
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:335)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 26 more
**更新**
Spark 2.0上不支持Spark on Hbase
{{3}}