即使在运行时添加依赖项后我也会收到跟踪错误
Exception in thread "main" java.lang.NoClassDefFoundError: com/cloudera/spark/hbase/HBaseContext
at Spark_HBase.SparkHBaseExample$.main(SparkHBaseExample.scala:36)
at Spark_HBase.SparkHBaseExample.main(SparkHBaseExample.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.SparkSubmit$.launch(SparkSubmit.scala:358)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:75)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.ClassNotFoundException: com.cloudera.spark.hbase.HBaseContext
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
代码:
import com.cloudera.spark.hbase.HBaseContext
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.{Scan, HTable, Get}
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.{SparkContext, SparkConf}
object SparkHBaseExample {
def main(args: Array[String]) {
val sparkConf = new SparkConf().setAppName("HBaseSpark").setMaster("local[2]")
val sc = new SparkContext(sparkConf)
val conf = HBaseConfiguration.create()
conf.set(TableInputFormat.INPUT_TABLE, "tmp")
var table = new HTable(conf, "tmp");
var hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat], classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable], classOf[org.apache.hadoop.hbase.client.Result])
var g= new Get(Bytes.toBytes("abc"))
var result=table.get(g)
var value = result.getValue(Bytes.toBytes("cf"),Bytes.toBytes("val"));
var name = Bytes.toString(value)
println("############################################################"+name)
val hbaseContext = new HBaseContext(sc, conf)
var scan = new Scan()
scan.setCaching(100)
var getRdd = hbaseContext.hbaseRDD("tmp", scan)
println("############################GETRDD################################")
getRdd.foreach(println(_))}}