我正在使用sparklauncher启动spark应用程序。在spark应用程序中,我将数据插入到hive表中,并在联接查询中使用一些hbase-hive链接表。我已经在火花启动器中添加了hive-hbase-handler-1.1.0-cdh5.13.0.jar,但我仍在获取原因:java.lang.ClassNotFoundException:org.apache.hadoop.hive.hbase.HiveHBaseTableTableInputFormat我已经添加了jar在sparklauncher错误中:
def launch(hdfsFilePath:String):Unit={
println("Inside ApplicationLauncher")
val command = new SparkLauncher()
.setAppResource("/home/cloudera/Desktop/Avi/LiveProjects/MusicDataAnalysis/target/scala-2.11/musicdataanalysis_2.11-0.1.jar")
.setMainClass("ParseInputFile")
.setVerbose(false)
.addAppArgs(hdfsFilePath)
.setMaster("local")
.addJar("/home/cloudera/Desktop/Avi/jars/hive-hbase-handler-1.1.0-cdh5.13.0.jar")
.addJar("/home/cloudera/Desktop/Avi/jars/spark-xml_2.11-0.5.0.jar")
println("Done with Spark Launcher")
val appHandle = command.startApplication()
appHandle.addListener(new SparkAppHandle.Listener{
def infoChanged(sparkAppHandle : SparkAppHandle) : Unit = {
// println(sparkAppHandle.getState + " Custom Print")
}
def stateChanged(sparkAppHandle : SparkAppHandle) : Unit = {
println(sparkAppHandle.getState)
if ("FINISHED".equals(sparkAppHandle.getState.toString)){
sparkAppHandle.stop
}
}
})
答案 0 :(得分:0)
当我将hbase-0.92.1.jar和hive-hbase-handler-1.1.0-cdh5.13.0.jar一起添加时,我的问题得到解决。请找到以下工作代码:
def launch(hdfsFilePath:String):Unit={
println("Inside ApplicationLauncher")
val command = new SparkLauncher()
.setAppResource("/home/cloudera/Desktop/Avi/LiveProjects/MusicDataAnalysis/target/scala-2.11/musicdataanalysis_2.11-0.1.jar")
.setMainClass("ParseInputFile")
.setVerbose(false)
.addAppArgs(hdfsFilePath)
.setMaster("local")
.addJar("file:///home/cloudera/Desktop/Avi/jars/hbase-0.92.1.jar")
.addJar("file:///home/cloudera/Desktop/Avi/jars/hive-hbase-handler-3.1.1.jar")
.addJar("file:///home/cloudera/Desktop/Avi/jars/spark-xml_2.11-0.5.0.jar")
println("Done with Spark Launcher")
val appHandle = command.startApplication()
appHandle.addListener(new SparkAppHandle.Listener{
def infoChanged(sparkAppHandle : SparkAppHandle) : Unit = {
// println(sparkAppHandle.getState + " Custom Print")
}
def stateChanged(sparkAppHandle : SparkAppHandle) : Unit = {
println(sparkAppHandle.getState)
if ("FINISHED".equals(sparkAppHandle.getState.toString)){
sparkAppHandle.stop
}
}
})
}