我试图在Apache Spark(Scala)中编译SBT包但收到以下错误:
[error] /home/bigdata/jgarcia2/test/src/main/scala/test.scala:37: package org.apache.spark.sql is not a value
[error] val dbschema = sql("SELECT * From table")
这是我的代码:
import org.apache.spark._
import org.apache.log4j.Logger
import org.apache.log4j.Level
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql._
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
object test {
def main(args: Array[String]) {
Logger.getLogger("org").setLevel(Level.OFF)
Logger.getLogger("akka").setLevel(Level.OFF)
if (args.length < 1) {
System.err.println("Usage: test <file>")
System.exit(1)
}
val sparkConf = new SparkConf().setAppName("test")
val sc = new SparkContext(sparkConf)
val sqlContext = new SQLContext(sc)
val df = sqlContext.read.format("com.databricks.spark.xml").option("rowTag", "row").load(args(0))
df.registerTempTable("table")
val dbschema = sql("SELECT * From table")
dbschema.printSchema()
sc.stop()
} }
任何人都可以帮我解决问题吗?
答案 0 :(得分:1)
由于您导入了org.apache.spark._
(包含sql
包),编译器认为您正在尝试将包作为函数调用,从而报告错误。
只需编写sqlContext.sql
,源代码就会编译。
import org.apache.log4j.Logger
import org.apache.log4j.Level
import org.apache.spark.sql.SQLContext
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
object Test {
def main(args: Array[String]) {
Logger.getLogger("org").setLevel(Level.OFF)
Logger.getLogger("akka").setLevel(Level.OFF)
if (args.length < 1) {
System.err.println("Usage: test <file>")
System.exit(1)
}
val sparkConf = new SparkConf().setAppName("test")
val sc = new SparkContext(sparkConf)
val sqlContext = new SQLContext(sc)
val df = sqlContext.read.format("com.databricks.spark.xml").option("rowTag", "row").load(args(0))
df.registerTempTable("table")
val dbschema = sqlContext.sql("SELECT * From table")
dbschema.printSchema()
sc.stop()
}
}