我想在spark sql中创建表我正在使用scala ide我已经添加了maven的依赖项 我试过这个,它给了我下面的错误
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql._
object HiveFromSpark {
def main(args: Array[String]) {
val sparkConf = new SparkConf().setAppName("HiveFromSpark").setMaster("local")
val sc = new SparkContext(sparkConf)
val sqlContext = new SQLContext(sc)
sqlContext.sql("SET hive.metastore.warehouse.dir=hdfs://localhost:9000/user/hive/warehouse")
sqlContext.sql("CREATE TABLE Test (code string,description string,code string) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TextFile")
sqlContext.sql("LOAD DATA INPATH 'C:\\Users\\mhattabi\\Desktop\\cars.csv' OVERWRITE INTO TABLE Test")
val df = sqlContext.sql("SELECT * from Test")
df.show()
}
}
我收到了这个错误:
线程中的异常" main" java.lang.RuntimeException:[1.1]失败:``insert''期望但找到标识符CREATE
CREATE TABLE Test(代码字符串,描述字符串,代码字符串)ROW FORMAT DELIMITED FIELDS终止于','存储为TextFile