使用sparkshell时我无法import sqlContext.implicits._
scala>
scala> case class Category(item_id: Int, category: String)
defined class Category
scala>
scala> val sqlContext = new org.apache.spark.sql.SQLContext(sc)
sqlContext: org.apache.spark.sql.SQLContext = org.apache.spark.sql.SQLContext@55b1b232
scala>
| val p=sc.textFile("hdfs://localhost:8020/user/cloudera/Input.txt")
15/12/28 06:13:15 INFO storage.MemoryStore: ensureFreeSpace(158080) called with curMem=0, maxMem=311387750
15/12/28 06:13:15 INFO storage.MemoryStore: Block broadcast_0 stored as values to memory (estimated size 154.4 KB, free 296.8 MB)
p: org.apache.spark.rdd.RDD[String] = MappedRDD[1] at textFile at <console>:35
scala> val pmap =p.map(p =>p.split("\t"))
pmap: org.apache.spark.rdd.RDD[Array[String]] = MappedRDD[2] at map at <console>:36
scala> val PersonRDD=pmap.map(p=>Category(p(0).toInt,p(1)))
PersonRDD: org.apache.spark.rdd.RDD[Category] = MappedRDD[3] at map at <console>:40
scala>
| import sqlContext.implicits._
<console>:37: error: value implicits is not a member of org.apache.spark.sql.SQLContext
import sqlContext.implicits._
^
scala>
| val PersonDF=PersonRDD.toDF()
<console>:43: error: value toDF is not a member of org.apache.spark.rdd.RDD[Category]
val PersonDF=PersonRDD.toDF()
^