这个演示运行正常。但是当我将它移动到另一个类函数(我以前的项目)并调用该函数时,它会编译失败。
object DFMain {
case class Person(name: String, age: Double, t:String)
def main (args: Array[String]): Unit = {
val sc = new SparkContext("local", "Scala Word Count")
val sqlContext = new org.apache.spark.sql.SQLContext(sc)
import sqlContext.implicits._
val bsonRDD = sc.parallelize(("foo",1,"female")::
("bar",2,"male")::
("baz",-1,"female")::Nil)
.map(tuple=>{
var bson = new BasicBSONObject()
bson.put("name","bfoo")
bson.put("value",0.1)
bson.put("t","female")
(null,bson)
})
val tDf = bsonRDD.map(_._2)
.map(f=>Person(f.get("name").toString,
f.get("value").toString.toDouble,
f.get("t").toString)).toDF()
tDf.limit(1).show()
}
}
'MySQLDao.insertIntoMySQL()'编译错误
object MySQLDao {
private val sc= new SparkContext("local", "Scala Word Count")
val sqlContext = new org.apache.spark.sql.SQLContext(sc)
import sqlContext.implicits._
case class Person(name: String, age: Double, t:String)
def insertIntoMySQL(): Unit ={
val bsonRDD = sc.parallelize(("foo",1,"female")::
("bar",2,"male")::
("baz",-1,"female")::Nil)
.map(tuple=>{
val bson = new BasicBSONObject()
bson.put("name","bfoo")
bson.put("value",0.1)
bson.put("t","female")
(null,bson)
})
val tDf = bsonRDD.map(_._2).map( f=> Person(f.get("name").toString,
f.get("value").toString.toDouble,
f.get("t").toString)).toDF()
tDf.limit(1).show()
}
}
当我调用'MySQLDao.insertIntoMySQL()'时会出现错误
value typedProductIterator不是对象scala.runtim.scala.scalaRuntTime的成员
case class Person(name:String,age:Double,t:String)
答案 0 :(得分:0)
我认为在map函数中的闭包中没有看到case类。将其移至包级别。
case class Person(name: String, age: Double, t:String)
object MySQLDao {
...
}