无法在apache spark中获取Println的值

时间:2017-11-06 09:15:23

标签: apache-spark rdd

scala> import org.apache.spark.SparkContext
import org.apache.spark.SparkContext

scala> import org.apache.spark.SparkConf
import org.apache.spark.SparkConf

scala> import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.SparkSession

scala> object  rddTest{
 | def main(args: Array[String]) = {
 | val spark =  SparkSession.builder.appName("mapExample").master("local").getOrCreate()
 | val rdd1 = spark.sparkContext.parallelize(Seq((1,"jan",2016),(3,"nov",2014),(16,"feb",2014)))
 | val rdd2 = spark.sparkContext.parallelize(Seq((5,"dec",2014),(17,"sep",2015)))
 | val rdd3 = spark.sparkContext.parallelize(Seq((6,"dec",2011),(16,"may",2015)))
 | val rddUnion = rdd1.union(rdd2).union(rdd3)
 | rddUnion.foreach(Println)
 | }
 | }

我收到此错误,我不知道为什么会这样

  

< console>:81:错误:找不到:值Println          rddUnion.foreach(的println)

1 个答案:

答案 0 :(得分:0)

你有一个extrat大写试试这个:

rddUnion.foreach(println)