运行Seq [Column]时,在Scala spark中出现以下错误

时间:2019-03-12 19:04:05

标签: scala apache-spark-sql

错误是:

<console>:195: error: not found: type Column
       val aggExprs: Seq[Column] = output.columns.filterNot(_=="id")

代码如下:

val df =Seq(
  (1, 1.0, true, "a"),
  (1, 2.0, false, "b")
).toDF("id","d","b","s")

val dataTypes: Map[String, DataType] = df.schema.map(sf => 
(sf.name,sf.dataType)).toMap

def genericAgg(c:String) = {
  dataTypes(c) match {
    case DoubleType => sum(col(c))
    case StringType => concat_ws(",",collect_list(col(c))) // "append"
    case BooleanType => max(col(c))
  }
}

**val aggExprs: Seq[Column] = df.columns.filterNot(_=="id")**
    .map(c => genericAgg(c))

df
  .groupBy("id")
  .agg(
    aggExprs.head,aggExprs.tail:_*
  )
  .show()

0 个答案:

没有答案