如何获取Dataframe上数组的所有名称?
问题是我正在尝试爆炸所有阵列。
import org.apache.spark.sql.{Column, DataFrame, SparkSession}
import org.apache.spark.sql.functions.col
import org.apache.spark.sql.types.{ArrayType, StructField, StructType}
val providersDF=SIWINSDF.select(explode(col("**providers**")).as("collection")).select(col("collection.*"))
def flattenSchema(schema: StructType, prefix: String = null) : Array[Column] = {
schema.fields.flatMap(f => {
val colName = if (prefix == null) f.name else (prefix + "." + f.name)
f.dataType match {
case st: StructType => flattenSchema(st, colName)
case _ => Array(col(colName).alias(colName))
}
})
}
val newDF=providersDF.select(flattenSchema(providersDF.schema):_*)
newDF.toDF(newDF.columns.map(_.replace(".", "_")): _*).printSchema
要获取我想做的所有数组名称:
获得我正在做的名字df.schema.filter(st => st.dataType.isInstanceOf [ArrayType])。flatMap(.dataType.asInstanceOf [StructType] .fields).map(.name)
感谢您的帮助。
答案 0 :(得分:1)
这是一种递归方法,可从DataFrame中提取所有嵌套的ArrayType
列:
import org.apache.spark.sql.types._
def extractArrayCols(schema: StructType, prefix: String): Seq[String] =
schema.fields.flatMap {
case StructField(name, struct: StructType, _, _) => extractArrayCols(struct, prefix + name + ".")
case StructField(name, ArrayType(_, _), _, _) => Seq(s"$prefix$name")
case _ => Seq.empty[String]
}
测试方法:
import org.apache.spark.sql.functions._
case class W(u: Int, v: Seq[String])
val df = Seq(
(10, Seq(1, 2), W(1, Seq("a", "b"))),
(20, Seq(3), W(2, Seq("c", "d")))
).toDF("c1", "c2", "c3")
val arrayCols = extractArrayCols(df.schema, "")
// arrayCols: Seq[String] = ArraySeq(c2, c3.v)