spark使用hashMap将数据帧写入postgres作为json

时间:2021-05-19 10:40:50

标签: postgresql scala apache-spark

我正在与<spark.version>2.2.1</spark.version>合作 我想将一个具有 map 字段的数据框写入 postgres 作为 json 字段。

示例代码:

import java.util.Properties

import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}

import scala.collection.immutable.HashMap

case class ExampleJson(map: HashMap[String,Long])

object JdbcLoaderJson extends App{

  val finalUrl = s"jdbc:postgresql://localhost:54321/development"
  val user = "user"
  val password = "123456"

  val sparkConf = new SparkConf()

  sparkConf.setMaster(s"local[2]")
  val spark = SparkSession.builder().config(sparkConf).getOrCreate()

  def writeWithJson(tableName: String) : Unit = {

    def getProperties: Properties = {
      val p = new Properties()
      val prop = new java.util.Properties
      prop.setProperty("user", user)
      prop.setProperty("password", password)
      prop
    }

    var schema = "public"
    var table = tableName
    val asList = List(ExampleJson(HashMap("x" -> 1L, "y" -> 2L)),
                      ExampleJson(HashMap("y" -> 3L, "z" -> 4L)))

    val asDf = spark.createDataFrame(asList)
    asDf.show(false)
   asDf.write.mode(SaveMode.Overwrite).jdbc(finalUrl, tableName, getProperties)

  }

  writeWithJson("with_json")

}

输出:

+-------------------+
|map                |
+-------------------+
|Map(x -> 1, y -> 2)|
|Map(y -> 3, z -> 4)|
+-------------------+

Exception in thread "main" java.lang.IllegalArgumentException: Can't get JDBC type for map<string,bigint>
    at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$org$apache$spark$sql$execution$datasources$jdbc$JdbcUtils$$getJdbcType$2.apply(JdbcUtils.scala:172)
    at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$org$apache$spark$sql$execution$datasources$jdbc$JdbcUtils$$getJdbcType$2.apply(JdbcUtils.scala:172)
    at scala.Option.getOrElse(Option.scala:121)
    at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.org$apache$spark$sql$execution$datasources$jdbc$JdbcUtils$$getJdbcType(JdbcUtils.scala:171)
    at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$schemaString$1$$anonfun$23.apply(JdbcUtils.scala:707)
    at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$schemaString$1$$anonfun$23.apply(JdbcUtils.scala:707)
    at scala.collection.MapLike$class.getOrElse(MapLike.scala:128)
    at scala.collection.AbstractMap.getOrElse(Map.scala:59)
    at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$schemaString$1.apply(JdbcUtils.scala:707)
    
Process finished with exit code 1

实际上我也可以使用字符串而不是地图,更多的是将 json 列从 spark 写入 postgres

1 个答案:

答案 0 :(得分:1)

将 HashMap 数据转换为如下所示的 json 字符串。

asDf
.select(
    to_json(struct($"*"))
    .as("map")
)
.write
.mode(SaveMode.Overwrite)
.jdbc(finalUrl, tableName, getProperties)
相关问题