从Spark向hive插入数据时截断的纳秒数

时间:2018-03-23 07:21:50

标签: scala apache-spark hive timestamp

从spark向Hive TimestampType插入数据时,会截断纳秒。有人有任何解决方案吗?我曾尝试在hive上写入orc和csv格式。

CSV:它显示为2018-03-20T13:04:20.123Z

ORC:2018-03-20 13:04:20.123456

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.types._
import org.apache.spark.sql.types.StructField
import java.util.Date
import org.apache.spark.sql.Row
import java.math.{BigDecimal,MathContext,RoundingMode}

/**
 * Main class to read Order, Route and Trade records and convert them to ORC File format
 * @author Shefali.Nema
 * @since 1.0.0
 */
object testDateAndDecimal {

  def main(args: Array[String]): Unit = {
    execute;
  }

   private def execute: Unit = {

    val sparkConf = new SparkConf().setAppName("Test");
    val sc = new SparkContext(sparkConf)
    val sqlContext = new org.apache.spark.sql.SQLContext(sc)

    // Define DataTypes
    val datetimestring: String = "2018-03-20 13:04:20.123456789" 
    val dt = java.sql.Timestamp.valueOf(datetimestring)

    //val DecimalType = DataTypes.createDecimalType(18, 8)

    //Define Values
    val id = 1
    //System.out.println(new BigDecimal("135.69")); // 135.69
    val price = new BigDecimal("1234567890.1234567899")

    System.out.println("\n###################################################price@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@" + price + "\n")
    System.out.println("\n###################################################dt@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@" + dt + "\n")

    val schema = StructType(StructField("id",IntegerType,true) :: StructField("name",TimestampType,true) :: StructField("price",DecimalType(18,8),true) :: Nil)

    val values = List(id,dt,price)
    val row = Row.fromSeq(values)

    // Create `RDD` from `Row`
    val rdd = sc.makeRDD(List(row))
    val orcFolderName = "testDecimal"
    val hiveRowsDF = sqlContext.createDataFrame(rdd, schema)

    hiveRowsDF.write.mode(org.apache.spark.sql.SaveMode.Append).orc(orcFolderName)
  }
} 

0 个答案:

没有答案