Spark Structured Streaming:控制台接收器未按预期工作

时间:2018-05-23 05:52:11

标签: scala apache-spark spark-structured-streaming

我有以下代码使用Structured Streaming读取和处理Kafka数据

object ETLTest {

  case class record(value: String, topic: String)

  def main(args: Array[String]): Unit = {
    run();
  }

  def run(): Unit = {

    val spark = SparkSession
      .builder
      .appName("Test JOB")
      .master("local[*]")
      .getOrCreate()

    val kafkaStreamingDF = spark
      .readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", "...")
      .option("subscribe", "...")
      .option("failOnDataLoss", "false")
      .option("startingOffsets","earliest")
      .load()
      .selectExpr("CAST(value as STRING)", "CAST(timestamp as STRING)","CAST(topic as STRING)")

    val sdvWriter = new ForeachWriter[record] {
      def open(partitionId: Long, version: Long): Boolean = {
        true
      }
      def process(record: record) = {
        println("record:: " + record)
      }
      def close(errorOrNull: Throwable): Unit = {}
    }

    val sdvDF = kafkaStreamingDF
      .as[record]
      .filter($"value".isNotNull)

    // DOES NOT WORK
    /*val query = sdvDF
        .writeStream
        .format("console")
        .start()
        .awaitTermination()*/

    // WORKS
    /*val query = sdvDF
      .writeStream
      .foreach(sdvWriter)
      .start()
      .awaitTermination()
      */

  }

}

我从IntellijIdea IDE运行此代码,当我使用foreach(sdvWriter)时,我可以看到从Kafka消耗的记录,但是当我使用.writeStream.format(“console”)时,我看不到任何记录。我假设控制台写入流正在维护某种检查点并假设它已经处理了所有记录。是这样的吗?我错过了一些明显的东西吗?

1 个答案:

答案 0 :(得分:0)

在这里复制了您的代码 两种选择都有效。实际上在两个选项中都没有
导入spark.implicits._会失败,所以我不确定您缺少什么。可能是某些依赖关系配置不正确。您可以添加pom.xml吗?

import org.apache.spark.SparkContext
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.streaming.Trigger



object Check {

  case class record(value: String, topic: String)


  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder().master("local[2]")
      .getOrCreate


    import spark.implicits._

    val kafkaStreamingDF = spark
      .readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", "localhost:9092")
      .option("subscribe", "test")
      .option("startingOffsets","earliest")
      .option("failOnDataLoss", "false")
      .load()
      .selectExpr("CAST(value as STRING)", "CAST(timestamp as STRING)","CAST(topic as STRING)")


    val sdvDF = kafkaStreamingDF
      .as[record]
      .filter($"value".isNotNull)

    val query = sdvDF.writeStream
          .format("console")
          .option("truncate","false")
          .start()
          .awaitTermination()

  }


}