从kafka消息中获取主题

时间:2015-05-20 09:31:50

标签: java cassandra apache-spark apache-kafka

如何从 kafka 中的消息中识别主题名称。

( sleep 5 && while [ 1 ]; do sleep 1; echo y; done ) | android update sdk --no-ui 

我可以从kafka制作人处获取消息。但由于消费者现在正在消费三个主题,因此需要确定主题名称。

2 个答案:

答案 0 :(得分:1)

从Spark 1.5.0开始,official documentation鼓励从最近的版本开始使用无接收器/直接方法,最近的版本已经从最近的1.5.0开始实验。 这个新的Direct API允许您轻松获取消息及其元数据,而不是其他好东西。

答案 1 :(得分:0)

不幸的是,这并不简单,因为Spark的源代码中的KafkaReceiver和ReliableKafkaReceiver只存储MessageAndMetadata.key和消息。

Spark的JIRA中有两个与此问题相关的开放票:

已经开了一段时间。

对Spark的源代码进行脏复制/粘贴/修改以解决您的问题:

package org.apache.spark.streaming.kafka

import java.lang.{Integer => JInt}
import java.util.{Map => JMap, Properties}

import kafka.consumer.{KafkaStream, Consumer, ConsumerConfig, ConsumerConnector}
import kafka.serializer.{Decoder, StringDecoder}
import kafka.utils.VerifiableProperties
import org.apache.spark.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.api.java.{JavaReceiverInputDStream, JavaStreamingContext}
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.receiver.Receiver
import org.apache.spark.streaming.util.WriteAheadLogUtils
import org.apache.spark.util.ThreadUtils
import scala.collection.JavaConverters._
import scala.collection.Map
import scala.reflect._

object MoreKafkaUtils {

  def createStream(
    jssc: JavaStreamingContext,
    zkQuorum: String,
    groupId: String,
    topics: JMap[String, JInt],
    storageLevel: StorageLevel = StorageLevel.MEMORY_AND_DISK_SER_2
  ): JavaReceiverInputDStream[(String, String, String)] = {
    val kafkaParams = Map[String, String](
      "zookeeper.connect" -> zkQuorum, "group.id" -> groupId,
      "zookeeper.connection.timeout.ms" -> "10000")
    val walEnabled = WriteAheadLogUtils.enableReceiverLog(jssc.ssc.conf)
    new KafkaInputDStreamWithTopic[String, String, StringDecoder, StringDecoder](jssc.ssc, kafkaParams, topics.asScala.mapValues(_.intValue()), walEnabled, storageLevel)
  }

}

private[streaming]
class KafkaInputDStreamWithTopic[
  K: ClassTag,
  V: ClassTag,
  U <: Decoder[_] : ClassTag,
  T <: Decoder[_] : ClassTag](
    @transient ssc_ : StreamingContext,
    kafkaParams: Map[String, String],
    topics: Map[String, Int],
    useReliableReceiver: Boolean,
    storageLevel: StorageLevel
  ) extends ReceiverInputDStream[(K, V, String)](ssc_) with Logging {

  def getReceiver(): Receiver[(K, V, String)] = {
    if (!useReliableReceiver) {
      new KafkaReceiverWithTopic[K, V, U, T](kafkaParams, topics, storageLevel)
    } else {
      new ReliableKafkaReceiverWithTopic[K, V, U, T](kafkaParams, topics, storageLevel)
    }
  }
}

private[streaming]
class KafkaReceiverWithTopic[
  K: ClassTag,
  V: ClassTag,
  U <: Decoder[_] : ClassTag,
  T <: Decoder[_] : ClassTag](
    kafkaParams: Map[String, String],
    topics: Map[String, Int],
    storageLevel: StorageLevel
  ) extends Receiver[(K, V, String)](storageLevel) with Logging {

  // Connection to Kafka
  var consumerConnector: ConsumerConnector = null

  def onStop() {
    if (consumerConnector != null) {
      consumerConnector.shutdown()
      consumerConnector = null
    }
  }

  def onStart() {

    logInfo("Starting Kafka Consumer Stream with group: " + kafkaParams("group.id"))

    // Kafka connection properties
    val props = new Properties()
    kafkaParams.foreach(param => props.put(param._1, param._2))

    val zkConnect = kafkaParams("zookeeper.connect")
    // Create the connection to the cluster
    logInfo("Connecting to Zookeeper: " + zkConnect)
    val consumerConfig = new ConsumerConfig(props)
    consumerConnector = Consumer.create(consumerConfig)
    logInfo("Connected to " + zkConnect)

    val keyDecoder = classTag[U].runtimeClass.getConstructor(classOf[VerifiableProperties])
      .newInstance(consumerConfig.props)
      .asInstanceOf[Decoder[K]]
    val valueDecoder = classTag[T].runtimeClass.getConstructor(classOf[VerifiableProperties])
      .newInstance(consumerConfig.props)
      .asInstanceOf[Decoder[V]]

    // Create threads for each topic/message Stream we are listening
    val topicMessageStreams = consumerConnector.createMessageStreams(
      topics, keyDecoder, valueDecoder)

    val executorPool =
      ThreadUtils.newDaemonFixedThreadPool(topics.values.sum, "KafkaMessageHandler")
    try {
      // Start the messages handler for each partition
      topicMessageStreams.values.foreach { streams =>
        streams.foreach { stream => executorPool.submit(new MessageHandler(stream)) }
      }
    } finally {
      executorPool.shutdown() // Just causes threads to terminate after work is done
    }
  }

  // Handles Kafka messages
  private class MessageHandler(stream: KafkaStream[K, V])
    extends Runnable {
    def run() {
      logInfo("Starting MessageHandler.")
      try {
        val streamIterator = stream.iterator()
        while (streamIterator.hasNext()) {
          val msgAndMetadata = streamIterator.next()
          store((msgAndMetadata.key, msgAndMetadata.message, msgAndMetadata.topic))
        }
      } catch {
        case e: Throwable => reportError("Error handling message; exiting", e)
      }
    }
  }

}