我尝试使用Spark Streaming将Kafka输入索引到elasticsearch。
kafka中的消息是这样的:
"汤姆34快乐巴黎"
我想在Spark Streaming中定义结构,以便在elasticsearch中索引此消息:
{姓名:"汤姆", 年龄:34岁, 状态:"快乐", 城市:"巴黎}
我已经阅读了有关RDD转换的内容,但无法找到如何定义值的键。
我需要你的帮助。
在我的代码下方,只对从Kafka收到的消息进行字数统计:
package com.examples
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
import org.apache.log4j.Logger
import org.apache.spark._
import org.apache.spark.streaming._
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.StreamingContext._
import org.apache.spark.streaming.kafka._
object MainExample {
def main(arg: Array[String]) {
var logger = Logger.getLogger(this.getClass())
val jobName = "MainExample"
val conf = new SparkConf().setAppName(jobName)
val ssc = new StreamingContext(conf, Seconds(2))
val zkQuorum = "localhost:2181"
val group = ""
val topics = "test"
val numThreads = 1
val topicMap = topics.split(",").map((_,numThreads.toInt)).toMap
val lineMap = KafkaUtils.createStream(ssc, zkQuorum, group, topicMap)
val lines = lineMap.map(_._2)
val words = lines.flatMap(_.split(" "))
val pair = words.map( x => (x,1))
val wordCounts = pair.reduceByKeyAndWindow(_ + _, _ - _, Minutes(10), Seconds(2), 2)
wordCounts.print()
ssc.start()
ssc.awaitTermination()
}
}
答案 0 :(得分:1)
JavaRDD<Map<String,String>> result = input.map(new Function<String, Map<String, String>>() {
@Override
public Map<String, String> call(String v1) throws Exception {
Map<String, String> ret = new HashMap<>();
int i=0;
for(String val : v1.split(" ")){
ret.put("key"+i++, val);
}
return ret;
}
});