我正在尝试使用分区0中的数据并将接收到的数据写入hdfs上的文件并且它抛出了异常,我没有看到任何数据被写入hdfs文件
import java.util
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.fs.Path
import java.util.Properties
import org.apache.kafka.clients.consumer.KafkaConsumer
import scala.collection.JavaConverters._
object WeatherCons {
def main(args: Array[String]): Unit = {
val TOPIC = "again"
val props = new Properties()
props.put("consumer.timeout.ms", "1500")
props.put("bootstrap.servers", "104.197.102.208:9092")
props.put("key.deserializer",
"org.apache.kafka.common.serialization.StringDeserializer")
props.put("value.deserializer",
"org.apache.kafka.common.serialization.StringDeserializer")
props.put("group.id", "something")
val consumer = new KafkaConsumer[String, String](props)
consumer.subscribe(util.Collections.singletonList(TOPIC))
/* val topicPartition = new TopicPartition(TOPIC, 0)
consumer.seekToBeginning(topicPartition)*/
val conf = new Configuration()
conf.set("fs.defaultFS", "hdfs://104.197.102.208:8020")
val fs = FileSystem.get(conf)
import org.apache.hadoop.fs.FSDataOutputStream
val fin:FSDataOutputStream = fs.create(new
Path("/prash/mySample3.txt"))
while (true) {
val records = consumer.poll(100)
for (record <- records.asScala) {
val co = record.value().toString
fin.writeUTF(co)
fin.writeUTF("\n")
println(co)
}
fin.close()
fin.flush()
}
}
}
它抛出异常如下,没有数据写入hdfs
Exception in thread "main" java.nio.channels.ClosedChannelException
at org.apache.hadoop.hdfs.DFSOutputStream.checkClosed(DFSOutputStream.java:1940)
at org.apache.hadoop.fs.FSOutputSummer.write(FSOutputSummer.java:105)
at
org.apache.hadoop.fs.FSDataOutputStream$PositionCache.write(FSDataOutputStream.java:58)
at java.io.DataOutputStream.write(DataOutputStream.java:107)
at java.io.DataOutputStream.writeUTF(DataOutputStream.java:401)
at java.io.DataOutputStream.writeUTF(DataOutputStream.java:323)
答案 0 :(得分:0)
在while循环后尝试刷新而不是关闭fn.call(undefined)
。