在进行案例类提取时,问题出在map函数中。案例类不可序列化。我隐含地定义了格式onsubmit="functionhere();"
。
DefaultFormats
错误:
INFO [main](TypeExtractor.java:1804) - 没有为类检测到字段 org.json4s.JsonAST $ JValue。不能用作PojoType。将会 作为GenericType处理 线程" main"中的例外情况org.apache.flink.api.common.InvalidProgramException:任务没有 序列化 在org.apache.flink.api.scala.ClosureCleaner $ .ensureSerializable(ClosureCleaner.scala:172) 在org.apache.flink.api.scala.ClosureCleaner $ .clean(ClosureCleaner.scala:164) 在org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.scalaClean(StreamExecutionEnvironment.scala:666) 在org.apache.flink.streaming.api.scala.DataStream.clean(DataStream.scala:994) 在org.apache.flink.streaming.api.scala.DataStream.map(DataStream.scala:519) 在org.apache.flink.quickstart.WordCount $ .main(WordCount.scala:38) 在org.apache.flink.quickstart.WordCount.main(WordCount.scala) 引起:java.io.NotSerializableException:org.json4s.DefaultFormats $$ anon $ 4 at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1184) at java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1548) at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1509) at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1432) at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1178) at java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1548) at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1509) at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1432) at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1178) at java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:348) 在org.apache.flink.util.InstantiationUtil.serializeObject(InstantiationUtil.java:317) 在org.apache.flink.api.scala.ClosureCleaner $ .ensureSerializable(ClosureCleaner.scala:170) ......还有6个
package org.apache.flink.quickstart import java.util.Properties import com.fasterxml.jackson.databind.{JsonNode, ObjectMapper} import com.fasterxml.jackson.module.scala.DefaultScalaModule import org.apache.flink.api.scala._ import org.apache.flink.runtime.state.filesystem.FsStateBackend import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment} import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer09 import org.apache.flink.streaming.util.serialization.SimpleStringSchema import org.json4s.DefaultFormats import org.json4s._ import org.json4s.native.JsonMethods import scala.util.Try case class CC(key:String) object WordCount{ def main(args: Array[String]) { implicit val formats = org.json4s.DefaultFormats // kafka properties val properties = new Properties() properties.setProperty("bootstrap.servers", "***.**.*.***:9093") properties.setProperty("zookeeper.connect", "***.**.*.***:2181") properties.setProperty("group.id", "afs") properties.setProperty("auto.offset.reset", "earliest") val env = StreamExecutionEnvironment.getExecutionEnvironment val st = env .addSource(new FlinkKafkaConsumer09("new", new SimpleStringSchema() , properties)) .flatMap(raw => JsonMethods.parse(raw).toOption) // .map(_.extract[CC]) val l = st.map(_.extract[CC]) st.print() env.execute() } }
答案 0 :(得分:0)
要解决的问题
implicit val formats = org.json4s.DefaultFormats
之外的主要功能
object WordCount{
implicit val formats = org.json4s.DefaultFormats
def main(args: Array[String])
{
或者懒洋洋地初始化像
这样的格式implicit lazy val formats = org.json4s.DefaultFormats
中的主要功能
def main(args: Array[String]) {
implicit val formats = org.json4s.DefaultFormats