org.bson.codecs.configuration.CodecConfigurationException:找不到类org.bson.BsonDecimal128

时间:2018-09-20 11:31:41

标签: mongodb apache-spark

org.apache.spark.SparkException:由于阶段失败而导致作业中止:阶段0.0中的任务0失败1次,最近一次失败:阶段0.0中的任务0.0丢失(TID 0,本地主机,执行程序驱动程序):org.bson .codecs.configuration.CodecConfigurationException:找不到类org.bson.BsonDecimal128的编解码器。     在org.bson.codecs.configuration.CodecCache.getOrThrow(CodecCache.java:46)     在org.bson.codecs.configuration.ProvidersCodecRegistry.get(ProvidersCodecRegistry.java:63)     在org.bson.codecs.configuration.ChildCodecRegistry.get(ChildCodecRegistry.java:51)     在org.bson.codecs.BsonDocumentCodec.writeValue(BsonDocumentCodec.java:132)     在org.bson.codecs.BsonDocumentCodec.encode(BsonDocumentCodec.java:112)     在org.bson.codecs.BsonDocumentCodec.encode(BsonDocumentCodec.java:40)     在com.mongodb.connection.InsertCommandMessage.writeTheWrites(InsertCommandMessage.java:101)     在com.mongodb.connection.InsertCommandMessage.writeTheWrites(InsertCommandMessage.java:43)     在com.mongodb.connection.BaseWriteCommandMessage.encodeMessageBodyWithMetadata(BaseWriteCommandMessage.java:129)     在com.mongodb.connection.RequestMessage.encodeWithMetadata(RequestMessage.java:160)     在com.mongodb.connection.WriteCommandProtocol.sendMessage(WriteCommandProtocol.java:212)     在com.mongodb.connection.WriteCommandProtocol.execute(WriteCommandProtocol.java:101)     在com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:67)     在com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:37)     在com.mongodb.connection.DefaultServer $ DefaultServerProtocolExecutor.execute(DefaultServer.java:159)     在com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)     在com.mongodb.connection.DefaultServerConnection.insertCommand(DefaultServerConnection.java:115)     在com.mongodb.operation.MixedBulkWriteOperation $ Run $ 2.executeWriteCommandProtocol(MixedBulkWriteOperation.java:455)     在com.mongodb.operation.MixedBulkWriteOperation $ Run $ RunExecutor.execute(MixedBulkWriteOperation.java:646)     在com.mongodb.operation.MixedBulkWriteOperation $ Run.execute(MixedBulkWriteOperation.java:401)     在com.mongodb.operation.MixedBulkWriteOperation $ 1.call(MixedBulkWriteOperation.java:179)     在com.mongodb.operation.MixedBulkWriteOperation $ 1.call(MixedBulkWriteOperation.java:168)     在com.mongodb.operation.OperationHelper.withConnectionSource(OperationHelper.java:230)     在com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:221)     在com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:168)     在com.mongodb.operation.MixedBulkWriteOperation.execute(MixedBulkWriteOperation.java:74)     在com.mongodb.Mongo.execute(Mongo.java:781)     在com.mongodb.Mongo $ 2.execute(Mongo.java:764)     在com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:323)     在com.mongodb.MongoCollectionImpl.insertMany(MongoCollectionImpl.java:311)     在com.mongodb.spark.MongoSpark $$ anonfun $ save $ 1 $ anonfun $ apply $ 1 $ anonfun $ apply $ 2.apply(MongoSpark.scala:119)     在com.mongodb.spark.MongoSpark $$ anonfun $ save $ 1 $ anonfun $ apply $ 1 $ anonfun $ apply $ 2.apply(MongoSpark.scala:119)     在scala.collection.Iterator $ class.foreach(Iterator.scala:727)     在scala.collection.AbstractIterator.foreach(Iterator.scala:1157)     在com.mongodb.spark.MongoSpark $$ anonfun $ save $ 1 $ anonfun $ apply $ 1.apply(MongoSpark.scala:119)     在com.mongodb.spark.MongoSpark $$ anonfun $ save $ 1 $ anonfun $ apply $ 1.apply(MongoSpark.scala:118)     在com.mongodb.spark.MongoConnector $$ anonfun $ withCollectionDo $ 1.apply(MongoConnector.scala:186)     在com.mongodb.spark.MongoConnector $$ anonfun $ withCollectionDo $ 1.apply(MongoConnector.scala:184)     在com.mongodb.spark.MongoConnector $$ anonfun $ withDatabaseDo $ 1.apply(MongoConnector.scala:171)     在com.mongodb.spark.MongoConnector $$ anonfun $ withDatabaseDo $ 1.apply(MongoConnector.scala:171)     在com.mongodb.spark.MongoConnector.withMongoClientDo(MongoConnector.scala:154)     在com.mongodb.spark.MongoConnector.withDatabaseDo(MongoConnector.scala:171)     在com.mongodb.spark.MongoConnector.withCollectionDo(MongoConnector.scala:184)     在com.mongodb.spark.MongoSpark $$ anonfun $ save $ 1.apply(MongoSpark.scala:118)     在com.mongodb.spark.MongoSpark $$ anonfun $ save $ 1.apply(MongoSpark.scala:117)     在org.apache.spark.rdd.RDD $$ anonfun $ foreachPartition $ 1 $ anonfun $ apply $ 29.apply(RDD.scala:926)     在org.apache.spark.rdd.RDD $$ anonfun $ foreachPartition $ 1 $ anonfun $ apply $ 29.apply(RDD.scala:926)     在org.apache.spark.SparkContext $$ anonfun $ runJob $ 5.apply(SparkContext.scala:2062)     在org.apache.spark.SparkContext $$ anonfun $ runJob $ 5.apply(SparkContext.scala:2062)     在org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)     在org.apache.spark.scheduler.Task.run(Task.scala:108)     在org.apache.spark.executor.Executor $ TaskRunner.run(Executor.scala:335)     在java.util.concurrent.ThreadPoolExecutor.runWorker(未知来源)     在java.util.concurrent.ThreadPoolExecutor $ Worker.run(未知来源)     在java.lang.Thread.run(未知来源)

0 个答案:

没有答案