编写朴素贝叶斯模型时,Spark正在通过scala.reflect.internal.MissingRequirementError

时间:2016-06-30 11:00:03

标签: multithreading scala apache-spark future apache-spark-ml

当我使用 scala Future

在spark-shell(1.6.0)中运行示例代码时,我得到了Exception

示例代码

import org.apache.spark.ml.classification.NaiveBayesModel
import java.util.concurrent.Executors
import scala.concurrent.{ ExecutionContext, Future }
import scala.util.{ Failure, Success }

implicit lazy val ec1 = ExecutionContext.fromExecutor(Executors.newFixedThreadPool(10))
implicit lazy val ec2 = ExecutionContext.fromExecutor(Executors.newFixedThreadPool(10))

(300 until 400).foreach { x =>
  Future {
     sc.setJobGroup(x.toString, "description");
     val model = NaiveBayesModel.load("/tmp/nb_0")
     model.save(s"/tmp/nb$x")
  }(ec1).onComplete {
     case Success(value) => println(s"output $value ")
     case Failure(e) => e.printStackTrace
  }(ec2)
}

在这里,我正在从本地磁盘读取一个朴素的贝叶斯模型,并使用不同的文件名在Asyc模式下多次写入。

异常

scala.reflect.internal.MissingRequirementError: class org.apache.spark.ml.classification.NaiveBayesModel.NaiveBayesModelWriter in JavaMirror with org.apache.spark.repl.SparkIMain$TranslatingClassLoader@16d48c9 of type class org.apache.spark.repl.SparkIMain$TranslatingClassLoader with classpath [/tmp/spark-bce3a131-f6d5-4b5a-9853-3aa0d70b928f] and parent being org.apache.spark.repl.SparkIMain$$anon$2@2dc584da of type class org.apache.spark.repl.SparkIMain$$anon$2 with classpath [file:/usr/lib/jvm/java-7-oracle/jre/lib/resources.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/rt.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/jsse.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/jce.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/charsets.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/jfr.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunec.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/localedata.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/dnsns.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunjce_provider.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/zipfs.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunpkcs11.jar,file:/usr/local/spark-1.6.0/conf/,file:/usr/local/spark-1.6.0/assembly/target/scala-2.10/spark-assembly-1.6.0-hadoop2.4.1.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-core-3.2.10.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-api-jdo-3.2.6.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-rdbms-3.2.9.jar,file:/home/kaushal/kaushal-drive/spark/Simba_Spark_JDBC_Desktop/SimbaRTU_SparkJDBC41_Client_1.0.0.1001/,file:/usr/local/spark-1.6.0/lib_managed/jars/mysql-connector-java-5.1.36.jar] and parent being sun.misc.Launcher$AppClassLoader@3ad6a0e0 of type class sun.misc.Launcher$AppClassLoader with classpath [file:/usr/local/spark-1.6.0/conf/,file:/usr/local/spark-1.6.0/assembly/target/scala-2.10/spark-assembly-1.6.0-hadoop2.4.1.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-core-3.2.10.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-api-jdo-3.2.6.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-rdbms-3.2.9.jar] and parent being sun.misc.Launcher$ExtClassLoader@60dbf04d of type class sun.misc.Launcher$ExtClassLoader with classpath [file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunec.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/localedata.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/dnsns.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunjce_provider.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/zipfs.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunpkcs11.jar] and parent being primordial classloader with boot classpath [/usr/lib/jvm/java-7-oracle/jre/lib/resources.jar:/usr/lib/jvm/java-7-oracle/jre/lib/rt.jar:/usr/lib/jvm/java-7-oracle/jre/lib/sunrsasign.jar:/usr/lib/jvm/java-7-oracle/jre/lib/jsse.jar:/usr/lib/jvm/java-7-oracle/jre/lib/jce.jar:/usr/lib/jvm/java-7-oracle/jre/lib/charsets.jar:/usr/lib/jvm/java-7-oracle/jre/lib/jfr.jar:/usr/lib/jvm/java-7-oracle/jre/classes] not found.
    at scala.reflect.internal.MissingRequirementError$.signal(MissingRequirementError.scala:16)
    at scala.reflect.internal.MissingRequirementError$.notFound(MissingRequirementError.scala:17)
    at scala.reflect.internal.Mirrors$RootsBase.getModuleOrClass(Mirrors.scala:48)
    at scala.reflect.internal.Mirrors$RootsBase.getModuleOrClass(Mirrors.scala:61)
    at scala.reflect.internal.Mirrors$RootsBase.staticModuleOrClass(Mirrors.scala:72)
    at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:119)
    at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:21)
    at org.apache.spark.ml.classification.NaiveBayesModel$NaiveBayesModelWriter$$typecreator1$1.apply(NaiveBayes.scala:264)
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:231)
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:231)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.localTypeOf(ScalaReflection.scala:642)
    at org.apache.spark.sql.catalyst.ScalaReflection$.localTypeOf(ScalaReflection.scala:30)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.schemaFor(ScalaReflection.scala:630)
    at org.apache.spark.sql.catalyst.ScalaReflection$.schemaFor(ScalaReflection.scala:30)
    at org.apache.spark.sql.SQLContext.createDataFrame(SQLContext.scala:430)
    at org.apache.spark.ml.classification.NaiveBayesModel$NaiveBayesModelWriter.saveImpl(NaiveBayes.scala:264)
    at org.apache.spark.ml.util.MLWriter.save(ReadWrite.scala:90)
    at org.apache.spark.ml.util.MLWritable$class.save(ReadWrite.scala:130)
    at org.apache.spark.ml.classification.NaiveBayesModel.save(NaiveBayes.scala:130)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply$mcV$sp(<console>:65)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply(<console>:63)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply(<console>:63)
    at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
    at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
    at java.lang.Thread.run(Thread.java:745)
scala.reflect.internal.MissingRequirementError: class org.apache.spark.ml.classification.NaiveBayesModel.NaiveBayesModelWriter in JavaMirror with org.apache.spark.repl.SparkIMain$TranslatingClassLoader@16d48c9 of type class org.apache.spark.repl.SparkIMain$TranslatingClassLoader with classpath [/tmp/spark-bce3a131-f6d5-4b5a-9853-3aa0d70b928f] and parent being org.apache.spark.repl.SparkIMain$$anon$2@2dc584da of type class org.apache.spark.repl.SparkIMain$$anon$2 with classpath [file:/usr/lib/jvm/java-7-oracle/jre/lib/resources.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/rt.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/jsse.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/jce.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/charsets.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/jfr.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunec.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/localedata.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/dnsns.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunjce_provider.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/zipfs.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunpkcs11.jar,file:/usr/local/spark-1.6.0/conf/,file:/usr/local/spark-1.6.0/assembly/target/scala-2.10/spark-assembly-1.6.0-hadoop2.4.1.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-core-3.2.10.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-api-jdo-3.2.6.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-rdbms-3.2.9.jar,file:/home/kaushal/kaushal-drive/spark/Simba_Spark_JDBC_Desktop/SimbaRTU_SparkJDBC41_Client_1.0.0.1001/,file:/usr/local/spark-1.6.0/lib_managed/jars/mysql-connector-java-5.1.36.jar] and parent being sun.misc.Launcher$AppClassLoader@3ad6a0e0 of type class sun.misc.Launcher$AppClassLoader with classpath [file:/usr/local/spark-1.6.0/conf/,file:/usr/local/spark-1.6.0/assembly/target/scala-2.10/spark-assembly-1.6.0-hadoop2.4.1.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-core-3.2.10.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-api-jdo-3.2.6.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-rdbms-3.2.9.jar] and parent being sun.misc.Launcher$ExtClassLoader@60dbf04d of type class sun.misc.Launcher$ExtClassLoader with classpath [file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunec.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/localedata.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/dnsns.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunjce_provider.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/zipfs.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunpkcs11.jar] and parent being primordial classloader with boot classpath [/usr/lib/jvm/java-7-oracle/jre/lib/resources.jar:/usr/lib/jvm/java-7-oracle/jre/lib/rt.jar:/usr/lib/jvm/java-7-oracle/jre/lib/sunrsasign.jar:/usr/lib/jvm/java-7-oracle/jre/lib/jsse.jar:/usr/lib/jvm/java-7-oracle/jre/lib/jce.jar:/usr/lib/jvm/java-7-oracle/jre/lib/charsets.jar:/usr/lib/jvm/java-7-oracle/jre/lib/jfr.jar:/usr/lib/jvm/java-7-oracle/jre/classes] not found.
    at scala.reflect.internal.MissingRequirementError$.signal(MissingRequirementError.scala:16)
    at scala.reflect.internal.MissingRequirementError$.notFound(MissingRequirementError.scala:17)
    at scala.reflect.internal.Mirrors$RootsBase.getModuleOrClass(Mirrors.scala:48)
    at scala.reflect.internal.Mirrors$RootsBase.getModuleOrClass(Mirrors.scala:61)
    at scala.reflect.internal.Mirrors$RootsBase.staticModuleOrClass(Mirrors.scala:72)
    at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:119)
    at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:21)
    at org.apache.spark.ml.classification.NaiveBayesModel$NaiveBayesModelWriter$$typecreator1$1.apply(NaiveBayes.scala:264)
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:231)
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:231)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.localTypeOf(ScalaReflection.scala:642)
    at org.apache.spark.sql.catalyst.ScalaReflection$.localTypeOf(ScalaReflection.scala:30)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.schemaFor(ScalaReflection.scala:630)
    at org.apache.spark.sql.catalyst.ScalaReflection$.schemaFor(ScalaReflection.scala:30)
    at org.apache.spark.sql.SQLContext.createDataFrame(SQLContext.scala:430)
    at org.apache.spark.ml.classification.NaiveBayesModel$NaiveBayesModelWriter.saveImpl(NaiveBayes.scala:264)
    at org.apache.spark.ml.util.MLWriter.save(ReadWrite.scala:90)
    at org.apache.spark.ml.util.MLWritable$class.save(ReadWrite.scala:130)
    at org.apache.spark.ml.classification.NaiveBayesModel.save(NaiveBayes.scala:130)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply$mcV$sp(<console>:65)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply(<console>:63)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply(<console>:63)
    at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
    at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
    at java.lang.Thread.run(Thread.java:745)
scala.reflect.internal.MissingRequirementError: class org.apache.spark.ml.classification.NaiveBayesModel.NaiveBayesModelWriter in JavaMirror with org.apache.spark.repl.SparkIMain$TranslatingClassLoader@16d48c9 of type class org.apache.spark.repl.SparkIMain$TranslatingClassLoader with classpath [/tmp/spark-bce3a131-f6d5-4b5a-9853-3aa0d70b928f] and parent being org.apache.spark.repl.SparkIMain$$anon$2@2dc584da of type class org.apache.spark.repl.SparkIMain$$anon$2 with classpath [file:/usr/lib/jvm/java-7-oracle/jre/lib/resources.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/rt.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/jsse.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/jce.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/charsets.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/jfr.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunec.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/localedata.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/dnsns.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunjce_provider.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/zipfs.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunpkcs11.jar,file:/usr/local/spark-1.6.0/conf/,file:/usr/local/spark-1.6.0/assembly/target/scala-2.10/spark-assembly-1.6.0-hadoop2.4.1.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-core-3.2.10.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-api-jdo-3.2.6.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-rdbms-3.2.9.jar,file:/home/kaushal/kaushal-drive/spark/Simba_Spark_JDBC_Desktop/SimbaRTU_SparkJDBC41_Client_1.0.0.1001/,file:/usr/local/spark-1.6.0/lib_managed/jars/mysql-connector-java-5.1.36.jar] and parent being sun.misc.Launcher$AppClassLoader@3ad6a0e0 of type class sun.misc.Launcher$AppClassLoader with classpath [file:/usr/local/spark-1.6.0/conf/,file:/usr/local/spark-1.6.0/assembly/target/scala-2.10/spark-assembly-1.6.0-hadoop2.4.1.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-core-3.2.10.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-api-jdo-3.2.6.jar,file:/usr/local/spark-1.6.0/lib_managed/jars/datanucleus-rdbms-3.2.9.jar] and parent being sun.misc.Launcher$ExtClassLoader@60dbf04d of type class sun.misc.Launcher$ExtClassLoader with classpath [file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunec.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/localedata.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/dnsns.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunjce_provider.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/zipfs.jar,file:/usr/lib/jvm/java-7-oracle/jre/lib/ext/sunpkcs11.jar] and parent being primordial classloader with boot classpath [/usr/lib/jvm/java-7-oracle/jre/lib/resources.jar:/usr/lib/jvm/java-7-oracle/jre/lib/rt.jar:/usr/lib/jvm/java-7-oracle/jre/lib/sunrsasign.jar:/usr/lib/jvm/java-7-oracle/jre/lib/jsse.jar:/usr/lib/jvm/java-7-oracle/jre/lib/jce.jar:/usr/lib/jvm/java-7-oracle/jre/lib/charsets.jar:/usr/lib/jvm/java-7-oracle/jre/lib/jfr.jar:/usr/lib/jvm/java-7-oracle/jre/classes] not found.
    at scala.reflect.internal.MissingRequirementError$.signal(MissingRequirementError.scala:16)
    at scala.reflect.internal.MissingRequirementError$.notFound(MissingRequirementError.scala:17)
    at scala.reflect.internal.Mirrors$RootsBase.getModuleOrClass(Mirrors.scala:48)
    at scala.reflect.internal.Mirrors$RootsBase.getModuleOrClass(Mirrors.scala:61)
    at scala.reflect.internal.Mirrors$RootsBase.staticModuleOrClass(Mirrors.scala:72)
    at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:119)
    at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:21)
    at org.apache.spark.ml.classification.NaiveBayesModel$NaiveBayesModelWriter$$typecreator1$1.apply(NaiveBayes.scala:264)
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:231)
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:231)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.localTypeOf(ScalaReflection.scala:642)
    at org.apache.spark.sql.catalyst.ScalaReflection$.localTypeOf(ScalaReflection.scala:30)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.schemaFor(ScalaReflection.scala:630)
    at org.apache.spark.sql.catalyst.ScalaReflection$.schemaFor(ScalaReflection.scala:30)
    at org.apache.spark.sql.SQLContext.createDataFrame(SQLContext.scala:430)
    at org.apache.spark.ml.classification.NaiveBayesModel$NaiveBayesModelWriter.saveImpl(NaiveBayes.scala:264)
    at org.apache.spark.ml.util.MLWriter.save(ReadWrite.scala:90)
    at org.apache.spark.ml.util.MLWritable$class.save(ReadWrite.scala:130)
    at org.apache.spark.ml.classification.NaiveBayesModel.save(NaiveBayes.scala:130)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply$mcV$sp(<console>:65)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply(<console>:63)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply(<console>:63)
    at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
    at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
    at java.lang.Thread.run(Thread.java:745)
java.lang.AssertionError: assertion failed: class MLWriter
    at scala.reflect.internal.pickling.UnPickler$Scan.readSymbolRef(UnPickler.scala:784)
    at scala.reflect.internal.pickling.UnPickler$Scan.readType(UnPickler.scala:346)
    at scala.reflect.internal.pickling.UnPickler$Scan$$anonfun$readTypeRef$1.apply(UnPickler.scala:792)
    at scala.reflect.internal.pickling.UnPickler$Scan$$anonfun$readTypeRef$1.apply(UnPickler.scala:792)
    at scala.reflect.internal.pickling.UnPickler$Scan.at(UnPickler.scala:171)
    at scala.reflect.internal.pickling.UnPickler$Scan.readTypeRef(UnPickler.scala:792)
    at scala.reflect.internal.pickling.UnPickler$Scan.readType(UnPickler.scala:366)
    at scala.reflect.internal.pickling.UnPickler$Scan$LazyTypeRef$$anonfun$34.apply(UnPickler.scala:855)
    at scala.reflect.internal.pickling.UnPickler$Scan$LazyTypeRef$$anonfun$34.apply(UnPickler.scala:855)
    at scala.reflect.internal.pickling.UnPickler$Scan.at(UnPickler.scala:171)
    at scala.reflect.internal.pickling.UnPickler$Scan$LazyTypeRef.complete(UnPickler.scala:855)
    at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
    at scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1374)
    at scala.reflect.internal.Symbols$Symbol.annotations(Symbols.scala:1581)
    at scala.reflect.internal.Symbols$Symbol.addAnnotation(Symbols.scala:1600)
    at scala.reflect.internal.pickling.UnPickler$Scan.readSymbolAnnotation(UnPickler.scala:491)
    at scala.reflect.internal.pickling.UnPickler$Scan.run(UnPickler.scala:88)
    at scala.reflect.internal.pickling.UnPickler.unpickle(UnPickler.scala:37)
    at scala.reflect.runtime.JavaMirrors$JavaMirror.unpickleClass(JavaMirrors.scala:565)
    at scala.reflect.runtime.SymbolLoaders$TopClassCompleter.complete(SymbolLoaders.scala:32)
    at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
    at scala.reflect.internal.Mirrors$RootsBase.getModuleOrClass(Mirrors.scala:43)
    at scala.reflect.internal.Mirrors$RootsBase.getModuleOrClass(Mirrors.scala:61)
    at scala.reflect.internal.Mirrors$RootsBase.staticModuleOrClass(Mirrors.scala:72)
    at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:119)
    at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:21)
    at org.apache.spark.ml.classification.NaiveBayesModel$NaiveBayesModelWriter$$typecreator1$1.apply(NaiveBayes.scala:264)
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:231)
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:231)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.localTypeOf(ScalaReflection.scala:642)
    at org.apache.spark.sql.catalyst.ScalaReflection$.localTypeOf(ScalaReflection.scala:30)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.schemaFor(ScalaReflection.scala:630)
    at org.apache.spark.sql.catalyst.ScalaReflection$.schemaFor(ScalaReflection.scala:30)
    at org.apache.spark.sql.SQLContext.createDataFrame(SQLContext.scala:430)
    at org.apache.spark.ml.classification.NaiveBayesModel$NaiveBayesModelWriter.saveImpl(NaiveBayes.scala:264)
    at org.apache.spark.ml.util.MLWriter.save(ReadWrite.scala:90)
    at org.apache.spark.ml.util.MLWritable$class.save(ReadWrite.scala:130)
    at org.apache.spark.ml.classification.NaiveBayesModel.save(NaiveBayes.scala:130)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply$mcV$sp(<console>:65)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply(<console>:63)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply(<console>:63)
    at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
    at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
    at java.lang.Thread.run(Thread.java:745)
scala.reflect.internal.Symbols$CyclicReference: illegal cyclic reference involving class NaiveBayesModelWriter
    at scala.reflect.internal.Symbols$Symbol$$anonfun$info$3.apply(Symbols.scala:1220)
    at scala.reflect.internal.Symbols$Symbol$$anonfun$info$3.apply(Symbols.scala:1218)
    at scala.Function0$class.apply$mcV$sp(Function0.scala:40)
    at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12)
    at scala.reflect.internal.Symbols$Symbol.lock(Symbols.scala:482)
    at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1218)
    at scala.reflect.internal.BuildUtils$BuildImpl.select(BuildUtils.scala:20)
    at scala.reflect.internal.BuildUtils$BuildImpl.selectType(BuildUtils.scala:11)
    at scala.reflect.internal.BuildUtils$BuildImpl.selectType(BuildUtils.scala:8)
    at org.apache.spark.ml.classification.NaiveBayesModel$NaiveBayesModelWriter$$typecreator1$1.apply(NaiveBayes.scala:264)
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:231)
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:231)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.localTypeOf(ScalaReflection.scala:642)
    at org.apache.spark.sql.catalyst.ScalaReflection$.localTypeOf(ScalaReflection.scala:30)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.schemaFor(ScalaReflection.scala:630)
    at org.apache.spark.sql.catalyst.ScalaReflection$.schemaFor(ScalaReflection.scala:30)
    at org.apache.spark.sql.SQLContext.createDataFrame(SQLContext.scala:430)
    at org.apache.spark.ml.classification.NaiveBayesModel$NaiveBayesModelWriter.saveImpl(NaiveBayes.scala:264)
    at org.apache.spark.ml.util.MLWriter.save(ReadWrite.scala:90)
    at org.apache.spark.ml.util.MLWritable$class.save(ReadWrite.scala:130)
    at org.apache.spark.ml.classification.NaiveBayesModel.save(NaiveBayes.scala:130)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply$mcV$sp(<console>:65)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply(<console>:63)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply(<console>:63)
    at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
    at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
    at java.lang.Thread.run(Thread.java:745)
java.lang.RuntimeException: error reading Scala signature of org.apache.spark.ml.classification.NaiveBayesModel: assertion failed: class MLWriter
    at scala.reflect.internal.pickling.UnPickler.unpickle(UnPickler.scala:45)
    at scala.reflect.runtime.JavaMirrors$JavaMirror.unpickleClass(JavaMirrors.scala:565)
    at scala.reflect.runtime.SymbolLoaders$TopClassCompleter.complete(SymbolLoaders.scala:32)
    at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
    at scala.reflect.internal.Mirrors$RootsBase.getModuleOrClass(Mirrors.scala:43)
    at scala.reflect.internal.Mirrors$RootsBase.getModuleOrClass(Mirrors.scala:61)
    at scala.reflect.internal.Mirrors$RootsBase.staticModuleOrClass(Mirrors.scala:72)
    at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:119)
    at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:21)
    at org.apache.spark.ml.classification.NaiveBayesModel$NaiveBayesModelWriter$$typecreator1$1.apply(NaiveBayes.scala:264)
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:231)
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:231)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.localTypeOf(ScalaReflection.scala:642)
    at org.apache.spark.sql.catalyst.ScalaReflection$.localTypeOf(ScalaReflection.scala:30)
    at org.apache.spark.sql.catalyst.ScalaReflection$class.schemaFor(ScalaReflection.scala:630)
    at org.apache.spark.sql.catalyst.ScalaReflection$.schemaFor(ScalaReflection.scala:30)
    at org.apache.spark.sql.SQLContext.createDataFrame(SQLContext.scala:430)
    at org.apache.spark.ml.classification.NaiveBayesModel$NaiveBayesModelWriter.saveImpl(NaiveBayes.scala:264)
    at org.apache.spark.ml.util.MLWriter.save(ReadWrite.scala:90)
    at org.apache.spark.ml.util.MLWritable$class.save(ReadWrite.scala:130)
    at org.apache.spark.ml.classification.NaiveBayesModel.save(NaiveBayes.scala:130)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply$mcV$sp(<console>:65)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply(<console>:63)
    at $line61.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1$$anonfun$apply$mcVI$sp$1.apply(<console>:63)
    at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
    at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
    at java.lang.Thread.run(Thread.java:745)

0 个答案:

没有答案