org.apache.spark.SparkException任务不可序列化原因:ForeachPartitionFunction中的java.io.NotSerializableException

时间:2019-03-27 05:52:42

标签: java apache-spark apache-spark-sql apache-spark-mllib

test.foreachPartition(new ForeachPartitionFunction<Row>() {


    public void call(Iterator<Row> t) throws Exception {
    while (t.hasNext()){
      Row row = t.next();
       /*Vector testdata= new Vector();
         testdata.add(row);
            Logisticmodel.addRow(testdata);*/
            System.out.print(row.getString(0) +"\t" + row.getString(1)+"\t" + row.getString(2)+"\t" + row.getString(3)+"\t" + row.getString(4)+"\t" + row.getString(5)+"\t" + row.getString(6)+"\t" + row.getString(7)+"\t" + row.getString(8)+"\t" + row.getString(9)+"\t" + row.getDouble(10)+"\t" +(org.apache.spark.ml.linalg.DenseVector) row.get(11));
                                     //System.out.println();
        }
       }
    });

    test.foreachPartition(new ForeachPartitionFunction<Row>() {


                            public void call(Iterator<Row> t) throws Exception {



                                 while (t.hasNext()){

                                     Row row = t.next();
                                     /*Vector testdata= new Vector();
                                     testdata.add(row);

                                     Logisticmodel.addRow(testdata);*/
                                     System.out.print(row.getString(0) +"\t" + row.getString(1)+"\t" + row.getString(2)+"\t" + row.getString(3)+"\t" + row.getString(4)+"\t" + row.getString(5)+"\t" + row.getString(6)+"\t" + row.getString(7)+"\t" + row.getString(8)+"\t" + row.getString(9)+"\t" + row.getDouble(10)+"\t" +(org.apache.spark.ml.linalg.DenseVector) row.get(11));
                                     //System.out.println();
                                 }
                             }
                         });

我有上面的代码,并给我以下错误

org.apache.spark.SparkException: Task not serializable
    at org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:403)
    at org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:393)
    at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:162)
    at org.apache.spark.SparkContext.clean(SparkContext.scala:2326)
    at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:934)
    at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:933)
    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
    at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
    at org.apache.spark.rdd.RDD.foreachPartition(RDD.scala:933)
    at org.apache.spark.sql.Dataset$$anonfun$foreachPartition$1.apply$mcV$sp(Dataset.scala:2736)
    at org.apache.spark.sql.Dataset$$anonfun$foreachPartition$1.apply(Dataset.scala:2736)
    at org.apache.spark.sql.Dataset$$anonfun$foreachPartition$1.apply(Dataset.scala:2736)
    at org.apache.spark.sql.Dataset$$anonfun$withNewRDDExecutionId$1.apply(Dataset.scala:3350)
    at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
    at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
    at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
    at org.apache.spark.sql.Dataset.withNewRDDExecutionId(Dataset.scala:3346)
    at org.apache.spark.sql.Dataset.foreachPartition(Dataset.scala:2735)
    at org.apache.spark.sql.Dataset.foreachPartition(Dataset.scala:2747)
    at LogisticRegression$3.actionPerformed(LogisticRegression.java:336)
    at javax.swing.AbstractButton.fireActionPerformed(Unknown Source)
    at javax.swing.AbstractButton$Handler.actionPerformed(Unknown Source)
    at javax.swing.DefaultButtonModel.fireActionPerformed(Unknown Source)
    at javax.swing.DefaultButtonModel.setPressed(Unknown Source)
    at javax.swing.plaf.basic.BasicButtonListener.mouseReleased(Unknown Source)
    at java.awt.Component.processMouseEvent(Unknown Source)
    at javax.swing.JComponent.processMouseEvent(Unknown Source)
    at java.awt.Component.processEvent(Unknown Source)
    at java.awt.Container.processEvent(Unknown Source)
    at java.awt.Component.dispatchEventImpl(Unknown Source)
    at java.awt.Container.dispatchEventImpl(Unknown Source)
    at java.awt.Component.dispatchEvent(Unknown Source)
    at java.awt.LightweightDispatcher.retargetMouseEvent(Unknown Source)
    at java.awt.LightweightDispatcher.processMouseEvent(Unknown Source)
    at java.awt.LightweightDispatcher.dispatchEvent(Unknown Source)
    at java.awt.Container.dispatchEventImpl(Unknown Source)
    at java.awt.Window.dispatchEventImpl(Unknown Source)
    at java.awt.Component.dispatchEvent(Unknown Source)
    at java.awt.EventQueue.dispatchEventImpl(Unknown Source)
    at java.awt.EventQueue.access$500(Unknown Source)
    at java.awt.EventQueue$3.run(Unknown Source)
    at java.awt.EventQueue$3.run(Unknown Source)
    at java.security.AccessController.doPrivileged(Native Method)
    at java.security.ProtectionDomain$JavaSecurityAccessImpl.doIntersectionPrivilege(Unknown Source)
    at java.security.ProtectionDomain$JavaSecurityAccessImpl.doIntersectionPrivilege(Unknown Source)
    at java.awt.EventQueue$4.run(Unknown Source)
    at java.awt.EventQueue$4.run(Unknown Source)
    at java.security.AccessController.doPrivileged(Native Method)
    at java.security.ProtectionDomain$JavaSecurityAccessImpl.doIntersectionPrivilege(Unknown Source)
    at java.awt.EventQueue.dispatchEvent(Unknown Source)
    at java.awt.EventDispatchThread.pumpOneEventForFilters(Unknown Source)
    at java.awt.EventDispatchThread.pumpEventsForFilter(Unknown Source)
    at java.awt.EventDispatchThread.pumpEventsForHierarchy(Unknown Source)
    at java.awt.EventDispatchThread.pumpEvents(Unknown Source)
    at java.awt.EventDispatchThread.pumpEvents(Unknown Source)
    at java.awt.EventDispatchThread.run(Unknown Source)
Caused by: java.io.NotSerializableException: LogisticRegression$3
Serialization stack:
    - object not serializable (class: LogisticRegression$3, value: LogisticRegression$3@66cd4e38)
    - field (class: LogisticRegression$3$1, name: this$1, type: class LogisticRegression$3)
    - object (class LogisticRegression$3$1, LogisticRegression$3$1@752a453c)
    - field (class: org.apache.spark.sql.Dataset$$anonfun$foreachPartition$2, name: func$4, type: interface org.apache.spark.api.java.function.ForeachPartitionFunction)
    - object (class org.apache.spark.sql.Dataset$$anonfun$foreachPartition$2, <function1>)
    at org.apache.spark.serializer.SerializationDebugger$.improveException(SerializationDebugger.scala:40)
    at org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:46)
    at org.apache.spark.serializer.JavaSerializerInstance.serialize(JavaSerializer.scala:100)
    at org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:400)

... 56 more

0 个答案:

没有答案