org.apache.spark.SparkException:任务不可序列化(scala)

时间:2015-12-30 10:44:00

标签: scala serialization apache-spark

我是scala以及FOR spark的新手,请帮我解决这个问题。 在我单独加载到函数下面时,它们运行时没有任何异常,当我在scala对象中复制此函数,并在spark shell中加载相同的文件时,它们会抛出 任务而不是序列化 尝试并行化时“processbatch”函数中的异常。 PFB代码相同:

import org.apache.spark.sql.Row
import org.apache.log4j.Logger
import org.apache.spark.sql.hive.HiveContext

object Process {
    val hc = new HiveContext(sc)

    def processsingle(wait: Int, patient: org.apache.spark.sql.Row, visits: Array[org.apache.spark.sql.Row]) : String = {
        var out = new StringBuilder()
        val processStart = getTimeInMillis()
        for( x <- visits ) { 
            out.append(", " + x.getAs("patientid") + ":" + x.getAs("visitid")) 
        }
    }

    def processbatch(batch: Int, wait: Int, patients: Array[org.apache.spark.sql.Row], visits: Array[org.apache.spark.sql.Row]) = {
        val out = sc.parallelize(patients, batch).map( r=> processsingle(wait, r, visits.filter(f=> f.getAs("patientid") == r.getAs("patientid")))).collect()
        for(x <- out) println(x)
    }

    def processmeasures(fetch: Int, batch: Int, wait: Int) = {

        val patients = hc.sql("SELECT patientid FROM tableName1 order by p_id").collect()
        val visit = hc.sql("SELECT patientid, visitid FROM tableName2")
        val count = patients.length
        val fetches = if(count % fetch > 0) (count / fetch + 1) else (count / fetch)


        for(i <- 0 to fetches.toInt-1){ 
            val startFetch = i*fetch
            val endFetch = math.min((i+1)*fetch, count.toInt)-1
            val fetchSize = endFetch - startFetch + 1
            val fetchClause = "patientid >= " + patients(startFetch).get(0) + " and patientid <= " + patients(endFetch).get(0)
            val fetchVisit = visit.filter( fetchClause ).collect()

            val batches = if(fetchSize % batch > 0) (fetchSize / batch + 1) else (fetchSize / batch)
            for(j <- 0 to batches.toInt-1){ 
                val startBatch = j*batch
                val endBatch = math.min((j+1)*batch, fetch.toInt)-1

                println(s"Batch from $startBatch to $endBatch");
                val batchVisits = fetchVisit.filter(g => g.getAs[Long]("patientid") >= patients(i*fetch + startBatch).getLong(0) && g.getAs[Long]("patientid") <= patients(math.min(i*fetch + endBatch + 1, endFetch)).getLong(0))
                processbatch(batch, wait, patients.slice(i*fetch + startBatch, i*fetch + endBatch + 1), batchVisits)
            }
        }
        println("Processing took " + getExecutionTime(processStart) + " millis")
    }

}

1 个答案:

答案 0 :(得分:1)

您应该Process对象Serializable

object Process extends Serializable {
  ...
}