Scala:课堂上找不到主要方法

时间:2017-12-04 07:07:23

标签: java scala hadoop

我是scala和java的新手。 我正在为K-Means聚类编写以下代码,但得到以下错误:

Error: Main method not found in class kmeans, please define the main method as:
   public static void main(String[] args)
or a JavaFX application class must extend javafx.application.Application

请帮助我克服这个问题。

我的.scala代码:

object kmeans {


  import scala.math.pow
  import org.apache.spark.SparkContext
  import org.apache.spark.SparkConf

  def distanceSquared(p1: (Double, Double), p2: (Double, Double)) = {
    pow(p1._1 - p2._1, 2) + pow(p1._2 - p2._2, 2)
  }

  def addPoints(p1: (Double, Double), p2: (Double, Double)) = {
    (p1._1 + p2._1, p1._2 + p2._2)
  }

  def closestPoint(p: (Double, Double), points: Array[(Double, Double)]): Int = {
    var index = 0
    var bestIndex = 0
    var closest = Double.PositiveInfinity

    for (i <- 0 until points.length) {
      val dist = distanceSquared(p, points(i))
      if (dist < closest) {
        closest = dist
        bestIndex = i
      }
    }
    bestIndex
  }


  def main(args: Array[String]) {

    val conf = new SparkConf().setAppName("First Scala app").setMaster("local[*]")
    val sc = new SparkContext(conf)

    val filename = "loudacre/*"

    val K = 5

    val convergeDist = .1

    val fileRdd = sc.textFile(filename)
    val pairLatLongRdd = fileRdd.map(line => line.split(',')).map(pair => (pair(3).toDouble, pair(4).toDouble)).filter(point => !((point._1 == 0) && (point._2 == 0))).
      persist()

    println(pairLatLongRdd.count())

    for ((a, b) <- pairLatLongRdd.take(2)) {

      println("Lat: " + a + " Long : " + b);

    }

    var kPoints = pairLatLongRdd.takeSample(false, K, 42)

    println("K Center points initialized :");

    for ((a, b) <- kPoints) {

      println("Lat: " + a + " Long : " + b);

    }
    var tempDist = Double.PositiveInfinity

    while (tempDist > convergeDist) {

      val closestToKpointRdd = pairLatLongRdd.map(point => (closestPoint(point, kPoints), (point, 1)))

      val pointCalculatedRdd = closestToKpointRdd.reduceByKey { case ((point1, n1), (point2, n2)) => (addPoints(point1, point2), n1 + n2) }

      val newPoints = pointCalculatedRdd.map { case (i, (point, n)) => (i, (point._1 / n, point._2 / n)) }.collectAsMap()

      tempDist = 0.0

      for (i <- 0 until K) {
        tempDist += distanceSquared(kPoints(i), newPoints(i))

      }

      println("Distance between iterations: " + tempDist);

      for (i <- 0 until K) {

        kPoints(i) = newPoints(i)

      }
    }

    println("Final center points :");

    for (point <- kPoints) {
      println(point);
    }

    val deviceRdd = fileRdd.map(line => line.split(',')).map(pair => (pair(1), (pair(3).toDouble, pair(4).toDouble))).filter(device => !((device._2._1 == 0) && (device._2._2 == 0))).
      persist()

    var points = deviceRdd.takeSample(false, 10, 42)

    for ((device, point) <- points) {

      val k = closestPoint(point, kPoints)
      println("device: " + device + " to K: " + k);

    }

    sc.stop()
  }

}

1 个答案:

答案 0 :(得分:0)

在您的主要声明之后,您可能会失踪=。

相关问题