scala

时间:2015-04-30 12:50:07

标签: scala

我有以下scala脚本:

import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.hadoop.io.{LongWritable, NullWritable}
import org.apache.spark.SparkConf

object SimpleApp {

    def main(args: Array[String]) {
        val conf = new SparkConf().setAppName("SimpleApp")
        val sc = new SparkContext(conf)
        val file = sc.textFile("hdfs://master-full:54310/tera-output")
        val splits = file.map(word => word.toLong)
        splits.map(row => (NullWritable.get(), new LongWritable(row))).saveAsNewAPIHadoopFile(“hdfs://master-full:54310/output/",classOf[NullWritable],classOf[LongWritable],classOf[TextOutputFormat[NullWritable,LongWritable]])
    }
}

当我尝试编译它时,我收到以下错误:

[error] /home/cloud-user/SparkWordCount/src/main/scala/WordCount.scala:13: illegal character '\u201c'
[error]         splits.map(row => (NullWritable.get(), new LongWritable(row))).saveAsNewAPIHadoopFile(“hdfs://master-full:54310/output/",classOf[NullWritable],classOf[LongWritable],classOf[TextOutputFormat[NullWritable,LongWritable]])
[error]                                                                                               ^
[error] /home/cloud-user/SparkWordCount/src/main/scala/WordCount.scala:14: ')' expected but '}' found.
[error]     }
[error]     ^
[error] two errors found
[error] (compile:compileIncremental) Compilation failed

我有一个文件simple.sbt,它列出了依赖项:

name := "Simple Project"

version := "1.0"

scalaVersion := "2.10.4"

libraryDependencies += "org.apache.spark" %% "spark-core" % "1.1.0"

有谁知道如何解决这个问题?提前谢谢!

修正引用问题后,正确指出,我收到此错误:

[info] Set current project to Simple Project (in build file:/home/cloud-user/SparkWordCount/)
[info] Compiling 1 Scala source to /home/cloud-user/SparkWordCount/target/scala-2.10/classes...
[error] /home/cloud-user/SparkWordCount/src/main/scala/WordCount.scala:15: not found: type TextOutputFormat
[error]         splits.map(row => (NullWritable.get(), new LongWritable(row))).saveAsNewAPIHadoopFile("hdfs://master-full:54310/output/",classOf[NullWritable],classOf[LongWritable],classOf[TextOutputFormat[NullWritable,LongWritable]])
[error]                                                                                                                                                                                      ^
[error] one error found
[error] (compile:compileIncremental) Compilation failed

1 个答案:

答案 0 :(得分:3)

尝试

import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.hadoop.io.{LongWritable, NullWritable}
object SimpleApp {

    def main(args: Array[String]) {
            val sc = new SparkContext(conf)
        val file = sc.textFile("hdfs://master-full:54310/tera-output")
        val splits = file.map(word => word.toLong)
        splits.map(row => (NullWritable.get(), new LongWritable(row))).saveAsNewAPIHadoopFile("hdfs://master-full:54310/output/",classOf[NullWritable],classOf[LongWritable],classOf[TextOutputFormat[NullWritable,LongWritable]])
    }
}

您可能会在违规行(而不是")中使用错误的引号。