嗨,我是Spark和Scala的新手。 一直试图使用spark-submit运行一个sbt编译的uber jar文件。
class TestResource extends AbstractResourceListener
{
public function fetch($id)
{
return ...
}
}
我正在使用此命令执行sbt jar文件,一旦打包并完成...
$(".general_form2").each(function(){
$(this).validate(
{
submitHandler: function (form) {
var id = $(this).attr('id');
var form1 = document.getElementById(id);
$.ajax(
{
url: form.action,
type: form.method,
data: new FormData(form1),
cache: false,
contentType: false,
processData: false,
dataType: 'html',
beforeSend: function () {
},
complete: function () {
},
success: function (response) {
},
error: function (error) {
alert("Error" + JSON.stringify(error));
}
});
}
}
);
});
在本地运行它是一件容易的事。它运行正常..错误是当我尝试在代码上运行它时,就像在代码中一样..这是我面临的错误
package com.spark.scala
import org.apache.spark._
import org.apache.spark.SparkContext._
import org.apache.log4j._
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.sql.SQLContext
import org.apache.hadoop.fs.s3a.S3AFileSystem
import com.amazonaws.auth.AWSCredentialsProvider
import com.amazonaws.auth.AWSCredentialsProvider._
import org.apache.hadoop.fs.s3native.NativeS3FileSystem
/** Compute the average number of friends by age in a social network. */
object AwsSample {
/** Our main function where the action happens */
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("AwsSample")
new SparkConf().setAppName("AwsSample") //.setMaster("spark://**.**.**.**:7077")
val sc = new SparkContext(conf)
System.setProperty("com.amazonaws.services.s3.enableV4", "true")
val keyId = "**********"
val accessKey = "****************"
val endPoint= "s3.*some-region*.amazonaws.com"
val hadoopConf = sc.hadoopConfiguration
hadoopConf.set("fs.s3.awsAccessKeyId", keyId)
hadoopConf.set("fs.s3n.awsAccessKeyId", keyId)
hadoopConf.set("fs.s3a.access.key", keyId)
hadoopConf.set("fs.s3.awsSecretAccessKey", accessKey)
hadoopConf.set("fs.s3n.awsSecretAccessKey", accessKey)
hadoopConf.set("fs.s3a.secret.key", accessKey)
hadoopConf.set("fs.s3a.impl", "org.apache.hadoop.fs.s3a.S3AFileSystem")
hadoopConf.set("fs.s3n.impl", "org.apache.hadoop.fs.s3a.S3AFileSystem")
hadoopConf.set("fs.s3a.endpoint", endPoint)
val sqlContext = new SQLContext(sc)
var poll = sqlContext.read.parquet("s3a://**/***/**/**.snappy")
println(poll.count())
}
}
有人可以帮帮我吗?