运行Spark Scala程序时出现以下错误:
用户类抛出异常:java.lang.NoSuchMethodError: 。com.fasterxml.jackson.databind.JsonMappingException(Ljava / IO /可关闭的; Ljava /郎/字符串;)V
引发错误的特定行是val configString = UtilsKPI.downloadFromS3(configPath)
。
这是UtilsKPI
的代码。
object UtilsKPI {
var bucketNameCode: String = ""
var bucketNameData: String = ""
var credentials: BasicAWSCredentials = null
var client: AmazonS3Client = null
def parseJSON(line: String): Map[String, String] = {
JacksMapper.readValue[Map[String, String]](line)
}
def setBucketNameCode(bnc: String): Unit = {
bucketNameCode = bnc
}
def setBucketNameData(bnd: String): Unit = {
bucketNameData = bnd
}
def setS3(): Unit = {
client = new AmazonS3Client()
}
def convertMapToJSON(map: Map[String, Any]): String = {
val json = JacksMapper.writeValueAsString[scala.collection.immutable.Map[String, Any]](map)
json
}
def uploadConfigToS3(config: String, filePath: String): Unit = {
val CONTENT_TYPE = "application/json"
val fileContentBytes = config.getBytes(StandardCharsets.UTF_8)
val fileInputStream = new ByteArrayInputStream(fileContentBytes)
val metadata = new ObjectMetadata()
metadata.setContentType(CONTENT_TYPE)
metadata.setContentLength(fileContentBytes.length)
val putObjectRequest = new PutObjectRequest(bucketNameCode, filePath, fileInputStream, metadata)
client.deleteObject(new DeleteObjectRequest(bucketNameCode, filePath))
client.putObject(putObjectRequest)
}
def downloadFromS3(downloadFile: String): String = {
val s3Object = client.getObject(new GetObjectRequest(bucketNameCode, downloadFile))
val myData = Source.fromInputStream(s3Object.getObjectContent())
myData.getLines().mkString("\n")
}
def getByKeyFromS3(key: String): String = {
val s3Object = client.getObject(bucketNameData, key)
val myData = Source.fromInputStream(s3Object.getObjectContent())
myData.getLines().mkString("\n")
}
}
这是我的pom.xml文件:
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>1.8</java.version>
<scala.version>2.11</scala.version>
<scala.subversion>2.11.8</scala.subversion>
<spark.version>2.2.0</spark.version>
<jackson.version>2.9.4</jackson.version>
</properties>
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.subversion}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.version}</artifactId>
<version>${spark.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-scala_${scala.version}</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.github.nscala-time</groupId>
<artifactId>nscala-time_${scala.version}</artifactId>
<version>2.18.0</version>
</dependency>
<dependency>
<groupId>com.github.scopt</groupId>
<artifactId>scopt_${scala.version}</artifactId>
<version>3.3.0</version>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId>
<version>1.11.297</version>
</dependency>
<dependency>
<groupId>com.typesafe.scala-logging</groupId>
<artifactId>scala-logging_${scala.version}</artifactId>
<version>3.8.0</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka-0-8_${scala.version}</artifactId>
<version>${spark.version}</version>
</dependency>
</dependencies>