当我将程序导出到jar文件并执行它时,我得到一个java.lang.ClassNotFoundException错误。
我正在使用的代码如下。
提前感谢您的任何帮助
SBT
name := "testFunctions"
version := "1.0"
scalaVersion := "2.11.8"
libraryDependencies += "com.github.pathikrit" %% "better-files" % "2.17.1"
libraryDependencies ++= Seq(
"org.apache.spark" % "spark-core_2.11" % "2.1.0",
"org.apache.spark" % "spark-sql_2.11" % "2.1.0",
"com.github.pathikrit" %% "better-files" % "2.17.1"
)
initialize := {
val _ = initialize.value
if (sys.props("java.specification.version") != "1.8")
sys.error("Java 8 is required for this project.")
}
Scala代码
/**
* Created by cloudera on 7/23/17.
*/
import better.files.File._
import org.apache.spark.sql.SparkSession
object func {
val forDelete = (root/"/home/cloudera/Documents/fabo")
.createIfNotExists()
if (forDelete.exists)
forDelete.delete()
def main(args:Array[String]) : Unit = {
val spark = SparkSession.builder
.master("local")
.appName("Get ForEx Data")
.getOrCreate()
}
}
命令行执行jar文件
spark-submit --class func --master local[*] /home/cloudera/testFunctions/out/artifacts/testFunctions_jar
错误
java.lang.ClassNotFoundException: func
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at org.apache.spark.util.Utils$.classForName(Utils.scala:229)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:695)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
** 代码实时如下所示的树结构 ** - 所以名为func的类存在于scala目录中
\main
\java
\resources
\scala
-func(class)
\scala-2.11
jar tf name_of_jar.jar
的输出META-INF/MANIFEST.MF
func.class
func$.class