我已将assembly.sbt文件添加到项目文件夹中。我没有在任何模块中保留任何sbt文件。 我没有得到任何可能导致解决方案的错误。此外,它是用Java编写的Spark代码。 运行sbt程序集给出了以下错误: -
[error] Not a valid command: assembly
[error] Not a valid project ID: assembly
[error] Expected ':' (if selecting a configuration)
[error] Not a valid key: assembly
[error] assembly
当我运行sbt包时,它只用META-INF创建空罐。
这是我的build.sbt文件: -
lazy val commonSettings = Seq(
version := "1.0",
scalaVersion := "2.10.5",
ivyScala := ivyScala.value map { _.copy(overrideScalaVersion = true) },
compile in Compile := inc.Analysis.Empty,
organization := "com.service.main",
)
libraryDependencies ++= Seq(
"com.google.code.gson" % "gson" % "2.3.1",
"org.apache.spark" % "spark-core_2.10" % "1.5.2",
"org.apache.spark" % "spark-sql_2.10" % "1.5.2",
"org.apache.spark" % "spark-streaming_2.10" % "1.5.2",
"org.apache.spark" % "spark-streaming-kafka_2.10" % "1.5.2",
"com.databricks" % "spark-csv_2.10" % "1.3.0",
"org.codehaus.janino" % "janino" % "2.7.8",
"it.unimi.dsi" % "fastutil" % "7.0.6",
"org.reflections" % "reflections" % "0.9.10",
"org.elasticsearch" % "elasticsearch-spark_2.10" % "2.1.1",
"junit" % "junit" % "4.8.1"
// "org.elasticsearch" % "elasticsearch-hadoop" % "2.1.1",
// "org.apache.kafka" % "kafka" % "0.8.2.0" exclude("log4j", "log4j") exclude("org.slf4j","slf4j-log4j12")
)
resolvers ++= Seq(
"Apache repo" at "https://repository.apache.org/content/repositories/releases",
"Cloudera Repository" at "https://repository.cloudera.com/artifactory/cloudera-repos/",
"Akka Repository" at "http://repo.akka.io/releases/",
"Apache HBase" at "https://repository.apache.org/content/repositories/releases",
"scala-tools" at "https://oss.sonatype.org/content/groups/scala-tools",
"Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/",
"Second Typesafe repo" at "http://repo.typesafe.com/typesafe/maven-releases/",
"Mesosphere Public Repository" at "http://downloads.mesosphere.io/maven",
Resolver.sonatypeRepo("public")
)
lazy val root = project.in(file("."))
.aggregate(core,common,provider,ooxp,model)
.settings(commonSettings: _*)
.settings(
name := "compute-engine"
)
.dependsOn(core)
.dependsOn(common)
.dependsOn(model)
.dependsOn(provider)
.dependsOn(ooxp)
lazy val core = project.in(file("core"))
.settings(commonSettings: _*)
.settings(
name := "core"
)
.dependsOn(provider)
.dependsOn(common)
lazy val diff = project.in(file("diff"))
.settings(commonSettings: _*)
.settings(
name := "diff"
)
.dependsOn(provider)
.dependsOn(common)
lazy val provider = project.in(file("provider"))
.settings(commonSettings: _*)
.settings(
name := "providers"
)
.dependsOn(ooxp)
.dependsOn(model)
.dependsOn(common)
lazy val ooxp = project.in(file("ooxp"))
.settings(commonSettings: _*)
.settings(
name := "ooxp"
)
.dependsOn(model)
.dependsOn(common)
lazy val model = project.in(file("model"))
.settings(commonSettings: _*)
.settings(
name := "model"
)
.dependsOn(common)
lazy val common = project.in(file("common"))
.settings(commonSettings: _*)
.settings(
name := "common"
)
答案 0 :(得分:0)
您需要添加sbt-assembly插件。
点击此处http://www.scala-sbt.org/0.13/tutorial/Using-Plugins.html
或者您可以在此处查看可能重复的内容:"./sbt/sbt assembly" errors "Not a valid command: assembly" for Apache Spark project