我正在尝试在Scala / Spark / H2O中编写示例程序。该程序编译,但在object App1 extends App{
val conf = new SparkConf()
conf.setAppName("AppTest")
conf.setMaster("local[1]")
conf.set("spark.executor.memory","1g");
val sc = new SparkContext(conf)
val spark = SparkSession.builder
.master("local")
.appName("ApplicationController")
.getOrCreate()
import spark.implicits._
val h2oContext = H2OContext.getOrCreate(sess) // <--- error here
import h2oContext.implicits._
val rawData = sc.textFile("c:\\spark\\data.csv")
val data = rawData.map(line => line.split(',').map(_.toDouble))
val response: RDD[Int] = data.map(row => row(0).toInt)
val str = "count: " + response.count()
val h2oResponse: H2OFrame = response.toDF
sc.stop
spark.stop
}
:
import React from 'react';
import ReactDOM from 'react-dom';
// Create a new component . This component should produce HTML
const app = function() {
return <div>Hi!</div>;
}
// Take this component generated HTML and put it in the DOM
ReactDOM.render(<app />, document.querySelector('.container'));
这是例外日志:
线程“main”中的异常 java.lang.RuntimeException:当使用Sparkling Water作为Spark时 包裹通过--packages选项,'no.priv.garshol.duke:duke:1.2' 由于Spark中的错误,必须明确指定依赖项 依赖解决。在 org.apache.spark.h2o.H2OContext.init(H2OContext.scala:117)