在JAAS配置中找不到“ KafkaClient”条目。未设置系统属性“ java.security.auth.login.config”

时间:2018-08-25 15:19:26

标签: apache-spark apache-kafka jaas spark-structured-streaming

我正在尝试通过Spark结构化的流媒体连接到Kafka。

这有效:

spark-shell --master local[1] \
       --files /mypath/jaas_mh.conf \
       --packages org.apache.spark:spark-sql-kafka-0-10_2.11:2.3.0 \
       --conf "spark.driver.extraJavaOptions=-Djava.security.auth.login.config=jaas_mh.conf" \
       --conf "spark.executor.extraJavaOptions=-Djava.security.auth.login.config=jaas_mh.conf" \
       --num-executors 1  --executor-cores 1 

但是,当我尝试以编程方式执行同样的操作时..

object SparkHelper {
  def getAndConfigureSparkSession() = {
    val conf = new SparkConf()
      .setAppName("Structured Streaming from Message Hub to Cassandra")
      .setMaster("local[1]")
      .set("spark.driver.extraJavaOptions", "-Djava.security.auth.login.config=jaas_mh.conf")
      .set("spark.executor.extraJavaOptions", "-Djava.security.auth.login.config=jaas_mh.conf")

    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")

    getSparkSession()
  }

  def getSparkSession() : SparkSession = {
    val spark = SparkSession
      .builder()
      .getOrCreate()

    spark.sparkContext.addFile("/mypath/jaas_mh.conf")

    return spark
  }
}

我得到了错误:

 Could not find a 'KafkaClient' entry in the JAAS configuration. 
    System property 'java.security.auth.login.config' is not set

有指针吗?

1 个答案:

答案 0 :(得分:2)

即使在conf中,您也应提供.conf文件的完整路径或相对路径。 另外,当您创建SparkConf时,我看到您没有将其应用于当前的SparkSession。

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object Driver extends App {

  val confPath: String = "/Users/arcizon/IdeaProjects/spark/src/main/resources/jaas_mh.conf"

  def getAndConfigureSparkSession(): SparkSession = {
    val conf = new SparkConf()
      .setAppName("Structured Streaming from Message Hub to Cassandra")
      .setMaster("local[1]")
      .set("spark.driver.extraJavaOptions", s"-Djava.security.auth.login.config=$confPath")
      .set("spark.executor.extraJavaOptions", s"-Djava.security.auth.login.config=$confPath")

    getSparkSession(conf)
  }

  def getSparkSession(conf: SparkConf): SparkSession = {
    val spark = SparkSession
      .builder()
      .config(conf)
      .getOrCreate()

    spark.sparkContext.addFile(confPath)

    spark.sparkContext.setLogLevel("WARN")

    spark
  }

  val sparkSession: SparkSession = getAndConfigureSparkSession()

  println(sparkSession.conf.get("spark.driver.extraJavaOptions"))
  println(sparkSession.conf.get("spark.executor.extraJavaOptions"))

  sparkSession.stop()
}