Cassandra:身份验证错误...在群集配置中找不到身份验证器

时间:2018-08-07 04:37:44

标签: apache-spark cassandra

我能够从Spark shell访问Cassandra,并使用shell执行该代码,而不会引发任何错误,但是在部署JAR时无法通过spark-submit进行访问。

遇到错误

no authenticator on host /192.168.1.219:9042: Host /192.168.1.219:9042 requires authentication, but no authentication
     

在群集配置中找到

例外:

Exception in thread "main" java.io.IOException: Failed to open native connection to Cassandra at {10.135.83.28}:9042
        at com.datastax.spark.connector.cql.CassandraConnector$.com$datastax$spark$connector$cql$CassandraConnector$$createSession(CassandraConnector.scala:168)
        at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$8.apply(CassandraConnector.scala:154)
        at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$8.apply(CassandraConnector.scala:154)
        at com.datastax.spark.connector.cql.RefCountedCache.createNewValueAndKeys(RefCountedCache.scala:32)
        at com.datastax.spark.connector.cql.RefCountedCache.syncAcquire(RefCountedCache.scala:69)
        at com.datastax.spark.connector.cql.RefCountedCache.acquire(RefCountedCache.scala:57)
        at com.datastax.spark.connector.cql.CassandraConnector.openSession(CassandraConnector.scala:79)
        at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:111)
        at com.datastax.spark.connector.rdd.partitioner.dht.TokenFactory$.forSystemLocalPartitioner(TokenFactory.scala:98)
        at org.apache.spark.sql.cassandra.CassandraSourceRelation$.apply(CassandraSourceRelation.scala:272)
        at org.apache.spark.sql.cassandra.DefaultSource.createRelation(DefaultSource.scala:56)
        at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:340)
        at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:239)
        at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:227)
        at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:164)
        at com.dev.Engine.JsonConfigRead.CassandraSpark$.main(CassandraSpark.scala:124)
        at com.dev.Engine.JsonConfigRead.CassandraSpark.main(CassandraSpark.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
        at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:894)
        at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:198)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:228)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:137)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: com.datastax.driver.core.exceptions.AuthenticationException: Authentication error on host /10.135.83.28:9042: Host /10.135.83.28:9042 requires authentication, but no authenticator found in Cluster configuration
        at com.datastax.driver.core.AuthProvider$1.newAuthenticator(AuthProvider.java:40)
        at com.datastax.driver.core.Connection$5.apply(Connection.java:261)
        at com.datastax.driver.core.Connection$5.apply(Connection.java:243)
        at shade.com.datastax.spark.connector.google.common.util.concurrent.Futures$ChainingListenableFuture.run(Futures.java:906)
        at shade.com.datastax.spark.connector.google.common.util.concurrent.Futures$1$1.run(Futures.java:635)
        at shade.com.datastax.spark.connector.google.common.util.concurrent.MoreExecutors$DirectExecutorService.execute(MoreExecutors.java:299)
        at shade.com.datastax.spark.connector.google.common.util.concurrent.Futures$1.run(Futures.java:632)
        at shade.com.datastax.spark.connector.google.common.util.concurrent.MoreExecutors$DirectExecutor.execute(MoreExecutors.java:457)
        at shade.com.datastax.spark.connector.google.common.util.concurrent.ExecutionList.executeListener(ExecutionList.java:156)
        at shade.com.datastax.spark.connector.google.common.util.concurrent.ExecutionList.execute(ExecutionList.java:145)
        at shade.com.datastax.spark.connector.google.common.util.concurrent.AbstractFuture.set(AbstractFuture.java:185)
        at com.datastax.driver.core.Connection$Future.onSet(Connection.java:1288)
        at com.datastax.driver.core.Connection$Dispatcher.channelRead0(Connection.java:1070)
        at com.datastax.driver.core.Connection$Dispatcher.channelRead0(Connection.java:993)
        at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
        at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362)
        at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348)
        at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340)
        at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286)
        at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362)
        at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348)
        at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340)
        at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102)
        at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362)
        at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348)
        at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340)
        at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310)
        at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:284)
        at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362)
        at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348)
        at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340)
        at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1359)
        at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362)
        at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348)
        at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:935)
        at io.netty.channel.epoll.AbstractEpollStreamChannel$EpollStreamUnsafe.epollInReady(AbstractEpollStreamChannel.java:801)
        at io.netty.channel.epoll.EpollEventLoop.processReady(EpollEventLoop.java:404)
        at io.netty.channel.epoll.EpollEventLoop.run(EpollEventLoop.java:304)
        at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858)
        at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138)
        at java.lang.Thread.run(Thread.java:748)

我的代码:

package com.dev.Engine.JsonConfigRead
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.expr
import java.security.Timestamp
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.sql.types.DoubleType
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.types.StructField
import org.apache.spark.sql.types.StringType
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.cassandra
import org.apache.spark.sql.cassandra._
import com.datastax.spark
import com.datastax.spark._
import com.datastax.spark.connector
import com.datastax.spark.connector._
import com.datastax.spark.connector.cql
import com.datastax.spark.connector.cql._
import com.datastax.spark.connector.cql.CassandraConnector
import com.datastax.spark.connector.cql.CassandraConnector._
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.expr
import java.security.Timestamp
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.sql.types.DoubleType
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.types.StructField
import org.apache.spark.sql.types.StringType
import org.apache.spark.sql.streaming.OutputMode
import org.apache.spark.sql.streaming.Trigger
import scala.io.Source
import com.dev.Engine.JsonConfigRead.JSONUtils._
import com.dev.Engine.JsonConfigRead.Configuration
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.sql.types.DataType
import scala.collection.mutable.ListBuffer
import org.apache.spark.SparkEnv
import spray.json._
import DefaultJsonProtocol._
import org.apache.spark.sql.functions.explode_outer
import org.apache.spark.sql.functions._
import org.json4s.DefaultFormats
import org.json4s.JsonDSL._
import org.json4s._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.apache.spark.sql.expressions.UserDefinedAggregateFunction



object CassandraSpark {
  def main(args: Array[String]) {

import com.datastax.spark.connector._
val conf = new SparkConf(true).setAppName("cassandraspool").setMaster("local[*]")
conf.set("spark.driver.allowMultipleContexts","true")
conf.set("spark.cassandra.connection.host", "10.135.83.28")
conf.set("spark.cassandra.auth.username", "risp_rw")            
conf.set("spark.cassandra.auth.password", "Ri$pdev4w")
val sc = new SparkContext(conf)
val sqlContext = new org.apache.spark.sql.SQLContext(sc)

val csc = new CassandraSQLContext(sc)

val df = sqlContext
  .read
  .format("org.apache.spark.sql.cassandra")
  .options(Map("table" -> "stocktransferorder", "keyspace" -> "risp1"))
  .load()

df.show
df.write.mode("append").json("/app/UDMF/cassandra/")

  }

}

使用spark cassandra连接器2.3.1版本...

1 个答案:

答案 0 :(得分:0)

如果不是SparkSession,则在没有身份验证管理器的情况下启动,应使用spark-submit指定用户名和密码。

--conf spark.cassandra.auth.username=<username> \  
--conf spark.cassandra.auth.password=<password>
相关问题