我需要一些帮助,使用spark cassandra连接器让测试应用程序在spark环境中工作。
首先是细节:
我使用Scala和以下代码在Intellij中创建了一个测试应用程序:
import com.datastax.spark.connector._
import org.apache.spark.{SparkContext, SparkConf}
object test {
def main(args: Array[String]){
val conf = new SparkConf(true).set("spark.cassandra.connection.host", "xx.xxx.xxx.59")
.set("spark.cassandra.username", "ubuntu")
.set("spark.cassandra.password", "")
val sc = new SparkContext("spark://xx.xxx.xxx.xxx:7077", "Season", conf)
println("Got past the setup.")
val ks = "test"// analytics_loadset"
val incf = "kv"//site_detail"
println("Got past setting the variables.")
val rdd = sc.cassandraTable(ks, incf)
println(rdd.count)
println("Got past reading the RDD.")
println("done with test")
sc.stop()
}
}
我得到的是以下内容:
"C:\Program Files\Java\jdk1.7.0_60\bin\java" -Didea.launcher.port=7538 "-Didea.launcher.bin.path=C:\Program Files (x86)\JetBrains\IntelliJ IDEA Community Edition 13.1.4\bin" -Dfile.encoding=UTF-8 -classpath "C:\Program Files\Java\jdk1.7.0_60\jre\lib\charsets.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\deploy.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\javaws.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\jce.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\jfr.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\jfxrt.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\jsse.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\management-agent.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\plugin.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\resources.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\rt.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\ext\access-bridge-64.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\ext\dnsns.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\ext\jaccess.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\ext\localedata.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\ext\sunec.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\ext\sunjce_provider.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\ext\sunmscapi.jar;C:\Program Files\Java\jdk1.7.0_60\jre\lib\ext\zipfs.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\target\scala-2.10\classes;C:\Users\JEUser\.sbt\boot\scala-2.10.4\lib\scala-library.jar;C:\Users\JEUser\.ivy2\cache\colt\colt\jars\colt-1.2.0.jar;C:\Users\JEUser\.ivy2\cache\com.clearspring.analytics\stream\jars\stream-2.5.1.jar;C:\Users\JEUser\.ivy2\cache\com.codahale.metrics\metrics-core\bundles\metrics-core-3.0.2.jar;C:\Users\JEUser\.ivy2\cache\com.codahale.metrics\metrics-graphite\bundles\metrics-graphite-3.0.0.jar;C:\Users\JEUser\.ivy2\cache\com.codahale.metrics\metrics-json\bundles\metrics-json-3.0.0.jar;C:\Users\JEUser\.ivy2\cache\com.codahale.metrics\metrics-jvm\bundles\metrics-jvm-3.0.0.jar;C:\Users\JEUser\.ivy2\cache\com.datastax.cassandra\cassandra-driver-core\jars\cassandra-driver-core-2.0.4-sources.jar;C:\Users\JEUser\.ivy2\cache\com.datastax.cassandra\cassandra-driver-core\jars\cassandra-driver-core-2.0.4.jar;C:\Users\JEUser\.ivy2\cache\com.datastax.spark\spark-cassandra-connector_2.10\jars\spark-cassandra-connector_2.10-1.0.0-rc4.jar;C:\Users\JEUser\.ivy2\cache\com.esotericsoftware.kryo\kryo\bundles\kryo-2.21.jar;C:\Users\JEUser\.ivy2\cache\com.esotericsoftware.minlog\minlog\jars\minlog-1.2.jar;C:\Users\JEUser\.ivy2\cache\com.esotericsoftware.reflectasm\reflectasm\jars\reflectasm-1.07-shaded.jar;C:\Users\JEUser\.ivy2\cache\com.fasterxml.jackson.core\jackson-annotations\bundles\jackson-annotations-2.3.0.jar;C:\Users\JEUser\.ivy2\cache\com.fasterxml.jackson.core\jackson-core\bundles\jackson-core-2.3.0.jar;C:\Users\JEUser\.ivy2\cache\com.fasterxml.jackson.core\jackson-databind\bundles\jackson-databind-2.3.0.jar;C:\Users\JEUser\.ivy2\cache\com.google.code.findbugs\jsr305\jars\jsr305-1.3.9.jar;C:\Users\JEUser\.ivy2\cache\com.google.guava\guava\bundles\guava-15.0.jar;C:\Users\JEUser\.ivy2\cache\com.ning\compress-lzf\bundles\compress-lzf-1.0.0.jar;C:\Users\JEUser\.ivy2\cache\com.thoughtworks.paranamer\paranamer\jars\paranamer-2.6.jar;C:\Users\JEUser\.ivy2\cache\com.twitter\chill-java\jars\chill-java-0.3.6.jar;C:\Users\JEUser\.ivy2\cache\com.twitter\chill_2.10\jars\chill_2.10-0.3.6.jar;C:\Users\JEUser\.ivy2\cache\com.typesafe\config\bundles\config-1.0.2.jar;C:\Users\JEUser\.ivy2\cache\commons-beanutils\commons-beanutils\jars\commons-beanutils-1.7.0.jar;C:\Users\JEUser\.ivy2\cache\commons-beanutils\commons-beanutils-core\jars\commons-beanutils-core-1.8.0.jar;C:\Users\JEUser\.ivy2\cache\commons-codec\commons-codec\jars\commons-codec-1.5.jar;C:\Users\JEUser\.ivy2\cache\commons-collections\commons-collections\jars\commons-collections-3.2.1.jar;C:\Users\JEUser\.ivy2\cache\commons-configuration\commons-configuration\jars\commons-configuration-1.6.jar;C:\Users\JEUser\.ivy2\cache\commons-digester\commons-digester\jars\commons-digester-1.8.jar;C:\Users\JEUser\.ivy2\cache\commons-el\commons-el\jars\commons-el-1.0.jar;C:\Users\JEUser\.ivy2\cache\commons-httpclient\commons-httpclient\jars\commons-httpclient-3.1.jar;C:\Users\JEUser\.ivy2\cache\commons-io\commons-io\jars\commons-io-2.4.jar;C:\Users\JEUser\.ivy2\cache\commons-lang\commons-lang\jars\commons-lang-2.4.jar;C:\Users\JEUser\.ivy2\cache\commons-logging\commons-logging\jars\commons-logging-1.1.1.jar;C:\Users\JEUser\.ivy2\cache\commons-net\commons-net\jars\commons-net-2.2.jar;C:\Users\JEUser\.ivy2\cache\concurrent\concurrent\jars\concurrent-1.3.4.jar;C:\Users\JEUser\.ivy2\cache\hsqldb\hsqldb\jars\hsqldb-1.8.0.10.jar;C:\Users\JEUser\.ivy2\cache\io.netty\netty\bundles\netty-3.9.0.Final.jar;C:\Users\JEUser\.ivy2\cache\io.netty\netty-all\jars\netty-all-4.0.17.Final.jar;C:\Users\JEUser\.ivy2\cache\jline\jline\jars\jline-0.9.94.jar;C:\Users\JEUser\.ivy2\cache\joda-time\joda-time\jars\joda-time-2.3.jar;C:\Users\JEUser\.ivy2\cache\log4j\log4j\bundles\log4j-1.2.17.jar;C:\Users\JEUser\.ivy2\cache\net.java.dev.jets3t\jets3t\jars\jets3t-0.7.1.jar;C:\Users\JEUser\.ivy2\cache\net.sf.py4j\py4j\jars\py4j-0.8.1.jar;C:\Users\JEUser\.ivy2\cache\org.apache.ant\ant\jars\ant-1.9.0.jar;C:\Users\JEUser\.ivy2\cache\org.apache.ant\ant-launcher\jars\ant-launcher-1.9.0.jar;C:\Users\JEUser\.ivy2\cache\org.apache.cassandra\cassandra-clientutil\jars\cassandra-clientutil-2.0.9.jar;C:\Users\JEUser\.ivy2\cache\org.apache.cassandra\cassandra-thrift\jars\cassandra-thrift-2.0.9.jar;C:\Users\JEUser\.ivy2\cache\org.apache.commons\commons-lang3\jars\commons-lang3-3.3.2.jar;C:\Users\JEUser\.ivy2\cache\org.apache.commons\commons-math\jars\commons-math-2.1.jar;C:\Users\JEUser\.ivy2\cache\org.apache.curator\curator-client\bundles\curator-client-2.4.0.jar;C:\Users\JEUser\.ivy2\cache\org.apache.curator\curator-framework\bundles\curator-framework-2.4.0.jar;C:\Users\JEUser\.ivy2\cache\org.apache.curator\curator-recipes\bundles\curator-recipes-2.4.0.jar;C:\Users\JEUser\.ivy2\cache\org.apache.hadoop\hadoop-client\jars\hadoop-client-1.0.4.jar;C:\Users\JEUser\.ivy2\cache\org.apache.hadoop\hadoop-core\jars\hadoop-core-1.0.4.jar;C:\Users\JEUser\.ivy2\cache\org.apache.httpcomponents\httpclient\jars\httpclient-4.2.5.jar;C:\Users\JEUser\.ivy2\cache\org.apache.httpcomponents\httpcore\jars\httpcore-4.2.4.jar;C:\Users\JEUser\.ivy2\cache\org.apache.mesos\mesos\jars\mesos-0.18.1-shaded-protobuf.jar;C:\Users\JEUser\.ivy2\cache\org.apache.spark\spark-core_2.10\jars\spark-core_2.10-1.0.2.jar;C:\Users\JEUser\.ivy2\cache\org.apache.thrift\libthrift\jars\libthrift-0.9.1.jar;C:\Users\JEUser\.ivy2\cache\org.apache.zookeeper\zookeeper\jars\zookeeper-3.4.5.jar;C:\Users\JEUser\.ivy2\cache\org.codehaus.jackson\jackson-core-asl\jars\jackson-core-asl-1.8.8.jar;C:\Users\JEUser\.ivy2\cache\org.codehaus.jackson\jackson-mapper-asl\jars\jackson-mapper-asl-1.8.8.jar;C:\Users\JEUser\.ivy2\cache\org.eclipse.jetty\jetty-continuation\jars\jetty-continuation-8.1.14.v20131031.jar;C:\Users\JEUser\.ivy2\cache\org.eclipse.jetty\jetty-http\jars\jetty-http-8.1.14.v20131031.jar;C:\Users\JEUser\.ivy2\cache\org.eclipse.jetty\jetty-io\jars\jetty-io-8.1.14.v20131031.jar;C:\Users\JEUser\.ivy2\cache\org.eclipse.jetty\jetty-jndi\jars\jetty-jndi-8.1.14.v20131031.jar;C:\Users\JEUser\.ivy2\cache\org.eclipse.jetty\jetty-plus\jars\jetty-plus-8.1.14.v20131031.jar;C:\Users\JEUser\.ivy2\cache\org.eclipse.jetty\jetty-security\jars\jetty-security-8.1.14.v20131031.jar;C:\Users\JEUser\.ivy2\cache\org.eclipse.jetty\jetty-server\jars\jetty-server-8.1.14.v20131031.jar;C:\Users\JEUser\.ivy2\cache\org.eclipse.jetty\jetty-servlet\jars\jetty-servlet-8.1.14.v20131031.jar;C:\Users\JEUser\.ivy2\cache\org.eclipse.jetty\jetty-util\jars\jetty-util-8.1.14.v20131031.jar;C:\Users\JEUser\.ivy2\cache\org.eclipse.jetty\jetty-webapp\jars\jetty-webapp-8.1.14.v20131031.jar;C:\Users\JEUser\.ivy2\cache\org.eclipse.jetty\jetty-xml\jars\jetty-xml-8.1.14.v20131031.jar;C:\Users\JEUser\.ivy2\cache\org.eclipse.jetty.orbit\javax.activation\orbits\javax.activation-1.1.0.v201105071233.jar;C:\Users\JEUser\.ivy2\cache\org.eclipse.jetty.orbit\javax.mail.glassfish\orbits\javax.mail.glassfish-1.4.1.v201005082020.jar;C:\Users\JEUser\.ivy2\cache\org.eclipse.jetty.orbit\javax.servlet\orbits\javax.servlet-3.0.0.v201112011016.jar;C:\Users\JEUser\.ivy2\cache\org.eclipse.jetty.orbit\javax.transaction\orbits\javax.transaction-1.1.1.v201105210645.jar;C:\Users\JEUser\.ivy2\cache\org.joda\joda-convert\jars\joda-convert-1.2.jar;C:\Users\JEUser\.ivy2\cache\org.json4s\json4s-ast_2.10\jars\json4s-ast_2.10-3.2.6.jar;C:\Users\JEUser\.ivy2\cache\org.json4s\json4s-core_2.10\jars\json4s-core_2.10-3.2.6.jar;C:\Users\JEUser\.ivy2\cache\org.json4s\json4s-jackson_2.10\jars\json4s-jackson_2.10-3.2.6.jar;C:\Users\JEUser\.ivy2\cache\org.objenesis\objenesis\jars\objenesis-1.2.jar;C:\Users\JEUser\.sbt\boot\scala-2.10.4\lib\scala-compiler.jar;C:\Users\JEUser\.sbt\boot\scala-2.10.4\lib\scala-reflect.jar;C:\Users\JEUser\.ivy2\cache\org.scala-lang\scalap\jars\scalap-2.10.4.jar;C:\Users\JEUser\.ivy2\cache\org.slf4j\jcl-over-slf4j\jars\jcl-over-slf4j-1.7.5.jar;C:\Users\JEUser\.ivy2\cache\org.slf4j\jul-to-slf4j\jars\jul-to-slf4j-1.7.5.jar;C:\Users\JEUser\.ivy2\cache\org.slf4j\slf4j-api\jars\slf4j-api-1.7.5.jar;C:\Users\JEUser\.ivy2\cache\org.slf4j\slf4j-log4j12\jars\slf4j-log4j12-1.7.5.jar;C:\Users\JEUser\.ivy2\cache\org.spark-project\pyrolite\jars\pyrolite-2.0.1.jar;C:\Users\JEUser\.ivy2\cache\org.spark-project.akka\akka-actor_2.10\jars\akka-actor_2.10-2.2.3-shaded-protobuf.jar;C:\Users\JEUser\.ivy2\cache\org.spark-project.akka\akka-remote_2.10\bundles\akka-remote_2.10-2.2.3-shaded-protobuf.jar;C:\Users\JEUser\.ivy2\cache\org.spark-project.akka\akka-slf4j_2.10\bundles\akka-slf4j_2.10-2.2.3-shaded-protobuf.jar;C:\Users\JEUser\.ivy2\cache\org.spark-project.protobuf\protobuf-java\jars\protobuf-java-2.4.1-shaded.jar;C:\Users\JEUser\.ivy2\cache\org.tachyonproject\tachyon\jars\tachyon-0.4.1-thrift.jar;C:\Users\JEUser\.ivy2\cache\org.uncommons.maths\uncommons-maths\jars\uncommons-maths-1.2.2a.jar;C:\Users\JEUser\.ivy2\cache\org.xerial.snappy\snappy-java\bundles\snappy-java-1.0.5.jar;C:\Users\JEUser\.ivy2\cache\oro\oro\jars\oro-2.0.8.jar;C:\Users\JEUser\.ivy2\cache\xmlenc\xmlenc\jars\xmlenc-0.52.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\spark-cassandra-connector-java_2.10-1.0.0-SNAPSHOT.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\netty-3.9.0.Final.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\spark-cassandra-connector_2.10-1.0.0-SNAPSHOT.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\sbt-assembly-0.11.2.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\cassandra-driver-dse-2.0.2.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\spark-core_2.10-0.9.1.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\metrics-core-3.0.2.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\fastutil-6.5.15.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\snappy-java-1.0.5.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\guava-15.0.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\slf4j-api-1.7.5.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\slf4j-log4j12-1.7.6.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\libthrift-0.9.1.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\joda-time-2.4.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\log4j-1.2.17.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\cassandra-driver-core-2.0.2.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\apache-cassandra-thrift-2.0.8.jar;C:\repository\trunk\Scala_210_wspace\Seasonality_v6\lib\apache-cassandra-clientutil-2.0.8.jar;C:\Program Files (x86)\JetBrains\IntelliJ IDEA Community Edition 13.1.4\lib\idea_rt.jar" com.intellij.rt.execution.application.AppMain test
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/C:/Users/JEUser/.ivy2/cache/org.slf4j/slf4j-log4j12/jars/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/C:/repository/trunk/Scala_210_wspace/Seasonality_v6/lib/slf4j-log4j12-1.7.6.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
14/08/27 13:23:10 INFO spark.SecurityManager: Changing view acls to: JEUser
14/08/27 13:23:10 INFO spark.SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(JEUser)
14/08/27 13:23:10 INFO slf4j.Slf4jLogger: Slf4jLogger started
14/08/27 13:23:10 INFO Remoting: Starting remoting
14/08/27 13:23:10 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://spark@JEMAC17:49565]
14/08/27 13:23:10 INFO Remoting: Remoting now listens on addresses: [akka.tcp://spark@JEMAC17:49565]
14/08/27 13:23:10 INFO spark.SparkEnv: Registering MapOutputTracker
14/08/27 13:23:10 INFO spark.SparkEnv: Registering BlockManagerMaster
14/08/27 13:23:10 INFO storage.DiskBlockManager: Created local directory at C:\Users\JEUser\AppData\Local\Temp\spark-local-20140827132310-b588
14/08/27 13:23:10 INFO storage.MemoryStore: MemoryStore started with capacity 1080.6 MB.
14/08/27 13:23:10 INFO network.ConnectionManager: Bound socket to port 49568 with id = ConnectionManagerId(JEMAC17,49568)
14/08/27 13:23:10 INFO storage.BlockManagerMaster: Trying to register BlockManager
14/08/27 13:23:10 INFO storage.BlockManagerInfo: Registering block manager JEMAC17:49568 with 1080.6 MB RAM
14/08/27 13:23:10 INFO storage.BlockManagerMaster: Registered BlockManager
14/08/27 13:23:10 INFO spark.HttpServer: Starting HTTP Server
14/08/27 13:23:10 INFO server.Server: jetty-8.1.14.v20131031
14/08/27 13:23:10 INFO server.AbstractConnector: Started SocketConnector@0.0.0.0:49569
14/08/27 13:23:10 INFO broadcast.HttpBroadcast: Broadcast server started at http://169.254.179.207:49569
14/08/27 13:23:10 INFO spark.HttpFileServer: HTTP File server directory is C:\Users\JEUser\AppData\Local\Temp\spark-d0f66cca-8480-4932-85a6-691ee93c4fbd
14/08/27 13:23:10 INFO spark.HttpServer: Starting HTTP Server
14/08/27 13:23:10 INFO server.Server: jetty-8.1.14.v20131031
14/08/27 13:23:10 INFO server.AbstractConnector: Started SocketConnector@0.0.0.0:49570
14/08/27 13:23:11 INFO server.Server: jetty-8.1.14.v20131031
14/08/27 13:23:11 INFO server.AbstractConnector: Started SelectChannelConnector@0.0.0.0:4040
14/08/27 13:23:11 INFO ui.SparkUI: Started SparkUI at http://JEMAC17:4040
14/08/27 13:23:20 INFO client.AppClient$ClientActor: Connecting to master spark://54.183.137.105:7077...
Got past the setup.
Got past setting the variables.
14/08/27 13:23:20 WARN core.FrameCompressor: Cannot find LZ4 class, you should make sure the LZ4 library is in the classpath if you intend to use it. LZ4 compression will not be available for the protocol.
14/08/27 13:23:21 WARN core.ControlConnection: Found host with 0.0.0.0 as rpc_address, using listen_address (/172.31.7.123) to contact it instead. If this is incorrect you should avoid the use of 0.0.0.0 server side.
14/08/27 13:23:21 WARN core.ControlConnection: Found host with 0.0.0.0 as rpc_address, using listen_address (/172.31.7.119) to contact it instead. If this is incorrect you should avoid the use of 0.0.0.0 server side.
14/08/27 13:23:21 WARN core.ControlConnection: Found host with 0.0.0.0 as rpc_address, using listen_address (/172.31.7.120) to contact it instead. If this is incorrect you should avoid the use of 0.0.0.0 server side.
14/08/27 13:23:21 WARN core.ControlConnection: Found host with 0.0.0.0 as rpc_address, using listen_address (/172.31.7.121) to contact it instead. If this is incorrect you should avoid the use of 0.0.0.0 server side.
14/08/27 13:23:21 WARN core.ControlConnection: Found host with 0.0.0.0 as rpc_address, using listen_address (/172.31.7.122) to contact it instead. If this is incorrect you should avoid the use of 0.0.0.0 server side.
14/08/27 13:23:21 INFO core.Cluster: New Cassandra host /172.31.7.122:9042 added
14/08/27 13:23:21 INFO core.Cluster: New Cassandra host /172.31.7.123:9042 added
14/08/27 13:23:21 INFO core.Cluster: New Cassandra host /54.183.197.59:9042 added
14/08/27 13:23:21 INFO core.Cluster: New Cassandra host /172.31.7.120:9042 added
14/08/27 13:23:21 INFO core.Cluster: New Cassandra host /172.31.7.121:9042 added
14/08/27 13:23:21 INFO core.Cluster: New Cassandra host /172.31.7.119:9042 added
14/08/27 13:23:21 INFO cql.CassandraConnector: Connected to Cassandra cluster: jeDev
14/08/27 13:23:26 ERROR core.Session: Error creating pool to /172.31.7.123:9042
com.datastax.driver.core.TransportException: [/172.31.7.123:9042] Cannot connect
at com.datastax.driver.core.Connection.<init>(Connection.java:104)
at com.datastax.driver.core.PooledConnection.<init>(PooledConnection.java:28)
at com.datastax.driver.core.Connection$Factory.open(Connection.java:458)
at com.datastax.driver.core.HostConnectionPool.<init>(HostConnectionPool.java:85)
14/08/27 13:23:26 ERROR core.Session: Error creating pool to /172.31.7.122:9042
com.datastax.driver.core.TransportException: [/172.31.7.122:9042] Cannot connect
at com.datastax.driver.core.Connection.<init>(Connection.java:104)
at com.datastax.driver.core.PooledConnection.<init>(PooledConnection.java:28)
at com.datastax.driver.core.Connection$Factory.open(Connection.java:458)
at com.datastax.driver.core.HostConnectionPool.<init>(HostConnectionPool.java:85)
at com.datastax.driver.core.SessionManager.replacePool(SessionManager.java:241)
at com.datastax.driver.core.SessionManager.access$400(SessionManager.java:42)
at com.datastax.driver.core.SessionManager$3.call(SessionManager.java:273)
at com.datastax.driver.core.SessionManager$3.call(SessionManager.java:265)
14/08/27 13:23:27 ERROR core.Session: Error creating pool to /172.31.7.120:9042
com.datastax.driver.core.TransportException: [/172.31.7.120:9042] Cannot connect
at com.datastax.driver.core.Connection.<init>(Connection.java:104)
at com.datastax.driver.core.PooledConnection.<init>(PooledConnection.java:28)
at com.datastax.driver.core.Connection$Factory.open(Connection.java:458)
at com.datastax.driver.core.HostConnectionPool.<init>(HostConnectionPool.java:85)
at com.datastax.driver.core.SessionManager.replacePool(SessionManager.java:241)
at com.datastax.driver.core.SessionManager.access$400(SessionManager.java:42)
at com.datastax.driver.core.SessionManager$3.call(SessionManager.java:273)
14/08/27 13:23:36 ERROR core.Session: Error creating pool to /172.31.7.120:9042
com.datastax.driver.core.TransportException: [/172.31.7.120:9042] Cannot connect
at com.datastax.driver.core.Connection.<init>(Connection.java:104)
at com.datastax.driver.core.PooledConnection.<init>(PooledConnection.java:28)
at com.datastax.driver.core.Connection$Factory.open(Connection.java:458)
at com.datastax.driver.core.HostConnectionPool.<init>(HostConnectionPool.java:85)
at com.datastax.driver.core.SessionManager.replacePool(SessionManager.java:241)
at com.datastax.driver.core.SessionManager.access$400(SessionManager.java:42)
at com.datastax.driver.core.SessionManager$3.call(SessionManager.java:273)
at com.datastax.driver.core.SessionManager$3.call(SessionManager.java:265)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
14/08/27 13:23:40 INFO client.AppClient$ClientActor: Connecting to master spark://54.183.137.105:7077...
14/08/27 13:23:41 ERROR core.Session: Error creating pool to /172.31.7.121:9042
com.datastax.driver.core.TransportException: [/172.31.7.121:9042] Cannot connect
at com.datastax.driver.core.Connection.<init>(Connection.java:104)
at com.datastax.driver.core.PooledConnection.<init>(PooledConnection.java:28)
at com.datastax.driver.core.Connection$Factory.open(Connection.java:458)
at com.datastax.driver.core.HostConnectionPool.<init>(HostConnectionPool.java:85)
at com.datastax.driver.core.SessionManager.replacePool(SessionManager.java:241)
at com.datastax.driver.core.SessionManager.access$400(SessionManager.java:42)
at com.datastax.driver.core.SessionManager$3.call(SessionManager.java:273)
at com.datastax.driver.core.SessionManager$3.call(SessionManager.java:265)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
14/08/27 13:23:47 ERROR core.Session: Error creating pool to /172.31.7.119:9042
com.datastax.driver.core.TransportException: [/172.31.7.119:9042] Cannot connect
at com.datastax.driver.core.Connection.<init>(Connection.java:104)
at com.datastax.driver.core.PooledConnection.<init>(PooledConnection.java:28)
at com.datastax.driver.core.Connection$Factory.open(Connection.java:458)
at com.datastax.driver.core.HostConnectionPool.<init>(HostConnectionPool.java:85)
at com.datastax.driver.core.SessionManager.replacePool(SessionManager.java:241)
at com.datastax.driver.core.SessionManager.access$400(SessionManager.java:42)
at com.datastax.driver.core.SessionManager$3.call(SessionManager.java:273)
at com.datastax.driver.core.SessionManager$3.call(SessionManager.java:265)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
Exception in thread "main" com.datastax.driver.core.exceptions.NoHostAvailableException: All host(s) tried for query failed (no host was tried)
at com.datastax.driver.core.exceptions.NoHostAvailableException.copy(NoHostAvailableException.java:65)
at com.datastax.driver.core.DefaultResultSetFuture.extractCauseFromExecutionException(DefaultResultSetFuture.java:256)
at com.datastax.driver.core.DefaultResultSetFuture.getUninterruptibly(DefaultResultSetFuture.java:172)
at com.datastax.driver.core.AbstractSession.execute(AbstractSession.java:52)
at com.datastax.driver.core.AbstractSession.execute(AbstractSession.java:36)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at com.datastax.spark.connector.cql.SessionProxy.invoke(SessionProxy.scala:33)
at com.sun.proxy.$Proxy6.execute(Unknown Source)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at com.datastax.spark.connector.cql.SessionProxy.invoke(SessionProxy.scala:33)
at com.sun.proxy.$Proxy6.execute(Unknown Source)
at com.datastax.spark.connector.rdd.CassandraRDD$$anonfun$cassandraPartitionerClassName$1.apply(CassandraRDD.scala:265)
at com.datastax.spark.connector.rdd.CassandraRDD$$anonfun$cassandraPartitionerClassName$1.apply(CassandraRDD.scala:264)
at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:98)
at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:97)
at com.datastax.spark.connector.util.IOUtils$.closeAfterUse(IOUtils.scala:16)
at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:97)
at com.datastax.spark.connector.rdd.CassandraRDD.cassandraPartitionerClassName$lzycompute(CassandraRDD.scala:263)
at com.datastax.spark.connector.rdd.CassandraRDD.cassandraPartitionerClassName(CassandraRDD.scala:262)
at com.datastax.spark.connector.rdd.CassandraRDD.getPartitions(CassandraRDD.scala:272)
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:204)
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:202)
at scala.Option.getOrElse(Option.scala:120)
at org.apache.spark.rdd.RDD.partitions(RDD.scala:202)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1097)
at org.apache.spark.rdd.RDD.count(RDD.scala:861)
at test$.main(test.scala:26)
at test.main(test.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:134)
Caused by: com.datastax.driver.core.exceptions.NoHostAvailableException: All host(s) tried for query failed (no host was tried)
at com.datastax.driver.core.RequestHandler.sendRequest(RequestHandler.java:103)
at com.datastax.driver.core.SessionManager.execute(SessionManager.java:446)
at com.datastax.driver.core.SessionManager.executeQuery(SessionManager.java:482)
at com.datastax.driver.core.SessionManager.executeAsync(SessionManager.java:90)
... 36 more
14/08/27 13:23:48 INFO cql.CassandraConnector: Disconnected from Cassandra cluster: jeDev
14/08/27 13:24:00 INFO client.AppClient$ClientActor: Connecting to master spark://54.183.137.105:7077...
Process finished with exit code 1
我需要的是一个可以调查的方向。我的假设有三个: 1.我从笔记本电脑上运行它,是否需要直接在spark服务器上运行? 我有罐子冲突吗? 3.更可恶的东西,比如我可能遇到一个未知的错误?
非常需要任何帮助?
谢谢,
埃里克