MemSQL找不到数据库

时间:2016-09-05 16:43:55

标签: apache-spark apache-spark-sql memsql

您好,我正在编写一些代码来连接到MemSQL并运行一个简单的查询。

       _ <- Task.delay {
          val conf = new SparkConf()
            .setAppName("MemSQL Connector Test")
            .set("memsql.host", "192.168.99.100")
            .set("spark.app.id", "memsql")
            .set("memsql.port", "3307")
            .set("memsql.user", "root")
            .set("memsql.password", "password")
            .set("memsql.defaultDatabase", "connect_db")
            .setMaster("local")

          val sc = new SparkContext(conf)
          val sqlContext = new SQLContext(sc)
          val msc = new MemSQLContext(sc)

          try {
            val rdd = msc.sql("SELECT * from impressions")
            log.info(s"rdd = $rdd")
            log.info(s"rdd.collect() = ${rdd.collect()}")
          } catch {
            case e: Throwable =>
              println(e.getStackTraceString)
          }
        }

我收到此异常/错误并且无法找出根本原因

17:31:20.862 [Executor task launch worker-0] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 0.0 (TID 0)
java.sql.SQLException: Cannot create PoolableConnectionFactory (Unknown database 'connect_db_0')
    at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2294) ~[commons-dbcp2-2.1.1.jar:2.1.1]
    at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:2039) ~[commons-dbcp2-2.1.1.jar:2.1.1]
    at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1533) ~[commons-dbcp2-2.1.1.jar:2.1.1]
    at com.memsql.spark.connector.MemSQLConnectionPool$.connect(MemSQLConnectionPool.scala:34) ~[memsql-connector_2.10-1.3.3.jar:1.3.3]
    at com.memsql.spark.connector.rdd.MemSQLRDD$$anon$1.<init>(MemSQLRDD.scala:241) ~[memsql-connector_2.10-1.3.3.jar:1.3.3]
    at com.memsql.spark.connector.rdd.MemSQLRDD.compute(MemSQLRDD.scala:231) ~[memsql-connector_2.10-1.3.3.jar:1.3.3]
    at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:300) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.RDD.iterator(RDD.scala:264) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:300) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.RDD.iterator(RDD.scala:264) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:300) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.RDD.iterator(RDD.scala:264) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:300) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.RDD.iterator(RDD.scala:264) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.scheduler.Task.run(Task.scala:88) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [na:1.8.0_92]
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [na:1.8.0_92]
    at java.lang.Thread.run(Thread.java:745) [na:1.8.0_92]


<.... more stack trace ....>

17:31:20.885 [task-result-getter-0] ERROR o.a.spark.scheduler.TaskSetManager - Task 0 in stage 0.0 failed 1 times; aborting job
17:31:20.892 [Executor task launch worker-0] ERROR org.apache.spark.executor.Executor - Exception in task 1.0 in stage 0.0 (TID 1)
java.sql.SQLException: Cannot create PoolableConnectionFactory (Unknown database 'connect_db_1')
    at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2294) ~[commons-dbcp2-2.1.1.jar:2.1.1]
    at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:2039) ~[commons-dbcp2-2.1.1.jar:2.1.1]
    at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1533) ~[commons-dbcp2-2.1.1.jar:2.1.1]
    at com.memsql.spark.connector.MemSQLConnectionPool$.connect(MemSQLConnectionPool.scala:34) ~[memsql-connector_2.10-1.3.3.jar:1.3.3]
    at com.memsql.spark.connector.rdd.MemSQLRDD$$anon$1.<init>(MemSQLRDD.scala:241) ~[memsql-connector_2.10-1.3.3.jar:1.3.3]
    at com.memsql.spark.connector.rdd.MemSQLRDD.compute(MemSQLRDD.scala:231) ~[memsql-connector_2.10-1.3.3.jar:1.3.3]
    at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:300) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.RDD.iterator(RDD.scala:264) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:300) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.RDD.iterator(RDD.scala:264) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:300) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.RDD.iterator(RDD.scala:264) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:300) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.rdd.RDD.iterator(RDD.scala:264) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.scheduler.Task.run(Task.scala:88) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214) ~[spark-core_2.10-1.5.2.jar:1.5.2]
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [na:1.8.0_92]
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [na:1.8.0_92]
    at java.lang.Thread.run(Thread.java:745) [na:1.8.0_92]
Caused by: com.mysql.jdbc.exceptions.jdbc4.MySQLSyntaxErrorException: Unknown database 'connect_db_1'
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[na:1.8.0_92]
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[na:1.8.0_92]
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[na:1.8.0_92]
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423) ~[na:1.8.0_92]
    at com.mysql.jdbc.Util.handleNewInstance(Util.java:377) ~[mysql-connector-java-5.1.34.jar:5.1.34]
    at com.mysql.jdbc.Util.getInstance(Util.java:360) ~[mysql-connector-java-5.1.34.jar:5.1.34]
    at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:978) ~[mysql-connector-java-5.1.34.jar:5.1.34]
    at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3887) ~[mysql-connector-java-5.1.34.jar:5.1.34]
    at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3823) ~[mysql-connector-java-5.1.34.jar:5.1.34]
    at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:870) ~[mysql-connector-java-5.1.34.jar:5.1.34]
    at com.mysql.jdbc.MysqlIO.proceedHandshakeWithPluggableAuthentication(MysqlIO.java:1659) ~[mysql-connector-java-5.1.34.jar:5.1.34]
    at com.mysql.jdbc.MysqlIO.doHandshake(MysqlIO.java:1206) ~[mysql-connector-java-5.1.34.jar:5.1.34]
    at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2234) ~[mysql-connector-java-5.1.34.jar:5.1.34]
    at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2265) ~[mysql-connector-java-5.1.34.jar:5.1.34]
    at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2064) ~[mysql-connector-java-5.1.34.jar:5.1.34]
    at com.mysql.jdbc.ConnectionImpl.<init>(ConnectionImpl.java:790) ~[mysql-connector-java-5.1.34.jar:5.1.34]
    at com.mysql.jdbc.JDBC4Connection.<init>(JDBC4Connection.java:44) ~[mysql-connector-java-5.1.34.jar:5.1.34]
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[na:1.8.0_92]
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[na:1.8.0_92]
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[na:1.8.0_92]
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423) ~[na:1.8.0_92]
    at com.mysql.jdbc.Util.handleNewInstance(Util.java:377) ~[mysql-connector-java-5.1.34.jar:5.1.34]
    at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:395) ~[mysql-connector-java-5.1.34.jar:5.1.34]
    at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:325) ~[mysql-connector-java-5.1.34.jar:5.1.34]
    at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39) ~[commons-dbcp2-2.1.1.jar:2.1.1]
    at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:256) ~[commons-dbcp2-2.1.1.jar:2.1.1]
    at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2304) ~[commons-dbcp2-2.1.1.jar:2.1.1]
    at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2290) ~[commons-dbcp2-2.1.1.jar:2.1.1]
    ... 22 common frames omitted

MemSQL中已存在数据库connect_db和impressions表。为什么要尝试连接connect_db_0和connect_db_1?也许这表明了什么?

我正在使用MemSQL连接器1.3.3和Apache Spark 1.5.2。

0 个答案:

没有答案