Hive ClassNotFoundException即使在maven存储库中添加了所有jar

时间:2016-12-30 06:26:01

标签: java mysql hadoop hive

我添加了此项目所需的所有罐子,但我无法解决此异常。任何人都可以提出相关建议。 你也可以告诉告诉如何给hive数据库访问权限。 提前致谢。

java.lang.ClassNotFoundException: org.apache.hadoop.hive.jdbc.HiveDriver
    at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
    at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
    at java.security.AccessController.doPrivileged(Native Method)
    at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
    at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
    at java.lang.Class.forName0(Native Method)
    at java.lang.Class.forName(Class.java:190)
    at org.ezytruk.com.CreateHiveExternalTable.createHiveExternalTable(CreateHiveExternalTable.java:20)
    at org.ezytruk.com.CreateHiveExternalTable.main(CreateHiveExternalTable.java:53)
Exception in thread "main" java.sql.SQLException: No suitable driver found for jdbc:hive://localhost/EZYTRUK
    at java.sql.DriverManager.getConnection(DriverManager.java:596)
    at java.sql.DriverManager.getConnection(DriverManager.java:215)
    at org.ezytruk.com.CreateHiveExternalTable.createHiveExternalTable(CreateHiveExternalTable.java:39)
    at org.ezytruk.com.CreateHiveExternalTable.main(CreateHiveExternalTable.java:53)

的pom.xml

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>
  <groupId>BigData</groupId>
  <artifactId>BigData</artifactId>
  <version>0.0.1-SNAPSHOT</version>
  <properties>
  <slf4j.version>1.6.1</slf4j.version>
  <hadoop-version>2.6.0</hadoop-version>
  <mysql-connector-version>5.1.40</mysql-connector-version>
  <sqoop-core-version>1.99.3</sqoop-core-version>
  <zookeeper-version>3.4.9</zookeeper-version>
  <hive-jdbc-version>1.2.1</hive-jdbc-version>
  <commons-io-version>2.2</commons-io-version>
  <commons-logging.version>1.2</commons-logging.version>
  </properties>
  <dependencies>
  <dependency>
    <groupId>commons-io</groupId>
    <artifactId>commons-io</artifactId>
    <version>${commons-io-version}</version>
</dependency>
 <dependency>
        <groupId>commons-logging</groupId>
        <artifactId>commons-logging</artifactId>
        <version>${commons-logging.version}</version>
   </dependency>        
   <dependency>
    <groupId>mysql</groupId>
    <artifactId>mysql-connector-java</artifactId>
    <version>${mysql-connector-version}</version>
   </dependency>
  <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-common</artifactId>
    <version>${hadoop-version}</version>
</dependency>
  <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-client</artifactId>
    <version>${hadoop-version}</version>
</dependency>
  <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-hdfs</artifactId>
    <version>${hadoop-version}</version>
</dependency>
  <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-mapreduce-client-core</artifactId>
    <version>${hadoop-version}</version>
</dependency>
  <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-yarn-common</artifactId>
    <version>${hadoop-version}</version>
</dependency>
 <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-core</artifactId>
    <version>1.2.1</version>
</dependency>
 <dependency> 
    <groupId>org.apache.sqoop</groupId>
    <artifactId>sqoop-core</artifactId>
    <version>${sqoop-core-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.sqoop</groupId>
    <artifactId>sqoop-client</artifactId>
    <version>${sqoop-core-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.sqoop</groupId>
    <artifactId>sqoop-common</artifactId>
    <version>${sqoop-core-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.sqoop.connector</groupId>
    <artifactId>sqoop-connector-generic-jdbc</artifactId>
    <version>${sqoop-core-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.sqoop</groupId>
    <artifactId>sqoop</artifactId>
    <version>1.4.1-incubating</version>
</dependency>
<dependency>
    <groupId>org.apache.zookeeper</groupId>
    <artifactId>zookeeper</artifactId>
    <version>${zookeeper-version}</version>
</dependency>

<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-jdbc</artifactId>
    <version>${hive-jdbc-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-exec</artifactId>
    <version>${hive-jdbc-version}</version>
</dependency>

<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-metastore</artifactId>
    <version>${hive-jdbc-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-common</artifactId>
    <version>${hive-jdbc-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-service</artifactId>
    <version>${hive-jdbc-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-shims</artifactId>
    <version>${hive-jdbc-version}</version>
</dependency>
<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-serde</artifactId>
    <version>${hive-jdbc-version}</version>
</dependency>

</dependencies>
  <packaging>war</packaging>
  <build>
    <sourceDirectory>src</sourceDirectory>
    <plugins>
      <plugin>
        <artifactId>maven-compiler-plugin</artifactId>
        <version>3.3</version>
        <configuration>
          <source>1.7</source>
          <target>1.7</target>
        </configuration>
      </plugin>
      <plugin>
        <artifactId>maven-war-plugin</artifactId>
        <version>2.6</version>
        <configuration>
          <warSourceDirectory>WebContent</warSourceDirectory>
        </configuration>
      </plugin>
    </plugins>
  </build>
</project>

程序:

 package org.hive.com;

    import java.io.FileNotFoundException;
    import java.io.IOException;
    import java.sql.Connection;
    import java.sql.DriverManager;
    import java.sql.SQLException;

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.Path;

    import com.mysql.jdbc.Statement;

    public class CreateHiveExternalTable {

        public static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";

        public static void createHiveExternalTable() throws FileNotFoundException, IOException, SQLException {
            try {
                Class.forName(driverName);
            } catch (ClassNotFoundException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }

            Configuration config = new Configuration();
            config.addResource(new Path("/usr/local/hadoop/etc/hadoop/conf/core-site.xml"));
            config.addResource(new Path("/usr/local/hadoop/etc/hadoop/conf/hdfs-site.xml"));



        Connection connect = DriverManager.getConnection("jdbc:hive://localhost/hivedb","hive","");
            Statement stmt = (Statement) connect.createStatement();
            //String tableName = properties.getProperty("hive_table_name");
            stmt.executeQuery("CREATE EXTERNAL TABLE IF NOT EXISTS"
             +"SHIPPER(S_ID INT,S_NAME VARCHAR(100),S_ADDR VARCHAR(100),S_CITY VARCHAR(100)"
             +"ROW FORMAT DELIMITED FIELDS TERMINATED BY ','"
             +"LOCATION 'hdfs://localhost://hive'");

            System.out.println("Table created.");
            connect.close();
        }

         public static void main(String[] args) throws FileNotFoundException, IOException, SQLException{
             CreateHiveExternalTable hiveTable = new CreateHiveExternalTable();
             hiveTable.createHiveExternalTable();
         }     

        }    

2 个答案:

答案 0 :(得分:1)

来自这篇文章Connect from Java to Hive using JDBC

  

尝试

private static String driverName = "org.apache.hive.jdbc.HiveDriver"
     

而不是

private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
     

我希望你在自己的帖子中添加Class.forName(driverName)语句   代码

另外:

    Connection connect = DriverManager.getConnection("jdbc:hive2://localhost:HIVEPORT/hivedb","hive","");

而不是

Connection connect = DriverManager.getConnection("jdbc:hive://localhost/hivedb","hive","");

我不确定您正在运行Hive的端口,但请记住更改此行

localhost:HIVEPORT

答案 1 :(得分:1)

hive.server2.thrift.port是您可以检查端口的属性。

在hive shell上发出命令“set hive.server2.thrift.port”这将为你提供hive的端口号

默认情况下,配置单元端口设置为10000,但你可以在hive shell上使用上面的命令进行操作。