NodeJS通过JDBC连接到配置单元会引发错误

时间:2019-06-05 14:04:24

标签: node.js hadoop jdbc hive

我正在尝试从nodeJS应用程序连接到配置单元实例。我找到了一个使用jdbc的示例,我正在尝试进行配置。

// Node index.js
var JDBC = require('jdbc');
var jinst = require('jdbc/lib/jinst');
var asyncjs = require('async');
var util = require('util');

//create a jvm and specify the jars required in the classpath and other jvm parameters
if (!jinst.isJvmCreated()) {
    jinst.addOption("-Xrs");
    jinst.setupClasspath(['./hive-jdbc-1.2.2-standalone.jar',
        './hadoop-common-2.7.4.jar']);
}

//read the input arguments
var server = 'lvshdc2en00.myserver.com';
var port = 10025;
var schema = 'default';
var principal = 'hive/_HOST@HDP.LOCAL';

//specify the hive connection parameters
var conf = {
    url: 'jdbc:hive2://' + server + ':' + port + '/' + schema + ';principal='+principal,
    drivername: 'org.apache.hive.jdbc.HiveDriver',
    properties: {
    }
};

var hive = new JDBC(conf);

//initialize the connection

hive.initialize(function (err) {

    if (err) {
        console.log(err);
    }
});

运行此测试应用程序时,出现以下错误:

node index5.js
Jun 05, 2019 6:56:57 AM org.apache.hive.jdbc.Utils parseURL
INFO: Supplied authorities: lvshdc2en00.myserver.com:10025
Jun 05, 2019 6:56:57 AM org.apache.hive.jdbc.Utils parseURL
INFO: Resolved authority: lvshdc2en00.myserver.com:10025
SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.
{ [Error: Error running static method
java.lang.NoClassDefFoundError: org/apache/commons/configuration/Configuration
    at org.apache.hadoop.metrics2.lib.DefaultMetricsSystem.<init>(DefaultMetricsSystem.java:38)
    at org.apache.hadoop.metrics2.lib.DefaultMetricsSystem.<clinit>(DefaultMetricsSystem.java:36)
    at org.apache.hadoop.security.UserGroupInformation$UgiMetrics.create(UserGroupInformation.java:122)
    at org.apache.hadoop.security.UserGroupInformation.<clinit>(UserGroupInformation.java:238)
    at org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.createClientWithConf(HadoopThriftAuthBridge.java:85)
    at org.apache.hive.service.auth.KerberosSaslHelper.getKerberosTransport(KerberosSaslHelper.java:55)
    at org.apache.hive.jdbc.HiveConnection.createBinaryTransport(HiveConnection.java:436)
    at org.apache.hive.jdbc.HiveConnection.openTransport(HiveConnection.java:203)
    at org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:178)
    at org.apache.hive.jdbc.HiveDriver.connect(HiveDriver.java:105)
    at java.sql.DriverManager.getConnection(DriverManager.java:664)
    at java.sql.DriverManager.getConnection(DriverManager.java:208)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
Caused by: java.lang.ClassNotFoundException: org.apache.commons.configuration.Configuration
    at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
    at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:335)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
    ... 16 more
] cause: nodeJava_java_lang_NoClassDefFoundError {} }

我不清楚该问题指的是什么。 我是否真的需要在正在建立连接的客户端计算机上安装hadoop / hive,还是独立的jar应该做的事情?

0 个答案:

没有答案