无法使用Cloudera VM中的java(在Eclipse中)连接到hbase

时间:2015-02-05 17:11:12

标签: java eclipse hadoop hbase

我正在尝试使用Cloudera VM中的Java(在Eclipse中)连接到Hbase,但是遇到错误。能够在命令行中运行相同的程序(通过将我的程序转换为jar)

我的java程序

    `import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.HColumnDescriptor;
    import org.apache.hadoop.hbase.HTableDescriptor;
    import org.apache.hadoop.hbase.TableName;
    import org.apache.hadoop.hbase.client.*;
    import org.apache.hadoop.hbase.util.Bytes;
    //import org.apache.hadoop.mapred.MapTask;
    import java.io.FileWriter;
    import java.io.IOException;
   public class HbaseConnection {

      public static void main(String[] args) throws IOException {
        Configuration config = HBaseConfiguration.create();
        config.addResource("/usr/lib/hbase/conf/hbase-site.xml");
        HTable table = new HTable(config, "test_table");
        byte[] columnFamily = Bytes.toBytes("colf");
        byte[] idColumnName = Bytes.toBytes("id");
        byte[] groupIdColumnName = Bytes.toBytes("g_id");
        Put put = new Put(Bytes.toBytes("testkey"));
        put.add(columnFamily, idColumnName, Bytes.toBytes("test id"));
        put.add(columnFamily, groupIdColumnName, Bytes.toBytes("test group id"));
        table.put(put);
        table.close();

      }
    }`

我在eclipse的源文件夹中保存了hbase-site.xml HBase的-site.xml中

  <property>
    <name>hbase.rest.port</name>
    <value>8070</value>
    <description>The port for the HBase REST server.</description>
  </property>

  <property>
    <name>hbase.cluster.distributed</name>
    <value>true</value>
  </property>

  <property>
    <name>hbase.rootdir</name>
    <value>hdfs://quickstart.cloudera:8020/hbase</value>
  </property>

  <property>
    <name>hbase.regionserver.ipc.address</name>
    <value>0.0.0.0</value>
  </property>

  <property>
    <name>hbase.master.ipc.address</name>
    <value>0.0.0.0</value>
  </property>

  <property>
    <name>hbase.thrift.info.bindAddress</name>
    <value>0.0.0.0</value>
  </property>

在eclipse中运行程序时出现错误

log4j:WARN No appenders could be found for logger (org.apache.hadoop.metrics2.lib.MutableMetricsFactory).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
Exception in thread "main" java.io.IOException: java.lang.reflect.InvocationTargetException
    at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:389)
    at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:366)
    at org.apache.hadoop.hbase.client.HConnectionManager.getConnection(HConnectionManager.java:247)
    at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:188)
    at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:150)
    at com.aig.gds.hadoop.platform.idgen.hbase.HBaseTest.main(HBaseTest.java:34)
Caused by: java.lang.reflect.InvocationTargetException
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
    at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:387)
    ... 5 more
Caused by: java.util.ServiceConfigurationError: org.apache.hadoop.fs.FileSystem: Provider org.apache.hadoop.hdfs.DistributedFileSystem could not be instantiated
    at java.util.ServiceLoader.fail(ServiceLoader.java:224)
    at java.util.ServiceLoader.access$100(ServiceLoader.java:181)
    at java.util.ServiceLoader$LazyIterator.next(ServiceLoader.java:377)
    at java.util.ServiceLoader$1.next(ServiceLoader.java:445)
    at org.apache.hadoop.fs.FileSystem.loadFileSystems(FileSystem.java:2400)
    at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2411)
    at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2428)
    at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
    at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
    at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
    at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
    at org.apache.hadoop.fs.Path.getFileSystem(Path.java:287)
    at org.apache.hadoop.hbase.util.DynamicClassLoader.<init>(DynamicClassLoader.java:104)
    at org.apache.hadoop.hbase.protobuf.ProtobufUtil.<clinit>(ProtobufUtil.java:197)
    at org.apache.hadoop.hbase.ClusterId.parseFrom(ClusterId.java:64)
    at org.apache.hadoop.hbase.zookeeper.ZKClusterId.readClusterIdZNode(ZKClusterId.java:69)
    at org.apache.hadoop.hbase.client.ZooKeeperRegistry.getClusterId(ZooKeeperRegistry.java:83)
    at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.retrieveClusterId(HConnectionManager.java:801)
    at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.<init>(HConnectionManager.java:633)
    ... 10 more
Caused by: java.lang.NoSuchMethodError: org.apache.hadoop.conf.Configuration.addDeprecations([Lorg/apache/hadoop/conf/Configuration$DeprecationDelta;)V
    at org.apache.hadoop.hdfs.HdfsConfiguration.addDeprecatedKeys(HdfsConfiguration.java:66)
    at org.apache.hadoop.hdfs.HdfsConfiguration.<clinit>(HdfsConfiguration.java:31)
    at org.apache.hadoop.hdfs.DistributedFileSystem.<clinit>(DistributedFileSystem.java:114)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
    at java.lang.Class.newInstance(Class.java:374)
    at java.util.ServiceLoader$LazyIterator.next(ServiceLoader.java:373)
    ... 26 more

提前致谢。

1 个答案:

答案 0 :(得分:2)

问题的根本原因在于堆栈跟踪:

NoSuchMethodError: org.apache.hadoop.conf.Configuration.addDeprecations

这意味着您的hadoop-common-* jar版本与您的hadoop-hdfs-* jar版本不同步或者您的类路径中可能包含不同版本。

请注意,hadoop 2.3.0及更高版本中存在addDeprecations: https://hadoop.apache.org/docs/r2.3.0/api/org/apache/hadoop/conf/Configuration.html

但在2.2.0和之前缺失: https://hadoop.apache.org/docs/r2.2.0/api/org/apache/hadoop/conf/Configuration.html