如何将数据加载到hbase中

时间:2014-11-06 16:33:11

标签: hadoop hbase

我想将数据加载到Hbase中,所以我尝试了一本简单的例子来自HBASE:Definitive guide。 HbaseHelper.java已加载。

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
// ^^ PutExample
import util.HBaseHelper; 
// vv PutExample

import java.io.IOException;

public class PutExample {

  public static void main(String[] args) throws IOException {
    Configuration conf = HBaseConfiguration.create(); // co PutExample-1-CreateConf Create the required configuration.

    // ^^ PutExample
    HBaseHelper helper = HBaseHelper.getHelper(conf);
    helper.dropTable("testtable1");
    helper.createTable("testtable1", "colfam1");
    // vv PutExample
    HTable table = new HTable(conf, "testtable1"); // co PutExample-2-NewTable Instantiate a new client.

    Put put = new Put(Bytes.toBytes("row1")); // co PutExample-3-NewPut Create put with specific row.

    put.add(Bytes.toBytes("colfam1"), Bytes.toBytes("qual1"),
      Bytes.toBytes("val1")); // co PutExample-4-AddCol1 Add a column, whose name is "colfam1:qual1", to the put.
    put.add(Bytes.toBytes("colfam1"), Bytes.toBytes("qual2"),
      Bytes.toBytes("val2")); // co PutExample-4-AddCol2 Add another column, whose name is "colfam1:qual2", to the put.

    table.put(put); // co PutExample-5-DoPut Store row with column into the HBase table.
  }
}

我做了javac -classpath hbase-version.jar:hadoop.jar:zookeeper.jar:log4j.jar:commons-logging.jar:commons-lang.jar PutData.java(它们在同一目录下)但是它无法顺利完成。 据说:

Note: Some input files use or override a deprecated API.
Note: Recompile with -Xlint:deprecation for details.
Note: Some input files use unchecked or unsafe operations.
Note: Recompile with -Xlint:unchecked for details.

我尝试了一些其他类似的方法来放置数据。错误是一样的。我怎么能以任何可能的方式做到这一点?

1 个答案:

答案 0 :(得分:0)

尝试此代码,首先将hbase / conf添加到eclipse中 - 在java内置路径 - > source - >添加链接 - >你的hbase conf目录

import java.io.IOException;

import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
public class insertDataManually
{
 public static void main(String[] args) throws IOException
 {
  org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();

  HTable table = new HTable(config, "car");
  Put p = new Put(Bytes.toBytes("row1"));
  p.add(Bytes.toBytes("vi"), Bytes.toBytes("make"),Bytes.toBytes("bmw"));
  p.add(Bytes.toBytes("vi"),Bytes.toBytes("model"),Bytes.toBytes("2012"));
  table.put(p);
  Get g = new Get(Bytes.toBytes("row1"));
  Result r = table.get(g);
  byte [] value = r.getValue(Bytes.toBytes("vi"),Bytes.toBytes("make"));
  byte [] value1 = r.getValue(Bytes.toBytes("vi"),Bytes.toBytes("model"));
  String valueStr = Bytes.toString(value);
  String valueStr1 = Bytes.toString(value1);
  System.out.println("GET: " +"make: "+ valueStr+"   model: "+valueStr1);
  Scan s = new Scan();
  s.addColumn(Bytes.toBytes("vi"), Bytes.toBytes("make"));
  s.addColumn(Bytes.toBytes("vi"), Bytes.toBytes("model"));
  ResultScanner scanner = table.getScanner(s);
  try
  {
   for (Result rr = scanner.next(); rr != null; rr = scanner.next())
   {
    System.out.println("Found row : " + rr);
   }
  } finally
  {
   // Make sure you close your scanners when you are done!
   scanner.close();
  }
 }
}