spark将数据写入hbase

时间:2015-10-28 03:45:29

标签: apache-spark hbase

我编写了一个将数据写入hbase的演示,但没有响应,没有错误,没有日志。 我的hbase是0.98,hadoop 2.3,火花1.4。 我在yarn-client模式下运行。任何的想法?感谢。

object SparkConnectHbase2 extends Serializable {

  def main(args: Array[String]) {
    new SparkConnectHbase2().toHbase();
  }

}

class SparkConnectHbase2 extends Serializable {

  def toHbase() {
    val conf = new SparkConf().setAppName("ljh_ml3");
    val sc = new SparkContext(conf)

    val tmp = sc.parallelize(Array(601, 701, 801, 901)).foreachPartition({ a => 
      val configuration = HBaseConfiguration.create();
      configuration.set("hbase.zookeeper.property.clientPort", "2181");
      configuration.set("hbase.zookeeper.quorum", “192.168.1.66");
      configuration.set("hbase.master", “192.168.1.66:60000");
      val table = new HTable(configuration, "ljh_test4");
      var put = new Put(Bytes.toBytes(a+""));
      put.add(Bytes.toBytes("f"), Bytes.toBytes("c"), Bytes.toBytes(a + "value"));
      table.put(put);
      table.flushCommits();
    })

  }

}

感谢。

1 个答案:

答案 0 :(得分:0)

写入hbase表

import org.apache.hadoop.hbase.client.{HBaseAdmin, HTable, Put}
import org.apache.hadoop.hbase.{HBaseConfiguration, HTableDescriptor, HColumnDescriptor, TableName}
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.spark._
val hconf = HBaseConfiguration.create()
hconf.set(TableInputFormat.INPUT_TABLE, tablename)
val admin = new HBaseAdmin(hconf)
if(!admin.isTableAvailable(tablename)) {
    val tabledesc= new HTableDescriptor(tablename)
    tabledesc.addFamily(new HColumnDescriptor("cf1".getBytes()));
    admin.createTable(tabledesc)
}
val newtable= new HTable(hconf, tablename);
val put = new Put(new String("row").getBytes());
put .add("cf1".getBytes(), "col1".getBytes(), new String("data").getBytes());
newtable.put(put);
newtable.flushCommits();
val hbaserdd = sc.newAPIHadoopRDD(hconf, classOf[TableInputFormat],
      classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
      classOf[org.apache.hadoop.hbase.client.Result])