您的位置:首页 > 数据库

Hadoop之Hbase数据库的应用

2015-09-10 17:59 417 查看


用Java代码实现hadoop平台上Hbase数据库中数据的增、删、改、查功能。

代码分析:

import java.io.IOException;

import java.util.List;

import org.apache.hadoop.conf.Configuration;

import java.util.ArrayList;

<span style="font-family:KaiTi_GB2312;">import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HTablePool;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.util.Bytes; //Hbase中用到的字节流的转换</span>
public class HbaseTest {
public static Configuration configuration;
static {
configuration = HBaseConfiguration.create();//获取Hbase的配置信息</span>
configuration.set("hbase.zookeeper.property.clientPort", "2181");
configuration.set("hbase.zookeeper.quorum", "192.168.1.100");
configuration.set("hbase.master", "192.168.1.100:600000");
}
public static void main(String[] args) {
// createTable("wujintao");
// insertData("wujintao");
// QueryAll("wujintao");
// QueryByCondition1("wujintao");
// QueryByCondition2("wujintao");
//QueryByCondition3("wujintao");
//deleteRow("wujintao","abcdef");
deleteByCondition("wujintao","abcdef");
}
public static void createTable(String tableName) { //在Hbase中创建新表
System.out.println("start create table ......");
try {
HBaseAdmin hBaseAdmin = new HBaseAdmin(configuration); //获取配置信息
if (hBaseAdmin.tableExists(tableName)) {// 如果存在要创建的表,那么先删除,再创建
hBaseAdmin.disableTable(tableName); //删除表格,先disable在delete
hBaseAdmin.deleteTable(tableName);
System.out.println(tableName + " is exist,detele....");
}
HTableDescriptor tableDescriptor = new HTableDescriptor(tableName); //用到HTableDescriptor类获得表的信息
tableDescriptor.addFamily(new HColumnDescriptor("column1"));//添加表中的列族
tableDescriptor.addFamily(new HColumnDescriptor("column2"));
tableDescriptor.addFamily(new HColumnDescriptor("column3"));
hBaseAdmin.createTable(tableDescriptor);
} catch (MasterNotRunningException e) {
e.printStackTrace();
} catch (ZooKeeperConnectionException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
System.out.println("end create table ......");
}
//Hbase中建表用到的包:
//import org.apache.hadoop.hbase.HBaseConfiguration;
</span>
<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.client.HBaseAdmin; </span>

//import org.apache.hadoop.hbase.HColumnDescriptor;

//import org.apache.hadoop.hbase.HTableDescriptor;

//import org.apache.hadoop.hbase.client.Put;

<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.MasterNotRunningException; 
//import org.apache.hadoop.hbase.ZooKeeperConnectionException;</span>
<span style="font-family:KaiTi_GB2312;">//import java.io.IOException;</span>

public static void insertData(String tableName) {

System.out.println("start insert data ......");

HTablePool pool = new HTablePool(configuration, 1000);//用到了HBase的table地址池;

HTable table = (HTable) pool.getTable(tableName);//从table池中get到一个table实例;

Put put = new Put("112233bbbcccc".getBytes());// 一个PUT代表一行数据,再NEW一个PUT表示第二行数据,每行一个唯一的ROWKEY,

<span style="font-family:KaiTi_GB2312;"><span style="color:#33CC00;">此处rowkey为put构造方法中传入的值 利用put方法插入一个已知行键的table中的一行</span>
put.add("column1".getBytes(), null, "aaa".getBytes());// 本行数据的第一列
put.add("column2".getBytes(), null, "bbb".getBytes());// 本行数据的第三列
put.add("column3".getBytes(), null, "ccc".getBytes());// 本行数据的第三列
try {
table.put(put);
} catch (IOException e) {
e.printStackTrace();
}
System.out.println("end insert data ......");
}
//向table中插入数据是用到的包有:</span>
<span style="font-family:KaiTi_GB2312;">  //import org.apache.hadoop.hbase.client.HTable;
//import org.apache.hadoop.hbase.client.HTablePool; </span>
//import org.apache.hadoop.hbase.client.Put;
<span style="font-family:KaiTi_GB2312;"> //import org.apache.hadoop.hbase.util.Bytes;</span></span>
<span style="font-family:KaiTi_GB2312;">//import java.io.IOException;
public static void dropTable(String tableName) {
try {
HBaseAdmin admin = new HBaseAdmin(configuration);
admin.disableTable(tableName);
admin.deleteTable(tableName);
} catch (MasterNotRunningException e) {
e.printStackTrace();
} catch (ZooKeeperConnectionException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
//删除table用到的包:</span></span>
<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.HBaseConfiguration; 
//import org.apache.hadoop.hbase.client.HBaseAdmin;
//import org.apache.hadoop.hbase.MasterNotRunningException; 
//import org.apache.hadoop.hbase.ZooKeeperConnectionException;</span>
<span style="font-family:KaiTi_GB2312;"> //import java.io.IOException;</span>

public static void deleteRow(String tablename, String rowkey) {

try { HTable table = new HTable(configuration, tablename);

List list = new ArrayList(); Delete d1 = new Delete(rowkey.getBytes());

list.add(d1); table.delete(list); System.out.println("删除行成功!");

} catch (IOException e) { e.printStackTrace(); } } //删除指定表的指定行:

<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.HBaseConfiguration;</span>
<span style="font-family:KaiTi_GB2312;"> // import org.apache.hadoop.hbase.client.HTable;</span>
<span style="font-family:KaiTi_GB2312;"> //import java.util.ArrayList;
//import java.util.List; </span>
<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.util.Bytes;</span>
<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.client.Delete;
</span>
<span style="font-family:KaiTi_GB2312;"> public static void deleteByCondition(String tablename, String rowkey)  {
//目前还没有发现有效的API能够实现根据非rowkey的条件删除这个功能能,还有清空表全部数据的API操作
}
public static void QueryAll(String tableName) {
HTablePool pool = new HTablePool(configuration, 1000);
HTable table = (HTable) pool.getTable(tableName);
try {
ResultScanner rs = table.getScanner(new Scan());
for (Result r : rs) {
System.out.println("获得到rowkey:" + new String(r.getRow()));
for (KeyValue keyValue : r.raw()) {
System.out.println("列:" + new String(keyValue.getFamily())
+ "====值:" + new String(keyValue.getValue()));
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
//查询HBase中所有的数据
//import org.apache.hadoop.hbase.client.HTable;
//import org.apache.hadoop.hbase.client.HTablePool; </span>
//import org.apache.hadoop.hbase.client.Result;
<span style="font-family:KaiTi_GB2312;"> //import org.apache.hadoop.hbase.client.ResultScanner;
//import org.apache.hadoop.hbase.client.Scan;
</span>
<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.util.Bytes;</span>

<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.KeyValue; </span>
<span style="font-family:KaiTi_GB2312;">//mport java.io.IOException;</span>

public static void QueryByCondition1(String tableName) {

HTablePool pool = new HTablePool(configuration, 1000);

HTable table = (HTable) pool.getTable(tableName); t

ry { Get scan = new Get("abcdef".getBytes());// 根据rowkey查询

Result r = table.get(scan);

System.out.println("获得到rowkey:" + new String(r.getRow()));

for (KeyValue keyValue : r.raw()) {

System.out.println("列:" + new String(keyValue.getFamily()) + "====值:" + new String(keyValue.getValue()));

}

} catch (IOException e) { e.printStackTrace(); } } //按行键查询Hbase的表

<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.client.HTable;ode"
//import org.apache.hadoop.hbase.client.HTablePool; </span>
//import org.apache.hadoop.hbase.client.Get;
<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.util.Bytes;
 //import org.apache.hadoop.hbase.client.Result; </span>
<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.KeyValue; </span>
<span style="font-family:KaiTi_GB2312;">//import java.io.IOException;</span>

public static void QueryByCondition2(String tableName) {

try {

HTablePool pool = new HTablePool(configuration, 1000);

HTable table = (HTable) pool.getTable(tableName);

Filter filter = new SingleColumnValueFilter(Bytes .toBytes("column1"), null, CompareOp.EQUAL, Bytes .toBytes("aaa")); // 当列column1的值为aaa时进行查询 Scan s = new Scan();

s.setFilter(filter);

ResultScanner rs = table.getScanner(s);

for (Result r : rs) {

System.out.println("获得到rowkey:" + new String(r.getRow()));

for (KeyValue keyValue : r.raw()) {

System.out.println("列:" + new String(keyValue.getFamily()) + "====值:" + new String(keyValue.getValue()));

} } } catch (Exception e) { e.printStackTrace(); } }//以列族为条件进行查询

<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.client.HTable;
//import org.apache.hadoop.hbase.client.HTablePool; </span>
//import org.apache.hadoop.hbase.filter.Filter;
<span style="font-family:KaiTi_GB2312;"> //import org.apache.hadoop.hbase.util.Bytes;</span>
<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
//import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; </span>
//import org.apache.hadoop.hbase.client.Result;
<span style="font-family:KaiTi_GB2312;"> //import org.apache.hadoop.hbase.client.ResultScanner;
//import org.apache.hadoop.hbase.client.Scan; </span>
<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.KeyValue; </span>
<span style="font-family:KaiTi_GB2312;">//import java.io.IOException;</span>

public static void QueryByCondition3(String tableName) {

try {

HTablePool pool = new HTablePool(configuration, 1000);

HTable table = (HTable) pool.getTable(tableName);

List<Filter> filters = new ArrayList<Filter>();

Filter filter1 = new SingleColumnValueFilter(Bytes .toBytes("column1"), null, CompareOp.EQUAL, Bytes .toBytes("aaa"));

filters.add(filter1);

Filter filter2 = new SingleColumnValueFilter(Bytes .toBytes("column2"), null, CompareOp.EQUAL, Bytes .toBytes("bbb"));

filters.add(filter2);

Filter filter3 = new SingleColumnValueFilter(Bytes .toBytes("column3"), null, CompareOp.EQUAL, Bytes .toBytes("ccc"));

filters.add(filter3);

FilterList filterList1 = new FilterList(filters);

Scan scan = new Scan();

scan.setFilter(filterList1);

ResultScanner rs = table.getScanner(scan);

for (Result r : rs) {

System.out.println("获得到rowkey:" + new String(r.getRow()));

for (KeyValue keyValue : r.raw()) {

System.out.println("列:" + new String(keyValue.getFamily()) + "====值:" + new String(keyValue.getValue()));

} } rs.close();

} catch (Exception e) { e.printStackTrace(); } } //以三个列族名为条件进行查询

<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.client.HTable;
//import org.apache.hadoop.hbase.client.HTablePool; </span>
<span style="font-family:KaiTi_GB2312;"> //import org.apache.hadoop.hbase.filter.Filter; </span>
<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.filter.FilterList;
</span>
<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.util.Bytes;</span>

//import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;

//import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;

//import org.apache.hadoop.hbase.client.Result;
<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.client.ResultScanner;
//import org.apache.hadoop.hbase.client.Scan; </span>
<span style="font-family:KaiTi_GB2312;">//import org.apache.hadoop.hbase.KeyValue; </span>
<span style="font-family:KaiTi_GB2312;"> //import java.io.IOException;</span>
}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: