DDL
- 生成conf -> 生成admin
Configuration conf = HBaseConfiguration.create();
HBaseAdmin admin = new HBaseAdmin(conf);
- 生成tableName -> 生成tableDescriptor
TableName tableName = TableName.valueOf("test");
HTableDescriptor tableDescriptor = new HTableDescriptor(tableName);
- 生成columnDescriptor(列族)-> 加入到tableDescriptor
tableDescriptor.addFamily(columnDescriptor);
- tableDescriptor -> admin生成table
admin.createTable(tableDescriptor);
DML
- conf & tableName -> 生成 table ,用于 DML 和 DQL
HTable table = new HTable(conf, tableName);
- 生成 row -> 生成put ,Hbase中传入传出数据库中的值用的都是byte[] , 要注意与String类型转换
byte[] row = Bytes.toBytes("row1");
Put put = new Put(row);
- 生成 想添加的val 以及它所在 列族 和 列 -> 加入到 put
byte[] colfam = Bytes.toBytes("data");
byte[] col = Bytes.toBytes(String.valueOf(1));
byte[] val = Bytes.toBytes("value1");
put.add(colfam, col, val);
- put -> table put
table.put(put);
DQL
get
- 生成 row -> 生成get
byte[] row = Bytes.toBytes("row1");
Get get = new Get(row);
- get -> table getResult 生成 result
Result result = table.get(get);
- 生成 查询的 列族 和 列 -> result getValue
byte[] colfam = Bytes.toBytes("data");
byte[] col = Bytes.toBytes(String.valueOf(1));
System.out.println("get value is " + Bytes.toString(result.getValue(colfam, col)));
scan
- 生成scan -> table getScanner 生成 ResultScanner。
Scan scan = new Scan();
ResultScanner scanner = table.getScanner(scan);
- ResultScanner可以理解为Result数组,遍历生成要查询的列族和列 -> result getValue
int i = 0;
for (Result scanresult : scanner) {
byte[] scancol = Bytes.toBytes(String.valueOf(++i));
System.out.println("scan value is " + Bytes.toString(scanresult.getValue(colfam, scancol)));
}
注意: HBaseAdmin,HTable,ResultScanner 对象最后都要close()
Example
package ExampleClient;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
public class ExampleClient {
public static void main(String[] args) throws MasterNotRunningException, ZooKeeperConnectionException, IOException {
// TODO Auto-generated method stub
Configuration conf = HBaseConfiguration.create();
HBaseAdmin admin = new HBaseAdmin(conf);
try {
TableName tableName = TableName.valueOf("test");
HTableDescriptor tableDescriptor = new HTableDescriptor(tableName);
HColumnDescriptor columnDescriptor = new HColumnDescriptor("data");
tableDescriptor.addFamily(columnDescriptor);
admin.createTable(tableDescriptor);
HTable table = new HTable(conf, tableName);
try {
for (int i = 1; i <= 3; ++i) {
byte[] row = Bytes.toBytes("row" + i);
Put put = new Put(row);
byte[] colfam = Bytes.toBytes("data");
byte[] col = Bytes.toBytes(String.valueOf(i));
byte[] val = Bytes.toBytes("value" + i);
put.add(colfam, col, val);
table.put(put);
}
byte[] row = Bytes.toBytes("row1");
Get get = new Get(row);
Result result = table.get(get);
byte[] colfam = Bytes.toBytes("data");
byte[] col = Bytes.toBytes(String.valueOf(1));
System.out.println("get result is " + Bytes.toString(result.getValue(colfam, col)));
Scan scan = new Scan();
ResultScanner scanner = table.getScanner(scan);
try {
int i = 0;
for (Result scanresult : scanner) {
byte[] scancol = Bytes.toBytes(String.valueOf(++i));
System.out.println("scan result is " + Bytes.toString(scanresult.getValue(colfam, scancol)));
}
} finally {
// TODO: handle finally clause
scanner.close();
}
} finally {
// TODO: handle finally clause
table.close();
}
} finally {
// TODO: handle finally clause
admin.close();
}
}
}