1。如果本机代码没问题,jar包没问题的话,检查集群也没用问题,但是代码运行显示 找不到对应的映射主机,需要在 C:\Windows\System32\drivers\etc 中修改host文件,增加主机映射
2.代码的话,只需要更改配置项 zk的列表就可以了
1 package cloudy.hbase.dao.imp; 2 3 import java.io.IOException; 4 5 import org.apache.hadoop.conf.Configuration; 6 import org.apache.hadoop.hbase.HBaseConfiguration; 7 import org.apache.hadoop.hbase.HColumnDescriptor; 8 import org.apache.hadoop.hbase.HTableDescriptor; 9 import org.apache.hadoop.hbase.KeyValue; 10 import org.apache.hadoop.hbase.client.Delete; 11 import org.apache.hadoop.hbase.client.Get; 12 import org.apache.hadoop.hbase.client.HBaseAdmin; 13 import org.apache.hadoop.hbase.client.HTable; 14 import org.apache.hadoop.hbase.client.HTablePool; 15 import org.apache.hadoop.hbase.client.Put; 16 import org.apache.hadoop.hbase.client.Result; 17 import org.apache.hadoop.hbase.client.ResultScanner; 18 import org.apache.hadoop.hbase.client.Scan; 19 import org.apache.hadoop.hbase.util.Bytes; 20 21 public class hbase_demo { 22 // 声明静态配置 23 static Configuration conf = null; 24 25 static { 26 conf = HBaseConfiguration.create(); 27 conf.set("hbase.zookeeper.quorum", "min1:2181,min2:2181,min3:2181"); //声明 zk列表 28 } 29 30 /* 31 * 创建表 32 * 33 * @tableName 表名 34 * 35 * @family 列族列表 36 */ 37 public static void creatTable(String tableName, String[] family) throws Exception { 38 HBaseAdmin admin = new HBaseAdmin(conf); 39 HTableDescriptor desc = new HTableDescriptor(tableName); 40 for (int i = 0; i < family.length; i++) { //hbase创建表的时候必须带所依赖的列蔟 41 desc.addFamily(new HColumnDescriptor(family[i])); 42 } 43 if (admin.tableExists(tableName)) { 44 System.out.println("table Exists!"); 45 System.exit(0); 46 } else { 47 admin.createTable(desc); 48 System.out.println("create table Success!"); 49 } 50 System.out.println("========================================================================================================="); 51 } 52 53 /* 54 * 为表添加数据(适合知道有多少列族的固定表) 55 * 56 * @rowKey rowKey 57 * 58 * @tableName 表名 59 * 60 * @column1 第一个列族列表 61 * 62 * @value1 第一个列的值的列表 63 * 64 * @column2 第二个列族列表 65 * 66 * @value2 第二个列的值的列表 67 */ 68 public static void addData(String rowKey, String tableName, String[] column1, String[] value1, String[] column2, String[] value2) throws IOException { 69 Put put = new Put(Bytes.toBytes(rowKey));// 设置rowkey 70 HTable table = new HTable(conf, Bytes.toBytes(tableName));// HTabel负责跟记录相关的操作如增删改查等// 71 // 获取表 72 HColumnDescriptor[] columnFamilies = table.getTableDescriptor() // 获取所有的列族 73 .getColumnFamilies(); 74 75 for (int i = 0; i < columnFamilies.length; i++) { // 通过遍历列族添加每个列族所需要的信息 76 String familyName = columnFamilies[i].getNameAsString(); // 获取列族名 77 if (familyName.equals("article")) { // article列族put数据 78 for (int j = 0; j < column1.length; j++) { 79 put.add(Bytes.toBytes(familyName), Bytes.toBytes(column1[j]), Bytes.toBytes(value1[j])); //put是包含rowkey的实例化对象,rowkey1,rowkey2,rowkey3 80 } 81 } 82 if (familyName.equals("author")) { // author列族put数据 83 for (int j = 0; j < column2.length; j++) { 84 put.add(Bytes.toBytes(familyName), Bytes.toBytes(column2[j]), Bytes.toBytes(value2[j])); 85 } 86 } 87 } 88 table.put(put); //table接受每个rowkey所在的put对象 89 System.out.println("add data Success!"); 90 System.out.println("==========================================================================================================="); 91 } 92 93 /* 94 * 根据rwokey查询rowkey下面的信息 95 * 96 * @rowKey rowKey 97 * 98 * @tableName 表名 99 */ 100 public static Result getResult(String tableName, String rowKey) throws IOException { 101 Get get = new Get(Bytes.toBytes(rowKey)); 102 HTable table = new HTable(conf, Bytes.toBytes(tableName));// 获取表 103 Result result = table.get(get); 104 for (KeyValue kv : result.list()) { 105 System.out.println("family:" + Bytes.toString(kv.getFamily())); 106 System.out.println("qualifier:" + Bytes.toString(kv.getQualifier())); 107 System.out.println("value:" + Bytes.toString(kv.getValue())); 108 System.out.println("Timestamp:" + kv.getTimestamp()); 109 System.out.println("----------Base on rowkey to search data--------"); 110 } 111 return result; 112 } 113 114 /* 115 * 遍历查询hbase表 116 * 117 * @tableName 表名 118 */ 119 public static void getResultScann(String tableName) throws IOException { 120 Scan scan = new Scan(); 121 ResultScanner rs = null; 122 HTable table = new HTable(conf, Bytes.toBytes(tableName)); 123 try { 124 rs = table.getScanner(scan); 125 for (Result r : rs) { 126 for (KeyValue kv : r.list()) { 127 System.out.println("row:" + Bytes.toString(kv.getRow())); 128 System.out.println("family:" + Bytes.toString(kv.getFamily())); 129 System.out.println("qualifier:" + Bytes.toString(kv.getQualifier())); 130 System.out.println("value:" + Bytes.toString(kv.getValue())); 131 System.out.println("timestamp:" + kv.getTimestamp()); 132 System.out.println("-------------------------------------------"); 133 } 134 } 135 } finally { 136 rs.close(); 137 System.out.println("==========================all the table (Base on tablename)==================="); 138 } 139 } 140 141 /* 142 * 遍历查询hbase表 143 * 144 * @tableName 表名 145 */ 146 public static void getResultScann(String tableName, String start_rowkey, String stop_rowkey) throws IOException { 147 Scan scan = new Scan(); 148 scan.setStartRow(Bytes.toBytes(start_rowkey)); 149 scan.setStopRow(Bytes.toBytes(stop_rowkey)); 150 ResultScanner rs = null; 151 HTable table = new HTable(conf, Bytes.toBytes(tableName)); 152 try { 153 rs = table.getScanner(scan); 154 for (Result r : rs) { 155 for (KeyValue kv : r.list()) { 156 System.out.println("row:" + Bytes.toString(kv.getRow())); 157 System.out.println("family:" + Bytes.toString(kv.getFamily())); 158 System.out.println("qualifier:" + Bytes.toString(kv.getQualifier())); 159 System.out.println("value:" + Bytes.toString(kv.getValue())); 160 System.out.println("timestamp:" + kv.getTimestamp()); 161 System.out.println("-------------------------------------------"); 162 } 163 } 164 } finally { 165 rs.close(); 166 System.out.println("==================rowkey reduce to search data"); 167 } 168 } 169 170 /* 171 * 查询表中的某一列 172 * 173 * @tableName 表名 174 * 175 * @rowKey rowKey 176 */ 177 public static void getResultByColumn(String tableName, String rowKey, String familyName, String columnName) throws IOException { 178 HTable table = new HTable(conf, Bytes.toBytes(tableName)); 179 Get get = new Get(Bytes.toBytes(rowKey)); 180 get.addColumn(Bytes.toBytes(familyName), Bytes.toBytes(columnName)); // 获取指定列族和列修饰符对应的列 181 Result result = table.get(get); 182 for (KeyValue kv : result.list()) { 183 System.out.println("family:" + Bytes.toString(kv.getFamily())); 184 System.out.println("qualifier:" + Bytes.toString(kv.getQualifier())); 185 System.out.println("value:" + Bytes.toString(kv.getValue())); 186 System.out.println("Timestamp:" + kv.getTimestamp()); 187 System.out.println("-------------------查询最小列中的数据-----------------------"); 188 } 189 } 190 191 /* 192 * 更新表中的某一列 193 * 194 * @tableName 表名 195 * 196 * @rowKey rowKey 197 * 198 * @familyName 列族名 199 * 200 * @columnName 列名 201 * 202 * @value 更新后的值 203 */ 204 public static void updateTable(String tableName, String rowKey, String familyName, String columnName, String value) throws IOException { 205 HTable table = new HTable(conf, Bytes.toBytes(tableName)); 206 Put put = new Put(Bytes.toBytes(rowKey)); 207 put.add(Bytes.toBytes(familyName), Bytes.toBytes(columnName), Bytes.toBytes(value));//只能用于修改最小列中的values值,如果最小列名更改的话,会报错,显示没有找到 208 table.put(put); 209 System.out.println("update table Success! table and rowkey"); 210 } 211 212 /* 213 * 查询某列数据的多个版本 214 * 215 * @tableName 表名 216 * 217 * @rowKey rowKey 218 * 219 * @familyName 列族名 220 * 221 * @columnName 列名 222 */ 223 public static void getResultByVersion(String tableName, String rowKey, String familyName, String columnName) throws IOException { 224 HTable table = new HTable(conf, Bytes.toBytes(tableName)); 225 Get get = new Get(Bytes.toBytes(rowKey)); 226 get.addColumn(Bytes.toBytes(familyName), Bytes.toBytes(columnName)); 227 get.setMaxVersions(5); 228 Result result = table.get(get); 229 for (KeyValue kv : result.list()) { 230 System.out.println("family:" + Bytes.toString(kv.getFamily())); 231 System.out.println("qualifier:" + Bytes.toString(kv.getQualifier())); 232 System.out.println("value:" + Bytes.toString(kv.getValue())); 233 System.out.println("Timestamp:" + kv.getTimestamp()); 234 System.out.println("-------------------------------------------"); 235 } 236 /* 237 * List<?> results = table.get(get).list(); Iterator<?> it = 238 * results.iterator(); while (it.hasNext()) { 239 * System.out.println(it.next().toString()); } 240 */ 241 } 242 243 /* 244 * 删除指定的列 245 * 246 * @tableName 表名 247 * 248 * @rowKey rowKey 249 * 250 * @familyName 列族名 251 * 252 * @columnName 列名 253 */ 254 public static void deleteColumn(String tableName, String rowKey, String falilyName, String columnName) throws IOException { 255 HTable table = new HTable(conf, Bytes.toBytes(tableName)); 256 Delete deleteColumn = new Delete(Bytes.toBytes(rowKey)); 257 deleteColumn.deleteColumns(Bytes.toBytes(falilyName), Bytes.toBytes(columnName)); 258 table.delete(deleteColumn); 259 System.out.println(falilyName + ":" + columnName + "is deleted!"); 260 } 261 262 /* 263 * 删除所有的列 264 * 265 * @tableName 表名 266 * 267 * @rowKey rowKey 268 */ 269 public static void deleteAllColumn(String tableName, String rowKey) throws IOException { 270 HTable table = new HTable(conf, Bytes.toBytes(tableName)); 271 Delete deleteAll = new Delete(Bytes.toBytes(rowKey)); 272 table.delete(deleteAll); 273 System.out.println("all columns are deleted!base on rowkey"); 274 } 275 276 /* 277 * 删除表 278 * 279 * @tableName 表名 280 */ 281 public static void deleteTable(String tableName) throws IOException { 282 HBaseAdmin admin = new HBaseAdmin(conf); 283 admin.disableTable(tableName); 284 admin.deleteTable(tableName); 285 System.out.println(tableName + "is deleted!,disable+delelete"); 286 } 287 288 public static void main(String[] args) throws Exception { 289 290 // 创建表 291 String tableName = "test"; 292 String[] family = {"article", "author"}; 293 creatTable(tableName, family); 294 295 // 为表添加数据 296 297 String[] column1 = {"title", "content", "tag"}; 298 String[] value1 = {"Head First HBase", "HBase is the Hadoop database. Use it when you need random, realtime read/write access to your Big Data.", "Hadoop,HBase,NoSQL"}; 299 String[] column2 = {"name", "nickname"}; 300 String[] value2 = {"nicholas", "lee"}; 301 302 addData("rowkey1", "test", column1, value1, column2, value2); 303 addData("rowkey2", "test", column1, value1, column2, value2); 304 addData("rowkey3", "test", column1, value1, column2, value2); 305 306 // 遍历查询 307 // getResultScann("test", "rowkey1", "rowkey5"); 308 // 根据row key范围遍历查询 309 //getResultScann("test", "rowkey4", "rowkey5"); 310 311 // 查询 312 // getResult("test", "rowkey1"); 313 314 // 查询某一列的值 315 //getResultByColumn("test", "rowkey1", "author", "name"); 316 317 // 更新列 318 // updateTable("test", "rowkey1", "author", "name", "bin"); 319 320 // 查询某一列的值 321 //getResultByColumn("test", "rowkey1", "author", "name"); 322 323 // 查询某列的多版本 324 // getResultByVersion("test", "rowkey1", "author", "name"); 325 326 // 删除一列 327 //deleteColumn("test", "rowkey1", "author", "nickname"); 328 329 // 删除所有列 330 //deleteAllColumn("test", "rowkey1"); 331 332 // 删除表 333 //deleteTable("test"); 334 335 } 336 }
hbase(main):002:0> scan 'test' ROW COLUMN+CELL rowkey1 column=article:content, timestamp=1545322109447, value=HBase is the Hadoop database. Use it when you need random, realtime read/write access to your Big Data. rowkey1 column=article:tag, timestamp=1545322109447, value=Hadoop,HBase,NoSQL rowkey1 column=article:title, timestamp=1545322109447, value=Head First HBase rowkey1 column=author:name, timestamp=1545322109447, value=nicholas rowkey1 column=author:nickname, timestamp=1545322109447, value=lee rowkey2 column=article:content, timestamp=1545322109476, value=HBase is the Hadoop database. Use it when you need random, realtime read/write access to your Big Data. rowkey2 column=article:tag, timestamp=1545322109476, value=Hadoop,HBase,NoSQL rowkey2 column=article:title, timestamp=1545322109476, value=Head First HBase rowkey2 column=author:name, timestamp=1545322109476, value=nicholas rowkey2 column=author:nickname, timestamp=1545322109476, value=lee rowkey3 column=article:content, timestamp=1545322109487, value=HBase is the Hadoop database. Use it when you need random, realtime read/write access to your Big Data. rowkey3 column=article:tag, timestamp=1545322109487, value=Hadoop,HBase,NoSQL rowkey3 column=article:title, timestamp=1545322109487, value=Head First HBase rowkey3 column=author:name, timestamp=1545322109487, value=nicholas rowkey3 column=author:nickname, timestamp=1545322109487, value=lee 3 row(s) in 0.1460 seconds 这是添加数据的结果
具体的结果需要自己去演示,关于hbase中的jar包问题,我尝试了很多办法,都不行,偶然得到了一个封装hbase的jar包,直接添加到依赖里就可以了
https://pan.baidu.com/s/1mn2TbAYJVKKz_kdA3hRLeg jar包的百度云链接