HBase 开发:利用Java操作HBase

打印 上一主题 下一主题

主题 892|帖子 892|积分 2676

1、实战简介

HBase和Hadoop一样,都是用Java举行开发的,本次实训我们就来学习如何利用Java编写代码来操作HBase数据库。
  1. 实验环境:
  2. hadoop-2.7
  3. JDK8.0
  4. HBase2.1.1
复制代码


 2、任务

1、第1关:创建表 

  1. package step1;
  2. import java.io.IOException;
  3. import org.apache.hadoop.conf.Configuration;
  4. import org.apache.hadoop.hbase.HBaseConfiguration;
  5. import org.apache.hadoop.hbase.HColumnDescriptor;
  6. import org.apache.hadoop.hbase.HTableDescriptor;
  7. import org.apache.hadoop.hbase.TableName;
  8. import org.apache.hadoop.hbase.client.Admin;
  9. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
  10. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
  11. import org.apache.hadoop.hbase.client.Connection;
  12. import org.apache.hadoop.hbase.client.ConnectionFactory;
  13. import org.apache.hadoop.hbase.client.Get;
  14. import org.apache.hadoop.hbase.client.Put;
  15. import org.apache.hadoop.hbase.client.Result;
  16. import org.apache.hadoop.hbase.client.ResultScanner;
  17. import org.apache.hadoop.hbase.client.Scan;
  18. import org.apache.hadoop.hbase.client.Table;
  19. import org.apache.hadoop.hbase.client.TableDescriptor;
  20. import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
  21. import org.apache.hadoop.hbase.util.Bytes;
  22. /**
  23. * HBase 1.0 version of ExampleClient that uses {@code Connection},
  24. * {@code Admin} and {@code Table}.
  25. */
  26. public class Task{
  27. public void createTable()throws Exception{
  28. /********* Begin *********/
  29. Configuration config = HBaseConfiguration.create();
  30. Connection connection = ConnectionFactory.createConnection(config);
  31. try {
  32. // Create table
  33. Admin admin = connection.getAdmin();
  34. try {
  35. TableName tableName = TableName.valueOf("dept");
  36. // 新 API 构建表
  37. // TableDescriptor 对象通过 TableDescriptorBuilder 构建;
  38. TableDescriptorBuilder tableDescriptor =
  39. TableDescriptorBuilder.newBuilder(tableName);
  40. ColumnFamilyDescriptor family =
  41. ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("data")).build();// 构建列族对象
  42. tableDescriptor.setColumnFamily(family); // 设置列族
  43. admin.createTable(tableDescriptor.build()); // 创建表
  44. TableName emp = TableName.valueOf("emp");
  45. // 新 API 构建表
  46. // TableDescriptor 对象通过 TableDescriptorBuilder 构建;
  47. TableDescriptorBuilder empDescriptor =
  48. TableDescriptorBuilder.newBuilder(emp);
  49. ColumnFamilyDescriptor empfamily =
  50. ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("emp")).build();// 构建列族对象
  51. empDescriptor.setColumnFamily(empfamily); // 设置列族
  52. admin.createTable(empDescriptor.build()); // 创建表
  53. } finally {
  54. admin.close();
  55. }
  56. } finally {
  57. connection.close();
  58. }
  59. /********* End *********/
  60. }
  61. }
复制代码
  1. 命令行:
  2. start-dfs.sh ( Hadoop 启动)
  3. 回车
  4. start-hbase.sh ( hbase 启动)
复制代码

2、第2关:添加数据

  1. package step2;
  2. import java.io.IOException;
  3. import org.apache.hadoop.conf.Configuration;
  4. import org.apache.hadoop.hbase.HBaseConfiguration;
  5. import org.apache.hadoop.hbase.HColumnDescriptor;
  6. import org.apache.hadoop.hbase.HTableDescriptor;
  7. import org.apache.hadoop.hbase.TableName;
  8. import org.apache.hadoop.hbase.client.Admin;
  9. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
  10. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
  11. import org.apache.hadoop.hbase.client.Connection;
  12. import org.apache.hadoop.hbase.client.ConnectionFactory;
  13. import org.apache.hadoop.hbase.client.Get;
  14. import org.apache.hadoop.hbase.client.Put;
  15. import org.apache.hadoop.hbase.client.Result;
  16. import org.apache.hadoop.hbase.client.ResultScanner;
  17. import org.apache.hadoop.hbase.client.Scan;
  18. import org.apache.hadoop.hbase.client.Table;
  19. import org.apache.hadoop.hbase.client.TableDescriptor;
  20. import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
  21. import org.apache.hadoop.hbase.util.Bytes;
  22. public class Task {
  23. public void insertInfo()throws Exception{
  24. /********* Begin *********/
  25. Configuration config = HBaseConfiguration.create();
  26. Connection connection = ConnectionFactory.createConnection(config);
  27. Admin admin = connection.getAdmin();
  28. TableName tableName = TableName.valueOf("tb_step2");
  29. TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(tableName);
  30. ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("data")).build();//  构建列族对象
  31. tableDescriptor.setColumnFamily(family); //  设置列族
  32. admin.createTable(tableDescriptor.build()); //  创建表
  33. // 添加数据
  34. byte[] row1 = Bytes.toBytes("row1");
  35. Put put1 = new Put(row1);
  36. byte[] columnFamily1 = Bytes.toBytes("data"); // 列
  37. byte[] qualifier1 = Bytes.toBytes(String.valueOf(1)); // 列族修饰词
  38. byte[] value1 = Bytes.toBytes("张三丰"); // 值
  39. put1.addColumn(columnFamily1, qualifier1, value1);
  40. byte[] row2 = Bytes.toBytes("row2");
  41. Put put2 = new Put(row2);
  42. byte[] columnFamily2 = Bytes.toBytes("data"); // 列
  43. byte[] qualifier2 = Bytes.toBytes(String.valueOf(2)); // 列族修饰词
  44. byte[] value2 = Bytes.toBytes("张无忌"); // 值
  45. put2.addColumn(columnFamily2, qualifier2, value2);
  46. Table table = connection.getTable(tableName);
  47. table.put(put1);
  48. table.put(put2);
  49. /********* End *********/
  50. }
  51. }
复制代码

3、第3关:获取数据

  1. package step3;
  2. import java.io.IOException;
  3. import org.apache.hadoop.conf.Configuration;
  4. import org.apache.hadoop.hbase.HBaseConfiguration;
  5. import org.apache.hadoop.hbase.HColumnDescriptor;
  6. import org.apache.hadoop.hbase.HTableDescriptor;
  7. import org.apache.hadoop.hbase.TableName;
  8. import org.apache.hadoop.hbase.client.Admin;
  9. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
  10. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
  11. import org.apache.hadoop.hbase.client.Connection;
  12. import org.apache.hadoop.hbase.client.ConnectionFactory;
  13. import org.apache.hadoop.hbase.client.Get;
  14. import org.apache.hadoop.hbase.client.Put;
  15. import org.apache.hadoop.hbase.client.Result;
  16. import org.apache.hadoop.hbase.client.ResultScanner;
  17. import org.apache.hadoop.hbase.client.Scan;
  18. import org.apache.hadoop.hbase.client.Table;
  19. import org.apache.hadoop.hbase.client.TableDescriptor;
  20. import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
  21. import org.apache.hadoop.hbase.util.Bytes;
  22. public class Task {
  23. public void queryTableInfo()throws Exception{
  24. /********* Begin *********/
  25. Configuration config = HBaseConfiguration.create();
  26. Connection connection = ConnectionFactory.createConnection(config);
  27. Admin admin = connection.getAdmin();
  28. TableName tableName = TableName.valueOf("t_step3");
  29. Table table = connection.getTable(tableName);
  30. // 获取数据
  31. Get get = new Get(Bytes.toBytes("row1")); // 定义 get 对象
  32. Result result = table.get(get); // 通过 table 对象获取数据
  33. //System.out.println("Result: " + result);
  34. // 很多时候我们只需要获取“值”   这里表示获取  data:1  列族的值
  35. byte[] valueBytes = result.getValue(Bytes.toBytes("data"), Bytes.toBytes("1")); // 获取到的是字节数组
  36. // 将字节转成字符串
  37. String valueStr = new String(valueBytes,"utf-8");
  38. System.out.println("value:" + valueStr);
  39. TableName tableStep3Name = TableName.valueOf("table_step3");
  40. Table step3Table = connection.getTable(tableStep3Name);
  41. // 批量查询
  42. Scan scan = new Scan();
  43. ResultScanner scanner = step3Table.getScanner(scan);
  44. try {
  45. int i = 0;
  46. for (Result scannerResult: scanner) {
  47. //byte[] value = scannerResult.getValue(Bytes.toBytes("data"), Bytes.toBytes(1));
  48. // System.out.println("Scan: " + scannerResult);
  49. byte[] row = scannerResult.getRow();
  50. System.out.println("rowName:" + new String(row,"utf-8"));
  51. }
  52. } finally {
  53. scanner.close();
  54. }
  55. /********* End *********/
  56. }
  57. }
复制代码

4、第4关:删除表

  1. package step4;
  2. import java.io.IOException;
  3. import org.apache.hadoop.conf.Configuration;
  4. import org.apache.hadoop.hbase.HBaseConfiguration;
  5. import org.apache.hadoop.hbase.HColumnDescriptor;
  6. import org.apache.hadoop.hbase.HTableDescriptor;
  7. import org.apache.hadoop.hbase.TableName;
  8. import org.apache.hadoop.hbase.client.Admin;
  9. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
  10. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
  11. import org.apache.hadoop.hbase.client.Connection;
  12. import org.apache.hadoop.hbase.client.ConnectionFactory;
  13. import org.apache.hadoop.hbase.client.Get;
  14. import org.apache.hadoop.hbase.client.Put;
  15. import org.apache.hadoop.hbase.client.Result;
  16. import org.apache.hadoop.hbase.client.ResultScanner;
  17. import org.apache.hadoop.hbase.client.Scan;
  18. import org.apache.hadoop.hbase.client.Table;
  19. import org.apache.hadoop.hbase.client.TableDescriptor;
  20. import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
  21. import org.apache.hadoop.hbase.util.Bytes;
  22. public class Task {
  23. public void deleteTable()throws Exception{
  24. /********* Begin *********/
  25. Configuration config = HBaseConfiguration.create();
  26. Connection connection = ConnectionFactory.createConnection(config);
  27. Admin admin = connection.getAdmin();
  28. TableName tableName = TableName.valueOf("t_step4");
  29. admin.disableTable(tableName);
  30. admin.deleteTable(tableName);
  31. /********* End *********/
  32. }
  33. }
复制代码
免责声明:如果侵犯了您的权益,请联系站长,我们会及时删除侵权内容,谢谢合作!更多信息从访问主页:qidao123.com:ToB企服之家,中国第一个企服评测及商务社交产业平台。
回复

使用道具 举报

0 个回复

倒序浏览

快速回复

您需要登录后才可以回帖 登录 or 立即注册

本版积分规则

民工心事

金牌会员
这个人很懒什么都没写!
快速回复 返回顶部 返回列表