排除复制粘贴限制
当作者遇到这个限制的时间火气起来了三分,然后去网上搜索答案,然后发现了一位【碳烤小肥肠】居然不贴代码,XX链接,贴截图,瞬时火气冲顶,怒写此文
起首启动全能的控制台,然后Ctrl+Shift+F全局搜索这个弹窗
双击进入源码,随便搞个NB的文件夹,然后点击答应
然后右键这个事儿多的js文件,给它更换了
然后在这儿把这玩意儿代码给注释了,然后保存
末了刷新网页,古迹出现,可以复制粘贴了【切勿关闭控制台】
第1关:使用 Shell 下令创建表
- start-hbase.sh
- hbase shell
- create 'exam_tb1','student_info','course_info'
- put 'exam_tb1', 'row-1', 'student_info:name', 'zhangsan'
- put 'exam_tb1', 'row-1', 'student_info:s_no', '2020001'
- put 'exam_tb1', 'row-2', 'student_info:name', 'lisi'
- put 'exam_tb1', 'row-2', 'student_info:s_no', '2020002'
- put 'exam_tb1', 'row-1', 'course_info:c_no', '123001'
- put 'exam_tb1', 'row-1', 'course_info:c_name', 'HBase'
- put 'exam_tb1', 'row-2', 'course_info:c_no', '123002'
- put 'exam_tb1', 'row-2', 'course_info:c_name', 'Hadoop'
- exit
- echo "scan 'exam_tb1'" | hbase shell >/root/student.txt
复制代码
第2关:使用 Java API 实现增删操作
- package com.yy;
- import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.hbase.HBaseConfiguration;
- import org.apache.hadoop.hbase.TableName;
- import org.apache.hadoop.hbase.client.*;
- import org.apache.hadoop.hbase.util.Bytes;
- import java.util.ArrayList;
- import java.util.List;
- import java.io.IOException;
- public class HbaseUtil {
- private static Admin admin = null;
- private static Connection connection = null;
- private static Configuration conf = null;
- static {
- // HBase配置文件
- conf = HBaseConfiguration.create();
- // 获取连接对象
- try {
- connection = ConnectionFactory.createConnection(conf);
- // 获取HBase管理员对象
- admin = connection.getAdmin();
- } catch (IOException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- }
- private static void close(Connection conn, Admin admin) throws IOException {
- if (conn != null) {
- conn.close();
- }
- if (admin != null) {
- admin.close();
- }
- }
- //删除exam_tb2表
- public void deleteTable() throws IOException {
- /**********Begin**********/
- admin.disableTable(TableName.valueOf("exam_tb2"));
- admin.deleteTable(TableName.valueOf("exam_tb2"));
- /**********End**********/
- }
- //创建exam_tb3表
- public void createTab() throws IOException {
- /**********Begin**********/
- TableName tb3 = TableName.valueOf("exam_tb3");
- TableDescriptorBuilder tbd = TableDescriptorBuilder.newBuilder(tb3);
- tbd.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("user_info")).build());
- tbd.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("address_info")).build());
- admin.createTable(tbd.build());
- /**********End**********/
- }
- //往exam_tb3表中添加数据
- public void putBatch() throws IOException {
- /**********Begin**********/
- Table table = connection.getTable(TableName.valueOf("exam_tb3"));
- Put p1 = new Put(Bytes.toBytes("1"));
- Put p2 = new Put(Bytes.toBytes("2"));
- Put p3 = new Put(Bytes.toBytes("3"));
- p1.addColumn(Bytes.toBytes("user_info"), Bytes.toBytes("name"), Bytes.toBytes("Avatar"));
- p1.addColumn(Bytes.toBytes("user_info"), Bytes.toBytes("age"), Bytes.toBytes("100"));
- p1.addColumn(Bytes.toBytes("address_info"), Bytes.toBytes("address"), Bytes.toBytes("pandora"));
- p1.addColumn(Bytes.toBytes("address_info"), Bytes.toBytes("email"), Bytes.toBytes("avatar@163.com"));
- p1.addColumn(Bytes.toBytes("address_info"), Bytes.toBytes("phone"), Bytes.toBytes("123456"));
- p2.addColumn(Bytes.toBytes("user_info"), Bytes.toBytes("name"), Bytes.toBytes("change"));
- p2.addColumn(Bytes.toBytes("user_info"), Bytes.toBytes("age"), Bytes.toBytes("50"));
- p2.addColumn(Bytes.toBytes("address_info"), Bytes.toBytes("address"), Bytes.toBytes("moon"));
- p2.addColumn(Bytes.toBytes("address_info"), Bytes.toBytes("email"), Bytes.toBytes("change@163.com"));
- p2.addColumn(Bytes.toBytes("address_info"), Bytes.toBytes("phone"), Bytes.toBytes("234567"));
- p3.addColumn(Bytes.toBytes("user_info"), Bytes.toBytes("name"), Bytes.toBytes("nezha"));
- p3.addColumn(Bytes.toBytes("user_info"), Bytes.toBytes("age"), Bytes.toBytes("6"));
- p3.addColumn(Bytes.toBytes("address_info"), Bytes.toBytes("address"), Bytes.toBytes("earth"));
- p3.addColumn(Bytes.toBytes("address_info"), Bytes.toBytes("email"), Bytes.toBytes("nezha@163.com"));
- p3.addColumn(Bytes.toBytes("address_info"), Bytes.toBytes("phone"), Bytes.toBytes("345678"));
- ArrayList < Put > puts = new ArrayList < > ();
- puts.add(p1);
- puts.add(p2);
- puts.add(p3);
- table.put(puts);
- /**********End**********/
- }
- }
复制代码
第3关:HBase扫描
- package step3;
- import java.io.IOException;
- import org.apache.hadoop.conf.*;
- import org.apache.hadoop.hbase.*;
- import org.apache.hadoop.hbase.client.*;
- import org.apache.hadoop.hbase.util.*;
- public class Task {
- public void scanTable(String tableName) throws Exception {
- /********* Begin *********/
- Configuration config = HBaseConfiguration.create();
- Connection conn = ConnectionFactory.createConnection(config);
- Admin admin = conn.getAdmin();
- TableName name = TableName.valueOf(tableName);
- Table table = conn.getTable(name);
- Scan scan = new Scan();
- scan.addColumn(Bytes.toBytes("info"), Bytes.toBytes("name"));
- scan.setStartRow(Bytes.toBytes("row-10"));
- scan.setStopRow(Bytes.toBytes("row-30"));
- ResultScanner scanner = table.getScanner(scan);
- for (Result result: scanner) {
- for (Cell cell: result.listCells()) {
- String family = Bytes.toString(CellUtil.cloneFamily(cell));
- String qualifier = Bytes.toString(CellUtil.cloneQualifier(cell));
- String value = Bytes.toString(CellUtil.cloneValue(cell));
- System.out.println("Rowkey:" + Bytes.toString(result.getRow()) + ",ColumuFamily:" + family + ",Column:" + qualifier + ",Value:" + value);
- /********* End *********/
- }
- }
- }
- }
复制代码 第4关:HBase过滤器
- package step4;
- import java.io.IOException;
- import java.util.ArrayList;
- import java.util.List;
- import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.hbase.HBaseConfiguration;
- import org.apache.hadoop.hbase.TableName;
- import org.apache.hadoop.hbase.client.Connection;
- import org.apache.hadoop.hbase.client.ConnectionFactory;
- import org.apache.hadoop.hbase.client.Result;
- import org.apache.hadoop.hbase.client.ResultScanner;
- import org.apache.hadoop.hbase.client.Scan;
- import org.apache.hadoop.hbase.client.Table;
- import org.apache.hadoop.hbase.util.Bytes;
- public class Task {
- public void query(String tName) throws Exception {
- /********* Begin *********/
- Configuration conf = HBaseConfiguration.create();
- Connection connection = ConnectionFactory.createConnection(conf);
- try {
- Table table = connection.getTable(TableName.valueOf(tName));
- Scan scan = new Scan();
- ResultScanner scanner = table.getScanner(scan);
- for (Result result : scanner) {
- byte[] row = result.getRow();
- String rowKey = Bytes.toString(row);
- result.listCells().forEach(cell -> {
- String family = Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength());
- String qualifier = Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
- String value = Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
- System.out.println(String.format("Rowkey:%s,ColumuFamily:%s,Column:%s,Value:%s", rowKey, family, qualifier, value));
- });
- }
- scanner.close();
- table.close();
- } finally {
- connection.close();
- }
- /********* End *********/
- }
- }
复制代码
免责声明:如果侵犯了您的权益,请联系站长,我们会及时删除侵权内容,谢谢合作!更多信息从访问主页:qidao123.com:ToB企服之家,中国第一个企服评测及商务社交产业平台。 |