0


头歌 HBase(相关的五个实验)

HBase的安装与基本操作

第一关:HBase数据库的安装与配置

mkdir /app

cd /opt

tar -zxvf hbase-2.1.1-bin.tar.gz -C /app

vim /app/hbase-2.1.1/conf/hbase-env.sh

vi中的操作:按i编写下面这行代码在文件末尾(换英文键盘输入)

export JAVA_HOME=/usr/lib/jvm/jdk1.8.0_111

vi中的操作:按键盘的esc键

shift + : 输入wq保存退出

下面两步一样

vi /app/hbase-2.1.1/conf/hbase-site.xml

<configuration> <property>
  1. <name>hbase.rootdir</name>
  2. <value>file:///root/data/hbase/data</value>
</property> <property>
  1. <name>hbase.zookeeper.property.dataDir</name>
  2. <value>/root/data/hbase/zookeeper</value>
</property> <property>

<name>hbase.unsafe.stream.capability.enforce</name>

  1. <value>false</value>
</property> </configuration>

vim /etc/profile

SET HBASE_enviroment

HBASE_HOME=/app/hbase-2.1.1

export PATH=$PATH:$HBASE_HOME/bin

source /etc/profile

start-hbase.sh

第二关:创建表

hbase shell
回车
create 'test','data'
回车
create 'dept','data'
回车
create 'emp','data'
回车
list
测评

第三关:添加数据,删除数据

启动 HBase 一行一次

start-hbase.sh

进入 hbase shell

hbase shell
create 'mytable','data'
put 'mytable','row1','data:1','zhangsan'
put 'mytable','row2','data:2','zhangsanfeng'
put 'mytable','row3','data:3','zhangwuji'

退出

exit

HBase 伪分布式环境搭建

下面这位博主的亲测详细有用

Fdecad的博客_CSDN博客-数据结构,头歌实训,配置环境领域博主

HBase 开发:使用Java操作HBase

第1关:创建表

代码文件

  1. package step1;
  2. import java.io.IOException;
  3. import org.apache.hadoop.conf.Configuration;
  4. import org.apache.hadoop.hbase.HBaseConfiguration;
  5. import org.apache.hadoop.hbase.HColumnDescriptor;
  6. import org.apache.hadoop.hbase.HTableDescriptor;
  7. import org.apache.hadoop.hbase.TableName;
  8. import org.apache.hadoop.hbase.client.Admin;
  9. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
  10. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
  11. import org.apache.hadoop.hbase.client.Connection;
  12. import org.apache.hadoop.hbase.client.ConnectionFactory;
  13. import org.apache.hadoop.hbase.client.Get;
  14. import org.apache.hadoop.hbase.client.Put;
  15. import org.apache.hadoop.hbase.client.Result;
  16. import org.apache.hadoop.hbase.client.ResultScanner;
  17. import org.apache.hadoop.hbase.client.Scan;
  18. import org.apache.hadoop.hbase.client.Table;
  19. import org.apache.hadoop.hbase.client.TableDescriptor;
  20. import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
  21. import org.apache.hadoop.hbase.util.Bytes;
  22. public class Task{
  23. public void createTable()throws Exception{
  24. /********* Begin *********/
  25. Configuration config = HBaseConfiguration.create();
  26. Connection connection = ConnectionFactory.createConnection(config);
  27. try {
  28. // Create table
  29. Admin admin = connection.getAdmin();
  30. try {
  31. TableName tableName = TableName.valueOf("dept");
  32. // 新 API 构建表
  33. // TableDescriptor 对象通过 TableDescriptorBuilder 构建;
  34. TableDescriptorBuilder tableDescriptor =
  35. TableDescriptorBuilder.newBuilder(tableName);
  36. ColumnFamilyDescriptor family =
  37. ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("data")).build();// 构建列族对象
  38. tableDescriptor.setColumnFamily(family); // 设置列族
  39. admin.createTable(tableDescriptor.build()); // 创建表
  40. TableName emp = TableName.valueOf("emp");
  41. // 新 API 构建表
  42. // TableDescriptor 对象通过 TableDescriptorBuilder 构建;
  43. TableDescriptorBuilder empDescriptor =
  44. TableDescriptorBuilder.newBuilder(emp);
  45. ColumnFamilyDescriptor empfamily =
  46. ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("emp")).build();// 构建列族对象
  47. empDescriptor.setColumnFamily(empfamily); // 设置列族
  48. admin.createTable(empDescriptor.build()); // 创建表
  49. } finally {
  50. admin.close();
  51. }
  52. } finally {
  53. connection.close();
  54. }
  55. /********* End *********/
  56. }
  57. }

命令行

start-dfs.sh

回车

start-hbase.sh

回车

第2关:添加数据

代码文件

  1. package step2;
  2. import java.io.IOException;
  3. import org.apache.hadoop.conf.Configuration;
  4. import org.apache.hadoop.hbase.HBaseConfiguration;
  5. import org.apache.hadoop.hbase.HColumnDescriptor;
  6. import org.apache.hadoop.hbase.HTableDescriptor;
  7. import org.apache.hadoop.hbase.TableName;
  8. import org.apache.hadoop.hbase.client.Admin;
  9. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
  10. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
  11. import org.apache.hadoop.hbase.client.Connection;
  12. import org.apache.hadoop.hbase.client.ConnectionFactory;
  13. import org.apache.hadoop.hbase.client.Get;
  14. import org.apache.hadoop.hbase.client.Put;
  15. import org.apache.hadoop.hbase.client.Result;
  16. import org.apache.hadoop.hbase.client.ResultScanner;
  17. import org.apache.hadoop.hbase.client.Scan;
  18. import org.apache.hadoop.hbase.client.Table;
  19. import org.apache.hadoop.hbase.client.TableDescriptor;
  20. import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
  21. import org.apache.hadoop.hbase.util.Bytes;
  22. public class Task {
  23. public void insertInfo()throws Exception{
  24. /********* Begin *********/
  25. Configuration config = HBaseConfiguration.create();
  26. Connection connection = ConnectionFactory.createConnection(config);
  27. Admin admin = connection.getAdmin();
  28. TableName tableName = TableName.valueOf("tb_step2");
  29. TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(tableName);
  30. ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("data")).build();// 构建列族对象
  31. tableDescriptor.setColumnFamily(family); // 设置列族
  32. admin.createTable(tableDescriptor.build()); // 创建表
  33. // 添加数据
  34. byte[] row1 = Bytes.toBytes("row1");
  35. Put put1 = new Put(row1);
  36. byte[] columnFamily1 = Bytes.toBytes("data"); // 列
  37. byte[] qualifier1 = Bytes.toBytes(String.valueOf(1)); // 列族修饰词
  38. byte[] value1 = Bytes.toBytes("张三丰"); // 值
  39. put1.addColumn(columnFamily1, qualifier1, value1);
  40. byte[] row2 = Bytes.toBytes("row2");
  41. Put put2 = new Put(row2);
  42. byte[] columnFamily2 = Bytes.toBytes("data"); // 列
  43. byte[] qualifier2 = Bytes.toBytes(String.valueOf(2)); // 列族修饰词
  44. byte[] value2 = Bytes.toBytes("张无忌"); // 值
  45. put2.addColumn(columnFamily2, qualifier2, value2);
  46. Table table = connection.getTable(tableName);
  47. table.put(put1);
  48. table.put(put2);
  49. /********* End *********/
  50. }
  51. }

命令行

start-dfs.sh

回车

start-hbase.sh

回车

第3关:获取数据

代码文件

  1. package step3;
  2. import java.io.IOException;
  3. import org.apache.hadoop.conf.Configuration;
  4. import org.apache.hadoop.hbase.HBaseConfiguration;
  5. import org.apache.hadoop.hbase.HColumnDescriptor;
  6. import org.apache.hadoop.hbase.HTableDescriptor;
  7. import org.apache.hadoop.hbase.TableName;
  8. import org.apache.hadoop.hbase.client.Admin;
  9. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
  10. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
  11. import org.apache.hadoop.hbase.client.Connection;
  12. import org.apache.hadoop.hbase.client.ConnectionFactory;
  13. import org.apache.hadoop.hbase.client.Get;
  14. import org.apache.hadoop.hbase.client.Put;
  15. import org.apache.hadoop.hbase.client.Result;
  16. import org.apache.hadoop.hbase.client.ResultScanner;
  17. import org.apache.hadoop.hbase.client.Scan;
  18. import org.apache.hadoop.hbase.client.Table;
  19. import org.apache.hadoop.hbase.client.TableDescriptor;
  20. import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
  21. import org.apache.hadoop.hbase.util.Bytes;
  22. public class Task {
  23. public void queryTableInfo()throws Exception{
  24. /********* Begin *********/
  25. Configuration config = HBaseConfiguration.create();
  26. Connection connection = ConnectionFactory.createConnection(config);
  27. Admin admin = connection.getAdmin();
  28. TableName tableName = TableName.valueOf("t_step3");
  29. Table table = connection.getTable(tableName);
  30. // 获取数据
  31. Get get = new Get(Bytes.toBytes("row1")); // 定义 get 对象
  32. Result result = table.get(get); // 通过 table 对象获取数据
  33. //System.out.println("Result: " + result);
  34. // 很多时候我们只需要获取“值” 这里表示获取 data:1 列族的值
  35. byte[] valueBytes = result.getValue(Bytes.toBytes("data"), Bytes.toBytes("1")); // 获取到的是字节数组
  36. // 将字节转成字符串
  37. String valueStr = new String(valueBytes,"utf-8");
  38. System.out.println("value:" + valueStr);
  39. TableName tableStep3Name = TableName.valueOf("table_step3");
  40. Table step3Table = connection.getTable(tableStep3Name);
  41. // 批量查询
  42. Scan scan = new Scan();
  43. ResultScanner scanner = step3Table.getScanner(scan);
  44. try {
  45. int i = 0;
  46. for (Result scannerResult: scanner) {
  47. //byte[] value = scannerResult.getValue(Bytes.toBytes("data"), Bytes.toBytes(1));
  48. // System.out.println("Scan: " + scannerResult);
  49. byte[] row = scannerResult.getRow();
  50. System.out.println("rowName:" + new String(row,"utf-8"));
  51. }
  52. } finally {
  53. scanner.close();
  54. }
  55. /********* End *********/
  56. }
  57. }

命令行

start-dfs.sh

回车

start-hbase.sh

回车

第4关:删除表

代码文件

  1. package step4;
  2. import java.io.IOException;
  3. import org.apache.hadoop.conf.Configuration;
  4. import org.apache.hadoop.hbase.HBaseConfiguration;
  5. import org.apache.hadoop.hbase.HColumnDescriptor;
  6. import org.apache.hadoop.hbase.HTableDescriptor;
  7. import org.apache.hadoop.hbase.TableName;
  8. import org.apache.hadoop.hbase.client.Admin;
  9. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
  10. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
  11. import org.apache.hadoop.hbase.client.Connection;
  12. import org.apache.hadoop.hbase.client.ConnectionFactory;
  13. import org.apache.hadoop.hbase.client.Get;
  14. import org.apache.hadoop.hbase.client.Put;
  15. import org.apache.hadoop.hbase.client.Result;
  16. import org.apache.hadoop.hbase.client.ResultScanner;
  17. import org.apache.hadoop.hbase.client.Scan;
  18. import org.apache.hadoop.hbase.client.Table;
  19. import org.apache.hadoop.hbase.client.TableDescriptor;
  20. import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
  21. import org.apache.hadoop.hbase.util.Bytes;
  22. public class Task {
  23. public void deleteTable()throws Exception{
  24. /********* Begin *********/
  25. Configuration config = HBaseConfiguration.create();
  26. Connection connection = ConnectionFactory.createConnection(config);
  27. Admin admin = connection.getAdmin();
  28. TableName tableName = TableName.valueOf("t_step4");
  29. admin.disableTable(tableName);
  30. admin.deleteTable(tableName);
  31. /********* End *********/
  32. }
  33. }

命令行

start-dfs.sh

回车

start-hbase.sh

回车

HBase 开发:批量操作

第1关:批量获取数据

代码文件

  1. package step1;
  2. import java.io.IOException;
  3. import java.util.ArrayList;
  4. import java.util.List;
  5. import org.apache.hadoop.conf.Configuration;
  6. import org.apache.hadoop.hbase.Cell;
  7. import org.apache.hadoop.hbase.CellScanner;
  8. import org.apache.hadoop.hbase.CellUtil;
  9. import org.apache.hadoop.hbase.HBaseConfiguration;
  10. import org.apache.hadoop.hbase.TableName;
  11. import org.apache.hadoop.hbase.client.Admin;
  12. import org.apache.hadoop.hbase.client.Connection;
  13. import org.apache.hadoop.hbase.client.ConnectionFactory;
  14. import org.apache.hadoop.hbase.client.Get;
  15. import org.apache.hadoop.hbase.client.HTable;
  16. import org.apache.hadoop.hbase.client.Result;
  17. import org.apache.hadoop.hbase.client.ResultScanner;
  18. import org.apache.hadoop.hbase.client.Scan;
  19. import org.apache.hadoop.hbase.client.Table;
  20. import org.apache.hadoop.hbase.client.TableDescriptor;
  21. import org.apache.hadoop.hbase.generated.rest.rest_jsp;
  22. import org.apache.hadoop.hbase.util.Bytes;
  23. public class Task {
  24. public void batchGet() throws Exception {
  25. /********* Begin *********/
  26. Configuration config = HBaseConfiguration.create();
  27. Connection Connection = ConnectionFactory.createConnection(config);
  28. List<String> rows = new ArrayList<>();
  29. rows.add("2018");
  30. //rows.add("2019");
  31. rows.add("2020");
  32. TableName tableName = TableName.valueOf(Bytes.toBytes("step1_student"));
  33. Table table = Connection.getTable(tableName);
  34. getData(table,rows);
  35. /********* End *********/
  36. }
  37. public List<String> getData(Table table, List<String> rows) throws Exception {
  38. List<Get> gets = new ArrayList<>();
  39. for (String str : rows) {
  40. Get get = new Get(Bytes.toBytes(str));
  41. gets.add(get);
  42. }
  43. List<String> values = new ArrayList<>();
  44. Result[] results = table.get(gets);
  45. for (Result result : results) {
  46. System.out.println("Row:" + Bytes.toString(result.getRow()));
  47. for (Cell kv : result.rawCells()) {
  48. String family = Bytes.toString(CellUtil.cloneFamily(kv));
  49. String qualifire = Bytes.toString(CellUtil.cloneQualifier(kv));
  50. String value = Bytes.toString(CellUtil.cloneValue(kv));
  51. values.add(value);
  52. System.out.println(family + ":" + qualifire + "\t" + value);
  53. }
  54. }
  55. return values;
  56. }
  57. }

命令行

start-dfs.sh

回车

start-hbase.sh

回车

第2关:批量删除数据

代码文件

  1. package step2;
  2. import java.io.IOException;
  3. import java.util.ArrayList;
  4. import java.util.HashMap;
  5. import java.util.List;
  6. import java.util.Map;
  7. import org.apache.hadoop.conf.Configuration;
  8. import org.apache.hadoop.hbase.HBaseConfiguration;
  9. import org.apache.hadoop.hbase.HColumnDescriptor;
  10. import org.apache.hadoop.hbase.HTableDescriptor;
  11. import org.apache.hadoop.hbase.TableDescriptors;
  12. import org.apache.hadoop.hbase.TableName;
  13. import org.apache.hadoop.hbase.client.*;
  14. import org.apache.hadoop.hbase.util.Bytes;
  15. public class Task {
  16. public void batchDelete()throws Exception{
  17. /********* Begin *********/
  18. Configuration conf = HBaseConfiguration.create();
  19. Connection conn = ConnectionFactory.createConnection(conf);
  20. TableName tableName = TableName.valueOf("step2_table");
  21. Table table = conn.getTable(tableName);
  22. List<String> rows1 = new ArrayList();
  23. for(int i = 1; i<6;i++){
  24. String row = "row" + i;
  25. rows1.add(row);
  26. }
  27. delete(table,rows1);
  28. List<String> rows2 = new ArrayList<>();
  29. for(int i = 7;i<11;i++){
  30. String row = "row" + i;
  31. rows2.add(row);
  32. }
  33. delete(table,rows2);
  34. /********* End *********/
  35. }
  36. public void delete(Table table,List<String> rows)throws IOException{
  37. List<Delete> deletes = new ArrayList<>();
  38. for(String str : rows){
  39. byte[] row = Bytes.toBytes(str);
  40. Delete delete = new Delete(row);
  41. deletes.add(delete);
  42. }
  43. table.delete(deletes);
  44. }
  45. }

命令行

start-dfs.sh

回车

start-hbase.sh

回车

第3关:批量导入数据至HBase

代码文件

  1. package step3;
  2. import java.io.IOException;
  3. import java.util.ArrayList;
  4. import java.util.HashMap;
  5. import java.util.List;
  6. import java.util.Map;
  7. import org.apache.hadoop.conf.Configuration;
  8. import org.apache.hadoop.hbase.HBaseConfiguration;
  9. import org.apache.hadoop.hbase.HColumnDescriptor;
  10. import org.apache.hadoop.hbase.HTableDescriptor;
  11. import org.apache.hadoop.hbase.TableDescriptors;
  12. import org.apache.hadoop.hbase.TableName;
  13. import org.apache.hadoop.hbase.client.Admin;
  14. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
  15. import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
  16. import org.apache.hadoop.hbase.client.Connection;
  17. import org.apache.hadoop.hbase.client.ConnectionFactory;
  18. import org.apache.hadoop.hbase.client.Get;
  19. import org.apache.hadoop.hbase.client.Put;
  20. import org.apache.hadoop.hbase.client.Result;
  21. import org.apache.hadoop.hbase.client.ResultScanner;
  22. import org.apache.hadoop.hbase.client.Scan;
  23. import org.apache.hadoop.hbase.client.Table;
  24. import org.apache.hadoop.hbase.client.TableDescriptor;
  25. import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
  26. import org.apache.hadoop.hbase.util.Bytes;
  27. public class Task {
  28. public void batchPut()throws Exception{
  29. /********* Begin *********/
  30. Configuration config = new Configuration();
  31. Connection conn = ConnectionFactory.createConnection(config);
  32. Admin admin = conn.getAdmin();
  33. // 建表
  34. TableName tableName = TableName.valueOf(Bytes.toBytes("stu"));
  35. TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
  36. ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("basic_info")).build();
  37. ColumnFamilyDescriptor family2 = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("school_info")).build();
  38. builder.setColumnFamily(family);
  39. builder.setColumnFamily(family2);
  40. admin.createTable(builder.build());
  41. List<Put> puts = new ArrayList<>();
  42. String[] rows = {"20181122","20181123"};
  43. String[][] basic_infos = {{"阿克蒙德","male","1987-05-23","tel:139********","HUNan-ChangSha"},{"萨格拉斯","male","1986-05-23","tel:187********","HUNan-ChangSha"}};
  44. String[] basic_colums = {"name","gender","birthday","connect","address"};
  45. String[][] school_infos = {{"ChengXing","class 1 grade 2","Software"},{"ChengXing","class 2 grade 2","Software"}};
  46. String[] school_colums = {"college","class","object"};
  47. for (int x = 0; x < rows.length; x++) {
  48. // 循环添加数据
  49. Put put = new Put(Bytes.toBytes(rows[x]));
  50. for (int i = 0; i < basic_infos.length; i++) {
  51. byte[] columFamily = Bytes.toBytes("basic_info");
  52. byte[] qualifier = Bytes.toBytes(basic_colums[i]);
  53. byte[] value = Bytes.toBytes(basic_infos[x][i]);
  54. put.addColumn(columFamily, qualifier, value);
  55. }
  56. for (int i = 0; i < school_infos.length; i++) {
  57. byte[] columFamily = Bytes.toBytes("school_info");
  58. byte[] qualifier = Bytes.toBytes(school_colums[i]);
  59. byte[] value = Bytes.toBytes(school_infos[x][i]);
  60. put.addColumn(columFamily, qualifier, value);
  61. }
  62. puts.add(put);
  63. }
  64. Table table = conn.getTable(tableName);
  65. table.put(puts);
  66. /********* End *********/
  67. }
  68. }

命令行

start-dfs.sh

回车

start-hbase.sh

回车

HBase开发: Java API 管理表

第1关:JavaAPI获取表的列表

代码文件

  1. package step1;
  2. import java.util.ArrayList;
  3. import java.util.List;
  4. import org.apache.hadoop.conf.*;
  5. import org.apache.hadoop.hbase.*;
  6. import org.apache.hadoop.hbase.client.*;
  7. import org.apache.hadoop.hbase.util.*;
  8. public class Task {
  9. public void showTableList() throws Exception {
  10. /********* Begin *********/
  11. Configuration conf = HBaseConfiguration.create(); //使用create()静态方法就可以得到Configuration对象
  12. Connection conn = ConnectionFactory.createConnection(conf); //config为前文的配置对象
  13. Admin admin = conn.getAdmin(); //使用连接对象获取Admin对象
  14. List<TableDescriptor> tableDescriptors = admin.listTableDescriptors();
  15. for(TableDescriptor tableDescriptor: tableDescriptors){
  16. System.out.println("Table:" + tableDescriptor.getTableName());
  17. System.out.println("\texists:" + admin.tableExists(tableDescriptor.getTableName()));
  18. System.out.println("\tenabled:" + admin.isTableEnabled(tableDescriptor.getTableName()));
  19. }
  20. /********* End *********/
  21. }
  22. }

第2关:修改表

代码文件

  1. package step2;
  2. import java.io.IOException;
  3. import org.apache.hadoop.conf.*;
  4. import org.apache.hadoop.hbase.*;
  5. import org.apache.hadoop.hbase.client.*;
  6. import org.apache.hadoop.hbase.io.compress.Compression;
  7. import org.apache.hadoop.hbase.util.*;
  8. public class Task {
  9. public void updateTables()throws Exception{
  10. /********* Begin *********/
  11. Configuration conf = HBaseConfiguration.create(); //使用create()静态方法就可以得到Configuration对象
  12. Connection conn = ConnectionFactory.createConnection(conf); //config为前文的配置对象
  13. Admin admin = conn.getAdmin(); //使用连接对象获取Admin对象
  14. TableName tableName1 = TableName.valueOf("t_emp2");
  15. TableName tableName2 = TableName.valueOf("t_dept2");
  16. ColumnFamilyDescriptorBuilder buildFamily = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("data"));//创建builder对象
  17. buildFamily.setBlocksize(1024*1024);//设置缓存大小
  18. buildFamily.setBlockCacheEnabled(false);//设置关闭缓存,默认是true
  19. buildFamily.setCompressionType(Compression.Algorithm.GZ);//默认是NONE
  20. ColumnFamilyDescriptor family = buildFamily.build();//构建Family对象
  21. admin.modifyColumnFamily(tableName1, family);//调用修改方法,方法接收两个参数:TableName,ColumnFamilyDescriptor
  22. admin.deleteColumnFamily(tableName1, Bytes.toBytes("data1"));//删除表中名为data的列族
  23. ColumnFamilyDescriptorBuilder buildFamily1 = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("data1"));//创建builder对象
  24. buildFamily1.setMinVersions(2);
  25. buildFamily1.setMaxVersions(5);
  26. buildFamily1.setInMemory(true); //默认是false
  27. buildFamily1.setTimeToLive(60*60*24); //以秒为单位,超过这个时间设置的就会在下一次大合并中被删除
  28. ColumnFamilyDescriptor family1 = buildFamily1.build();//构建Family对象
  29. admin.modifyColumnFamily(tableName2, family1);//调用修改方法,方法接收两个参数:TableName,ColumnFamilyDescriptor
  30. admin.deleteColumnFamily(tableName2, Bytes.toBytes("data"));//删除表中名为data的列族
  31. /********* End *********/
  32. }
  33. }

第3关:禁用表、启用表、删除表

代码文件

  1. package step3;
  2. import java.io.IOException;
  3. import org.apache.hadoop.conf.*;
  4. import org.apache.hadoop.hbase.*;
  5. import org.apache.hadoop.hbase.client.*;
  6. import org.apache.hadoop.hbase.util.*;
  7. public class Task {
  8. /**
  9. * 刪除表
  10. * @param tableName 表名
  11. * @throws Exception
  12. */
  13. public void deleteTable(String tableName)throws Exception{
  14. /********* Begin *********/
  15. Configuration conf = HBaseConfiguration.create(); //使用create()静态方法就可以得到Configuration对象
  16. Connection conn = ConnectionFactory.createConnection(conf); //config为前文的配置对象
  17. Admin admin = conn.getAdmin(); //使用连接对象获取Admin对象
  18. TableName testName = TableName.valueOf(Bytes.toBytes(tableName));
  19. admin.disableTable(testName);
  20. admin.deleteTable(testName);
  21. /********* End *********/
  22. }
  23. /**
  24. * 创建表
  25. * @param tableName 表名
  26. * @param columnNames 列族的动态数组
  27. * @throws Exception
  28. */
  29. public void createTable(String tableName,String... columnNames)throws Exception{
  30. /********* Begin *********/
  31. Configuration conf = HBaseConfiguration.create(); //使用create()静态方法就可以得到Configuration对象
  32. Connection conn = ConnectionFactory.createConnection(conf); //config为前文的配置对象
  33. Admin admin = conn.getAdmin(); //使用连接对象获取Admin对象
  34. // TableName tableName1 = TableName.valueOf(Bytes.toBytes(tableName));//定义表名
  35. // HTableDescriptor htd = new HTableDescriptor(tableName1);//定义表对象
  36. // for(String s: columnNames){
  37. // htd.addFamily(new HColumnDescriptor(Bytes.toBytes(s)));
  38. // }
  39. //构建一个Test_teacher_info表
  40. TableDescriptorBuilder test_teacher_info = TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName));
  41. for(String s: columnNames){
  42. ColumnFamilyDescriptor of = ColumnFamilyDescriptorBuilder.of(s);
  43. test_teacher_info.setColumnFamily(of);
  44. }
  45. //构建
  46. TableDescriptor build = test_teacher_info.build();
  47. admin.createTable(build);
  48. /********* End *********/
  49. }
  50. /**
  51. * 启用表
  52. * @param tableName
  53. * @throws Exception
  54. */
  55. public void enableTable(String tableName) throws Exception{
  56. /********* Begin *********/
  57. Configuration conf = HBaseConfiguration.create(); //使用create()静态方法就可以得到Configuration对象
  58. Connection conn = ConnectionFactory.createConnection(conf); //config为前文的配置对象
  59. Admin admin = conn.getAdmin(); //使用连接对象获取Admin对象
  60. TableName demoName = TableName.valueOf(Bytes.toBytes(tableName));
  61. admin.enableTable(demoName);
  62. /********* End *********/
  63. }
  64. /**
  65. * 禁用表
  66. * @param tableName
  67. */
  68. public void disableTable(String tableName)throws Exception{
  69. /********* Begin *********/
  70. Configuration conf = HBaseConfiguration.create(); //使用create()静态方法就可以得到Configuration对象
  71. Connection conn = ConnectionFactory.createConnection(conf); //config为前文的配置对象
  72. Admin admin = conn.getAdmin(); //使用连接对象获取Admin对象
  73. TableName testName = TableName.valueOf(Bytes.toBytes(tableName));
  74. admin.disableTable(testName);
  75. /********* End *********/
  76. }
  77. }

本文转载自: https://blog.csdn.net/weixin_61657766/article/details/127362541
版权归原作者 海浪~& 所有, 如有侵权,请联系我们删除。

“头歌 HBase(相关的五个实验)”的评论:

还没有评论