0


头哥实践平台----HBase 开发:使用Java操作HBase

一.第1关:创建表

1.先写命令行

start-dfs.sh

start-hbase.sh

hadoop fs -ls /hbase(可有可无)

2.再写代码文件


package step1;import java.io.IOException;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.HColumnDescriptor;import org.apache.hadoop.hbase.HTableDescriptor;import org.apache.hadoop.hbase.TableName;import org.apache.hadoop.hbase.client.Admin;import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;import org.apache.hadoop.hbase.client.Connection;import org.apache.hadoop.hbase.client.ConnectionFactory;import org.apache.hadoop.hbase.client.Get;import org.apache.hadoop.hbase.client.Put;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.client.Table;import org.apache.hadoop.hbase.client.TableDescriptor;import org.apache.hadoop.hbase.client.TableDescriptorBuilder;import org.apache.hadoop.hbase.util.Bytes;

public classTask{
    public void createTable()throws Exception{/********* Begin *********/
        Configuration config = HBaseConfiguration.create();
        Connection connection = ConnectionFactory.createConnection(config);try{// Create table
        Admin admin = connection.getAdmin();try{
            TableName tableName = TableName.valueOf("dept");// 新 API 构建表
            // TableDescriptor 对象通过 TableDescriptorBuilder 构建;
            TableDescriptorBuilder tableDescriptor =
            TableDescriptorBuilder.newBuilder(tableName);
            ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("data")).build();// 构建列族对象
            tableDescriptor.setColumnFamily(family);// 设置列族
            admin.createTable(tableDescriptor.build());// 创建表

            TableName emp = TableName.valueOf("emp");// 新 API 构建表
            // TableDescriptor 对象通过 TableDescriptorBuilder 构建;
            TableDescriptorBuilder empDescriptor =
            TableDescriptorBuilder.newBuilder(emp);
            ColumnFamilyDescriptor empfamily =
            ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("emp")).build();// 构建列族对象
            empDescriptor.setColumnFamily(empfamily);// 设置列族
            admin.createTable(empDescriptor.build());// 创建表
        }finally{
            admin.close();}}finally{
        connection.close();}/********* End *********/}}

3.运行

二.第2关:添加数据

1.先写命令行

start-dfs.sh

start-hbase.sh

2.再写代码文件


package step2;import java.io.IOException;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.HColumnDescriptor;import org.apache.hadoop.hbase.HTableDescriptor;import org.apache.hadoop.hbase.TableName;import org.apache.hadoop.hbase.client.Admin;import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;import org.apache.hadoop.hbase.client.Connection;import org.apache.hadoop.hbase.client.ConnectionFactory;import org.apache.hadoop.hbase.client.Get;import org.apache.hadoop.hbase.client.Put;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.client.Table;import org.apache.hadoop.hbase.client.TableDescriptor;import org.apache.hadoop.hbase.client.TableDescriptorBuilder;import org.apache.hadoop.hbase.util.Bytes;

public classTask{

    public void insertInfo()throws Exception{/********* Begin *********/
        Configuration config = HBaseConfiguration.create();
         Connection connection = ConnectionFactory.createConnection(config);
         Admin admin = connection.getAdmin();
         TableName tableName = TableName.valueOf("tb_step2");
         TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(tableName);
         ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("data")).build();//  构建列族对象
         tableDescriptor.setColumnFamily(family);//  设置列族
         admin.createTable(tableDescriptor.build());//  创建表
         // 添加数据
         byte[] row1 = Bytes.toBytes("row1");
         Put put1 = new Put(row1); 
         byte[] columnFamily1 = Bytes.toBytes("data");// 列
         byte[] qualifier1 = Bytes.toBytes(String.valueOf(1));// 列族修饰词
        byte[] value1 = Bytes.toBytes("张三丰");// 值
        put1.addColumn(columnFamily1, qualifier1, value1);
        byte[] row2 = Bytes.toBytes("row2");
        Put put2 = new Put(row2); 
        byte[] columnFamily2 = Bytes.toBytes("data");// 列
        byte[] qualifier2 = Bytes.toBytes(String.valueOf(2));// 列族修饰词
        byte[] value2 = Bytes.toBytes("张无忌");// 值
        put2.addColumn(columnFamily2, qualifier2, value2);
        Table table = connection.getTable(tableName);
        table.put(put1);
        table.put(put2);/********* End *********/}}

3.运行

三.第3关:获取数据

1.先写命令行

start-dfs.sh

start-hbase.sh

2.再写代码文件

package step3;import java.io.IOException;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.HColumnDescriptor;import org.apache.hadoop.hbase.HTableDescriptor;import org.apache.hadoop.hbase.TableName;import org.apache.hadoop.hbase.client.Admin;import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;import org.apache.hadoop.hbase.client.Connection;import org.apache.hadoop.hbase.client.ConnectionFactory;import org.apache.hadoop.hbase.client.Get;import org.apache.hadoop.hbase.client.Put;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.client.Table;import org.apache.hadoop.hbase.client.TableDescriptor;import org.apache.hadoop.hbase.client.TableDescriptorBuilder;import org.apache.hadoop.hbase.util.Bytes;

public classTask{

    public void queryTableInfo()throws Exception{/********* Begin *********/
        Configuration config = HBaseConfiguration.create();
        Connection connection = ConnectionFactory.createConnection(config);
         Admin admin = connection.getAdmin();
         TableName tableName = TableName.valueOf("t_step3");
         Table table = connection.getTable(tableName);// 获取数据
         Get get = new Get(Bytes.toBytes("row1"));// 定义 get 对象
         Result result = table.get(get);// 通过 table 对象获取数据
         //System.out.println("Result: "+ result);// 很多时候我们只需要获取“值”   这里表示获取  data:1  列族的值
        byte[] valueBytes = result.getValue(Bytes.toBytes("data"), Bytes.toBytes("1"));// 获取到的是字节数组
         // 将字节转成字符串
        String valueStr = new String(valueBytes,"utf-8");
        System.out.println("value:"+ valueStr);
        TableName tableStep3Name = TableName.valueOf("table_step3");
        Table step3Table = connection.getTable(tableStep3Name);// 批量查询
        Scan scan = new Scan();
        ResultScanner scanner = step3Table.getScanner(scan);try{int i =0;for(Result scannerResult: scanner){//byte[] value = scannerResult.getValue(Bytes.toBytes("data"), Bytes.toBytes(1));// System.out.println("Scan: "+ scannerResult);
                 byte[] row = scannerResult.getRow();
                 System.out.println("rowName:"+ new String(row,"utf-8"));}}finally{
             scanner.close();}/********* End *********/}}

3.运行

四.第4关:删除表

1.先写命令行

start-dfs.sh

start-hbase.sh

2.再写代码文件


package step4;import java.io.IOException;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.HColumnDescriptor;import org.apache.hadoop.hbase.HTableDescriptor;import org.apache.hadoop.hbase.TableName;import org.apache.hadoop.hbase.client.Admin;import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;import org.apache.hadoop.hbase.client.Connection;import org.apache.hadoop.hbase.client.ConnectionFactory;import org.apache.hadoop.hbase.client.Get;import org.apache.hadoop.hbase.client.Put;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.client.Table;import org.apache.hadoop.hbase.client.TableDescriptor;import org.apache.hadoop.hbase.client.TableDescriptorBuilder;import org.apache.hadoop.hbase.util.Bytes;

public classTask{
    
    
    public void deleteTable()throws Exception{/********* Begin *********/
        Configuration config = HBaseConfiguration.create();
        Connection connection = ConnectionFactory.createConnection(config);
        Admin admin = connection.getAdmin();
        TableName tableName = TableName.valueOf("t_step4");
        admin.disableTable(tableName);    
        admin.deleteTable(tableName);/********* End *********/}}

3.运行

标签: hbase java

本文转载自: https://blog.csdn.net/m0_74459049/article/details/134779333
版权归原作者 不想做程序猿的员 所有, 如有侵权,请联系我们删除。

“头哥实践平台----HBase 开发:使用Java操作HBase”的评论:

还没有评论