一.第1关:创建表
1.先写命令行
start- dfs. shstart- hbase. shhadoop fs - ls / hbase( 可有可无)
2.再写代码文件
package step1; import java. io. IOException;
import org. apache. hadoop. conf. Configuration;
import org. apache. hadoop. hbase. HBaseConfiguration;
import org. apache. hadoop. hbase. HColumnDescriptor;
import org. apache. hadoop. hbase. HTableDescriptor;
import org. apache. hadoop. hbase. TableName;
import org. apache. hadoop. hbase. client. Admin;
import org. apache. hadoop. hbase. client. ColumnFamilyDescriptor;
import org. apache. hadoop. hbase. client. ColumnFamilyDescriptorBuilder;
import org. apache. hadoop. hbase. client. Connection;
import org. apache. hadoop. hbase. client. ConnectionFactory;
import org. apache. hadoop. hbase. client. Get;
import org. apache. hadoop. hbase. client. Put;
import org. apache. hadoop. hbase. client. Result;
import org. apache. hadoop. hbase. client. ResultScanner;
import org. apache. hadoop. hbase. client. Scan;
import org. apache. hadoop. hbase. client. Table;
import org. apache. hadoop. hbase. client. TableDescriptor;
import org. apache. hadoop. hbase. client. TableDescriptorBuilder;
import org. apache. hadoop. hbase. util. Bytes; public class Task { public void createTable( ) throws Exception{ / ** ** ** ** * Begin ** ** ** ** * / Configuration config = HBaseConfiguration. create( ) ; Connection connection = ConnectionFactory. createConnection( config) ; try { // Create tableAdmin admin = connection. getAdmin( ) ; try { TableName tableName = TableName. valueOf( "dept" ) ; // 新 API 构建表// TableDescriptor 对象通过 TableDescriptorBuilder 构建;TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder. newBuilder( tableName) ; ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder. newBuilder( Bytes. toBytes( "data" ) ) . build( ) ; // 构建列族对象tableDescriptor. setColumnFamily( family) ; // 设置列族admin. createTable( tableDescriptor. build( ) ) ; // 创建表TableName emp = TableName. valueOf( "emp" ) ; // 新 API 构建表// TableDescriptor 对象通过 TableDescriptorBuilder 构建;TableDescriptorBuilder empDescriptor = TableDescriptorBuilder. newBuilder( emp) ; ColumnFamilyDescriptor empfamily = ColumnFamilyDescriptorBuilder. newBuilder( Bytes. toBytes( "emp" ) ) . build( ) ; // 构建列族对象empDescriptor. setColumnFamily( empfamily) ; // 设置列族admin. createTable( empDescriptor. build( ) ) ; // 创建表} finally { admin. close( ) ; } } finally { connection. close( ) ; } / ** ** ** ** * End ** ** ** ** * / }
}
3.运行
二.第2关:添加数据
1.先写命令行
start- dfs. shstart- hbase. sh
2.再写代码文件
package step2; import java. io. IOException;
import org. apache. hadoop. conf. Configuration;
import org. apache. hadoop. hbase. HBaseConfiguration;
import org. apache. hadoop. hbase. HColumnDescriptor;
import org. apache. hadoop. hbase. HTableDescriptor;
import org. apache. hadoop. hbase. TableName;
import org. apache. hadoop. hbase. client. Admin;
import org. apache. hadoop. hbase. client. ColumnFamilyDescriptor;
import org. apache. hadoop. hbase. client. ColumnFamilyDescriptorBuilder;
import org. apache. hadoop. hbase. client. Connection;
import org. apache. hadoop. hbase. client. ConnectionFactory;
import org. apache. hadoop. hbase. client. Get;
import org. apache. hadoop. hbase. client. Put;
import org. apache. hadoop. hbase. client. Result;
import org. apache. hadoop. hbase. client. ResultScanner;
import org. apache. hadoop. hbase. client. Scan;
import org. apache. hadoop. hbase. client. Table;
import org. apache. hadoop. hbase. client. TableDescriptor;
import org. apache. hadoop. hbase. client. TableDescriptorBuilder;
import org. apache. hadoop. hbase. util. Bytes; public class Task { public void insertInfo( ) throws Exception{ / ** ** ** ** * Begin ** ** ** ** * / Configuration config = HBaseConfiguration. create( ) ; Connection connection = ConnectionFactory. createConnection( config) ; Admin admin = connection. getAdmin( ) ; TableName tableName = TableName. valueOf( "tb_step2" ) ; TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder. newBuilder( tableName) ; ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder. newBuilder( Bytes. toBytes( "data" ) ) . build( ) ; // 构建列族对象tableDescriptor. setColumnFamily( family) ; // 设置列族admin. createTable( tableDescriptor. build( ) ) ; // 创建表// 添加数据byte[ ] row1 = Bytes. toBytes( "row1" ) ; Put put1 = new Put( row1) ; byte[ ] columnFamily1 = Bytes. toBytes( "data" ) ; // 列byte[ ] qualifier1 = Bytes. toBytes( String. valueOf( 1 ) ) ; // 列族修饰词byte[ ] value1 = Bytes. toBytes( "张三丰" ) ; // 值put1. addColumn( columnFamily1, qualifier1, value1) ; byte[ ] row2 = Bytes. toBytes( "row2" ) ; Put put2 = new Put( row2) ; byte[ ] columnFamily2 = Bytes. toBytes( "data" ) ; // 列byte[ ] qualifier2 = Bytes. toBytes( String. valueOf( 2 ) ) ; // 列族修饰词byte[ ] value2 = Bytes. toBytes( "张无忌" ) ; // 值put2. addColumn( columnFamily2, qualifier2, value2) ; Table table = connection. getTable( tableName) ; table. put( put1) ; table. put( put2) ; / ** ** ** ** * End ** ** ** ** * / }
}
3.运行
三.第3关:获取数据
1.先写命令行
start- dfs. shstart- hbase. sh
2.再写代码文件
package step3; import java. io. IOException;
import org. apache. hadoop. conf. Configuration;
import org. apache. hadoop. hbase. HBaseConfiguration;
import org. apache. hadoop. hbase. HColumnDescriptor;
import org. apache. hadoop. hbase. HTableDescriptor;
import org. apache. hadoop. hbase. TableName;
import org. apache. hadoop. hbase. client. Admin;
import org. apache. hadoop. hbase. client. ColumnFamilyDescriptor;
import org. apache. hadoop. hbase. client. ColumnFamilyDescriptorBuilder;
import org. apache. hadoop. hbase. client. Connection;
import org. apache. hadoop. hbase. client. ConnectionFactory;
import org. apache. hadoop. hbase. client. Get;
import org. apache. hadoop. hbase. client. Put;
import org. apache. hadoop. hbase. client. Result;
import org. apache. hadoop. hbase. client. ResultScanner;
import org. apache. hadoop. hbase. client. Scan;
import org. apache. hadoop. hbase. client. Table;
import org. apache. hadoop. hbase. client. TableDescriptor;
import org. apache. hadoop. hbase. client. TableDescriptorBuilder;
import org. apache. hadoop. hbase. util. Bytes; public class Task { public void queryTableInfo( ) throws Exception{ / ** ** ** ** * Begin ** ** ** ** * / Configuration config = HBaseConfiguration. create( ) ; Connection connection = ConnectionFactory. createConnection( config) ; Admin admin = connection. getAdmin( ) ; TableName tableName = TableName. valueOf( "t_step3" ) ; Table table = connection. getTable( tableName) ; // 获取数据Get get = new Get( Bytes. toBytes( "row1" ) ) ; // 定义 get 对象Result result = table. get( get) ; // 通过 table 对象获取数据// System. out. println( "Result: " + result) ; // 很多时候我们只需要获取“值” 这里表示获取 data: 1 列族的值byte[ ] valueBytes = result. getValue( Bytes. toBytes( "data" ) , Bytes. toBytes( "1" ) ) ; // 获取到的是字节数组// 将字节转成字符串String valueStr = new String( valueBytes, "utf-8" ) ; System. out. println( "value:" + valueStr) ; TableName tableStep3Name = TableName. valueOf( "table_step3" ) ; Table step3Table = connection. getTable( tableStep3Name) ; // 批量查询Scan scan = new Scan( ) ; ResultScanner scanner = step3Table. getScanner( scan) ; try { int i = 0 ; for ( Result scannerResult: scanner) { // byte[ ] value = scannerResult. getValue( Bytes. toBytes( "data" ) , Bytes. toBytes( 1 ) ) ; // System. out. println( "Scan: " + scannerResult) ; byte[ ] row = scannerResult. getRow( ) ; System. out. println( "rowName:" + new String( row, "utf-8" ) ) ; } } finally { scanner. close( ) ; } / ** ** ** ** * End ** ** ** ** * / } }
3.运行
四.第4关:删除表
1.先写命令行
start- dfs. shstart- hbase. sh
2.再写代码文件
package step4; import java. io. IOException;
import org. apache. hadoop. conf. Configuration;
import org. apache. hadoop. hbase. HBaseConfiguration;
import org. apache. hadoop. hbase. HColumnDescriptor;
import org. apache. hadoop. hbase. HTableDescriptor;
import org. apache. hadoop. hbase. TableName;
import org. apache. hadoop. hbase. client. Admin;
import org. apache. hadoop. hbase. client. ColumnFamilyDescriptor;
import org. apache. hadoop. hbase. client. ColumnFamilyDescriptorBuilder;
import org. apache. hadoop. hbase. client. Connection;
import org. apache. hadoop. hbase. client. ConnectionFactory;
import org. apache. hadoop. hbase. client. Get;
import org. apache. hadoop. hbase. client. Put;
import org. apache. hadoop. hbase. client. Result;
import org. apache. hadoop. hbase. client. ResultScanner;
import org. apache. hadoop. hbase. client. Scan;
import org. apache. hadoop. hbase. client. Table;
import org. apache. hadoop. hbase. client. TableDescriptor;
import org. apache. hadoop. hbase. client. TableDescriptorBuilder;
import org. apache. hadoop. hbase. util. Bytes; public class Task { public void deleteTable( ) throws Exception{ / ** ** ** ** * Begin ** ** ** ** * / Configuration config = HBaseConfiguration. create( ) ; Connection connection = ConnectionFactory. createConnection( config) ; Admin admin = connection. getAdmin( ) ; TableName tableName = TableName. valueOf( "t_step4" ) ; admin. disableTable( tableName) ; admin. deleteTable( tableName) ; / ** ** ** ** * End ** ** ** ** * / }
}
3.运行