pom.xml
<dependency><groupId>org.apache.hbase</groupId><artifactId>hbase-it</artifactId><version>1.2.0</version>
</dependency>
源码
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.util.Bytes;public class HBaseService {public static Configuration configuration;public static Connection connection;static {System.setProperty("hadoop.home.dir", "D:\\Program\\hadoop-common-2.6.0-bin-master");configuration = HBaseConfiguration.create();configuration.set("hbase.zookeeper.property.clientPort", "2181");configuration.set("hbase.zookeeper.quorum", "192.168.91.144");configuration.set("hbase.client.keyvalue.maxsize","500000");try {connection=ConnectionFactory.createConnection(configuration);} catch (IOException e) {e.printStackTrace();}//configuration.set("hbase.master", "192.168.1.100:600000");}public static void main(String[] args) {createTable("tbtest");insertData("tbtest");QueryAll("tbtest");QueryByCondition1("tbtest");QueryByCondition2("tbtest");QueryByCondition3("tbtest");deleteRow("tbtest","rowkey123");dropTable("tbtest");}public static void createTable(String tableName) {System.out.println("start create table ......");try {Admin hBaseAdmin = connection.getAdmin();if (hBaseAdmin.tableExists(TableName.valueOf(tableName))) {// 如果存在要创建的表,那么先删除,再创建hBaseAdmin.disableTable(TableName.valueOf(tableName));hBaseAdmin.deleteTable(TableName.valueOf(tableName));System.out.println(tableName + " is exist,delete....");}HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(tableName));tableDescriptor.addFamily(new HColumnDescriptor("column1"));tableDescriptor.addFamily(new HColumnDescriptor("column2"));tableDescriptor.addFamily(new HColumnDescriptor("column3"));hBaseAdmin.createTable(tableDescriptor);} catch (MasterNotRunningException e) {e.printStackTrace();} catch (ZooKeeperConnectionException e) {e.printStackTrace();} catch (IOException e) {e.printStackTrace();}System.out.println("end create table ......");}public static void insertData(String tableName) {System.out.println("start insert data ......");try {HTable table = (HTable) connection.getTable(TableName.valueOf(tableName));Put put = new Put("rowkey123".getBytes());// 一个PUT代表一行数据,再NEW一个PUT表示第二行数据,每行一个唯一的ROWKEY,此处rowkey为put构造方法中传入的值put.addColumn("column1".getBytes(), null, "aaa".getBytes());// 本行数据的第一列put.addColumn("column2".getBytes(), null, "bbb".getBytes());// 本行数据的第三列put.addColumn("column3".getBytes(), null, "ccc".getBytes());// 本行数据的第三列table.put(put);} catch (IOException e) {e.printStackTrace();}System.out.println("end insert data ......");}public static void dropTable(String tableName) {try {Admin admin = connection.getAdmin();;admin.disableTable(TableName.valueOf(tableName));admin.deleteTable(TableName.valueOf(tableName));} catch (MasterNotRunningException e) {e.printStackTrace();} catch (ZooKeeperConnectionException e) {e.printStackTrace();} catch (IOException e) {e.printStackTrace();}}public static void deleteRow(String tableName, String rowkey) {try {Table table = connection.getTable(TableName.valueOf(tableName));List list = new ArrayList();Delete d1 = new Delete(rowkey.getBytes());list.add(d1);table.delete(list);System.out.println("删除行成功!");} catch (IOException e) {e.printStackTrace();}}public static void QueryAll(String tableName) {try {Table table = connection.getTable(TableName.valueOf(tableName));ResultScanner rs = table.getScanner(new Scan());for (Result r : rs) {System.out.println("获得到rowkey:" + new String(r.getRow()));for (KeyValue keyValue : r.raw()) {System.out.println("列:" + new String(keyValue.getFamily())+ "====值:" + new String(keyValue.getValue()));}}} catch (IOException e) {e.printStackTrace();}}public static void QueryByCondition1(String tableName) {try {Table table = connection.getTable(TableName.valueOf(tableName));Get scan = new Get("rowkey123".getBytes());// 根据rowkey查询Result r = table.get(scan);System.out.println("获得到rowkey:" + new String(r.getRow()));for (KeyValue keyValue : r.raw()) {System.out.println("列:" + new String(keyValue.getFamily())+ "====值:" + new String(keyValue.getValue()));}} catch (IOException e) {e.printStackTrace();}}public static void QueryByCondition2(String tableName) {try {Table table = connection.getTable(TableName.valueOf(tableName));Filter filter = new SingleColumnValueFilter(Bytes.toBytes("column1"), null, CompareOp.EQUAL, Bytes.toBytes("aaa")); // 当列column1的值为aaa时进行查询Scan s = new Scan();s.setFilter(filter);ResultScanner rs = table.getScanner(s);for (Result r : rs) {System.out.println("获得到rowkey:" + new String(r.getRow()));for (KeyValue keyValue : r.raw()) {System.out.println("列:" + new String(keyValue.getFamily())+ "====值:" + new String(keyValue.getValue()));}}} catch (Exception e) {e.printStackTrace();}}public static void QueryByCondition3(String tableName) {try {Table table = connection.getTable(TableName.valueOf(tableName));List<Filter> filters = new ArrayList<Filter>();Filter filter1 = new SingleColumnValueFilter(Bytes.toBytes("column1"), null, CompareOp.EQUAL, Bytes.toBytes("aaa"));filters.add(filter1);Filter filter2 = new SingleColumnValueFilter(Bytes.toBytes("column2"), null, CompareOp.EQUAL, Bytes.toBytes("bbb"));filters.add(filter2);Filter filter3 = new SingleColumnValueFilter(Bytes.toBytes("column3"), null, CompareOp.EQUAL, Bytes.toBytes("ccc"));filters.add(filter3);FilterList filterList1 = new FilterList(filters);Scan scan = new Scan();scan.setFilter(filterList1);ResultScanner rs = table.getScanner(scan);for (Result r : rs) {System.out.println("获得到rowkey:" + new String(r.getRow()));for (KeyValue keyValue : r.raw()) {System.out.println("列:" + new String(keyValue.getFamily())+ "====值:" + new String(keyValue.getValue()));}}rs.close();} catch (Exception e) {e.printStackTrace();}}
}
异常处理
//报错
org.apache.hadoop.hbase.ipc.RemoteWithExtrasException: org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException: Column family table does not exist in region hbase:meta,,1.1588230740 in table 'hbase:meta', {TABLE_ATTRIBUTES => {IS_META => 'true', coprocessor$1 => '|org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint|536870911|'}, {NAME => 'info', BLOOMFILTER => 'NONE', VERSIONS => '10', IN_MEMORY => 'true', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', COMPRESSION => 'NONE', CACHE_DATA_IN_L1 => 'true', MIN_VERSIONS => '0', BLOCKCACHE => 'true', BLOCKSIZE => '8192', REPLICATION_SCOPE => '0'}
at org.apache.hadoop.hbase.regionserver.HRegion.checkFamily(HRegion.java:7884)
由于HBase和HBase-client(2.x)版本不一致导致,查看下hbase版本
http://192.168.91.144:60010/master-status切换到Show non-RPC Tasks得到HBase Version 1.2.0
修改pom.xml中依赖版本为1.2.0
/报错:java.lang.RuntimeException: Unexpected version format: 11.0.2
java版本太高了,用11无法通过hbase的版本验证,这正则写法真心无语了,呵呵,降低到8就可以了
static {
final String version = System.getProperty("java.version");
// Verify String looks like this: 1.6.0_29
if (!version.matches("\\d\\.\\d\\..*")) {
throw new RuntimeException("Unexpected version format: " + version);
}
//java.io.IOException: Could not locate executable null\bin\winutils.exe in the Hadoop binaries.
两种处理方式
1.直接设置属性System.setProperty("hadoop.home.dir", "D:\\Program\\hadoop-common-2.6.0-bin-master");
2.设置环境变量HADOOP_HOME
下载winutils地址https://github.com/srccodes/hadoop-common-2.2.0-bin下载解压
或者新版的https://github.com/amihalik/hadoop-common-2.6.0-bin
/o.a.hadoop.hbase.client.ConnectionUtils : Can not resolve node3, please check your network
java.net.UnknownHostException: 不知道这样的主机。 (node3)
设置hosts文件,将hadoop集群各主机ip及机器名映射放进去
参考文章:
HBase Java简单示例 - zhenjing - 博客园
Spring Boot中Java API操作HBase 2.x详解 | zifangsky的个人博客