代码:

1.一些依赖

<project xmlns="http://maven.apache.org/POM/4.0.0"xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"><modelVersion>4.0.0</modelVersion><groupId>com.hbase</groupId><artifactId>HbaseTest</artifactId><version>0.0.1-SNAPSHOT</version><packaging>jar</packaging><name>HbaseTest</name><url>http://maven.apache.org</url><properties><project.build.sourceEncoding>UTF-8</project.build.sourceEncoding></properties><dependencies><dependency><groupId>org.apache.hbase</groupId><artifactId>hbase-client</artifactId><version>1.2.4</version></dependency><dependency><groupId>jdk.tools</groupId><artifactId>jdk.tools</artifactId><version>1.8</version><scope>system</scope><systemPath>${JAVA_HOME}/lib/tools.jar</systemPath></dependency><dependency><groupId>junit</groupId><artifactId>junit</artifactId><version>3.8.1</version><scope>test</scope></dependency></dependencies>
</project>

2.Hbase连接工具类

package com.hbase.utils;import java.io.IOException;import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Table;/*** Hbase连接工具类* @author linhaiy* @date 2019.05.20*/
public class HBaseConn {private static final HBaseConn INSTANCE = new HBaseConn();private static Configuration configuration;private static Connection connection;public HBaseConn() {try {if (configuration == null) {configuration = HBaseConfiguration.create();configuration.set("hbase.zookeeper.quorum", "localhost:2181");}} catch (Exception e) {e.printStackTrace();}}public Connection getConnection() {if (connection == null || connection.isClosed()) {try {connection = ConnectionFactory.createConnection(configuration);} catch (Exception e) {e.printStackTrace();}}return connection;}public static Connection getHBaseConn() {return INSTANCE.getConnection();}public static Table getTable(String tableName) throws IOException {return INSTANCE.getConnection().getTable(TableName.valueOf(tableName));}public static void closeConn() {if (connection != null) {try {connection.close();} catch (IOException ioe) {ioe.printStackTrace();}}}
}

3.数据操作工具类

package com.hbase.service;import java.io.IOException;
import java.util.Arrays;
import java.util.List;import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.util.Bytes;import com.hbase.utils.HBaseConn;/*** Hbase操作工具类* @author linhaiy* @date 2019.05.20*/
public class HBaseService {/*** 创建HBase表.* @param tableName 表名* @param cfs  列族的数组* @return  是否创建成功*/public static boolean createTable(String tableName, String[] cfs) {try (HBaseAdmin admin = (HBaseAdmin) HBaseConn.getHBaseConn().getAdmin()) {if (admin.tableExists(tableName)) {return false;}HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(tableName));Arrays.stream(cfs).forEach(cf -> {HColumnDescriptor columnDescriptor = new HColumnDescriptor(cf);columnDescriptor.setMaxVersions(1);tableDescriptor.addFamily(columnDescriptor);});admin.createTable(tableDescriptor);} catch (Exception e) {e.printStackTrace();}return true;}/*** 删除hbase表.* @param tableName 表名* @return  是否删除成功*/public static boolean deleteTable(String tableName) {try (HBaseAdmin admin = (HBaseAdmin) HBaseConn.getHBaseConn().getAdmin()) {admin.disableTable(tableName);admin.deleteTable(tableName);} catch (Exception e) {e.printStackTrace();}return true;}/*** hbase插入一条数据.* @param tableName 表名* @param rowKey    唯一标识* @param cfName    列族名* @param qualifier 列标识* @param data      数据* @return 是否插入成功*/public static boolean putRow(String tableName, String rowKey, String cfName, String qualifier, String data) {try (Table table = HBaseConn.getTable(tableName)) {Put put = new Put(Bytes.toBytes(rowKey));put.addColumn(Bytes.toBytes(cfName), Bytes.toBytes(qualifier), Bytes.toBytes(data));table.put(put);} catch (IOException ioe) {ioe.printStackTrace();}return true;}/*** 插入多条数据* @param tableName* @param puts* @return*/public static boolean putRows(String tableName, List<Put> puts) {try (Table table = HBaseConn.getTable(tableName)) {table.put(puts);} catch (IOException ioe) {ioe.printStackTrace();}return true;}public static Result getRow(String tableName, String rowKey) {try (Table table = HBaseConn.getTable(tableName)) {Get get = new Get(Bytes.toBytes(rowKey));return table.get(get);} catch (IOException ioe) {ioe.printStackTrace();}return null;}public static Result getRow(String tableName, String rowKey, FilterList filterList) {try (Table table = HBaseConn.getTable(tableName)) {Get get = new Get(Bytes.toBytes(rowKey));get.setFilter(filterList);return table.get(get);} catch (IOException ioe) {ioe.printStackTrace();}return null;}public static ResultScanner getScanner(String tableName) {try (Table table = HBaseConn.getTable(tableName)) {Scan scan = new Scan();scan.setCaching(1000);return table.getScanner(scan);} catch (IOException ioe) {ioe.printStackTrace();}return null;}/*** 批量检索数据.* @param tableName   表名* @param startRowKey 起始RowKey* @param endRowKey   终止RowKey* @return ResultScanner实例*/public static ResultScanner getScanner(String tableName, String startRowKey, String endRowKey) {try (Table table = HBaseConn.getTable(tableName)) {Scan scan = new Scan();scan.setStartRow(Bytes.toBytes(startRowKey));scan.setStopRow(Bytes.toBytes(endRowKey));scan.setCaching(1000);return table.getScanner(scan);} catch (IOException ioe) {ioe.printStackTrace();}return null;}public static ResultScanner getScanner(String tableName, String startRowKey, String endRowKey,FilterList filterList) {try (Table table = HBaseConn.getTable(tableName)) {Scan scan = new Scan();scan.setStartRow(Bytes.toBytes(startRowKey));scan.setStopRow(Bytes.toBytes(endRowKey));scan.setFilter(filterList);scan.setCaching(1000);return table.getScanner(scan);} catch (IOException ioe) {ioe.printStackTrace();}return null;}/*** HBase删除一行记录.* @param tableName 表名* @param rowKey    唯一标识* @return 是否删除成功*/public static boolean deleteRow(String tableName, String rowKey) {try (Table table = HBaseConn.getTable(tableName)) {Delete delete = new Delete(Bytes.toBytes(rowKey));table.delete(delete);} catch (IOException ioe) {ioe.printStackTrace();}return true;}/*** 删除指定列簇* @param tableName* @param cfName* @return*/public static boolean deleteColumnFamily(String tableName, String cfName) {try (HBaseAdmin admin = (HBaseAdmin) HBaseConn.getHBaseConn().getAdmin()) {admin.deleteColumn(tableName, cfName);} catch (Exception e) {e.printStackTrace();}return true;}public static boolean deleteQualifier(String tableName, String rowKey, String cfName, String qualifier) {try (Table table = HBaseConn.getTable(tableName)) {Delete delete = new Delete(Bytes.toBytes(rowKey));delete.addColumn(Bytes.toBytes(cfName), Bytes.toBytes(qualifier));table.delete(delete);} catch (IOException ioe) {ioe.printStackTrace();}return true;}
}

4.测试

package com.hbase.test;import java.io.IOException;import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Table;
import org.junit.Test;
import com.hbase.utils.HBaseConn;/*** Hbase连接测试* @author linhaiy* @date 2019.05.20*/
public class HBaseConnTest {@Testpublic void getConnTest() {Connection conn = HBaseConn.getHBaseConn();System.out.println(conn.isClosed());HBaseConn.closeConn();System.out.println(conn.isClosed());}@Testpublic void getTableTest() {try {Table table = HBaseConn.getTable("US_POPULATION");System.out.println(table.getName().getNameAsString());table.close();} catch (IOException ioe) {ioe.printStackTrace();}}
}
package com.hbase.test;import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import com.hbase.service.HBaseService;/*** Hbase的一些基础操作测试* @author linhaiy* @date 2019.05.20*/
public class HbaseTest {@Testpublic void createTable() {HBaseService.createTable("FileTable", new String[] { "fileInfo", "saveInfo" });}@Testpublic void addFileDetails() {HBaseService.putRow("FileTable", "rowkey1", "fileInfo", "name", "file1.txt");HBaseService.putRow("FileTable", "rowkey1", "fileInfo", "type", "txt");HBaseService.putRow("FileTable", "rowkey1", "fileInfo", "size", "1024");HBaseService.putRow("FileTable", "rowkey1", "saveInfo", "creator", "jixin");HBaseService.putRow("FileTable", "rowkey2", "fileInfo", "name", "file2.jpg");HBaseService.putRow("FileTable", "rowkey2", "fileInfo", "type", "jpg");HBaseService.putRow("FileTable", "rowkey2", "fileInfo", "size", "1024");HBaseService.putRow("FileTable", "rowkey2", "saveInfo", "creator", "jixin");}@Testpublic void getFileDetails() {Result result = HBaseService.getRow("FileTable", "rowkey1");if (result != null) {System.out.println("rowkey=" + Bytes.toString(result.getRow()));System.out.println("fileName=" + Bytes.toString(result.getValue(Bytes.toBytes("fileInfo"), Bytes.toBytes("name"))));}}@Testpublic void scanFileDetails() {ResultScanner scanner = HBaseService.getScanner("FileTable", "rowkey2", "rowkey2");if (scanner != null) {scanner.forEach(result -> {System.out.println("rowkey=" + Bytes.toString(result.getRow()));System.out.println("fileName="+ Bytes.toString(result.getValue(Bytes.toBytes("fileInfo"), Bytes.toBytes("name"))));});scanner.close();}}@Testpublic void deleteRow() {HBaseService.deleteRow("FileTable", "rowkey1");}@Testpublic void deleteTable() {HBaseService.deleteTable("FileTable");}
}
package com.hbase.test;import java.util.Arrays;import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.ColumnPrefixFilter;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import com.hbase.service.HBaseService;/*** HBaseFilter的测试类* @author linhaiy* @date 2019.05.18*/
public class HBaseFilterTest {/***  建表*/@Testpublic void createTable() {HBaseService.createTable("File", new String[] { "fileInfo", "saveInfo" });}/***   批量插入单条数据*/@Testpublic void addFileDetails() {HBaseService.putRow("File", "rowkey1", "fileInfo", "name", "file1.txt");HBaseService.putRow("File", "rowkey1", "fileInfo", "type", "txt");HBaseService.putRow("File", "rowkey1", "fileInfo", "size", "1024");HBaseService.putRow("File", "rowkey1", "saveInfo", "creator", "jixin");HBaseService.putRow("File", "rowkey2", "fileInfo", "name", "file2.jpg");HBaseService.putRow("File", "rowkey2", "fileInfo", "type", "jpg");HBaseService.putRow("File", "rowkey2", "fileInfo", "size", "1024");HBaseService.putRow("File", "rowkey2", "saveInfo", "creator", "jixin");HBaseService.putRow("File", "rowkey3", "fileInfo", "name", "file3.jpg");HBaseService.putRow("File", "rowkey3", "fileInfo", "type", "jpg");HBaseService.putRow("File", "rowkey3", "fileInfo", "size", "1024");HBaseService.putRow("File", "rowkey3", "saveInfo", "creator", "jixin");}/***   过滤查询*/@Testpublic void rowFilterTest() {Filter filter = new RowFilter(CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes("rowkey1")));  //RowFilter行键过滤器 ,BinaryComparator 匹配完整字节数组  列族为rowkey1//"与" Operator.Operator.MUST_PASS_ALL 和  "或"  Operator.MUST_PASS_ONEFilterList filterList = new FilterList(Operator.MUST_PASS_ONE, Arrays.asList(filter));     //FilterList 代表一个过滤器链,它可以包含一组即将应用于目标数据集的过滤器ResultScanner scanner = HBaseService.getScanner("File", "rowkey1", "rowkey3", filterList); if (scanner != null) {scanner.forEach(result -> {System.out.println("rowkey=" + Bytes.toString(result.getRow()));System.out.println("fileName="+ Bytes.toString(result.getValue(Bytes.toBytes("fileInfo"), Bytes.toBytes("name"))));});scanner.close();}}/***    前缀过滤查询*/@Testpublic void prefixFilterTest() {Filter filter = new PrefixFilter(Bytes.toBytes("rowkey2"));FilterList filterList = new FilterList(Operator.MUST_PASS_ALL, Arrays.asList(filter));ResultScanner scanner = HBaseService.getScanner("File", "rowkey1", "rowkey3", filterList);if (scanner != null) {scanner.forEach(result -> {System.out.println("rowkey=" + Bytes.toString(result.getRow()));System.out.println("fileName="+ Bytes.toString(result.getValue(Bytes.toBytes("fileInfo"), Bytes.toBytes("name"))));});scanner.close();}}/***   只查询每行键值对中有 "键" 元数据信息,不显示值*/@Testpublic void keyOnlyFilterTest() {Filter filter = new KeyOnlyFilter(true);FilterList filterList = new FilterList(Operator.MUST_PASS_ALL, Arrays.asList(filter));ResultScanner scanner = HBaseService.getScanner("File", "rowkey1", "rowkey3", filterList);if (scanner != null) {scanner.forEach(result -> {System.out.println("rowkey=" + Bytes.toString(result.getRow()));System.out.println("fileName="+ Bytes.toString(result.getValue(Bytes.toBytes("fileInfo"), Bytes.toBytes("name"))));});scanner.close();}}/***   列名的前缀来筛选单元格*/@Testpublic void columnPrefixFilterTest() {Filter filter = new ColumnPrefixFilter(Bytes.toBytes("nam"));FilterList filterList = new FilterList(Operator.MUST_PASS_ALL, Arrays.asList(filter));ResultScanner scanner = HBaseService.getScanner("File", "rowkey1", "rowkey3", filterList);if (scanner != null) {scanner.forEach(result -> {System.out.println("rowkey=" + Bytes.toString(result.getRow()));System.out.println("fileName="+ Bytes.toString(result.getValue(Bytes.toBytes("fileInfo"), Bytes.toBytes("name"))));System.out.println("fileType="+ Bytes.toString(result.getValue(Bytes.toBytes("fileInfo"), Bytes.toBytes("type"))));});scanner.close();}}
}

Hbase Java API对数据的一些操作使用相关推荐

  1. Hbase java API操作(模板代码)

    Hbase java API操作 1 创建maven工程 导入jar包 <repositories><repository><id>cloudera</id& ...

  2. 6 HBase java API访问HBase数据库

    HBase java API访问HBase数据库 package com.hunan.hbase_options;import org.apache.hadoop.conf.Configuration ...

  3. HBase JAVA API(大章鱼版)

    阅读前请注意: 此api中的环境为大章鱼大数据学习平台提供,非此环境,jar包,与程序代码存在一定问题.如果想本地虚拟机运行请参考分布式数据应用,进行操作 任务目标 1.了解HBase语言的基本语法 ...

  4. hbase java api 两种方式

    NoSQL Hbase JAVA API 实例一 导入架包: <dependency><groupId>org.apache.hbase</groupId>< ...

  5. Hbase Java API详解

    2019独角兽企业重金招聘Python工程师标准>>> HBase是Hadoop的数据库,能够对大数据提供随机.实时读写访问.他是开源的,分布式的,多版本的,面向列的,存储模型. 在 ...

  6. hbase java api最新版本_HBase基本命令与新版本Java API

    简介 有关HBase的安装可以参考hbase安装 我们可以通过hbase shell和hbase数据库进行交互,也可以通过Java-api和hbase数据库交互,这里我们使用的是hbase-clien ...

  7. HBase Java API 代码开发

    1. API 介绍 几个主要 HBase API 类和数据模型之间的对应关系: Java 类 HBase 数据模型 Admin 数据库(Database) HBaseConfiguration Tab ...

  8. hbase java api样例(版本1.3.1,新API)

    hbase版本:1.3.1 目的:HBase新API的使用方法. 尝试并验证了如下几种java api的使用方法. 1.创建表 2.创建表(预分区) 3.单条插入 4.批量插入 5.批量插入(客户端缓 ...

  9. HBase Java API 创建表时一直卡住

    场景 HBase在CentOS上分布集群安装: https://blog.csdn.net/BADAO_LIUMANG_QIZHI/article/details/119511593 在上面搭建起来H ...

最新文章

  1. oracle中before,Oracle中Before和After触发器的例子
  2. java转义字符_Java入门 - 语言基础 - 13.Character类
  3. java checkstyle 导入_Checkstyle的配置详解
  4. struts.xml中class路径错误报错的问题
  5. Linux基础之文件权限详解
  6. Azure Arc:微软是怎么玩多云游戏的?
  7. Python 爬虫-进阶开发之路
  8. ip地址怎么设置才有效_如果想减肥,怎么拆解目标才是有效的?
  9. 参数等效模型可以用于_华北电力大学 陈宁、齐磊 等:适用于柔性直流电网操作过电压分析的混合式高压直流断路器端口等效模型...
  10. python是什么意思-python是什么
  11. wps表格l制作甘特图_WPS表格制作进度计划横道图教程
  12. MSI Center局域网管理器开启导致电脑网络质量变差、网速低
  13. 电源设计经验之 MOS 管驱动电路
  14. nvidia控制面板点了没反应win7_nvidia控制面板点击没反应 - 卡饭网
  15. 【MES】MES能为制造企业做什么,解决什么问题?
  16. blender 2.8 python bpy 编写脚本操作物体
  17. 在线订票系统--永胜票务网是怎么搭建的?
  18. com.google.common.base.Stopwatch 应用
  19. matlab海杂波统计建模及CFAR阈值分割
  20. 【文献翻译】用于改进脑电图癫痫发作分析的自监督图神经网络 - (DCRNN / SSL)

热门文章

  1. 图像增强算法的安卓移植
  2. 苏小红C语言程序设计第九、十章知识总结
  3. 微积分的历史(三):起源之莱布尼茨
  4. 第七篇:文件和文件夹得权限
  5. 17、江科大stm32视频学习笔记——USART串口协议和USART串口外设
  6. 计算机网络课第六次作业之CDMA习题
  7. uniapp小程序echarts插件----秋云 ucharts echarts 高性能跨全端图表组件
  8. oracle 定义变量及使用方法,Oracle pl/sql变量定义和使用详解
  9. 【正则表达式】常用的正则表达式(数字,汉字,字符串,金额等的正则表达式)
  10. React forwardRef的使用方法