HbaseDaoImp HBASE 增删改查
来源:互联网 发布:eclipse加载jar包源码 编辑:程序博客网 时间:2024/05/16 05:48
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.commons.collections.map.HashedMap;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.PageFilter;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.qjzh.bigdata.api.hadoop.hbase.dao.HbaseDaoI;
import com.qjzh.bigdata.api.utils.BigDataUtils;
import com.qjzh.bigdata.api.utils.PropertiesUtils;
public class HbaseDaoImp implements HbaseDaoI {
private static final Logger log = LoggerFactory
.getLogger(HbaseDaoImp.class);
private static Configuration configuration;
private static Connection connection;
static {
try {
configuration = HBaseConfiguration.create();
configuration
.set("hbase.zookeeper.property.clientPort", PropertiesUtils
.getProValue("hbase.zookeeper.property.clientPort"));
configuration.set("hbase.zookeeper.quorum",
PropertiesUtils.getProValue("hbase.zookeeper.quorum"));
User user = User
.create(UserGroupInformation
.createRemoteUser(PropertiesUtils
.getProValue("hbase.user")));
connection = ConnectionFactory
.createConnection(configuration, user);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 创建表 (列族统一为 data)
*
* @param tableName
* 表名
* @param isReplace
* 是否覆盖已经存在的表
*/
public void createTable(String tableName, boolean isReplace, int ttlTime)
throws Exception {
System.out.println("start create table ......");
// Connection connection =
// ConnectionFactory.createConnection(configuration);
Admin hBaseAdmin = connection.getAdmin();
TableName tn = TableName.valueOf(tableName);
if (hBaseAdmin.tableExists(tn)) {
if (!isReplace) {
// connection.close();
return;
}
hBaseAdmin.disableTable(tn);
hBaseAdmin.deleteTable(tn);
System.out.println(tableName + " is exist,detele....");
}
HTableDescriptor tableDescriptor = new HTableDescriptor(tn);
HColumnDescriptor column = new HColumnDescriptor(
BigDataUtils.commonClomun);
if (ttlTime > 0) {
column.setTimeToLive(ttlTime);
}
tableDescriptor.addFamily(new HColumnDescriptor(
BigDataUtils.commonClomun));
hBaseAdmin.createTable(tableDescriptor);
System.out.println("end create table ......");
// connection.close();
}
/**
* 插入或者更新数据 (如果插入的键值存在则更新)
*
* @param tableName
* 表名
* @param rowKey
* key值
* @param paramMap
* 存储的列值(key为列表,value为值)
* @return boolean
*
*/
public boolean saveOrUpdateOne(String tableName, String rowKey,
Map<String, Object> paramMap) throws Exception {
boolean isRun = false;
System.out.println("start insert data ......");
// Connection connection =
// ConnectionFactory.createConnection(configuration);
TableName tn = TableName.valueOf(tableName);
Table table = connection.getTable(tn);
Put put = new Put(rowKey.getBytes());
for (Entry<String, Object> en : paramMap.entrySet()) {
String key = en.getKey();
Object value = en.getValue();
put.addColumn(BigDataUtils.commonClomun.getBytes(), key.getBytes(),
value.toString().getBytes());
}
table.put(put);
// connection.close();
System.out.println("end insert data ......");
isRun = true;
return isRun;
}
/**
* 删除一张表
*
* @param tableName
*/
public void dropTable(String tableName) throws Exception {
// Connection connection =
// ConnectionFactory.createConnection(configuration);
Admin hBaseAdmin = connection.getAdmin();
TableName tn = TableName.valueOf(tableName);
hBaseAdmin.disableTable(tn);
hBaseAdmin.deleteTable(tn);
// connection.close();
}
/**
* 根据 rowkey删除一条记录
*
* @param tablename
* @param rowkey
*/
public void deleteRow(String tablename, String rowkey) throws Exception {
// Connection connection =
// ConnectionFactory.createConnection(configuration);
TableName tn = TableName.valueOf(tablename);
Table table = connection.getTable(tn);
Delete del = new Delete(rowkey.getBytes());
table.delete(del);
// connection.close();
System.out.println("删除行成功!");
}
/**
* 查询某张表下所有数据 (方法慎用)
*
* @param tableName
*/
public List<Map<String, Object>> queryAll(String tableName)
throws Exception {
List<Map<String, Object>> resultList = new ArrayList<Map<String, Object>>();
// Connection connection =
// ConnectionFactory.createConnection(configuration);
TableName tn = TableName.valueOf(tableName);
Table table = connection.getTable(tn);
ResultScanner rs = table.getScanner(new Scan());
for (Result r : rs) {
Map<String, Object> resultMap = new LinkedHashMap<String, Object>();
for (Cell cell : r.listCells()) {
BigDataUtils.dealCell(resultMap, cell);
}
resultList.add(resultMap);
}
rs.close();
// connection.close();
return resultList;
}
/**
* 根据rowkey查询唯一一条记录
*
* @param tableName
* 表名
* @param rowKey
* 行主键
*/
public Map<String, Object> queryByRowKey(String tableName, String rowKey)
throws Exception {
log.info("HbaseDaoImp.queryByRowKey tableName=" + tableName
+ ",rowKey=" + rowKey);
Map<String, Object> resultMap = null;
// Connection connection =
// ConnectionFactory.createConnection(configuration);
TableName tn = TableName.valueOf(tableName);
Table table = connection.getTable(tn);
Get scan = new Get(rowKey.getBytes());// 根据rowkey查询
Result r = table.get(scan);
if (r.isEmpty()) {
return resultMap;
}
resultMap = new LinkedHashMap<String, Object>();
for (Cell cell : r.listCells()) {
BigDataUtils.dealCell(resultMap, cell);
}
// connection.close();
log.info(" end HbaseDaoImp.queryByRowKey ");
return resultMap;
}
/**
* 指定某一列的值进行查询
*
* @param tableName
* 表名
* @param columnName
* 列名
* @param value
* 列值
*/
public List<Map<String, Object>> queryByColumn(String tableName,
String columnName, String value) throws Exception {
List<Map<String, Object>> resultList = new ArrayList<Map<String, Object>>();
// Connection connection =
// ConnectionFactory.createConnection(configuration);
TableName tn = TableName.valueOf(tableName);
Table table = connection.getTable(tn);
Filter filter = new SingleColumnValueFilter(
Bytes.toBytes(BigDataUtils.commonClomun),
Bytes.toBytes(columnName), CompareOp.EQUAL,
Bytes.toBytes(value));
Scan s = new Scan();
s.setFilter(filter);
ResultScanner rs = table.getScanner(s);
for (Result r : rs) {
Map<String, Object> resultMap = new LinkedHashMap<String, Object>();
for (Cell cell : r.listCells()) {
BigDataUtils.dealCell(resultMap, cell);
}
resultList.add(resultMap);
}
rs.close();
// connection.close();
return resultList;
}
/**
* 指定多列名及列值组合条件查询
*
* @param tableName
* 表名
* @param paramMap
* 列-->值
* @return List<Map<String,String>>
*/
public List<Map<String, Object>> queryByColumns(String tableName,
Map<String, Object> paramMap) throws Exception {
List<Map<String, Object>> resultList = new ArrayList<Map<String, Object>>();
// Connection connection =
// ConnectionFactory.createConnection(configuration);
TableName tn = TableName.valueOf(tableName);
Table table = connection.getTable(tn);
Scan scan = BigDataUtils.getSimpleScanByParamMap(paramMap);
ResultScanner rs = table.getScanner(scan);
for (Result r : rs) {
Map<String, Object> resultMap = new LinkedHashMap<String, Object>();
for (Cell cell : r.listCells()) {
BigDataUtils.dealCell(resultMap, cell);
}
resultList.add(resultMap);
}
rs.close();
return resultList;
}
/**
* 分页查询
*
* @param tableName
* 表名
* @param rowKeyRegex
* rowKey搜索表达式
* @param startRowKey
* 开始rowKey
* @param pageNum
* 最大结果集
* @return
* @throws Exception
*/
public List<Map<String, Object>> pageQueryByColumns(String tableName,
String rowKeyRegex, String startRowKey, int pageNum)
throws Exception {
return pageQueryByColumns(tableName, rowKeyRegex, startRowKey, pageNum,
null, null);
}
public static void main(String[] args) throws Exception {
/*
* HbaseServiceImp server = new HbaseServiceImp();
* server.createTable("test_out",false); Map<String,String> map = new
* HashMap<String,String>(); map.put("ab", "1"); map.put("aa", "2");
* map.put("bb", "3"); map.put("dd", "4"); map.put("ee", "5");
* server.saveOrUpdateOne("test", "asdfasdfas", map);
* List<Map<String,String>> resultList = server.queryAll("test");
* System.out.println(resultList);
*/
HbaseDaoImp hbaseDaoImp =new HbaseDaoImp();
// List<String> list = new ArrayList<String>();
// list.add("data");
// HashedMap hashedMap = new HashedMap();
// hashedMap.put("data", "\"sum\":1");
//
// List<Map<String,Object>> pageQueryByColumns =
// hbaseDaoImp.pageQueryByColumns("t_test", null, null,
// 10,hashedMap,list);
// List<Map<String,Object>> queryByColumns =
// hbaseDaoImp.queryByColumns("t_test", hashedMap);
// System.out.println(pageQueryByColumns);
// List<String> list = new ArrayList<String>();
// list.add("201610251920");
// list.add("34603254");
//
// Map<String, List<String>> hashMap = new HashMap<String, List<String>>();
// hashMap.put("data", list);
//
// List<Map<String,Object>> scanTableByValueFilterList = hbaseDaoImp.scanTableByValueFilterList("t_test", 10, hashMap);
// System.out.println(scanTableByValueFilterList);
List<Map<String, Object>> result = hbaseDaoImp.findListByRowKeyRegex("t_test", "1610", false,200);
System.out.println(result);
}
/**
*
* @param tabaleName 表明
* @param nums 查询多少条数据
* @param cellNameAndValueSubString 目标列与目标字符串
* @return 查询到的数据
* @throws IOException
* @throws UnsupportedEncodingException
*/
public List<Map<String,Object>> scanTableByValueFilterList(String tabaleName, int nums,
Map<String, List<String>> cellNameAndValueSubString) throws IOException,
UnsupportedEncodingException {
List<Map<String, Object>> resultList = new ArrayList<Map<String, Object>>();
TableName tn = TableName.valueOf(tabaleName);
Table table = connection.getTable(tn);
Scan scan = new Scan();
List<Filter> filters = new ArrayList<Filter>();
Set<Entry<String,List<String>>> entrySet = cellNameAndValueSubString.entrySet();
for(Entry<String,List<String>> en : entrySet){
String key = en.getKey();
List<String> value = en.getValue();
if (value == null || value.size() == 0) {
continue;
}
for(String str : value){
if(str == null || "".endsWith(str)){
continue;
}
SingleColumnValueFilter scvf = new SingleColumnValueFilter(
Bytes.toBytes("data"), Bytes.toBytes(key),
CompareFilter.CompareOp.EQUAL, new SubstringComparator(
str.toString()));
scvf.setFilterIfMissing(true);
scvf.setLatestVersionOnly(true); // OK
filters.add(scvf);
}
}
FilterList filterList1 = new FilterList(filters);
scan.setFilter(filterList1);
scan.setMaxResultSize(nums);
ResultScanner rs = table.getScanner(scan);
for (Result r : rs) {
Map<String, Object> resultMap = new LinkedHashMap<String, Object>();
for (Cell cell : r.listCells()) {
BigDataUtils.dealCell(resultMap, cell);
}
resultList.add(resultMap);
}
rs.close();
if(nums < 0){
return resultList;
}
if(resultList.size() > nums){
resultList = resultList.subList(0, nums);
}
return resultList;
}
/**
*
* 分页查询
*
* @param tableName
* 表名
* @param rowKeyRegex
* rowkey搜索表达式
* @param startRowKey
* 开始rowKey
* @param pageNum
* 最大结果集
* @param paramMap
* 查询条件
* @return
* @throws Exception
*/
// ///========分页查询方法
public List<Map<String, Object>> pageQueryByColumns(String tableName,
String rowKeyRegex, String startRowKey, int pageNum,
Map<String, Object> paramMap, List<String> columns)
throws Exception {
List<Map<String, Object>> resultList = new ArrayList<Map<String, Object>>();
// connection= ConnectionFactory.createConnection(configuration);
TableName tn = TableName.valueOf(tableName);
Table table = connection.getTable(tn);
Scan scan = new Scan();
if (columns != null) {
for (String column : columns) {
scan.addColumn(Bytes.toBytes("data"), Bytes.toBytes(column));
}
}
List<Filter> filters = new ArrayList<Filter>();
if (rowKeyRegex != null && !"".equals(rowKeyRegex)) {
Filter filteRowkey = new RowFilter(CompareFilter.CompareOp.EQUAL,
new RegexStringComparator(rowKeyRegex));
filters.add(filteRowkey);
}
if (paramMap != null) {
for (Entry<String, Object> filter : paramMap.entrySet()) {
String key = filter.getKey();
Object value = filter.getValue();
if (value == null || "".equals(value)) {
continue;
}
Filter filterOne = new SingleColumnValueFilter(
Bytes.toBytes(BigDataUtils.commonClomun),
Bytes.toBytes(key), CompareOp.EQUAL,
Bytes.toBytes(value.toString()));
filters.add(filterOne);
}
}
if (0 != pageNum) {
Filter p = new PageFilter(pageNum);
filters.add(p);
}
FilterList filterList1 = new FilterList(filters);
scan.setFilter(filterList1);
if (startRowKey != null && !"".equals(startRowKey.trim())) {
scan.setStartRow(Bytes.toBytes(startRowKey.trim()));
}
// scan.setMaxResultsPerColumnFamily(pageNum);
scan.setMaxResultSize(pageNum);
ResultScanner rs = table.getScanner(scan);
for (Result r : rs) {
Map<String, Object> resultMap = new LinkedHashMap<String, Object>();
for (Cell cell : r.listCells()) {
BigDataUtils.dealCell(resultMap, cell);
}
resultList.add(resultMap);
}
rs.close();
return resultList;
}
/**
* 分页查询
*
* @param tableName
* 表名
* @param startRowKey
* 开始rowkey
* @param endRowKey
* 结束rowkey
* @param pageNum
* 页码大小
* @param paramMap
* 查询参数(目前仅等于)
* @return
* @throws Exception
*/
public List<Map<String, Object>> pageQuery(String tableName,
String startRowKey, String endRowKey, int pageNum,
Map<String, Object> paramMap) throws Exception {
List<Map<String, Object>> resultList = new ArrayList<Map<String, Object>>();
// connection= ConnectionFactory.createConnection(configuration);
TableName tn = TableName.valueOf(tableName);
Table table = connection.getTable(tn);
Scan scan = new Scan();
if (!StringUtils.isEmpty(startRowKey))
scan.setStartRow(Bytes.toBytes(startRowKey));
if (!StringUtils.isEmpty(endRowKey))
scan.setStopRow(Bytes.toBytes(endRowKey));
if (paramMap != null) {
List<Filter> filters = new ArrayList<Filter>();
for (Entry<String, Object> filter : paramMap.entrySet()) {
String key = filter.getKey();
Object value = filter.getValue();
if (value == null || "".equals(value)) {
continue;
}
Filter filterOne = new SingleColumnValueFilter(
Bytes.toBytes(BigDataUtils.commonClomun),
Bytes.toBytes(key), CompareOp.EQUAL,
Bytes.toBytes(value.toString()));
filters.add(filterOne);
}
Filter p = new PageFilter(pageNum);
filters.add(p);
FilterList filterList1 = new FilterList(filters);
scan.setFilter(filterList1);
}
// scan.setMaxResultsPerColumnFamily(pageNum);
scan.setMaxResultSize(pageNum);
ResultScanner rs = table.getScanner(scan);
for (Result r : rs) {
Map<String, Object> resultMap = new LinkedHashMap<String, Object>();
for (Cell cell : r.listCells()) {
BigDataUtils.dealCell(resultMap, cell);
}
resultList.add(resultMap);
}
rs.close();
return resultList;
}
/**
* 根据主键模糊查询全表
*
* @param tableName
* @param rowKeyRegex
* 要求以ABC开头:new RegexStringComparator("^ABC") 要求以ABC结尾:new
* RegexStringComparator("ABC$") 要求包含ABC:new
* RegexStringComparator("ABC") 要求包含4位2或3或4的数值:new
* RegexStringComparator("[2-4]{4}")
* @param isOnlyResultKey
* 返回的结果 是否只包含rowKey值
* @return
* @throws Exception
*/
public List<Map<String, Object>> findListByRowKeyRegex(String tableName,
String rowKeyRegex, boolean isOnlyResultKey,int limit) throws Exception {
log.info("HbaseDaoImp.findListByRowKeyRegex start tableName="
+ tableName + ",rowKeyRegex=" + rowKeyRegex
+ ",isOnlyResultKey=" + isOnlyResultKey);
List<Map<String, Object>> resultList = new ArrayList<Map<String, Object>>();
TableName tn = TableName.valueOf(tableName);
Table table = connection.getTable(tn);
Scan scan = new Scan();
List<Filter> filters = new ArrayList<Filter>();
if(rowKeyRegex != null && !"".equals(rowKeyRegex)){
Filter filteRowkey = new RowFilter(CompareFilter.CompareOp.EQUAL,
new RegexStringComparator(rowKeyRegex));
filters.add(filteRowkey);
log.info(" start to hbase ResultScanner" + filteRowkey);
}
if (limit > 0) {
Filter p = new PageFilter(limit);
filters.add(p);
}
FilterList filterList1 = new FilterList(filters);
scan.setFilter(filterList1);
ResultScanner rs = table.getScanner(scan);
log.info(" end to hbase ResultScanner" + rs);
for (Result r : rs) {
Map<String, Object> resultMap = new LinkedHashMap<String, Object>();
if (isOnlyResultKey) {
Cell cell = r.listCells().get(0);
String rowKey = new String(cell.getRowArray(),
cell.getRowOffset(), cell.getRowLength(), "UTF-8");
resultMap.put("rowKey", rowKey);
} else {
for (Cell cell : r.listCells()) {
BigDataUtils.dealCell(resultMap, cell);
}
}
resultList.add(resultMap);
}
rs.close();
log.info(" HbaseDaoImp.findListByRowKeyRegex end ");
return resultList;
}
/**
* 按KEY值的范围取数据
*
* @param tableName
* 表名
* @param startRowkey
* 开始前缀 ASCII排序中:"#" < "0-9" < ":"
* @param endRowKey
* 结束前缀
* @param limit
* 数量
* @return
* @throws Exception
*/
public List<Map<String, Object>> findListByRowKeyRange(String tableName,
String startRowkey, String endRowKey, int limit) throws Exception {
log.info("HbaseDaoImp.findListByRowKeyRange start tableName="
+ tableName + ",startRowkey=" + startRowkey + ",endRowKey="
+ endRowKey + ",limit=" + limit);
List<Map<String, Object>> resultList = new ArrayList<Map<String, Object>>();
TableName tn = TableName.valueOf(tableName);
Table table = connection.getTable(tn);
Scan scan = new Scan();
scan.setStartRow(Bytes.toBytes(startRowkey));
if (endRowKey != null) {
scan.setStopRow(Bytes.toBytes(endRowKey));
}
if (limit > 0) {
scan.setMaxResultSize(limit);
}
log.info(" start to hbase StartRow ");
ResultScanner rs = table.getScanner(scan);
log.info(" end to hbase ResultScanner" + rs);
log.info(" start to find badiu point ");
for (Result r : rs) {
Map<String, Object> resultMap = new LinkedHashMap<String, Object>();
for (Cell cell : r.listCells()) {
BigDataUtils.dealCell(resultMap, cell);
}
resultList.add(resultMap);
}
log.info(" end to find badiu point " + resultList);
rs.close();
log.info(" HbaseDaoImp.findListByRowKeyRegex end ");
return resultList;
}
}
package com.qjzh.bigdata.api.utils;
import java.io.UnsupportedEncodingException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import org.apache.commons.codec.binary.Hex;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.util.Bytes;
public class BigDataUtils {
public static final String ROWKEY_SPLIT = "-";
public static final String commonClomun = "data";
private static Random random = new Random();
public static void dealCell(Map<String, Object> resultMap, Cell cell) throws UnsupportedEncodingException {
String rowKey = new String(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), "UTF-8");
// String family = new String(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), "UTF-8");
String qualifier = new String(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),
"UTF-8");
String value = new String(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), "UTF-8");
// System.out.println("RowKey=>"+rowKey+"->"+family+":"+qualifier+"="+value);
resultMap.put(qualifier, value);
resultMap.put("rowKey", rowKey);
}
public static Scan getSimpleScanByParamMap(Map<String, Object> paramMap) {
List<Filter> filters = new ArrayList<Filter>();
for (Entry<String, Object> filter : paramMap.entrySet()) {
String key = filter.getKey();
Object value = filter.getValue();
Filter filterOne = new SingleColumnValueFilter(Bytes.toBytes(BigDataUtils.commonClomun), Bytes.toBytes(key),
CompareOp.EQUAL, Bytes.toBytes(value.toString()));
filters.add(filterOne);
}
FilterList filterList1 = new FilterList(filters);
Scan scan = new Scan();
scan.setFilter(filterList1);
return scan;
}
public static String formatHdfsDirDate(String yyyyMMdd) throws ParseException {
SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd");
Date d = format.parse(yyyyMMdd);
SimpleDateFormat format2 = new SimpleDateFormat("yyyy/MM/dd");
yyyyMMdd = format2.format(d);
return yyyyMMdd;
}
/**
* 生成rowkey第1部分
*
* rowkey设计方案:
*
* 需要指出的是这种预切分方法用了一种默认算法md5stringsplit来分配region的startkey,endkey。
* 这种得到的是一个8位的右移一位的md5的字符串。实际上就是md5前八位然后右移。这样在设计rowkey的时候就要千万注意了!
* 因为这样分配出来的region是00000000~7fffffff。所以为了让rowkey能够均匀落入到这个范围需要md5(id)>>1 +
* id。提示一下:如果你使用的md5(id)+id的方法设计了rowkey,入库时如果数据量很大则会导致hot
* region。rs就会频繁的挂掉了,因为大部分数据都落入最后一个region了。
*
* @param id 主参数
* @return
*/
public static String makeRowKeyPart1(String id) {
String md5_content = null;
try {
MessageDigest messageDigest = MessageDigest.getInstance("MD5");
messageDigest.reset();
messageDigest.update(id.getBytes());
byte[] bytes = messageDigest.digest();
md5_content = new String(Hex.encodeHex(bytes));
} catch (NoSuchAlgorithmException e1) {
e1.printStackTrace();
}
// turn right md5
String right_md5_id = Integer.toHexString(Integer.parseInt(md5_content.substring(0, 7), 16) >> 1);
while (right_md5_id.length() < 7) {
right_md5_id = "0" + right_md5_id;
}
return right_md5_id + ROWKEY_SPLIT + id;
}
/**
* 生成rowkey
* @param id 主参数
* @param args 多个参数拼成rowkey
* @return
*/
public static String makeRowKey(String id, Object... args) {
String rs = makeRowKeyPart1(id);
for (int i = 0; i < args.length; i++) {
rs += ROWKEY_SPLIT + args[i];
}
return rs;
}
/**
* 生成rowkey,后面加4位随机数(用于特殊情况避免重复)
* @param id
* @param args
* @return
*/
public static String makeRowKeyRandom(String id, Object... args) {
String rnd=""+random.nextInt(10000);
while (rnd.length() < 4) {
rnd = "0" + rnd;
}
String rs = makeRowKey(id,args)+ROWKEY_SPLIT+rnd;
return rs;
}
/**
* 根据startRowKey返回endRowKey,在startRowKey后面加上255的16进制,可以覆盖中文rowKey
* @param startRowKey
* @return
*/
public static String makeEndRowKey(String startRowKey){
return startRowKey+new String(new byte[]{(byte) 0xff});
}
public static void main(String[] args) throws UnsupportedEncodingException {
System.out.println(makeRowKey("l123j", "sdf", 12, "ddd"));
System.out.println(makeRowKey("l1231"));
System.out.println(makeRowKeyRandom("l1232",3));
System.out.println(makeRowKeyRandom("l1232"));
System.out.println(makeRowKeyRandom("l1232"));
System.out.println(Long.MAX_VALUE);
System.out.println(Integer.MAX_VALUE);
System.out.println(Integer.toHexString(22));
byte[] b=new byte[1];
b[0]= (byte) (0xff & Integer.parseInt("F1",16));
System.out.println("a"+new String(b, "utf-8"));
byte[] b2=new byte[]{(byte) 0xff};
System.out.println("a"+new String(b2));
}
}
package com.qjzh.bigdata.api.utils;
import java.io.InputStream;
import java.util.Properties;
public class PropertiesUtils {
private InputStream inputStream = this.getClass().getClassLoader().getResourceAsStream("bigdata_config.properties");
private String dealProperties(String proName) throws Exception{
Properties p = new Properties();
try {
p.load(inputStream);
return p.getProperty(proName);
} catch (Exception e1) {
e1.printStackTrace();
}
return null;
}
public static String getProValue(String proName) throws Exception{
PropertiesUtils pu = new PropertiesUtils();
return pu.dealProperties(proName);
}
public static void main(String[] args) throws Exception {
PropertiesUtils pu = new PropertiesUtils();
System.out.println(pu.getProValue("hdfs_url"));
}
}
- HbaseDaoImp HBASE 增删改查
- hbase 增删改查
- hbase数据增删改查
- hbase 增删查改操作
- HBase初学-增删查改
- Hbase常用操作(增删改查)
- 关于hbase增删改查的操作
- hbase 增删改查 api 简单操作
- Hbase shell 操作:增删改查小结
- Hbase常用操作(增删改查)
- hbase 增删改查 java示例
- HBase java 增删改查操作
- hbase 增删改查基本操作
- hbase 1.1.4增删查改demo
- java操作HBase增删改查基础
- Hbase 的java 增删改查操作
- HBase java 增删改查操作
- HBASE 常用shell命令,增删改查
- 导入employee测试数据
- 手把手教你用C写游程编码
- 神箭手云爬虫平台 如何在1小时内编写简单爬虫
- springAOP中的各种通知
- split和block的区别以及maptask和reducetask个数设定
- HbaseDaoImp HBASE 增删改查
- Jquery实现多选框的基本功能
- C语言编译数组地址分配问题
- MOS管知识
- 小案例-健康栏目的实现分析
- Codeforces Round #379 (Div. 2)E. Anton and Tree(dfs缩点,想法题)
- Origin中做fillarea填充图
- 二维码系列(五)综合DEMO演练,工具类效能提升
- 健康栏目的实现