HBase_Eclipse基本操作
来源:互联网 发布:开淘宝天猫要多少钱 编辑:程序博客网 时间:2024/06/01 22:13
HBase_Eclipse基本操作
查看HBase中的表
创建HTable
删除HTable
添加表列族
获取表描述信息
添加数据
根据rowKey 获取列键值
遍历表内容
1.准备工作
1.新建一个普通java项目,在项目根目录下新建lib文件夹。
2.将$HBase/lib下的所有jar包拷贝到项目lib目录下。
3.选中所有jar包,右击选择build->add to buildPath
4.在项目根目录下新建hbase-site.xml和log4j.properties两个文件。
hbase-site.xml
<?xml version="1.0"?><?xml-stylesheet type="text/xsl" href="configuration.xsl"?><configuration> <property> <name>hbase.cluster.distributed</name> <value>true</value> </property> <property> <name>hbase.rootdir</name> #zhx01是hostname,也可以写IP <value>hdfs://zhx01:9000/hbase</value> </property> <property> <name>hbase.zookeeper.property.dataDir</name> #ZooKeeperdata目录 <value>/home/zouhongxue/data/zkdata</value> </property> <property> <name>hbase.zookeeper.quorum</name> #这是自己的机器IP <value>192.168.50.100</value> </property> <property> <name>hbase.zookeeper.property.clientPort</name> <value>2181</value> </property> </configuration>
log4j.properties
这个日志配置文件内容直接复制就OK
# Define some default values that can be overridden by system propertieshbase.root.logger=INFO,consolehbase.security.logger=INFO,consolehbase.log.dir=.hbase.log.file=hbase.log# Define the root logger to the system property "hbase.root.logger".log4j.rootLogger=${hbase.root.logger}# Logging Thresholdlog4j.threshold=ALL## Daily Rolling File Appender#log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppenderlog4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}# Rollver at midnightlog4j.appender.DRFA.DatePattern=.yyyy-MM-dd# 30-day backup#log4j.appender.DRFA.MaxBackupIndex=30log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout# Pattern format: Date LogLevel LoggerName LogMessagelog4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n# Rolling File Appender propertieshbase.log.maxfilesize=256MBhbase.log.maxbackupindex=20# Rolling File Appenderlog4j.appender.RFA=org.apache.log4j.RollingFileAppenderlog4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file}log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize}log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex}log4j.appender.RFA.layout=org.apache.log4j.PatternLayoutlog4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n## Security audit appender#hbase.security.log.file=SecurityAuth.audithbase.security.log.maxfilesize=256MBhbase.security.log.maxbackupindex=20log4j.appender.RFAS=org.apache.log4j.RollingFileAppenderlog4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file}log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize}log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex}log4j.appender.RFAS.layout=org.apache.log4j.PatternLayoutlog4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%nlog4j.category.SecurityLogger=${hbase.security.logger}log4j.additivity.SecurityLogger=false#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE## Null Appender#log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender## console# Add "console" to rootlogger above if you want to use this#log4j.appender.console=org.apache.log4j.ConsoleAppenderlog4j.appender.console.target=System.errlog4j.appender.console.layout=org.apache.log4j.PatternLayoutlog4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%nlog4j.appender.asyncconsole=org.apache.hadoop.hbase.AsyncConsoleAppenderlog4j.appender.asyncconsole.target=System.err# Custom Logging levelslog4j.logger.org.apache.zookeeper=INFO#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUGlog4j.logger.org.apache.hadoop.hbase=INFO# Make these two classes INFO-level. Make them DEBUG to see more zk debug.log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFOlog4j.logger.org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher=INFO#log4j.logger.org.apache.hadoop.dfs=DEBUG# Set this class to log INFO only otherwise its OTT# Enable this to get detailed connection error/retry logging.# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output)#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG# Uncomment the below if you want to remove logging of client region caching'# and scan of hbase:meta messages# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO# Prevent metrics subsystem start/stop messages (HBASE-17722)log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARNlog4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARNlog4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
5.新建一个包,新建一个文件HBase.java。完成后如下图。
2.HBase.java里面封装了一些操作,包括对HTable,HColumn还有遍历等。
package com.demo.hbase;import java.io.IOException;import java.util.ArrayList;import java.util.List;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.Cell;import org.apache.hadoop.hbase.CellUtil;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.HColumnDescriptor;import org.apache.hadoop.hbase.HTableDescriptor;import org.apache.hadoop.hbase.TableName;import org.apache.hadoop.hbase.client.Admin;import org.apache.hadoop.hbase.client.Connection;import org.apache.hadoop.hbase.client.ConnectionFactory;import org.apache.hadoop.hbase.client.Get;import org.apache.hadoop.hbase.client.Put;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.client.Table;import org.apache.hadoop.hbase.filter.BinaryComparator;import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;import org.apache.hadoop.hbase.filter.FilterList;import org.apache.hadoop.hbase.filter.RegexStringComparator;import org.apache.hadoop.hbase.filter.RowFilter;import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;import org.apache.hadoop.hbase.filter.SubstringComparator;import org.apache.hadoop.hbase.util.Bytes;public class HBase { private static Connection conn = null; static{ //创建配置对象 Configuration configuration = HBaseConfiguration.create(); //创建Hbase连接 try { conn = ConnectionFactory.createConnection(configuration); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } /** * 查看HBase中的表 * @throws IOException */ public static void list(){ //获取HBase管理实例对象 Admin admin = null; try { admin = conn.getAdmin(); for (TableName tn: admin.listTableNames()) { System.out.println("表:"+tn.getNameAsString()); } } catch (IOException e) { e.printStackTrace(); }finally { if (admin!=null) { try { admin.close(); } catch (IOException e) { e.printStackTrace(); } } } } /** * 创建HTable * @param tableName * @param familys * @return * @throws IOException */ public static boolean create(String tableName,String ...familys){ Admin admin = null; try { admin = conn.getAdmin(); if (admin.tableExists(TableName.valueOf(tableName))) { System.out.println("表已存在"); return false; }else { HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName)); for (String family : familys) { HColumnDescriptor hcd = new HColumnDescriptor(family); htd.addFamily(hcd); } admin.createTable(htd); System.out.println("表创建成功"); return true; } } catch (IOException e) { e.printStackTrace(); }finally { if (admin!=null) { try { admin.close(); } catch (IOException e) { e.printStackTrace(); } } } return false; } /** * 删除HTable * @param tableName * @throws IOException */ public static void delTable(String tableName) { Admin admin = null; try { admin = conn.getAdmin(); if (admin.tableExists(TableName.valueOf(tableName))) { admin.disableTable(TableName.valueOf(tableName)); admin.deleteTable(TableName.valueOf(tableName)); } } catch (IOException e) { e.printStackTrace(); }finally { if (admin!=null) { try { admin.close(); } catch (IOException e) { e.printStackTrace(); } } } } /** * 获取表描述信息 * @param tableName */ public static void describe(String tableName){ try { Admin admin = conn.getAdmin(); HTableDescriptor htd= admin.getTableDescriptor(TableName.valueOf(tableName)); System.out.println("===describe "+tableName+"==="); for(HColumnDescriptor hcd:htd.getColumnFamilies()) { System.out.println(hcd.getNameAsString()); } System.out.println("======================="); } catch (IOException e) { e.printStackTrace(); } } /** * 添加表列族 * @param tableName * @param familys * @throws IOException */ public static void addFamily(String tableName,String ...familys) { Admin admin = null; try { admin = conn.getAdmin(); if (admin.tableExists(TableName.valueOf(tableName))) { HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName)); for (String family : familys) { HColumnDescriptor hcd = new HColumnDescriptor(family); htd.addFamily(hcd); } admin.modifyTable(TableName.valueOf(tableName), htd); System.out.println("列族添加成功"); }else { System.out.println("表不存在"); } } catch (IOException e) { e.printStackTrace(); }finally { if (admin!=null) { try { admin.close(); } catch (IOException e) { e.printStackTrace(); } } } } /** * 添加数据 * @param tableName * @param key * @param kvs * @throws IOException */ public static void put(String tableName,String key,String[][] kvs){ Table table = null; try { table = conn.getTable(TableName.valueOf(tableName)); List<Put> lp = new ArrayList<Put>(); for (String[] kv : kvs) { Put put = new Put(Bytes.toBytes(key)); put.addColumn(Bytes.toBytes(kv[0]), Bytes.toBytes(kv[1]), Bytes.toBytes(kv[2])); lp.add(put); } table.put(lp); System.out.println("添加成功"); } catch (IOException e) { e.printStackTrace(); }finally { if (table!=null) { try { table.close(); } catch (IOException e) { e.printStackTrace(); } } } } /** * 根据rowKey 获取列键值 * @param tableName * @param rowKey */ public static void get(String tableName,String rowKey){ Table table = null; try { table = conn.getTable(TableName.valueOf(tableName)); Get get = new Get(Bytes.toBytes(rowKey)); Result result = table.get(get); for (Cell cell : result.listCells()) { String family = Bytes.toString(CellUtil.cloneFamily(cell)); String qualifier = Bytes.toString(CellUtil.cloneQualifier(cell)); String value = Bytes.toString(CellUtil.cloneValue(cell)); System.out.println(family+"\t"+qualifier+"\t"+value); } } catch (IOException e) { e.printStackTrace(); }finally { if (table!=null) { try { table.close(); } catch (IOException e) { e.printStackTrace(); } } } } /** * 遍历表内容 * @param tableName */ public static void scan(String tableName){ Table table = null; try { table = conn.getTable(TableName.valueOf(tableName)); Scan scan = new Scan(); //全表扫描,可以指定列族、列键、不能有行健 ResultScanner rs = table.getScanner(scan); for (Result row : rs) { for (Cell cell : row.listCells()) { System.out.println("Rowkey:"+Bytes.toString(row.getRow())+"\t" +"Family:"+Bytes.toString(CellUtil.cloneFamily(cell))+"\t" +"Quilifier:"+Bytes.toString(CellUtil.cloneQualifier(cell))+"\t" +"Value:"+Bytes.toString(CellUtil.cloneValue(cell))); } } } catch (IOException e) { e.printStackTrace(); }finally { if (table!=null) { try { table.close(); } catch (IOException e) { e.printStackTrace(); } } } } /** * 遍历表(带过滤器) * @param tableName * @param filterList */ public static void scan(String tableName,Scan scan){ Table table = null; try { table = conn.getTable(TableName.valueOf(tableName)); //全表扫描,可以指定列族、列键、不能有行健 ResultScanner rs = table.getScanner(scan); for (Result row : rs) { for (Cell cell : row.listCells()) { System.out.println("Rowkey:"+Bytes.toString(row.getRow())+"\t" +"Family:"+Bytes.toString(CellUtil.cloneFamily(cell))+"\t" +"Quilifier:"+Bytes.toString(CellUtil.cloneQualifier(cell))+"\t" +"Value:"+Bytes.toString(CellUtil.cloneValue(cell))); } } } catch (IOException e) { e.printStackTrace(); }finally { if (table!=null) { try { table.close(); } catch (IOException e) { e.printStackTrace(); } } } } /** * 查询条件 * @return */ public static Scan scan(){ Scan scan = new Scan(); //列值过滤器 SingleColumnValueFilter scvf = new SingleColumnValueFilter(Bytes.toBytes("info"), Bytes.toBytes("song1"), CompareOp.EQUAL, Bytes.toBytes("冰雨")); //行键过滤器 BinaryPrefixComparator | SubstringComparator | RegexStringComparator RowFilter rf = new RowFilter(CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes("刘德华")));// scan.setFilter(scvf);// scan.setFilter(rf); //过滤器链 FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL); filterList.addFilter(scvf); filterList.addFilter(rf); scan.setFilter(filterList);// 设置查询起止rowkey scan.setStartRow(Bytes.toBytes("a1")); scan.setStopRow(Bytes.toBytes("刘德华")); return scan; } /** * 测试入口 * @param args * @throws IOException */ public static void main(String[] args) { //设置连接参数:HBase数据库所在的主机IP// configuration.set("hbase.zookeeper.quorum", "192.168.50.100"); //设置连接参数:HBase数据库使用的端口// configuration.set("hbase.zookeeper.property.clientPort", "2181");// delTable("student");// create("student", "info","addr");// list();// conn.close();// list();// get("music", "刘德华");// describe("music");// String [][] column = {{"info","song1","冰雨"}};// put("music", "刘德华", column); scan("music"); }}
阅读全文
0 0
- HBase_Eclipse基本操作
- 基本操作
- 基本操作
- 基本操作
- 基本操作
- 基本操作
- 基本操作
- sparkSQL操作基本操作
- gitlab基本一些基本操作
- Matlab 基本命令、基本操作
- MySQL常用操作基本操作
- C#操作XML(基本操作)
- MySQL常用操作基本操作
- Python -- Json基本操作操作
- MySQL操作数据库基本操作
- MySQL基本操作 结构操作
- 操作mysql数据库基本操作
- Java 操作MongoDB 基本操作
- 欢迎使用CSDN-markdown编辑器
- 自定义类型:结构体,枚举,联合体
- 基本图表
- 国内yum源的安装(163,阿里云,epel)
- 浅析企业信息化变革中多种 IT 架构并存背后的原因
- HBase_Eclipse基本操作
- hdu 1325
- SSM框架搭建(Spring+SpringMVC+MyBatis)与easyui集成并实现增删改查实现
- linux mint安装中文字体
- HAProxy系列—配置文件详解
- Java核心技术点之动态代理
- CLOUT —— 媒体去中心化
- 解决安装Docker CE for Windows之后VirtualBox无法启动虚拟机的问题
- 使用命令安装gcc/g++, gdb, vim 以及配置