squirrel-sql通过phoenix-4.7.0连接windows中的hbase-1.1.3(三)
来源:互联网 发布:淘宝低价包邮怎么赚钱 编辑:程序博客网 时间:2024/05/21 11:01
Intellij IDEA 15.0.2 操作 phoenix-4.7.0
项目目录
项目执行结果
SQuirreL SQL 客户端
Hbase网页界面
Config
package phoenix;import com.typesafe.config.ConfigFactory;import java.util.Properties;/** * Created by yuhui on 2016/1/15. */class Config { final static com.typesafe.config.Config conf = ConfigFactory.load(); static Properties hbaseTenantDbConf(long userId) { Properties properties = new Properties(); properties.setProperty("url",conf.getString("Engine.Hbase.url")); properties.setProperty("driverClassName", conf.getString("jdbc.driverClassName")); properties.setProperty("initialSize",conf.getString("dataSource.initialSize")); properties.setProperty("minIdle", conf.getString("dataSource.minIdle")); properties.setProperty("maxIdle",conf.getString("dataSource.maxIdle")); properties.setProperty("maxWait",conf.getString("dataSource.maxWait")); properties.setProperty("maxActive", conf.getString("dataSource.maxActive")); properties.setProperty("removeAbandoned",conf.getString("dataSource.removeAbandoned")); properties.setProperty("removeAbandonedTimeout", conf.getString("dataSource.removeAbandonedTimeout")); return properties; }}
Hbase
package phoenix;import com.typesafe.config.ConfigFactory;import io.vertx.core.json.JsonObject;import java.sql.*;import java.util.List;import java.util.Properties;import java.util.Iterator;/** * Created by yuhui on 2016/5/3. */public class Hbase { final static com.typesafe.config.Config conf = ConfigFactory.load(); Connection conn; Statement ps=null; public Hbase(Properties properties) { try { conn=HbaseConnectionSource.getConnection(properties); } catch (Exception e) { e.printStackTrace(); } } //可以执行增、删、改、建表、删除表 public void update(String sql){ try { ps=conn.createStatement(); ps.executeUpdate(sql); }catch (Exception e) { e.printStackTrace(); }finally { close(); } } public List<JsonObject> query(String sql) { List li = null; ResultSet rs =null; try { ps=conn.createStatement(); rs= ps.executeQuery(sql); return Utils.asList(rs).getRows(); }catch (Exception e) { e.printStackTrace(); }finally { close(); } return li; } public void close(){ try { conn.close(); }catch (Exception e) { e.printStackTrace(); } } public static void main(String[] args) throws Exception { Hbase h=new Hbase(Config.hbaseTenantDbConf(1)); //增、删、改、建表、删除表都用update就可以,改的话就是更具主键改。// h.update("CREATE TABLE USERINFO( TIMESTAMP BIGINT not null primary key, DATE VARCHAR , UUID VARCHAR , EVENT integer, USEDURATION integer , APPKEY VARCHAR )");// h.update("UPSERT INTO USERINFO VALUES('4140309326' ,'1460404549000', '1460404549000' ,'2', '1' , '2000' , 'xinlan')");// h.update("UPSERT INTO USERINFO VALUES('4140309326' ,'222222222', '33333333' ,'2', '1' , '1501' , 'baidu')");// h.update("DELETE from USERINFO WHERE UUID='4140309326'");// h.update("DROP TABLE USERINFO"); //查询使用query方法 List rs= h.query("SELECT * from USERINFO"); Iterator<JsonObject> it = rs.iterator(); while(it.hasNext()){ System.out.println(it.next().toString()); } }}
HbaseConnectionSource
package phoenix;import org.apache.commons.dbcp.BasicDataSource;import org.apache.commons.dbcp.BasicDataSourceFactory;import java.sql.Connection;import java.sql.SQLException;import java.util.Properties;/** * Created by yuhui on 2016/4/29. */public class HbaseConnectionSource { private static BasicDataSource dataSource=null; public static void init(Properties properties){ if(dataSource!=null) { try { dataSource.close(); } catch (SQLException e) { e.printStackTrace(); } dataSource=null; } try { dataSource= (BasicDataSource) BasicDataSourceFactory.createDataSource(properties); } catch (Exception e) { e.printStackTrace(); } } public static synchronized Connection getConnection(Properties p) throws SQLException{ if(dataSource==null) { init(p); } Connection con=null; if(dataSource!=null) { con= dataSource.getConnection(); } return con; }}
Utils
package phoenix;import io.vertx.core.json.JsonArray;import java.math.BigDecimal;import java.sql.*;import java.time.Instant;import java.time.OffsetDateTime;import java.time.ZoneOffset;import java.util.ArrayList;import java.util.List;import static java.time.format.DateTimeFormatter.ISO_OFFSET_DATE_TIME;/** * Created by yuhui on 2016/5/5. */public class Utils { public static io.vertx.ext.sql.ResultSet asList(ResultSet rs) throws SQLException { List<String> columnNames = new ArrayList<>(); ResultSetMetaData metaData = rs.getMetaData(); int cols = metaData.getColumnCount(); for (int i = 1; i <= cols; i++) { columnNames.add(metaData.getColumnLabel(i)); } List<JsonArray> results = new ArrayList<>(); while (rs.next()) { JsonArray result = new JsonArray(); for (int i = 1; i <= cols; i++) { Object res = convertSqlValue(rs.getObject(i)); if (res != null) { result.add(res); } else { result.addNull(); } } results.add(result); } return new io.vertx.ext.sql.ResultSet(columnNames, results); } public static Object convertSqlValue(Object value) { if (value == null) { return null; } // valid json types are just returned as is if (value instanceof Boolean || value instanceof String || value instanceof byte[]) { return value; } // numeric values if (value instanceof Number) { if (value instanceof BigDecimal) { BigDecimal d = (BigDecimal) value; if (d.scale() == 0) { return ((BigDecimal) value).toBigInteger(); } else { // we might loose precision here return ((BigDecimal) value).doubleValue(); } } return value; } // temporal values if (value instanceof Date || value instanceof Time || value instanceof Timestamp) { return OffsetDateTime.ofInstant(Instant.ofEpochMilli(((java.util.Date) value).getTime()), ZoneOffset.UTC).format(ISO_OFFSET_DATE_TIME); } // large objects if (value instanceof Clob) { Clob c = (Clob) value; try { // result might be truncated due to downcasting to int String tmp = c.getSubString(1, (int) c.length()); c.free(); return tmp; } catch (SQLException e) { throw new RuntimeException(e); } } if (value instanceof Blob) { Blob b = (Blob) value; try { // result might be truncated due to downcasting to int byte[] tmp = b.getBytes(1, (int) b.length()); b.free(); return tmp; } catch (SQLException e) { throw new RuntimeException(e); } } // arrays if (value instanceof Array) { Array a = (Array) value; try { Object[] arr = (Object[]) a.getArray(); if (arr != null) { JsonArray jsonArray = new JsonArray(); for (Object o : arr) { jsonArray.add(convertSqlValue(o)); } a.free(); return jsonArray; } } catch (SQLException e) { throw new RuntimeException(e); } } // fallback to String return value.toString(); }}
application.conf
# Connection Pool settingsdb.default.poolInitialSize=4db.default.poolMaxSize=4db.default.poolConnectionTimeoutMillis=1000db.default.autoReconnect=truehbase.zookeeper.quorum="localhost"hbase.master="localhost:60000"Engine.Hbase.url="jdbc:phoenix:localhost:2181"jdbc.driverClassName=org.apache.phoenix.jdbc.PhoenixDriver#<!-- 初始化连接 -->dataSource.initialSize=10#<!-- 最大空闲连接 -->dataSource.maxIdle=20#<!-- 最小空闲连接 -->dataSource.minIdle=5#最大连接数量dataSource.maxActive=50#是否在自动回收超时连接的时候打印连接的超时错误dataSource.logAbandoned=true#是否自动回收超时连接dataSource.removeAbandoned=true#超时时间(以秒数为单位)dataSource.removeAbandonedTimeout=180#<!-- 超时等待时间以毫秒为单位 6000毫秒/1000等于60秒 -->dataSource.maxWait=1000
pom.xml
<?xml version="1.0" encoding="UTF-8"?><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>groupId</groupId> <artifactId>LogAnalysis</artifactId> <version>1.0</version> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <vertix.version>3.2.1</vertix.version> </properties> <dependencies> <dependency> <groupId>org.apache.phoenix</groupId> <artifactId>phoenix-core</artifactId> <version>4.7.0-HBase-1.1</version> <!--<scope>provided</scope>--> </dependency> <dependency> <groupId>commons-dbcp</groupId> <artifactId>commons-dbcp</artifactId> <version>1.4</version> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>5.1.38</version> </dependency> <dependency> <groupId>io.vertx</groupId> <artifactId>vertx-web</artifactId> <version>${vertix.version}</version> </dependency> <dependency> <groupId>io.vertx</groupId> <artifactId>vertx-jdbc-client</artifactId> <version>${vertix.version}</version> </dependency> <dependency> <groupId>io.vertx</groupId> <artifactId>vertx-service-factory</artifactId> <version>${vertix.version}</version> </dependency> <dependency> <groupId>io.vertx</groupId> <artifactId>vertx-service-proxy</artifactId> <version>${vertix.version}</version> </dependency> <dependency> <groupId>io.vertx</groupId> <artifactId>vertx-sync</artifactId> <version>${vertix.version}</version> </dependency> <dependency> <groupId>it.sauronsoftware.cron4j</groupId> <artifactId>cron4j</artifactId> <version>2.2.5</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-sql_2.11</artifactId> <version>1.6.0</version> </dependency> </dependencies> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>3.3</version> <configuration> <source>1.8</source> <target>1.8</target> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-shade-plugin</artifactId> <version>2.4</version> <executions> <execution> <phase>package</phase> <goals> <goal>shade</goal> </goals> <configuration> <transformers> <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"> <manifestEntries> <Main-Class>io.vertx.core.Launcher</Main-Class> <Main-Verticle>com.donews.loganalysis.LogAnalysisMain</Main-Verticle> </manifestEntries> </transformer> <transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer"> <resource>META-INF/services/io.vertx.core.spi.VerticleFactory</resource> </transformer> </transformers> <artifactSet> </artifactSet> <outputFile>${project.build.directory}/${project.artifactId}-${project.version}-fat.jar</outputFile> </configuration> </execution> </executions> </plugin> </plugins> </build></project>
0 0
- squirrel-sql通过phoenix-4.7.0连接windows中的hbase-1.1.3(三)
- squirrel-sql通过phoenix-4.7.0连接windows中的hbase-1.1.3(一)
- squirrel-sql通过phoenix-4.7.0连接windows中的hbase-1.1.3(二)
- 安装SQuirrel SQL Client连接Phoenix操作HBase
- Phoenix安装使用及使用 SQuirrel客户端连接操作Hbase
- Phoenix(六)配置使用Squirrel GUI连接Phoenix
- Phoenix ,SQuirrel 与Hbase 搭建
- 用SQuirreL工具连接phoenix
- Phoenix通过sql语句更新操作hbase
- squirrel安装(连接hbase,整合phonenix)
- SQuirrel 连不上 Phoenix Hbase ---> 可能是因为zookeeper 集群中的一个zookeeper 启动有问题
- 使用SQuirreL客户端工具配置连接 Phoenix
- 使用Phoenix连接HBase-0.96.0
- Phoenix Jdbc 连接HBase
- 使用Phoenix连接Hbase
- 使用Phoenix通过sql语句更新操作hbase数据
- 使用Phoenix通过sql语句更新操作hbase数据
- 使用 Phoenix 通过 sql 语句 更新操作 hbase 数据
- 物联网与移动宽带——未来5G的两大着眼点
- Android studio 开发百度地图
- 【数据结构与算法】字典代码示例
- DTS修改
- 关于单片机电路中NPN三极管与PNP三极管的接法问题
- squirrel-sql通过phoenix-4.7.0连接windows中的hbase-1.1.3(三)
- 开源虚拟化管理平台Ovirt简介和配置环境搭建 http://xiaoli110.blog.51cto.com/1724/784874
- linux下mysql忘记密码
- Matlab中plot函数全功能解析
- CentOS安装配置JDK-7
- activity的生命周期管理。
- 判断一个数是否是2的整数次幂
- java 继承与多态练习题目
- runtime的使用