squirrel-sql通过phoenix-4.7.0连接windows中的hbase-1.1.3(三)

来源:互联网 发布:淘宝低价包邮怎么赚钱 编辑:程序博客网 时间:2024/05/21 11:01

Intellij IDEA 15.0.2 操作 phoenix-4.7.0


项目目录



项目执行结果



SQuirreL SQL 客户端



Hbase网页界面



Config

package phoenix;import com.typesafe.config.ConfigFactory;import java.util.Properties;/** * Created by yuhui on 2016/1/15. */class Config {    final static com.typesafe.config.Config conf = ConfigFactory.load();    static Properties hbaseTenantDbConf(long userId) {        Properties properties = new Properties();        properties.setProperty("url",conf.getString("Engine.Hbase.url"));        properties.setProperty("driverClassName", conf.getString("jdbc.driverClassName"));        properties.setProperty("initialSize",conf.getString("dataSource.initialSize"));        properties.setProperty("minIdle", conf.getString("dataSource.minIdle"));        properties.setProperty("maxIdle",conf.getString("dataSource.maxIdle"));        properties.setProperty("maxWait",conf.getString("dataSource.maxWait"));        properties.setProperty("maxActive", conf.getString("dataSource.maxActive"));        properties.setProperty("removeAbandoned",conf.getString("dataSource.removeAbandoned"));        properties.setProperty("removeAbandonedTimeout", conf.getString("dataSource.removeAbandonedTimeout"));        return properties;    }}

Hbase

package phoenix;import com.typesafe.config.ConfigFactory;import io.vertx.core.json.JsonObject;import java.sql.*;import java.util.List;import java.util.Properties;import java.util.Iterator;/** * Created by yuhui on 2016/5/3. */public class Hbase {    final static com.typesafe.config.Config conf = ConfigFactory.load();    Connection conn;    Statement ps=null;    public Hbase(Properties properties) {        try {            conn=HbaseConnectionSource.getConnection(properties);        } catch (Exception e) {            e.printStackTrace();        }    }    //可以执行增、删、改、建表、删除表    public void update(String sql){       try {           ps=conn.createStatement();           ps.executeUpdate(sql);       }catch (Exception e) {           e.printStackTrace();       }finally {           close();       }    }    public List<JsonObject> query(String sql) {        List li = null;        ResultSet rs =null;        try {            ps=conn.createStatement();            rs= ps.executeQuery(sql);            return Utils.asList(rs).getRows();        }catch (Exception e) {            e.printStackTrace();        }finally {            close();        }        return li;    }    public void close(){        try {            conn.close();        }catch (Exception e) {            e.printStackTrace();        }    }    public static void main(String[] args) throws Exception {        Hbase h=new Hbase(Config.hbaseTenantDbConf(1));          //增、删、改、建表、删除表都用update就可以,改的话就是更具主键改。//          h.update("CREATE TABLE USERINFO( TIMESTAMP BIGINT not null primary key, DATE VARCHAR , UUID VARCHAR , EVENT integer, USEDURATION integer , APPKEY VARCHAR )");//          h.update("UPSERT INTO USERINFO VALUES('4140309326' ,'1460404549000', '1460404549000' ,'2', '1' , '2000' , 'xinlan')");//          h.update("UPSERT INTO USERINFO VALUES('4140309326' ,'222222222', '33333333' ,'2', '1' , '1501' , 'baidu')");//          h.update("DELETE from USERINFO WHERE UUID='4140309326'");//          h.update("DROP TABLE  USERINFO");        //查询使用query方法        List rs= h.query("SELECT * from USERINFO");        Iterator<JsonObject> it = rs.iterator();        while(it.hasNext()){            System.out.println(it.next().toString());        }    }}


HbaseConnectionSource

package phoenix;import org.apache.commons.dbcp.BasicDataSource;import org.apache.commons.dbcp.BasicDataSourceFactory;import java.sql.Connection;import java.sql.SQLException;import java.util.Properties;/** * Created by yuhui on 2016/4/29. */public class HbaseConnectionSource {    private static BasicDataSource dataSource=null;    public static void init(Properties properties){        if(dataSource!=null)        {            try {                dataSource.close();            } catch (SQLException e) {                e.printStackTrace();            }            dataSource=null;        }        try {            dataSource= (BasicDataSource) BasicDataSourceFactory.createDataSource(properties);        } catch (Exception e) {            e.printStackTrace();        }    }    public static synchronized Connection getConnection(Properties p) throws SQLException{        if(dataSource==null)        {            init(p);        }        Connection con=null;        if(dataSource!=null)        {            con= dataSource.getConnection();        }        return con;    }}


Utils

package phoenix;import io.vertx.core.json.JsonArray;import java.math.BigDecimal;import java.sql.*;import java.time.Instant;import java.time.OffsetDateTime;import java.time.ZoneOffset;import java.util.ArrayList;import java.util.List;import static java.time.format.DateTimeFormatter.ISO_OFFSET_DATE_TIME;/** * Created by yuhui on 2016/5/5. */public class Utils {    public static io.vertx.ext.sql.ResultSet asList(ResultSet rs) throws SQLException {        List<String> columnNames = new ArrayList<>();        ResultSetMetaData metaData = rs.getMetaData();        int cols = metaData.getColumnCount();        for (int i = 1; i <= cols; i++) {            columnNames.add(metaData.getColumnLabel(i));        }        List<JsonArray> results = new ArrayList<>();        while (rs.next()) {            JsonArray result = new JsonArray();            for (int i = 1; i <= cols; i++) {                Object res = convertSqlValue(rs.getObject(i));                if (res != null) {                    result.add(res);                } else {                    result.addNull();                }            }            results.add(result);        }        return new io.vertx.ext.sql.ResultSet(columnNames, results);    }    public static Object convertSqlValue(Object value) {        if (value == null) {            return null;        }        // valid json types are just returned as is        if (value instanceof Boolean || value instanceof String || value instanceof byte[]) {            return value;        }        // numeric values        if (value instanceof Number) {            if (value instanceof BigDecimal) {                BigDecimal d = (BigDecimal) value;                if (d.scale() == 0) {                    return ((BigDecimal) value).toBigInteger();                } else {                    // we might loose precision here                    return ((BigDecimal) value).doubleValue();                }            }            return value;        }        // temporal values        if (value instanceof Date || value instanceof Time || value instanceof Timestamp) {            return OffsetDateTime.ofInstant(Instant.ofEpochMilli(((java.util.Date) value).getTime()), ZoneOffset.UTC).format(ISO_OFFSET_DATE_TIME);        }        // large objects        if (value instanceof Clob) {            Clob c = (Clob) value;            try {                // result might be truncated due to downcasting to int                String tmp = c.getSubString(1, (int) c.length());                c.free();                return tmp;            } catch (SQLException e) {                throw new RuntimeException(e);            }        }        if (value instanceof Blob) {            Blob b = (Blob) value;            try {                // result might be truncated due to downcasting to int                byte[] tmp = b.getBytes(1, (int) b.length());                b.free();                return tmp;            } catch (SQLException e) {                throw new RuntimeException(e);            }        }        // arrays        if (value instanceof Array) {            Array a = (Array) value;            try {                Object[] arr = (Object[]) a.getArray();                if (arr != null) {                    JsonArray jsonArray = new JsonArray();                    for (Object o : arr) {                        jsonArray.add(convertSqlValue(o));                    }                    a.free();                    return jsonArray;                }            } catch (SQLException e) {                throw new RuntimeException(e);            }        }        // fallback to String        return value.toString();    }}


application.conf

# Connection Pool settingsdb.default.poolInitialSize=4db.default.poolMaxSize=4db.default.poolConnectionTimeoutMillis=1000db.default.autoReconnect=truehbase.zookeeper.quorum="localhost"hbase.master="localhost:60000"Engine.Hbase.url="jdbc:phoenix:localhost:2181"jdbc.driverClassName=org.apache.phoenix.jdbc.PhoenixDriver#<!-- 初始化连接 -->dataSource.initialSize=10#<!-- 最大空闲连接 -->dataSource.maxIdle=20#<!-- 最小空闲连接 -->dataSource.minIdle=5#最大连接数量dataSource.maxActive=50#是否在自动回收超时连接的时候打印连接的超时错误dataSource.logAbandoned=true#是否自动回收超时连接dataSource.removeAbandoned=true#超时时间(以秒数为单位)dataSource.removeAbandonedTimeout=180#<!-- 超时等待时间以毫秒为单位 6000毫秒/1000等于60秒 -->dataSource.maxWait=1000


pom.xml

<?xml version="1.0" encoding="UTF-8"?><project xmlns="http://maven.apache.org/POM/4.0.0"         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">    <modelVersion>4.0.0</modelVersion>    <groupId>groupId</groupId>    <artifactId>LogAnalysis</artifactId>    <version>1.0</version>    <properties>        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>        <vertix.version>3.2.1</vertix.version>    </properties>    <dependencies>        <dependency>            <groupId>org.apache.phoenix</groupId>            <artifactId>phoenix-core</artifactId>            <version>4.7.0-HBase-1.1</version>            <!--<scope>provided</scope>-->        </dependency>        <dependency>            <groupId>commons-dbcp</groupId>            <artifactId>commons-dbcp</artifactId>            <version>1.4</version>        </dependency>        <dependency>            <groupId>mysql</groupId>            <artifactId>mysql-connector-java</artifactId>            <version>5.1.38</version>        </dependency>        <dependency>            <groupId>io.vertx</groupId>            <artifactId>vertx-web</artifactId>            <version>${vertix.version}</version>        </dependency>        <dependency>            <groupId>io.vertx</groupId>            <artifactId>vertx-jdbc-client</artifactId>            <version>${vertix.version}</version>        </dependency>        <dependency>            <groupId>io.vertx</groupId>            <artifactId>vertx-service-factory</artifactId>            <version>${vertix.version}</version>        </dependency>        <dependency>            <groupId>io.vertx</groupId>            <artifactId>vertx-service-proxy</artifactId>            <version>${vertix.version}</version>        </dependency>        <dependency>            <groupId>io.vertx</groupId>            <artifactId>vertx-sync</artifactId>            <version>${vertix.version}</version>        </dependency>        <dependency>            <groupId>it.sauronsoftware.cron4j</groupId>            <artifactId>cron4j</artifactId>            <version>2.2.5</version>        </dependency>        <dependency>            <groupId>org.apache.spark</groupId>            <artifactId>spark-sql_2.11</artifactId>            <version>1.6.0</version>        </dependency>    </dependencies>    <build>        <plugins>            <plugin>                <groupId>org.apache.maven.plugins</groupId>                <artifactId>maven-compiler-plugin</artifactId>                <version>3.3</version>                <configuration>                    <source>1.8</source>                    <target>1.8</target>                </configuration>            </plugin>            <plugin>                <groupId>org.apache.maven.plugins</groupId>                <artifactId>maven-shade-plugin</artifactId>                <version>2.4</version>                <executions>                    <execution>                        <phase>package</phase>                        <goals>                            <goal>shade</goal>                        </goals>                        <configuration>                            <transformers>                                <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">                                    <manifestEntries>                                        <Main-Class>io.vertx.core.Launcher</Main-Class>                                        <Main-Verticle>com.donews.loganalysis.LogAnalysisMain</Main-Verticle>                                    </manifestEntries>                                </transformer>                                <transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">                                    <resource>META-INF/services/io.vertx.core.spi.VerticleFactory</resource>                                </transformer>                            </transformers>                            <artifactSet>                            </artifactSet>                            <outputFile>${project.build.directory}/${project.artifactId}-${project.version}-fat.jar</outputFile>                        </configuration>                    </execution>                </executions>            </plugin>        </plugins>    </build></project>


0 0
原创粉丝点击