hadoop中用java访问hive

来源:互联网 发布:美工需要的电脑配置 编辑:程序博客网 时间:2024/05/21 17:45
export HADOOP_HOME=/opt/hadoop/hadoop-1.0.3
export JAVA_HOME=/opt/hadoop/jdk1.7.0_06
export HIVE_HOME=/opt/hadoop/hive-0.8.1
export PATH=$PATH:$HADOOP_HOME/bin:$JAVA_HOME/bin:$HIVE_HOME/bin
export CLASSPATH=
expor tCLASSPATH=$CLASSPATH:$HIVE_HOME/lib/mysql-connector-java-5.1.18-bin.jar
export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib/hive-exec-0.8.1.jar
export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib/hive-jdbc-0.8.1.jar
export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib/hive-metastore-0.8.1.jar
export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib/hive-service-0.8.1.jar
export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib/libfb303.jar
export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib/log4j-1.2.16.jar
export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib/slf4j-log4j12-1.6.1.jar

export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib/slf4j-api-1.6.1.jar

export CLASSPATH=$CLASSPATH:$HADOOP_HOME/lib/commons-configuration-1.6.jar

export CLASSPATH=$CLASSPATH:$HADOOP_HOME/lib/commons-logging-1.1.1.jar
export CLASSPATH=$CLASSPATH:$HADOOP_HOME/lib/commons-io-2.1.jar

export CLASSPATH=$CLASSPATH:$HADOOP_HOME/hadoop-core-1.0.3.jar


import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;


public class HiveJdbcClient {
    private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";


    /**
   * @param args
   * @throws SQLException
     */
    public static void main(String[] args) throws SQLException {
        try {
        Class.forName(driverName);
      } catch (ClassNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        System.exit(1);
      }
      Connection con = DriverManager.getConnection("jdbc:hive://localhost:10000/default", "", "");
      Statement stmt = con.createStatement();
      String tableName = "ghh";
      //stmt.executeQuery("drop table " + tableName);
      //ResultSet res = stmt.executeQuery("create table " + tableName + " (key int, value string)");
      ResultSet res;
      // show tables
      String sql = "show tables '" + tableName + "'";
      System.out.println("Running: " + sql);
      res = stmt.executeQuery(sql);
      if (res.next()) {
        System.out.println(res.getString(1));
      }
      // describe table
      sql = "describe " + tableName;
      System.out.println("Running: " + sql);
      res = stmt.executeQuery(sql);
      while (res.next()) {
        System.out.println(res.getString(1) + "\t" + res.getString(2));
      }


      // load data into table
      // NOTE: filepath has to be local to the hive server
      // NOTE: /tmp/a.txt is a ctrl-A separated file with two fields per line
      //String filepath = "/tmp/a.txt";
      //sql = "load data local inpath '" + filepath + "' into table " + tableName;
      //System.out.println("Running: " + sql);
      //res = stmt.executeQuery(sql);


      // select * query
      sql = "select * from " + tableName;
      System.out.println("Running: " + sql);
      res = stmt.executeQuery(sql);
      while (res.next()) {
        System.out.println(String.valueOf(res.getInt(1)) + "\t" + res.getString(2));
      }


      // regular hive query
      sql = "select count(1) from " + tableName;
      System.out.println("Running: " + sql);
      res = stmt.executeQuery(sql);
      while (res.next()) {
        System.out.println(res.getString(1));
      }
    }
  }

原创粉丝点击