Hive JDBC 连接

来源:互联网 发布:淘宝买药提交需求 编辑:程序博客网 时间:2024/05/01 12:53
package com.mycompany.app.my_simple_app;


import java.sql.DriverManager;


import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


public class HiveTest {


private static Logger log = LoggerFactory.getLogger(HiveTest.class);

private static void getQueryLogFromHive()
throws Exception {


// private static String driverName =


         //  String sql="select * from usr_q limit 10";

  String sql="select * from wx_recmdresult_1 limit 10";
 // String sql = "select guid, user_id, in_keyword, cur_req_url "
  // + "from da.fn_track_info where user_id<>\"\" and regexp(user_id,'^FN_[0-9]+')  "
  //+ "and cur_req_url<>\"\" and regexp(cur_req_url,'^http://search.feiniu')  limit 30";
           java.sql.Statement stmt=null;
           java.sql.Connection conn=null;
// ResultSet rscount=null;
try {
//
Class.forName("org.apache.hive.jdbc.HiveDriver");

log.info("start connect hive server2+++++");
conn = DriverManager.getConnection("jdbc:hive2://10.200.42.1:10000/default",
"hadoop", "hadoop");
stmt = conn.createStatement();




log.info("start execute");

java.sql.ResultSet res=stmt.executeQuery(sql);



while(res.next()){
String name=res.getString(1);
String dt=res.getString(2);
System.out.println("name:"+name);
System.out.println("dt:"+dt);
}

log.info(" insert data to HIVE table success!");



}finally{
if(stmt!=null){
stmt.close();
}
if(conn!=null){
conn.close();
}


}
}

         public static void main(String[] args) throws Exception{
         
        getQueryLogFromHive();
         }
}
0 0
原创粉丝点击