Java 调用Hive 自定义UDF

来源:互联网 发布:java自行车是哪个国家 编辑:程序博客网 时间:2024/05/21 20:27

Hive 具有一个可选的组件叫做HiveServer,它允许通过在一定的端口访问Hive.

将自定义的UDF的jar拷贝到安装的hive服务器中(如 /home/hadoop下)

具体的不啰嗦了,看代码吧!、

package com.hive.client;import java.sql.Connection;import java.sql.DriverManager;import java.sql.ResultSet;import java.sql.SQLException;import java.sql.Statement;public class HiveClientUDF {private static String driverName = "org.apache.hive.jdbc.HiveDriver";    private static String url = "jdbc:hive2://192.168.150.162:10000/default";    private static String sql = "";    private static ResultSet res;public static void main(String[] args) throws SQLException, ClassNotFoundException {// TODO Auto-generated method stub          Connection conn = null;         Class.forName(driverName); //默认使用端口10000, 使用默认数据库,用户名密码默认  hadoop 服务器登录用户名  dh!123456登录密码         conn = DriverManager.getConnection(url, "hadoop", "dh!123456");         String tableName = "vehicle_pass";    Statement stmt = conn.createStatement();     //将cityudf.jar加入到HIVE类路径下 sql = "add jar /home/hadoop/cityudf.jar";        stmt.execute(sql);        //加入到classpath下        sql = "create temporary function cityudf as 'com.hive.utf.CityUDF'";        stmt.execute(sql);        //使用自定义UDF cityudf    sql = "select  key,kkNO,cityudf(key) as city from " + tableName;         System.out.println("Running:" + sql);     res = stmt.executeQuery(sql);     System.out.println("执行“select * query”运行结果:");     while (res.next()) {               System.out.println(res.getString(1) + "\t" +":" + res.getString(3));                 }      stmt.close();     conn.close();  }}
运行结果:

1522301_20150417103242404_黑A01BCI:哈尔滨1522301_20150417103242494_黑A013AA:哈尔滨1522301_20150417103242614_黑A01GHI:哈尔滨1522301_20150417103242616_黑A01GLJ:哈尔滨1522301_20150417103242617_黑A01E5G:哈尔滨1522301_20150417103242623_黑A01HDK:哈尔滨1522301_20150417103242625_黑A018MM:哈尔滨1522301_20150417103242758_黑A015KD:哈尔滨

http://blog.csdn.net/allen_oscar/article/details/45146913

1 0