使用JDBC访问Hive表中的数据
来源:互联网 发布:一级建造师网络 编辑:程序博客网 时间:2024/05/06 03:33
package hive;
import java.sql.Connection;
public class HiveJdbcClient {
private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
private static String url = "jdbc:hive://hadoop2:10000/hive_metadata";
private static String user = "hive";
private static String password = "hive";
private static String sql = "";
private static ResultSet res;
private static final Logger log = Logger.getLogger(HiveJdbcClient.class);
public static void main(String[] args) {
try {
Class.forName(driverName);
//Connection conn = DriverManager.getConnection(url, user, password);
//默认使用端口10000, 使用默认数据库,用户名密码默认
Connection conn = DriverManager.getConnection(url,user,password);
Statement stmt = conn.createStatement();
// 创建的表名
String tableName = "testHiveDriverTable";
/** 第一步:存在就先删除 **/
sql = "drop table " + tableName;
stmt.executeQuery(sql);
/** 第二步:不存在就创建 **/
sql = "create table " + tableName + " (key int, value string) row format delimited fields terminated by '\t'";
stmt.executeQuery(sql);
// 执行“show tables”操作
sql = "show tables '" + tableName + "'";
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行“show tables”运行结果:");
if (res.next()) {
System.out.println(res.getString(1));
}
// 执行“describe table”操作
sql = "describe " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行“describe table”运行结果:");
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
// 执行“load data into table”操作
String filepath = "/home/gaomeng/practice/t_hive2.txt";
sql = "load data local inpath '" + filepath + "' into table " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
// 执行“select * query”操作
sql = "select * from " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行“select * query”运行结果:");
while (res.next()) {
System.out.println(res.getInt(1) + "\t" + res.getString(2));
}
// 执行“regular hive query”操作
sql = "select count(1) from " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行“regular hive query”运行结果:");
while (res.next()) {
System.out.println(res.getString(1));
}
conn.close();
conn = null;
} catch (ClassNotFoundException e) {
e.printStackTrace();
log.error(driverName + " not found!", e);
System.exit(1);
} catch (SQLException e) {
e.printStackTrace();
log.error("Connection error!", e);
System.exit(1);
}
}
}
import java.sql.Connection;
public class HiveJdbcClient {
private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
private static String url = "jdbc:hive://hadoop2:10000/hive_metadata";
private static String user = "hive";
private static String password = "hive";
private static String sql = "";
private static ResultSet res;
private static final Logger log = Logger.getLogger(HiveJdbcClient.class);
public static void main(String[] args) {
try {
Class.forName(driverName);
//Connection conn = DriverManager.getConnection(url, user, password);
//默认使用端口10000, 使用默认数据库,用户名密码默认
Connection conn = DriverManager.getConnection(url,user,password);
Statement stmt = conn.createStatement();
// 创建的表名
String tableName = "testHiveDriverTable";
/** 第一步:存在就先删除 **/
sql = "drop table " + tableName;
stmt.executeQuery(sql);
/** 第二步:不存在就创建 **/
sql = "create table " + tableName + " (key int, value string) row format delimited fields terminated by '\t'";
stmt.executeQuery(sql);
// 执行“show tables”操作
sql = "show tables '" + tableName + "'";
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行“show tables”运行结果:");
if (res.next()) {
System.out.println(res.getString(1));
}
// 执行“describe table”操作
sql = "describe " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行“describe table”运行结果:");
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
// 执行“load data into table”操作
String filepath = "/home/gaomeng/practice/t_hive2.txt";
sql = "load data local inpath '" + filepath + "' into table " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
// 执行“select * query”操作
sql = "select * from " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行“select * query”运行结果:");
while (res.next()) {
System.out.println(res.getInt(1) + "\t" + res.getString(2));
}
// 执行“regular hive query”操作
sql = "select count(1) from " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行“regular hive query”运行结果:");
while (res.next()) {
System.out.println(res.getString(1));
}
conn.close();
conn = null;
} catch (ClassNotFoundException e) {
e.printStackTrace();
log.error(driverName + " not found!", e);
System.exit(1);
} catch (SQLException e) {
e.printStackTrace();
log.error("Connection error!", e);
System.exit(1);
}
}
}
0 0
- 使用JDBC访问Hive表中的数据
- 使用JDBC访问Hive
- Spark SQL读取Hive数据配置及使用Thrift JDBC/ODBC Server访问Spark SQL
- 使用JDBC进行数据访问
- Hive在Java中的jdbc使用
- 使用hive访问elasticsearch的数据
- 如何使用Java程序通过JDBC访问HDInsight Hive Server
- Hive数据仓库--Java代码JDBC方式访问Hive中表的数据
- 通过JDBC访问hive集群
- 使用JDBC进行数据访问【spring framwork】
- Spring之使用JDBC访问关系数据
- 使用Spring进行JDBC数据访问
- Spring Boot使用JDBC Template访问数据
- jdbc通过hive server2访问hive
- spring中的jdbc访问类JdbcTemplate使用
- Hive使用JDBC连接
- hive jdbc使用
- 1008-Hive访问HBase表数据
- Java教程]JavaEE框架Bootstrap、HTML5、jQuery、SpringMVC
- 生命周期
- ssh2+注解+filter+cookie实现自动登录功能。
- Android中的Activity启动的方式分析
- 用FTP修改网站服务器内容
- 使用JDBC访问Hive表中的数据
- lichee编译u-boot(Allwinner A80)
- SPFA最短路 建图模板 POJ1511&&POJ3159
- DWR的简单使用
- 【Spring】Spring MVC文件上传--整合bootstrap-fileinput和jQuery-File-Upload
- IPsec VPN知多少--(1)IPsec VPN简介
- C#判断文件的真实格式
- 【Unity Shader】在Asset创建面板中添加新的shader模板
- 澄清P问题、NP问题、NPC问题的概念