hadoop中用java访问hive
来源:互联网 发布:美工需要的电脑配置 编辑:程序博客网 时间:2024/05/21 17:45
export JAVA_HOME=/opt/hadoop/jdk1.7.0_06
export HIVE_HOME=/opt/hadoop/hive-0.8.1
export PATH=$PATH:$HADOOP_HOME/bin:$JAVA_HOME/bin:$HIVE_HOME/bin
export CLASSPATH=
expor tCLASSPATH=$CLASSPATH:$HIVE_HOME/lib/mysql-connector-java-5.1.18-bin.jar
export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib/hive-exec-0.8.1.jar
export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib/hive-jdbc-0.8.1.jar
export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib/hive-metastore-0.8.1.jar
export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib/hive-service-0.8.1.jar
export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib/libfb303.jar
export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib/log4j-1.2.16.jar
export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib/slf4j-log4j12-1.6.1.jar
export CLASSPATH=$CLASSPATH:$HIVE_HOME/lib/slf4j-api-1.6.1.jar
export CLASSPATH=$CLASSPATH:$HADOOP_HOME/lib/commons-configuration-1.6.jar
export CLASSPATH=$CLASSPATH:$HADOOP_HOME/lib/commons-io-2.1.jar
export CLASSPATH=$CLASSPATH:$HADOOP_HOME/hadoop-core-1.0.3.jar
import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;
public class HiveJdbcClient {
private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
/**
* @param args
* @throws SQLException
*/
public static void main(String[] args) throws SQLException {
try {
Class.forName(driverName);
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.exit(1);
}
Connection con = DriverManager.getConnection("jdbc:hive://localhost:10000/default", "", "");
Statement stmt = con.createStatement();
String tableName = "ghh";
//stmt.executeQuery("drop table " + tableName);
//ResultSet res = stmt.executeQuery("create table " + tableName + " (key int, value string)");
ResultSet res;
// show tables
String sql = "show tables '" + tableName + "'";
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
if (res.next()) {
System.out.println(res.getString(1));
}
// describe table
sql = "describe " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
// load data into table
// NOTE: filepath has to be local to the hive server
// NOTE: /tmp/a.txt is a ctrl-A separated file with two fields per line
//String filepath = "/tmp/a.txt";
//sql = "load data local inpath '" + filepath + "' into table " + tableName;
//System.out.println("Running: " + sql);
//res = stmt.executeQuery(sql);
// select * query
sql = "select * from " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(String.valueOf(res.getInt(1)) + "\t" + res.getString(2));
}
// regular hive query
sql = "select count(1) from " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getString(1));
}
}
}
- hadoop中用java访问hive
- hadoop+hive多用户访问支持
- php通过 thrift访问hadoop的hive
- php通过 thrift访问hadoop的hive
- php通过 thrift访问hadoop的hive
- php通过 thrift访问hadoop的hive
- java访问hive的方式
- Java访问Hadoop实践
- 在Java中用Type4驱动访问DB2
- JAVA中用POI组件访问EXCEL文档
- hive中用java实现的udaf做线性回归
- hadoop + hive apache nginx 访问日志分析 (一)
- Hadoop之通过Java Api连接Hive
- linux中用eclipse开发Hadoop mapreduce与Hive UDF实例详解及其配置说明
- Hadoop --> Hive
- Hadoop+Hive
- hadoop-hive
- Hive-命令行基本操作和java API访问hive数据库
- Linux文件删除原理
- DataSet
- 使用ortp windows
- iphone开发 耗资源
- C/C++写日志代码
- hadoop中用java访问hive
- [分享]linux Y480安装显卡驱动经历!
- 转 -- DBA的职责
- classpath与JVM的关系
- Multiple selection button name
- 在LINQ to SQL中使用Translate方法以及修改查询用SQL
- myeclipse自动生成POJO类时一直找不到java src folder--解决方案
- linq to sql 的DataContext的 ExecuteCommand()方法
- Java Web笔记 – Servlet多线程同步问题及其解决方法