ubuntu14.04+hadoop2.6.2+hive1.1.1
来源:互联网 发布:影视包装培训软件 编辑:程序博客网 时间:2024/05/17 02:38
1.Hive是基于Hadoop构建的一套数据仓库分析系统,它提供了丰富的SQL查询方式来分析存储在Hadoop 分布式文件系统中的数据。其在Hadoop的架构体系中承担了一个SQL解析的过程,它提供了对外的入口来获取用户的指令然后对指令进行分析,解析出一个MapReduce程序组成可执行计划,并按照该计划生成对应的MapReduce任务提交给Hadoop集群处理,获取最终的结果。元数据——如表模式——存储在名为metastore的数据库中。
2.下载http://mirror.bjtu.edu.cn/apache/hive/
解压到/usr/local/hadoop/hive
3.拷贝mysql-connector-java-5.1.6-bin.jar 到hive 的lib下面
4.将hive下的新版本jline的JAR包拷贝到hadoop下:
cp /usr/local/hadoop/hive /lib/jline-2.12.jar /usr/local/hadoop /share/hadoop/yarn/lib 并且移除$HADOOP_HOME/share/hadoop/yarn/lib/下的jline-0.9.94.jar文件
5.gedit ~/.bashrc
#HADOOP VARIABLES START
export JAVA_HOME=/jdk/jdk1.8.0_91
export HADOOP_INSTALL=/usr/local/hadoop
export PATH=$PATH:$HADOOP_INSTALL/bin
export PATH=$PATH:$JAVA_HOME/bin
export PATH=$PATH:$HADOOP_INSTALL/sbin
export HADOOP_MAPRED_HOME=$HADOOP_INSTALL
export HADOOP_COMMON_HOME=$HADOOP_INSTALL
export HADOOP_HDFS_HOME=$HADOOP_INSTALL
export YARN_HOME=$HADOOP_INSTALL
exportHADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_INSTALL/lib/native
exportHADOOP_OPTS="-Djava.library.path=$HADOOP_INSTALL/lib"
#HADOOP VARIABLES END
#set Hive environment
export HIVE_HOME=/usr/local/hadoop/hive
export PATH=$PATH:$HIVE_HOME/bin
export CLASSPATH=$CLASSPATH:$HIVE_HOME/bin
6.gedit /etc/profile
#set java environment
export JAVA_HOME=/jdk/jdk1.8.0_91
export JRE_HOME=$JAVA_HOME/jre
exportCLASSPATH=$CLASSPATH:$JAVA_HOME/lib:$JRE_HOME/lib
export PATH=$PATH:$JAVA_HOME/bin:$JRE_HOME/bin
#set hive
export HIVE_HOME=/usr/local/hadoop/hive
export PATH=$HIVE_HOME/bin:$PATH
7.重命名配置文件
对conf 目录下的配置文件重命名,执行如下命令:
mv hive-env.sh.template hive-env.sh
mv hive-default.xml.template hive-site.xml
8.在hive-env.sh中
HADOOP_HOME=/usr/local/hadoop
export HIVE_CONF_DIR=/usr/local/hadoop/hive/conf
9.修改hive-site.xml
<property>
<name>hive.exec.scratchdir</name>
<value>/usr/local/hadoop/hive/tmp</value>
<description>HDFS rootscratch dir for Hive jobs which gets created with write all (733) permission.For each connecting user, an HDFS scratch dir:${hive.exec.scratchdir}/<username> is created, with${hive.scratch.dir.permission}.</description>
</property>
<property>
<name>hive.exec.local.scratchdir</name>
<value>/usr/local/hadoop/hive/tmp</value>
<description>Local scratchspace for Hive jobs</description>
</property>
<property>
<name>hive.downloaded.resources.dir</name>
<value>/usr/local/hadoop/hive/tmp</value>
Maximum number of bytes ascript is allowed to emit to standard error (per map-reduce task).
This prevents runaway scriptsfrom filling logs partitions to capacity
</description>
</property>
<property>
<name>hive.metastore.warehouse.dir</name>
<value>/usr/local/hadoop/hive/warehouse</value>
<description>location ofdefault database for the warehouse</description>
</property>
<property>
<name>hive.metastore.uris</name>
<value>thrift://127.0.0.1:9083</value>
<description>Thrift URIfor the remote metastore. Used by metastore client to connect to remotemetastore.</description>
</property>
<property>
<name>hive.metastore.local</name>
<value>true</value>
<description></description>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>myhive</value>
<description>password touse against metastore database</description>
</property>
<property>
<name>javax.jdo.option.ConnectionURL</name> <value>jdbc:mysql://localhost:3306/hive?createDatabaseIfNotExsit=true;characterEncoding=UTF-8</value>
<description>JDBC connectstring for a JDBC metastore</description>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>myhive</value>
<description>Username touse against metastore database</description>
</property>
2.下载http://mirror.bjtu.edu.cn/apache/hive/
解压到/usr/local/hadoop/hive
3.拷贝mysql-connector-java-5.1.6-bin.jar 到hive 的lib下面
4.将hive下的新版本jline的JAR包拷贝到hadoop下:
cp /usr/local/hadoop/hive /lib/jline-2.12.jar /usr/local/hadoop /share/hadoop/yarn/lib 并且移除$HADOOP_HOME/share/hadoop/yarn/lib/下的jline-0.9.94.jar文件
5.gedit ~/.bashrc
#HADOOP VARIABLES START
export JAVA_HOME=/jdk/jdk1.8.0_91
export HADOOP_INSTALL=/usr/local/hadoop
export PATH=$PATH:$HADOOP_INSTALL/bin
export PATH=$PATH:$JAVA_HOME/bin
export PATH=$PATH:$HADOOP_INSTALL/sbin
export HADOOP_MAPRED_HOME=$HADOOP_INSTALL
export HADOOP_COMMON_HOME=$HADOOP_INSTALL
export HADOOP_HDFS_HOME=$HADOOP_INSTALL
export YARN_HOME=$HADOOP_INSTALL
exportHADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_INSTALL/lib/native
exportHADOOP_OPTS="-Djava.library.path=$HADOOP_INSTALL/lib"
#HADOOP VARIABLES END
#set Hive environment
export HIVE_HOME=/usr/local/hadoop/hive
export PATH=$PATH:$HIVE_HOME/bin
export CLASSPATH=$CLASSPATH:$HIVE_HOME/bin
6.gedit /etc/profile
#set java environment
export JAVA_HOME=/jdk/jdk1.8.0_91
export JRE_HOME=$JAVA_HOME/jre
exportCLASSPATH=$CLASSPATH:$JAVA_HOME/lib:$JRE_HOME/lib
export PATH=$PATH:$JAVA_HOME/bin:$JRE_HOME/bin
#set hive
export HIVE_HOME=/usr/local/hadoop/hive
export PATH=$HIVE_HOME/bin:$PATH
7.重命名配置文件
对conf 目录下的配置文件重命名,执行如下命令:
mv hive-env.sh.template hive-env.sh
mv hive-default.xml.template hive-site.xml
8.在hive-env.sh中
HADOOP_HOME=/usr/local/hadoop
export HIVE_CONF_DIR=/usr/local/hadoop/hive/conf
9.修改hive-site.xml
<property>
<name>hive.exec.scratchdir</name>
<value>/usr/local/hadoop/hive/tmp</value>
<description>HDFS rootscratch dir for Hive jobs which gets created with write all (733) permission.For each connecting user, an HDFS scratch dir:${hive.exec.scratchdir}/<username> is created, with${hive.scratch.dir.permission}.</description>
</property>
<property>
<name>hive.exec.local.scratchdir</name>
<value>/usr/local/hadoop/hive/tmp</value>
<description>Local scratchspace for Hive jobs</description>
</property>
<property>
<name>hive.downloaded.resources.dir</name>
<value>/usr/local/hadoop/hive/tmp</value>
Maximum number of bytes ascript is allowed to emit to standard error (per map-reduce task).
This prevents runaway scriptsfrom filling logs partitions to capacity
</description>
</property>
<property>
<name>hive.metastore.warehouse.dir</name>
<value>/usr/local/hadoop/hive/warehouse</value>
<description>location ofdefault database for the warehouse</description>
</property>
<property>
<name>hive.metastore.uris</name>
<value>thrift://127.0.0.1:9083</value>
<description>Thrift URIfor the remote metastore. Used by metastore client to connect to remotemetastore.</description>
</property>
<property>
<name>hive.metastore.local</name>
<value>true</value>
<description></description>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>myhive</value>
<description>password touse against metastore database</description>
</property>
<property>
<name>javax.jdo.option.ConnectionURL</name> <value>jdbc:mysql://localhost:3306/hive?createDatabaseIfNotExsit=true;characterEncoding=UTF-8</value>
<description>JDBC connectstring for a JDBC metastore</description>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>myhive</value>
<description>Username touse against metastore database</description>
</property>
0 0
- ubuntu14.04+hadoop2.6.2+hive1.1.1
- ubuntu14.04下Hadoop2.6.0+Hive1.1.1安装
- ubuntu14.04配置Hive1.2.1
- CentOS6.9+Hadoop2.7.3+Hive1.2.1+Hbase1.3.1+Spark2.1.1
- ubuntu16.04+hadoop2.7.2+hive1.2.1 server2通过jdbc连接
- hadoop2.6 和hive1.2
- ubuntu14.04 安装 hadoop2.4.0
- ubuntu14.04安装hadoop2.6.03
- ubuntu14.04安装hadoop2.7.1
- Hadoop2.6.4、zookeeper3.4.6、HBase1.2.2、Hive1.2.1、sqoop1.99.7、spark1.6.2安装
- Hive1.1.0+Hadoop2.6.0启动异常
- hive安装 (hive1.2.1+hadoop2.7+mysql)
- Hadoop2.6.4分布式下安装 hive1.2.1
- ubuntu14.04下hadoop2.2.0集群安装
- ubuntu14.04 搭建hadoop2.5.1环境
- Ubuntu14.04安装Hadoop2.5.2(单机模式)
- 64位Ubuntu14.04编译hadoop2.6
- Ubuntu14.04下Hadoop2.6源码编译
- 32位与64位下各类型长度对比
- Linux syslog机制
- 【图形学与游戏编程】开发笔记-基础篇2:DX11初始化
- 南歌子.记2016年仲夏独上平江石牛寨过玻璃天桥
- POJ 3468 A Simple Problem with Integers 线段树区间更新 纯模板题
- ubuntu14.04+hadoop2.6.2+hive1.1.1
- java-基础-强、软、弱、虚引用
- gcc常用命令
- Java变量
- data table使用
- 去掉chrome记住密码后自动填充表单的黄色背景
- HDU 2191悼念512汶川大地震遇难同胞——珍惜现在,感恩生活
- HDU 5015 233 Matrix 【矩阵快速幂】
- javaweb学习总结(三十四)——使用JDBC处理MySQL大数据