sqoop mysql 抽到hive
来源:互联网 发布:淘宝联盟结算第三方 编辑:程序博客网 时间:2024/05/16 01:01
#默认抽昨天的数据,如果传入日期,则抽取传入日期那天的数据
dt=`date -d "1 days ago" +"%Y%m%d"`
if [ $# -eq 1 ];then
dt=`date -d "$1" +"%Y%m%d"`
fi
echo $dt
HIVE_DB_NAME=bdpa
MYSQL_HOST="10.202.12.12"
MYSQL_USER="ss"
MYSQL_PASSWORD="password"
MYSQL_DB="eee"
MYSQL_URL="jdbc:mysql://${MYSQL_HOST}:3306/${MYSQL_DB} --username ${MYSQL_USER} --password ${MYSQL_PASSWORD}"
function sqoop_mysql_to_hdfs(){
TABLE_NAME="$1"
PATITION_TIME="$2"
APPEND_FIELD="$3"
#hadoop fs -rm -r /user/hive/warehouse/${HIVE_DB_NAME}.db/${TABLE_NAME}/${PATITION_TIME}
#hadoop fs -rm -r /user/hdfs/${TABLE_NAME}
sqoop import -m 1 \
--connect ${MYSQL_URL} \
--query "select * from ${TABLE_NAME} where DATE_FORMAT(${APPEND_FIELD},'%Y%m%d') = '${PATITION_TIME}' and \$CONDITIONS" \
--target-dir "/user/hive/warehouse/${HIVE_DB_NAME}.db/${TABLE_NAME}/${PATITION_TIME}" \
--fields-terminated-by '\002' \
--hive-drop-import-delims \
--null-string '\\N' \
--null-non-string '\\N'
}
function sqoop_yesterday_append(){
TABLE_NAME="$1"
APPEND_FIELD="$2"
echo "aa"
hive -e "alter table ${HIVE_DB_NAME}.${TABLE_NAME} drop partition(inc_day='${dt}');"
sqoop_mysql_to_hdfs "${TABLE_NAME}" "${dt}" "${APPEND_FIELD}"
hive -e "alter table ${HIVE_DB_NAME}.${TABLE_NAME} add partition(inc_day='${dt}') location '/user/hive/warehouse/${HIVE_DB_NAME}.db/${TABLE_NAME}/${dt}';"
}
echo "begin sqoop invoice_path"
sqoop_yesterday_append invoice_path create_tm
echo "begin sqoop invoice_result"
sqoop_yesterday_append invoice_result create_tm
dt=`date -d "1 days ago" +"%Y%m%d"`
if [ $# -eq 1 ];then
dt=`date -d "$1" +"%Y%m%d"`
fi
echo $dt
HIVE_DB_NAME=bdpa
MYSQL_HOST="10.202.12.12"
MYSQL_USER="ss"
MYSQL_PASSWORD="password"
MYSQL_DB="eee"
MYSQL_URL="jdbc:mysql://${MYSQL_HOST}:3306/${MYSQL_DB} --username ${MYSQL_USER} --password ${MYSQL_PASSWORD}"
function sqoop_mysql_to_hdfs(){
TABLE_NAME="$1"
PATITION_TIME="$2"
APPEND_FIELD="$3"
#hadoop fs -rm -r /user/hive/warehouse/${HIVE_DB_NAME}.db/${TABLE_NAME}/${PATITION_TIME}
#hadoop fs -rm -r /user/hdfs/${TABLE_NAME}
sqoop import -m 1 \
--connect ${MYSQL_URL} \
--query "select * from ${TABLE_NAME} where DATE_FORMAT(${APPEND_FIELD},'%Y%m%d') = '${PATITION_TIME}' and \$CONDITIONS" \
--target-dir "/user/hive/warehouse/${HIVE_DB_NAME}.db/${TABLE_NAME}/${PATITION_TIME}" \
--fields-terminated-by '\002' \
--hive-drop-import-delims \
--null-string '\\N' \
--null-non-string '\\N'
}
function sqoop_yesterday_append(){
TABLE_NAME="$1"
APPEND_FIELD="$2"
echo "aa"
hive -e "alter table ${HIVE_DB_NAME}.${TABLE_NAME} drop partition(inc_day='${dt}');"
sqoop_mysql_to_hdfs "${TABLE_NAME}" "${dt}" "${APPEND_FIELD}"
hive -e "alter table ${HIVE_DB_NAME}.${TABLE_NAME} add partition(inc_day='${dt}') location '/user/hive/warehouse/${HIVE_DB_NAME}.db/${TABLE_NAME}/${dt}';"
}
echo "begin sqoop invoice_path"
sqoop_yesterday_append invoice_path create_tm
echo "begin sqoop invoice_result"
sqoop_yesterday_append invoice_result create_tm
阅读全文
0 0
- sqoop mysql 抽到hive
- Sqoop MySQL 导入到Hive
- sqoop 导入mysql数据到hive
- sqoop同步mysql数据到hive中
- sqoop导入mysql数据到hive中报错
- sqoop从mysql导入到hive
- [Sqoop]将Mysql数据表导入到Hive
- [Sqoop]将Hive数据表导出到Mysql
- sqoop 导出 hive分区表 数据到 mysql
- sqoop导出hive表到mysql中
- sqoop 实现mysql 到hive数据抽取
- sqoop导出hive表数据到mysql
- SQOOP从MySQL导入数据到Hive
- 【hadoop Sqoop】Sqoop从mysql导数据到hive
- 利用sqoop将hive数据导入导出数据到mysql
- sqoop 从 hive 导到mysql遇到的问题
- sqoop导oracle.mysql数据到hdfs hive
- 使用sqoop将mysql中数据导入到hive中
- Redis 讲解系列之 Redis的持久化
- linux crontab 命令 定时处理脚本文件
- 排序的Java实现
- Java学习笔记之IO(十七):转换流
- C语言除法”四舍五入“与“进一法”的实现
- sqoop mysql 抽到hive
- 云栖大会之异构计算,12日,B-3-5不见不散。
- bzoj 4423: [AMPPZ2013]Bytehattan
- CodeForces
- 给定中序-先序,中序-后序序列构建二叉树的算法
- C语言和C++ C#的区别在什么地方?
- retrofit的使用
- C++ STL系列之 STL标准入门必读
- 485芯片中slew-rate-limited是什么意思(转)