使用sqoop1,将sqlserver数据导入hive
来源:互联网 发布:php 制作扇形统计图 编辑:程序博客网 时间:2024/04/29 22:18
#!/bin/sh
#数据库连接
sqlConnect="\jdbc:sqlserver://172.16.177.45:1433;\
username=sa;\
password=123456;\
database=Test;"
checkColumn="RecoID"
condiColumn="[记录时间]"
#导入数据的范围
endTime="2016-08-15"
dstTabArray=(
"000B_7F14_IDUStateInfo_RSimulateData"
#"000B_7F14_IDUStateInfo_RSwitchData"
#"000B_7F14_ODUStateInfo_DTUWRSimulateData"
#"000B_7F14_ODUStateInfo_WRSimulateData"
#"000B_7F14_ODUStateInfo_WRSwitchData"
)
#hive数据库
hiveDbName=BIZ1
#hive数据库中的表名
hiveTableName=000B_data
#hive临时表明
tempTabName=000B_dataTemp
#临时表存放的目录
tempTabPath=/user/hive/biz1/extend/wp04/${tempTabName}
#正式表文件存放的目录
hiveTablePath=/user/hive/biz1/extend/wp04/${hiveTableName}
#hive表列的名称
hiveTableCols="RowKey string,\
RecoTime timestamp,\
ProjID bigint,\
DevID bigint,\
DevAddr int,\
FrameNO int,\
ReceiveTime timestamp,\
ModifyFlag string,\
TableName string,\
RecoID bigint,\
ProtocolVer_DB string,\
ModelID_DB string,\
RemoteOnOffFunc smallint,\
ForbidComp1 smallint,\
ForbidComp2 smallint,\
OnOffModeSet smallint,\
RunModeSet smallint,\
Chill_LWT_Set float,\
Comp2FreqProAlarm smallint"
WRSI="select reverse(right('0000000000'+ltrim((select top 1 c.[ProjDev_ID] from Proj_Dev c where c.Proj_ID=b.Proj_ID and c.[机组条码]='FFFFFFFFFFFFFFF')),10))+replace(CONVERT(varchar(12),a.[记录时间],108),':','')+'7F14'+right('000000'+ltrim([Reco_ID]),6)as RowKey\
,a.[记录时间] as RecoTime\
,b.Proj_ID as ProjID\
,(select top 1 c.[ProjDev_ID] from Proj_Dev c where c.Proj_ID=b.Proj_ID and c.[机组条码]='FFFFFFFFFFFFFFF') as DevID\
,'224' as DevAddr\
,'' as FrameNO\
,'' as ReceiveTime\
,'' as ModifyFlag\
,'7F14' as TableName\
,a.[Reco_ID] as RecoID\
,a.[协议版本] as ProtocolVer_DB\
,a.[机型ID] as ModelID_DB\
,a.[记录时间] as RecoTime\
,b.Proj_ID as ProjID\
,(select top 1 c.[ProjDev_ID] from Proj_Dev c where c.Proj_ID=b.Proj_ID and c.[机组条码]='FFFFFFFFFFFFFFF') as DevID\
,'224' as DevAddr\
,'' as FrameNO\
,'' as ReceiveTime\
,'' as ModifyFlag\
,'7F14' as TableName\
,a.[Reco_ID] as RecoID\
,'' as ProtocolVer_DB\
,a.[记录时间] as RecoTime\
,b.Proj_ID as ProjID\
,a.[ProjDev_ID] as DevID\
,'224' as DevAddr\
,'' as FrameNO\
,'' as ReceiveTime\
,'' as ModifyFlag\
,'7F14' as TableName\
,a.[Reco_ID] as RecoID\
,'' as ProtocolVer_DB\
,a.[记录时间] as RecoTime\
,b.Proj_ID as ProjID\
,(select top 1 c.[ProjDev_ID] from Proj_Dev c where c.Proj_ID=b.Proj_ID and c.[机组条码]='FFFFFFFFFFFFFFF') as DevID\
,'224' as DevAddr\
,'' as FrameNO\
,'' as ReceiveTime\
,'' as ModifyFlag\
,'7F14' as TableName\
,a.[Reco_ID] as RecoID\
whereStr=" from [dbo].[${dstTabName}] a left join [dbo].[Proj_Dev] b on a.ProjDev_ID=b.ProjDev_ID"
sql=""
case ${dstTabName} in
"000B_7F14_IDUStateInfo_RSimulateData" )
sql=${RSI}${whereStr};;
"000B_7F14_IDUStateInfo_RSwitchData" )
sql=${RSW}${whereStr};;
"000B_7F14_ODUStateInfo_DTUWRSimulateData" )
sql=${DTU}${whereStr};;
"000B_7F14_ODUStateInfo_WRSimulateData" )
sql=${WRSI}${whereStr};;
"000B_7F14_ODUStateInfo_WRSwitchData" )
sql=${WRSW}${whereStr};;
esac
#导入数据到hive-D mapred.job.queue.name=production
sqoop import -D mapred.job.queue.name=production --connect "${sqlConnect}" \
--query "select t.* from (${sql} where ${condiColumn}>='${endTime}') t WHERE \$CONDITIONS" \
--split-by ${checkColumn} \
--fields-terminated-by '\t' \
--lines-terminated-by '\n' \
--delete-target-dir \
--target-dir ${tempTabPath} \
-m 20
hive -e "\
use ${hiveDbName}; \
drop table if exists ${tempTabName}; \
create external table ${tempTabName} (${hiveTableCols}) \
row format delimited fields terminated by '\t' \
location \"${tempTabPath}\"; \
use ${hiveDbName};\
set mapreduce.job.queuename=production; \
set hive.execution.engine=mr;\
set hive.exec.dynamic.partition=true; \
set hive.exec.dynamic.partition.mode=nonstrict; \
set hive.exec.compress.output=true;\
set mapred.output.compression.codec=org.apache.hadoop.io.compress.SnappyCodec;\
set PARQUET_COMPRESSION_CODE=snappy;\
set mapred.max.split.size=10000000;\
insert into table ${hiveTableName} partition(year,month,day) \
select * ,year(RecoTime),month(RecoTime),day(RecoTime) from ${tempTabName};\
drop table ${tempTabName};"
hadoop fs -rm -r ${tempTabPath}
echo "load ${dstTabName} Data over"
echo ""
echo ""
echo "------------------------------------------------------------------------------------------"
done
echo "load Data over"
echo "hive完成----------------------"
#数据库连接
sqlConnect="\jdbc:sqlserver://172.16.177.45:1433;\
username=sa;\
password=123456;\
database=Test;"
checkColumn="RecoID"
condiColumn="[记录时间]"
#导入数据的范围
endTime="2016-08-15"
dstTabArray=(
"000B_7F14_IDUStateInfo_RSimulateData"
#"000B_7F14_IDUStateInfo_RSwitchData"
#"000B_7F14_ODUStateInfo_DTUWRSimulateData"
#"000B_7F14_ODUStateInfo_WRSimulateData"
#"000B_7F14_ODUStateInfo_WRSwitchData"
)
#hive数据库
hiveDbName=BIZ1
#hive数据库中的表名
hiveTableName=000B_data
#hive临时表明
tempTabName=000B_dataTemp
#临时表存放的目录
tempTabPath=/user/hive/biz1/extend/wp04/${tempTabName}
#正式表文件存放的目录
hiveTablePath=/user/hive/biz1/extend/wp04/${hiveTableName}
#hive表列的名称
hiveTableCols="RowKey string,\
RecoTime timestamp,\
ProjID bigint,\
DevID bigint,\
DevAddr int,\
FrameNO int,\
ReceiveTime timestamp,\
ModifyFlag string,\
TableName string,\
RecoID bigint,\
ProtocolVer_DB string,\
ModelID_DB string,\
RemoteOnOffFunc smallint,\
ForbidComp1 smallint,\
ForbidComp2 smallint,\
OnOffModeSet smallint,\
RunModeSet smallint,\
Chill_LWT_Set float,\
Comp2FreqProAlarm smallint"
WRSI="select reverse(right('0000000000'+ltrim((select top 1 c.[ProjDev_ID] from Proj_Dev c where c.Proj_ID=b.Proj_ID and c.[机组条码]='FFFFFFFFFFFFFFF')),10))+replace(CONVERT(varchar(12),a.[记录时间],108),':','')+'7F14'+right('000000'+ltrim([Reco_ID]),6)as RowKey\
,a.[记录时间] as RecoTime\
,b.Proj_ID as ProjID\
,(select top 1 c.[ProjDev_ID] from Proj_Dev c where c.Proj_ID=b.Proj_ID and c.[机组条码]='FFFFFFFFFFFFFFF') as DevID\
,'224' as DevAddr\
,'' as FrameNO\
,'' as ReceiveTime\
,'' as ModifyFlag\
,'7F14' as TableName\
,a.[Reco_ID] as RecoID\
,a.[协议版本] as ProtocolVer_DB\
,a.[机型ID] as ModelID_DB\
,'' as Comp2FreqProAlarm"
,a.[记录时间] as RecoTime\
,b.Proj_ID as ProjID\
,(select top 1 c.[ProjDev_ID] from Proj_Dev c where c.Proj_ID=b.Proj_ID and c.[机组条码]='FFFFFFFFFFFFFFF') as DevID\
,'224' as DevAddr\
,'' as FrameNO\
,'' as ReceiveTime\
,'' as ModifyFlag\
,'7F14' as TableName\
,a.[Reco_ID] as RecoID\
,'' as ProtocolVer_DB\
,'' as Comp2FreqProAlarm"
,a.[记录时间] as RecoTime\
,b.Proj_ID as ProjID\
,a.[ProjDev_ID] as DevID\
,'224' as DevAddr\
,'' as FrameNO\
,'' as ReceiveTime\
,'' as ModifyFlag\
,'7F14' as TableName\
,a.[Reco_ID] as RecoID\
,'' as ProtocolVer_DB\
,'' as Comp2FreqProAlarm"
,a.[记录时间] as RecoTime\
,b.Proj_ID as ProjID\
,(select top 1 c.[ProjDev_ID] from Proj_Dev c where c.Proj_ID=b.Proj_ID and c.[机组条码]='FFFFFFFFFFFFFFF') as DevID\
,'224' as DevAddr\
,'' as FrameNO\
,'' as ReceiveTime\
,'' as ModifyFlag\
,'7F14' as TableName\
,a.[Reco_ID] as RecoID\
,[压缩机二频繁保护报警] as Comp2FreqProAlarm"
whereStr=" from [dbo].[${dstTabName}] a left join [dbo].[Proj_Dev] b on a.ProjDev_ID=b.ProjDev_ID"
sql=""
case ${dstTabName} in
"000B_7F14_IDUStateInfo_RSimulateData" )
sql=${RSI}${whereStr};;
"000B_7F14_IDUStateInfo_RSwitchData" )
sql=${RSW}${whereStr};;
"000B_7F14_ODUStateInfo_DTUWRSimulateData" )
sql=${DTU}${whereStr};;
"000B_7F14_ODUStateInfo_WRSimulateData" )
sql=${WRSI}${whereStr};;
"000B_7F14_ODUStateInfo_WRSwitchData" )
sql=${WRSW}${whereStr};;
esac
#导入数据到hive-D mapred.job.queue.name=production
sqoop import -D mapred.job.queue.name=production --connect "${sqlConnect}" \
--query "select t.* from (${sql} where ${condiColumn}>='${endTime}') t WHERE \$CONDITIONS" \
--split-by ${checkColumn} \
--fields-terminated-by '\t' \
--lines-terminated-by '\n' \
--delete-target-dir \
--target-dir ${tempTabPath} \
-m 20
hive -e "\
use ${hiveDbName}; \
drop table if exists ${tempTabName}; \
create external table ${tempTabName} (${hiveTableCols}) \
row format delimited fields terminated by '\t' \
location \"${tempTabPath}\"; \
use ${hiveDbName};\
set mapreduce.job.queuename=production; \
set hive.execution.engine=mr;\
set hive.exec.dynamic.partition=true; \
set hive.exec.dynamic.partition.mode=nonstrict; \
set hive.exec.compress.output=true;\
set mapred.output.compression.codec=org.apache.hadoop.io.compress.SnappyCodec;\
set PARQUET_COMPRESSION_CODE=snappy;\
set mapred.max.split.size=10000000;\
insert into table ${hiveTableName} partition(year,month,day) \
select * ,year(RecoTime),month(RecoTime),day(RecoTime) from ${tempTabName};\
drop table ${tempTabName};"
hadoop fs -rm -r ${tempTabPath}
echo "load ${dstTabName} Data over"
echo ""
echo ""
echo "------------------------------------------------------------------------------------------"
done
echo "load Data over"
echo "hive完成----------------------"
0 0
- 使用sqoop1,将sqlserver数据导入hive
- 使用sqoop1将hive导入mysql
- sqoop1,将sqlserver导入hbase
- 利用sqoop1将mysql数据导入至hive多分区
- 使用sqoop1.99.6将mysql数据导入到hdfs
- 【sqoop1】sqlserver 同步 hive
- Sqoop1.4.4将MySQL中数据导入到Hive表中
- Sqoop1.4.4使用增量导入模式将MySQL数据库中数据导入到HDFS中
- Sqoop1.4.6使用数据导入导出
- OOzie调度sqoop1 Action 从mysql导入数据到hive
- 【甘道夫】Sqoop1.4.4 实现将 Oracle10g 中的增量数据导入 Hive0.13.1 ,并更新Hive中的主表
- 使用Sqoop1.4.4将MySQL数据库表中数据导入到HDFS中
- Sqoop1.4.4使用SQL语句形式将MySQL数据库表中数据导入到HDFS中
- sqoop1.4.2数据导入
- Sqoop1.4.4 实现将 Oracle10g 中的增量数据导入
- sqoop1.4.5将mysql中的数据导入到HBase中
- Flume 将数据导入Hive
- 使用sqoop将mysql中数据导入到hive中
- Android程序如何在任意处完全退出应用
- POJ1465:Multiple(BFS)
- 史上最全的 Sublime Text 汉化、插件安装合集
- webgis主界面及查询功能实现
- 钓鱼问题
- 使用sqoop1,将sqlserver数据导入hive
- Android团队协作中正确的启动Activity方法
- android自定义控件之圆形进度条(带动画)
- Android不使用第三方升级库实现应用升级
- Java注解的使用
- 线性表学习
- hdoj 3440 house man 差分约束+spfa
- 希尔排序的简单实现
- PAT L2-010. 排座位 并查集+数组