python脚本用sqoop把mysql数据导入hive数据仓库中

来源:互联网 发布:安徽网络大学马鞍山 编辑:程序博客网 时间:2024/05/29 18:36

使用说明:由于项目需要将mysql中的业务数据导入到hive中,这里采用sqoop来做中间桥梁,并且通过supervisor来做时间控制,让python定时启动,导入数据。
安装sqoop1.4.6和supervisor

#! /usr/bin/env python  # coding:utf-8  # --------------------------------  # Created by coco  on 16/2/23  # ---------------------------------  # Comment: 主要功能说明 :初始化业务数据库  import os  import pyhs2  conn=pyhs2.connect(host="192.168.8.94",port=10000,authMechanism="PLAIN",user="hdfs")  mysql_info={"host":"192.168.1.200","port":3306,"user":"root","passwd":"123456"}  print mysql_info  def run_hive_query(sql):      with conn.cursor()  as cursor:          cursor.execute(sql)          return cursor.fetchall()  def mysql_to_hive(host,port,user,passwd,database,table):      #os.system("hadoop fs -rm    -r /user/task/%s"%table)      if [database] not in run_hive_query("show databases"):          with conn.cursor() as cursor:              cursor.execute("create database " +database)      with conn.cursor() as cursor:          cursor.execute("use  "+database)      if [table] not in run_hive_query("show tables"):          os.system("sqoop   import --connect   jdbc:mysql://%s:%s/%s --username  %s   --password  %s --table %s  --hive-database  %s  -m 10 --create-hive-table --hive-import   --hive-overwrite "%(              host,port,database,user,passwd,table,database))      else:          os.system("sqoop   import --connect   jdbc:mysql://%s:%s/%s --username  %s   --password  %s --table %s  --hive-database  %s  -m 10 --hive-import   --hive-overwrite "%(              host,port,database,user,passwd,table,database))  mysql_to_hive(mysql_info["host"],mysql_info["port"],mysql_info["user"],mysql_info["passwd"].replace("(","\("),"wwn","cm_vip")  

转载自:http://blog.csdn.net/ljphilp/article/details/53884090

原创粉丝点击