一些hadoop脚本事例--64

来源:互联网 发布:淘宝网落地晒衣架 编辑:程序博客网 时间:2024/05/21 11:21

1.用于启动jar包读hbase表数据到hdfs

#!/bin/bashsource /etc/profileif [ $# -ne 1 ];then  echo "aaa.sh table"  exit 1fitable=$1hadoop fs -rm -r /user/ads/hbase_tests/$tablehadoop jar ./aasss.jar hbase.hfile.aa.aaExport -Dmapreduce.job.queuename=regular $table /user/ads/hbase_tests/$tableif [ $? -ne 0 ];then  echo "export table fail:"$table  exit 1else  echo "export table success:"$table  exit 0fi

2.用于hdfs到HDFS之间传递数据

#!/bin/bashsource /etc/profilehadoop_home="/home/hbase/bigdate/hadoop"src_hdfs="hftp://11.1.114.111/user/aa/hbase_aatables/"#dst_hdfs="hdfs://aaa/home/hbaaaaart/hfileaaaaaaaaaaaaaaaaaaaaaaaaas/"dst_hdfs="as.as.sa01:/home/hbase/hfiles/"if [ $# -ne 1 ];then  echo "distcp.sh table"  exit 1fi$hadoop_home/bin/hadoop jar ew.jar hbase.ew.Checker precheck $src_hdfs$1 debugif [ $? -ne 0 ];then  echo "precheck src path error:"$src_hdfs$1  exit 1fi$hadoop_home/bin/hadoop jar ew.jar hbase.ew.Checker sizecheck $src_hdfs$1 $dst_hdfs$1 debugif [ $? -eq 0 ];then  echo "table has been copied:"$1  exit 0else  echo "remove /home/hfiles/"$1  $hadoop_home/bin/hadoop fs -rm -r /home/hfiles/$1fisuccess=0for (( i=0; i<1; i++ ))do  $hadoop_home/bin/hadoop distcp -Dmapreduce.job.acl-view-job=dr.who -Dmapreduce.job.queuename=offline.aaaa.normal -pb -m 20 $src_hdfs$1 $dst_hdfs  if [ $? -ne 0 ];then    echo "distcp table fail:"$1    echo "remove /home/hfiles/"$1    $hadoop_home/bin/hadoop fs -rm -r /home/hfiles/$1  else    echo "distcp table success:"$1    echo "checksize /home/hfiles/"$1    $hadoop_home/bin/hadoop jar ew.jar hbase.ew.Checker sizecheck $src_hdfs$1 $dst_hdfs$1 debug    if [ $? -eq 0 ];then      echo "sizecheck table success:"$1      exit 0    fi  fidoneexit 1
3.用于hbase存储hdfs上的数据

#!/bin/bashsource /etc/profileif [ $# -ne 1 ];then  echo "import.sh table"  exit 1fitable=$1hadoop_home="/home/hbase/bigdate/hadoop/"$hadoop_home/bin/hadoop jar hbase.importer.jar hbase.importer.Checker precheck /home/hfiles/$table debugif [ $? -ne 0 ];then  echo "precheck rowfile path error:/home/hfiles/"$table  exit 1fi$hadoop_home/bin/hadoop jar hbase.importer.jar hbase.importer.Import -Dhbase.mapreduce.bulkload.max.hfiles.perRegion.perFamily=1024 /home/hfiles/$table $tableif [ $? -ne 0 ];then  echo "import table fail:"$table  exit 1else  echo "import table success:"$table  exit 0fi


0 0