shell 有用的

来源:互联网 发布:古天乐捐学校知乎 编辑:程序博客网 时间:2024/06/05 12:00



mysql -h192.168.206.191 -uhive -phadoophive -e "select a.NAME,b.tbl_name from DBS a join TBLS b on a.DB_ID=b.DB_ID" hive>1.txt
awk '{print $1","$2}' 1.txt>gettbname`date +%y%m%d` 



# set up configurations

source benchmark.conf;


if [ -e "$LOG_FILE" ]; then
        timestamp=`date "+%F-%R" --reference=$LOG_FILE`
        backupFile="$LOG_FILE.$timestamp"
        mv $LOG_FILE $LOG_DIR/$backupFile
fi


echo ""
echo "***********************************************"
echo "*           PC-H benchmark on Hive            *"
echo "***********************************************"
echo "                                               " 
echo "Running Hive from $HIVE_HOME" | tee -a $LOG_FILE
echo "Running Hadoop from $HADOOP_HOME" | tee -a $LOG_FILE
echo "See $LOG_FILE for more details of query errors."
echo ""


trial=0
while [ $trial -lt $NUM_OF_TRIALS ]; do
        trial=`expr $trial + 1`
        echo "Executing Trial #$trial of $NUM_OF_TRIALS trial(s)..."


        for query in ${HIVE_TPCH_QUERIES_ALL[@]}; do
                echo "Running Hive query: $query" | tee -a $LOG_FILE
                $TIME_CMD $HIVE_CMD -f $BASE_DIR/$query 2>&1 | tee -a $LOG_FILE | grep '^Time:'
                returncode=${PIPESTATUS[0]}
                if [ $returncode -ne 0 ]; then
                        echo "ABOVE QUERY FAILED:$returncode"
                fi
        done


done # TRIAL
echo "***********************************************"
echo ""




########################################################
#!/usr/bin/env bash


BASE_DIR=`pwd`


TIME_CMD="/usr/bin/time -f Time:%e"  


NUM_OF_TRIALS=1


LOG_FILE="benchmark.log"


LOG_DIR="$BASE_DIR/logs"


# hadoop
HADOOP_CMD="$HADOOP_HOME/bin/hadoop"


# hive
HIVE_CMD="$HIVE_HOME/bin/hive"


# hive tpch queries
# hive all benchmark queries


IVE_TPCH_QUERIES_ALL=( \
        "tpch/q1_pricing_summary_report.hive" \
        "tpch/q2_minimum_cost_supplier.hive" \
        "tpch/q3_shipping_priority.hive" \
        "tpch/q4_order_priority.hive" \
        "tpch/q5_local_supplier_volume.hive" \
        "tpch/q6_forecast_revenue_change.hive" \
        "tpch/q7_volume_shipping.hive" \
        "tpch/q8_national_market_share.hive" \
        "tpch/q9_product_type_profit.hive" \
        "tpch/q10_returned_item.hive" \
        "tpch/q11_important_stock.hive" \
        "tpch/q12_shipping.hive" \
        "tpch/q13_customer_distribution.hive" \
        "tpch/q14_promotion_effect.hive" \
        "tpch/q15_top_supplier.hive" \
        "tpch/q16_parts_supplier_relationship.hive" \
        "tpch/q17_small_quantity_order_revenue.hive" \
        "tpch/q18_large_volume_customer.hive" \
        "tpch/q19_discounted_revenue.hive" \
        "tpch/q20_potential_part_promotion.hive" \
        "tpch/q21_suppliers_who_kept_orders_waiting.hive" \ 

        "tpch/q22_global_sales_opportunity.hive" \)

$HIVE_HOME/bin/hive -e 'select a.col from ${HIVE_TPCH_QUERIES_ALL[@]};'

0 0