$HADOOP_HOME/bin/hadoop脚本文件分析

来源:互联网 发布:iris数据集主成分分析 编辑:程序博客网 时间:2024/05/16 15:57

1. $HADOOP_HOME/bin/ hadoop 

#!/usr/bin/env bash# Licensed to the Apache Software Foundation (ASF) under one or more# contributor license agreements.  See the NOTICE file distributed with# this work for additional information regarding copyright ownership.# The ASF licenses this file to You under the Apache License, Version 2.0# (the "License"); you may not use this file except in compliance with# the License.  You may obtain a copy of the License at##     http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.# The Hadoop command script## Environment Variables##   JAVA_HOME        The java implementation to use.  Overrides JAVA_HOME.##   HADOOP_CLASSPATH Extra Java CLASSPATH entries.##   HADOOP_USER_CLASSPATH_FIRST      When defined, the HADOOP_CLASSPATH is #                                    added in the beginning of the global#                                    classpath. Can be defined, for example,#                                    by doing #                                    export HADOOP_USER_CLASSPATH_FIRST=true##   HADOOP_HEAPSIZE  The maximum amount of heap to use, in MB. #                    Default is 1000.##   HADOOP_OPTS      Extra Java runtime options.#   #   HADOOP_NAMENODE_OPTS       These options are added to HADOOP_OPTS  #   HADOOP_CLIENT_OPTS         when the respective command is run.#   HADOOP_{COMMAND}_OPTS etc  HADOOP_JT_OPTS applies to JobTracker #                              for e.g.  HADOOP_CLIENT_OPTS applies to #                              more than one command (fs, dfs, fsck, #                              dfsadmin etc)  ##   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.##   HADOOP_ROOT_LOGGER The root appender. Default is INFO,console#bin=`dirname "$0"`   //(1)获取$HADOOP_HOME/hadoop/bin所在目录bin=`cd "$bin"; pwd` //(2)进入到$HADOOP_HOME/bin/hadoop/bin目录if [ -e "$bin"/../libexec/hadoop-config.sh ]; then//(3)执行hadoop-config.sh进行,进行配置设置  . "$bin"/../libexec/hadoop-config.shelse  . "$bin"/hadoop-config.shficygwin=falsecase "`uname`" inCYGWIN*) cygwin=true;;esac# if no args specified, show usageif [ $# = 0 ]; then  echo "Usage: hadoop [--config confdir] COMMAND"  echo "where COMMAND is one of:"  echo "  namenode -format     format the DFS filesystem"  echo "  secondarynamenode    run the DFS secondary namenode"  echo "  namenode             run the DFS namenode"  echo "  datanode             run a DFS datanode"  echo "  dfsadmin             run a DFS admin client"  echo "  mradmin              run a Map-Reduce admin client"  echo "  fsck                 run a DFS filesystem checking utility"  echo "  fs                   run a generic filesystem user client"  echo "  balancer             run a cluster balancing utility"  echo "  fetchdt              fetch a delegation token from the NameNode"  echo "  jobtracker           run the MapReduce job Tracker node"   echo "  pipes                run a Pipes job"  echo "  tasktracker          run a MapReduce task Tracker node"   echo "  historyserver        run job history servers as a standalone daemon"  echo "  job                  manipulate MapReduce jobs"  echo "  queue                get information regarding JobQueues"   echo "  version              print the version"  echo "  jar <jar>            run a jar file"  echo "  distcp <srcurl> <desturl> copy file or directories recursively"  echo "  archive -archiveName NAME -p <parent path> <src>* <dest> create a hadoop archive"  echo "  classpath            prints the class path needed to get the"  echo "                       Hadoop jar and the required libraries"  echo "  daemonlog            get/set the log level for each daemon"  echo " or"  echo "  CLASSNAME            run the class named CLASSNAME"  echo "Most commands print help when invoked w/o parameters."  exit 1fi# get argumentsCOMMAND=$1shift# Determine if we're starting a secure datanode, and if so, redefine appropriate variablesif [ "$COMMAND" == "datanode" ] && [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then  HADOOP_PID_DIR=$HADOOP_SECURE_DN_PID_DIR  HADOOP_LOG_DIR=$HADOOP_SECURE_DN_LOG_DIR  HADOOP_IDENT_STRING=$HADOOP_SECURE_DN_USER  starting_secure_dn="true"fi# some Java parametersif [ "$JAVA_HOME" != "" ]; then  #echo "run java in $JAVA_HOME"  JAVA_HOME=$JAVA_HOMEfi  if [ "$JAVA_HOME" = "" ]; then  echo "Error: JAVA_HOME is not set."  exit 1fiJAVA=$JAVA_HOME/bin/javaJAVA_HEAP_MAX=-Xmx1000m # check envvars which might override default argsif [ "$HADOOP_HEAPSIZE" != "" ]; then  #echo "run with heapsize $HADOOP_HEAPSIZE"  JAVA_HEAP_MAX="-Xmx""$HADOOP_HEAPSIZE""m"  #echo $JAVA_HEAP_MAXfi# CLASSPATH initially contains $HADOOP_CONF_DIR//(3)设置CLASSPATHCLASSPATH="${HADOOP_CONF_DIR}"if [ "$HADOOP_USER_CLASSPATH_FIRST" != "" ] && [ "$HADOOP_CLASSPATH" != "" ] ; then  CLASSPATH=${CLASSPATH}:${HADOOP_CLASSPATH}fiCLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar# for developers, add Hadoop classes to CLASSPATHif [ -d "$HADOOP_HOME/build/classes" ]; then  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classesfiif [ -d "$HADOOP_HOME/build/webapps" ]; then  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/buildfiif [ -d "$HADOOP_HOME/build/test/classes" ]; then  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/test/classesfiif [ -d "$HADOOP_HOME/build/tools" ]; then  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/toolsfi# so that filenames w/ spaces are handled correctly in loops belowIFS=# for releases, add core hadoop jar & webapps to CLASSPATHif [ -e $HADOOP_PREFIX/share/hadoop/hadoop-core-* ]; then  # binary layout  if [ -d "$HADOOP_PREFIX/share/hadoop/webapps" ]; then    CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop  fi  for f in $HADOOP_PREFIX/share/hadoop/hadoop-core-*.jar; do    CLASSPATH=${CLASSPATH}:$f;  done  # add libs to CLASSPATH  for f in $HADOOP_PREFIX/share/hadoop/lib/*.jar; do    CLASSPATH=${CLASSPATH}:$f;  done  for f in $HADOOP_PREFIX/share/hadoop/lib/jsp-2.1/*.jar; do    CLASSPATH=${CLASSPATH}:$f;  done  for f in $HADOOP_PREFIX/share/hadoop/hadoop-tools-*.jar; do    TOOL_PATH=${TOOL_PATH}:$f;  doneelse  # tarball layout  if [ -d "$HADOOP_HOME/webapps" ]; then    CLASSPATH=${CLASSPATH}:$HADOOP_HOME  fi  for f in $HADOOP_HOME/hadoop-core-*.jar; do    CLASSPATH=${CLASSPATH}:$f;  done  # add libs to CLASSPATH  for f in $HADOOP_HOME/lib/*.jar; do    CLASSPATH=${CLASSPATH}:$f;  done  if [ -d "$HADOOP_HOME/build/ivy/lib/Hadoop/common" ]; then    for f in $HADOOP_HOME/build/ivy/lib/Hadoop/common/*.jar; do      CLASSPATH=${CLASSPATH}:$f;    done  fi  for f in $HADOOP_HOME/lib/jsp-2.1/*.jar; do    CLASSPATH=${CLASSPATH}:$f;  done  for f in $HADOOP_HOME/hadoop-tools-*.jar; do    TOOL_PATH=${TOOL_PATH}:$f;  done  for f in $HADOOP_HOME/build/hadoop-tools-*.jar; do    TOOL_PATH=${TOOL_PATH}:$f;  donefi# add user-specified CLASSPATH lastif [ "$HADOOP_USER_CLASSPATH_FIRST" = "" ] && [ "$HADOOP_CLASSPATH" != "" ]; then  CLASSPATH=${CLASSPATH}:${HADOOP_CLASSPATH}fi# default log directory & fileif [ "$HADOOP_LOG_DIR" = "" ]; then  HADOOP_LOG_DIR="$HADOOP_HOME/logs"fiif [ "$HADOOP_LOGFILE" = "" ]; then  HADOOP_LOGFILE='hadoop.log'fi# default policy file for service-level authorizationif [ "$HADOOP_POLICYFILE" = "" ]; then  HADOOP_POLICYFILE="hadoop-policy.xml"fi# restore ordinary behaviourunset IFS# figure out which class to run//(4)根据不同命令确定CLASSif [ "$COMMAND" = "classpath" ] ; then  if $cygwin; then    CLASSPATH=`cygpath -p -w "$CLASSPATH"`  fi  echo $CLASSPATH  exitelif [ "$COMMAND" = "namenode" ] ; then  CLASS='org.apache.hadoop.hdfs.server.namenode.NameNode'  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_NAMENODE_OPTS"elif [ "$COMMAND" = "secondarynamenode" ] ; then  CLASS='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_SECONDARYNAMENODE_OPTS"elif [ "$COMMAND" = "datanode" ] ; then  CLASS='org.apache.hadoop.hdfs.server.datanode.DataNode'  if [ "$starting_secure_dn" = "true" ]; then    HADOOP_OPTS="$HADOOP_OPTS -jvm server $HADOOP_DATANODE_OPTS"  else    HADOOP_OPTS="$HADOOP_OPTS -server $HADOOP_DATANODE_OPTS"  fielif [ "$COMMAND" = "fs" ] ; then  CLASS=org.apache.hadoop.fs.FsShell  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"elif [ "$COMMAND" = "dfs" ] ; then  CLASS=org.apache.hadoop.fs.FsShell  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"elif [ "$COMMAND" = "dfsadmin" ] ; then  CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"elif [ "$COMMAND" = "mradmin" ] ; then  CLASS=org.apache.hadoop.mapred.tools.MRAdmin  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"elif [ "$COMMAND" = "fsck" ] ; then  CLASS=org.apache.hadoop.hdfs.tools.DFSck  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"elif [ "$COMMAND" = "balancer" ] ; then  CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_BALANCER_OPTS"elif [ "$COMMAND" = "fetchdt" ] ; then  CLASS=org.apache.hadoop.hdfs.tools.DelegationTokenFetcherelif [ "$COMMAND" = "jobtracker" ] ; then  CLASS=org.apache.hadoop.mapred.JobTracker  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_JOBTRACKER_OPTS"elif [ "$COMMAND" = "historyserver" ] ; then  CLASS=org.apache.hadoop.mapred.JobHistoryServer  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_JOB_HISTORYSERVER_OPTS"elif [ "$COMMAND" = "tasktracker" ] ; then  CLASS=org.apache.hadoop.mapred.TaskTracker  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_TASKTRACKER_OPTS"elif [ "$COMMAND" = "job" ] ; then  CLASS=org.apache.hadoop.mapred.JobClient  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"elif [ "$COMMAND" = "queue" ] ; then  CLASS=org.apache.hadoop.mapred.JobQueueClient  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"elif [ "$COMMAND" = "pipes" ] ; then  CLASS=org.apache.hadoop.mapred.pipes.Submitter  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"elif [ "$COMMAND" = "version" ] ; then  CLASS=org.apache.hadoop.util.VersionInfo  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"elif [ "$COMMAND" = "jar" ] ; then  CLASS=org.apache.hadoop.util.RunJar  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"elif [ "$COMMAND" = "distcp" ] ; then  CLASS=org.apache.hadoop.tools.DistCp  CLASSPATH=${CLASSPATH}:${TOOL_PATH}  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"elif [ "$COMMAND" = "daemonlog" ] ; then  CLASS=org.apache.hadoop.log.LogLevel  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"elif [ "$COMMAND" = "archive" ] ; then  CLASS=org.apache.hadoop.tools.HadoopArchives  CLASSPATH=${CLASSPATH}:${TOOL_PATH}  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"elif [ "$COMMAND" = "sampler" ] ; then  CLASS=org.apache.hadoop.mapred.lib.InputSampler  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"else  CLASS=$COMMANDfi# cygwin path translationif $cygwin; then  CLASSPATH=`cygpath -p -w "$CLASSPATH"`  HADOOP_HOME=`cygpath -w "$HADOOP_HOME"`  HADOOP_LOG_DIR=`cygpath -w "$HADOOP_LOG_DIR"`  TOOL_PATH=`cygpath -p -w "$TOOL_PATH"`fi#Determine the JAVA_PLATFORM//(5)获取系统平台JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} -Xmx32m ${HADOOP_JAVA_PLATFORM_OPTS} org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`  if [ "$JAVA_PLATFORM" = "Linux-amd64-64" ]; then  JSVC_ARCH="amd64"else  JSVC_ARCH="i386"fi# setup 'java.library.path' for native-hadoop code if necessaryJAVA_LIBRARY_PATH=''if [ -d "${HADOOP_HOME}/build/native" -o -d "${HADOOP_HOME}/lib/native" -o -e "${HADOOP_PREFIX}/lib/libhadoop.a" ]; then  if [ -d "$HADOOP_HOME/build/native" ]; then    JAVA_LIBRARY_PATH=${HADOOP_HOME}/build/native/${JAVA_PLATFORM}/lib  fi    if [ -d "${HADOOP_HOME}/lib/native" ]; then    if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then      JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}    else      JAVA_LIBRARY_PATH=${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}    fi  fi  if [ -e "${HADOOP_PREFIX}/lib/libhadoop.a" ]; then    JAVA_LIBRARY_PATH=${HADOOP_PREFIX}/lib  fifi# cygwin path translationif $cygwin; then  JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`fiHADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_HOME"HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"#turn security logger on the namenode and jobtracker onlyif [ $COMMAND = "namenode" ] || [ $COMMAND = "jobtracker" ]; then  HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,DRFAS}"else  HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"fiif [ "x$JAVA_LIBRARY_PATH" != "x" ]; then  HADOOP_OPTS="$HADOOP_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"//(6)设定使用hadoop本地库fi  HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.policy.file=$HADOOP_POLICYFILE"# Check to see if we should start a secure datanodeif [ "$starting_secure_dn" = "true" ]; then  if [ "$HADOOP_PID_DIR" = "" ]; then    HADOOP_SECURE_DN_PID="/tmp/hadoop_secure_dn.pid"  else    HADOOP_SECURE_DN_PID="$HADOOP_PID_DIR/hadoop_secure_dn.pid"  fi  exec "$HADOOP_HOME/libexec/jsvc.${JSVC_ARCH}" -Dproc_$COMMAND -outfile "$HADOOP_LOG_DIR/jsvc.out" \                                                -errfile "$HADOOP_LOG_DIR/jsvc.err" \                                                -pidfile "$HADOOP_SECURE_DN_PID" \                                                -nodetach \                                                -user "$HADOOP_SECURE_DN_USER" \                                                -cp "$CLASSPATH" \                                                $JAVA_HEAP_MAX $HADOOP_OPTS \                                                org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter "$@"else  # run it  exec "$JAVA" -Dproc_$COMMAND $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"//运行具体的java class  fi
2. $HADOOP_HOME/bin/ hadoop-config.sh

# Licensed to the Apache Software Foundation (ASF) under one or more# contributor license agreements.  See the NOTICE file distributed with# this work for additional information regarding copyright ownership.# The ASF licenses this file to You under the Apache License, Version 2.0# (the "License"); you may not use this file except in compliance with# the License.  You may obtain a copy of the License at##     http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.# included in all the hadoop scripts with source command# should not be executable directly# also should not be passed any arguments, since we need original $*# resolve links - $0 may be a softlinkthis="${BASH_SOURCE-$0}"common_bin=$(cd -P -- "$(dirname -- "$this")" && pwd -P)script="$(basename -- "$this")"this="$common_bin/$script"# convert relative path to absolute pathconfig_bin=`dirname "$this"`script=`basename "$this"`config_bin=`cd "$config_bin"; pwd`this="$config_bin/$script"# the root of the Hadoop installationexport HADOOP_PREFIX=`dirname "$this"`/..     #check to see if the conf dir is given as an optional argumentif [ $# -gt 1 ]then    if [ "--config" = "$1" ]  then      shift      confdir=$1      shift      HADOOP_CONF_DIR=$confdir    fifi # Allow alternate conf dir location.if [ -e "${HADOOP_PREFIX}/conf/hadoop-env.sh" ]; then  DEFAULT_CONF_DIR="conf"else  DEFAULT_CONF_DIR="etc/hadoop"fiHADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_PREFIX/$DEFAULT_CONF_DIR}"#check to see it is specified whether to use the slaves or the# masters fileif [ $# -gt 1 ]then    if [ "--hosts" = "$1" ]    then        shift        slavesfile=$1        shift        export HADOOP_SLAVES="${HADOOP_CONF_DIR}/$slavesfile"    fifiif [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then  . "${HADOOP_CONF_DIR}/hadoop-env.sh"//(2)执行hadoop-env.sh,进行配置设置fiif [ "$HADOOP_HOME_WARN_SUPPRESS" = "" ] && [ "$HADOOP_HOME" != "" ]; then  echo "Warning: \$HADOOP_HOME is deprecated." 1>&2  echo 1>&2fi# Newer versions of glibc use an arena memory allocator that causes virtual# memory usage to explode. This interacts badly with the many threads that# we use in Hadoop. Tune the variable down to prevent vmem explosion.export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}export HADOOP_HOME=${HADOOP_PREFIX}export HADOOP_HOME_WARN_SUPPRESS=1
3. $HADOOP_HOME/conf/hadoop-env.sh


总结,hadoop命令在运行时一次分别执行hadoop-config.sh 和 hadoop-env.sh来设置配置和参数。最后将设置的配置和参数交给java类来运行,根据不同的参数定位不同的执行类和行为。

0 0
原创粉丝点击