Reply
New Contributor
Posts: 2
Registered: ‎06-26-2017

unable to start the Namenodes after enabling the hdfs high availability

Hi Team,

 

unable to start the Namenodes after enabling the hdfs high availability in my environment. I request you to help on this,

 

here the logs:

 

Mon Jul 17 02:47:24 EDT 2017
+ source_parcel_environment
+ '[' '!' -z '' ']'
+ locate_cdh_java_home
+ '[' -z '' ']'
+ '[' -z /usr/libexec/bigtop-utils ']'
+ local BIGTOP_DETECT_JAVAHOME=
+ for candidate in '"${JSVC_HOME}"' '"${JSVC_HOME}/.."' '"/usr/lib/bigtop-utils"' '"/usr/libexec"'
+ '[' -e /usr/libexec/bigtop-utils/bigtop-detect-javahome ']'
+ for candidate in '"${JSVC_HOME}"' '"${JSVC_HOME}/.."' '"/usr/lib/bigtop-utils"' '"/usr/libexec"'
+ '[' -e /usr/libexec/bigtop-utils/../bigtop-detect-javahome ']'
+ for candidate in '"${JSVC_HOME}"' '"${JSVC_HOME}/.."' '"/usr/lib/bigtop-utils"' '"/usr/libexec"'
+ '[' -e /usr/lib/bigtop-utils/bigtop-detect-javahome ']'
+ BIGTOP_DETECT_JAVAHOME=/usr/lib/bigtop-utils/bigtop-detect-javahome
+ break
+ '[' -z /usr/lib/bigtop-utils/bigtop-detect-javahome ']'
+ . /usr/lib/bigtop-utils/bigtop-detect-javahome
++ BIGTOP_DEFAULTS_DIR=/etc/default
++ '[' -n /etc/default -a -r /etc/default/bigtop-utils ']'
++ . /etc/default/bigtop-utils
++ JAVA6_HOME_CANDIDATES=('/usr/lib/j2sdk1.6-sun' '/usr/lib/jvm/java-6-sun' '/usr/lib/jvm/java-1.6.0-sun-1.6.0' '/usr/lib/jvm/j2sdk1.6-oracle' '/usr/lib/jvm/j2sdk1.6-oracle/jre' '/usr/java/jdk1.6' '/usr/java/jre1.6')
++ OPENJAVA6_HOME_CANDIDATES=('/usr/lib/jvm/java-1.6.0-openjdk' '/usr/lib/jvm/jre-1.6.0-openjdk')
++ JAVA7_HOME_CANDIDATES=('/usr/java/jdk1.7' '/usr/java/jre1.7' '/usr/lib/jvm/j2sdk1.7-oracle' '/usr/lib/jvm/j2sdk1.7-oracle/jre' '/usr/lib/jvm/java-7-oracle')
++ OPENJAVA7_HOME_CANDIDATES=('/usr/lib/jvm/java-1.7.0-openjdk' '/usr/lib/jvm/java-7-openjdk')
++ JAVA8_HOME_CANDIDATES=('/usr/java/jdk1.8' '/usr/java/jre1.8' '/usr/lib/jvm/j2sdk1.8-oracle' '/usr/lib/jvm/j2sdk1.8-oracle/jre' '/usr/lib/jvm/java-8-oracle')
++ OPENJAVA8_HOME_CANDIDATES=('/usr/lib/jvm/java-1.8.0-openjdk' '/usr/lib/jvm/java-8-openjdk')
++ MISCJAVA_HOME_CANDIDATES=('/Library/Java/Home' '/usr/java/default' '/usr/lib/jvm/default-java' '/usr/lib/jvm/java-openjdk' '/usr/lib/jvm/jre-openjdk')
++ case ${BIGTOP_JAVA_MAJOR} in
++ JAVA_HOME_CANDIDATES=(${JAVA7_HOME_CANDIDATES[@]} ${JAVA8_HOME_CANDIDATES[@]} ${MISCJAVA_HOME_CANDIDATES[@]} ${OPENJAVA7_HOME_CANDIDATES[@]} ${OPENJAVA8_HOME_CANDIDATES[@]})
++ '[' -z '' ']'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/java/jdk1.7*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/java/jre1.7*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/lib/jvm/j2sdk1.7-oracle*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/lib/jvm/j2sdk1.7-oracle/jre*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/lib/jvm/java-7-oracle*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd /usr/java/jdk1.8.0_112
++ for candidate in '`ls -rvd ${candidate_regex}* 2>/dev/null`'
++ '[' -e /usr/java/jdk1.8.0_112/bin/java ']'
++ export JAVA_HOME=/usr/java/jdk1.8.0_112
++ JAVA_HOME=/usr/java/jdk1.8.0_112
++ break 2
+ verify_java_home
+ '[' -z /usr/java/jdk1.8.0_112 ']'
+ echo JAVA_HOME=/usr/java/jdk1.8.0_112
+ . /usr/lib64/cmf/service/common/cdh-default-hadoop
++ [[ -z 5 ]]
++ '[' 5 = 3 ']'
++ '[' 5 = -3 ']'
++ '[' 5 -ge 4 ']'
++ export HADOOP_HOME_WARN_SUPPRESS=true
++ HADOOP_HOME_WARN_SUPPRESS=true
++ export HADOOP_PREFIX=/usr/lib/hadoop
++ HADOOP_PREFIX=/usr/lib/hadoop
++ export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
++ HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
++ export HADOOP_CONF_DIR=/run/cloudera-scm-agent/process/165-hdfs-NAMENODE
++ HADOOP_CONF_DIR=/run/cloudera-scm-agent/process/165-hdfs-NAMENODE
++ export HADOOP_COMMON_HOME=/usr/lib/hadoop
++ HADOOP_COMMON_HOME=/usr/lib/hadoop
++ export HADOOP_HDFS_HOME=/usr/lib/hadoop-hdfs
++ HADOOP_HDFS_HOME=/usr/lib/hadoop-hdfs
++ export HADOOP_MAPRED_HOME=/usr/lib/hadoop-mapreduce
++ HADOOP_MAPRED_HOME=/usr/lib/hadoop-mapreduce
++ '[' 5 = 4 ']'
++ '[' 5 = 5 ']'
++ export HADOOP_YARN_HOME=/usr/lib/hadoop-yarn
++ HADOOP_YARN_HOME=/usr/lib/hadoop-yarn
++ replace_pid -Xms395313152 -Xmx395313152 -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:+CMSParallelRemarkEnabled -XX:OnOutOfMemoryError=/usr/lib64/cmf/service/common/killparent.sh
++ sed 's#{{PID}}#4086#g'
++ echo -Xms395313152 -Xmx395313152 -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:+CMSParallelRemarkEnabled -XX:OnOutOfMemoryError=/usr/lib64/cmf/service/common/killparent.sh
+ export 'HADOOP_NAMENODE_OPTS=-Xms395313152 -Xmx395313152 -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:+CMSParallelRemarkEnabled -XX:OnOutOfMemoryError=/usr/lib64/cmf/service/common/killparent.sh'
+ HADOOP_NAMENODE_OPTS='-Xms395313152 -Xmx395313152 -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:+CMSParallelRemarkEnabled -XX:OnOutOfMemoryError=/usr/lib64/cmf/service/common/killparent.sh'
++ replace_pid
++ sed 's#{{PID}}#4086#g'
++ echo
+ export HADOOP_DATANODE_OPTS=
+ HADOOP_DATANODE_OPTS=
++ replace_pid
++ sed 's#{{PID}}#4086#g'
++ echo
+ export HADOOP_SECONDARYNAMENODE_OPTS=
+ HADOOP_SECONDARYNAMENODE_OPTS=
++ replace_pid
++ sed 's#{{PID}}#4086#g'
++ echo
+ export HADOOP_NFS3_OPTS=
+ HADOOP_NFS3_OPTS=
++ replace_pid
++ echo
++ sed 's#{{PID}}#4086#g'
+ export HADOOP_JOURNALNODE_OPTS=
+ HADOOP_JOURNALNODE_OPTS=
+ '[' 5 -ge 4 ']'
+ HDFS_BIN=/usr/lib/hadoop-hdfs/bin/hdfs
+ export 'HADOOP_OPTS=-Djava.net.preferIPv4Stack=true '
+ HADOOP_OPTS='-Djava.net.preferIPv4Stack=true '
+ echo 'using /usr/java/jdk1.8.0_112 as JAVA_HOME'
+ echo 'using 5 as CDH_VERSION'
+ echo 'using /run/cloudera-scm-agent/process/165-hdfs-NAMENODE as CONF_DIR'
+ echo 'using  as SECURE_USER'
+ echo 'using  as SECURE_GROUP'
+ set_hadoop_classpath
+ set_classpath_in_var HADOOP_CLASSPATH
+ '[' -z HADOOP_CLASSPATH ']'
+ [[ -n /usr/share/cmf ]]
++ tr '\n' :
++ find /usr/share/cmf/lib/plugins -maxdepth 1 -name '*.jar'
+ ADD_TO_CP=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:
+ [[ -n navigator/cdh57 ]]
+ for DIR in '$CM_ADD_TO_CP_DIRS'
++ find /usr/share/cmf/lib/plugins/navigator/cdh57 -maxdepth 1 -name '*.jar'
++ tr '\n' :
+ PLUGIN=/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar:
+ ADD_TO_CP=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar:
+ eval 'OLD_VALUE=$HADOOP_CLASSPATH'
++ OLD_VALUE=
+ NEW_VALUE=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar:
+ export HADOOP_CLASSPATH=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar
+ HADOOP_CLASSPATH=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar
+ set -x
+ replace_conf_dir
+ find /run/cloudera-scm-agent/process/165-hdfs-NAMENODE -type f '!' -path '/run/cloudera-scm-agent/process/165-hdfs-NAMENODE/logs/*' '!' -name '*.log' '!' -name '*.keytab' '!' -name '*jceks' -exec perl -pi -e 's#{{CMF_CONF_DIR}}#/run/cloudera-scm-agent/process/165-hdfs-NAMENODE#g' '{}' ';'
Can't open /run/cloudera-scm-agent/process/165-hdfs-NAMENODE/supervisor.conf: Permission denied.
+ make_scripts_executable
+ find /run/cloudera-scm-agent/process/165-hdfs-NAMENODE -regex '.*\.\(py\|sh\)$' -exec chmod u+x '{}' ';'
+ '[' DATANODE_MAX_LOCKED_MEMORY '!=' '' ']'
+ ulimit -l
+ export HADOOP_IDENT_STRING=hdfs
+ HADOOP_IDENT_STRING=hdfs
+ '[' -n '' ']'
+ acquire_kerberos_tgt hdfs.keytab
+ '[' -z hdfs.keytab ']'
+ '[' -n '' ']'
+ '[' validate-writable-empty-dirs = namenode ']'
+ '[' file-operation = namenode ']'
+ '[' bootstrap = namenode ']'
+ '[' failover = namenode ']'
+ '[' transition-to-active = namenode ']'
+ '[' initializeSharedEdits = namenode ']'
+ '[' initialize-znode = namenode ']'
+ '[' format-namenode = namenode ']'
+ '[' monitor-decommission = namenode ']'
+ '[' jnSyncWait = namenode ']'
+ '[' nnRpcWait = namenode ']'
+ '[' -safemode = '' -a get = '' ']'
+ '[' monitor-upgrade = namenode ']'
+ '[' finalize-upgrade = namenode ']'
+ '[' rolling-upgrade-prepare = namenode ']'
+ '[' rolling-upgrade-finalize = namenode ']'
+ '[' nnDnLiveWait = namenode ']'
+ '[' refresh-datanode = namenode ']'
+ '[' mkdir = namenode ']'
+ '[' nfs3 = namenode ']'
+ '[' namenode = namenode -o secondarynamenode = namenode -o datanode = namenode ']'
+ HADOOP_OPTS='-Dsecurity.audit.logger=INFO,RFAS -Djava.net.preferIPv4Stack=true '
+ export 'HADOOP_OPTS=-Dhdfs.audit.logger=INFO,RFAAUDIT -Dsecurity.audit.logger=INFO,RFAS -Djava.net.preferIPv4Stack=true '
+ HADOOP_OPTS='-Dhdfs.audit.logger=INFO,RFAAUDIT -Dsecurity.audit.logger=INFO,RFAS -Djava.net.preferIPv4Stack=true '
+ '[' namenode = namenode -a rollingUpgrade = '' ']'
+ exec /usr/lib/hadoop-hdfs/bin/hdfs --config /run/cloudera-scm-agent/process/165-hdfs-NAMENODE namenode
Mon Jul 17 02:47:31 EDT 2017
+ source_parcel_environment
+ '[' '!' -z '' ']'
+ locate_cdh_java_home
+ '[' -z '' ']'
+ '[' -z /usr/libexec/bigtop-utils ']'
+ local BIGTOP_DETECT_JAVAHOME=
+ for candidate in '"${JSVC_HOME}"' '"${JSVC_HOME}/.."' '"/usr/lib/bigtop-utils"' '"/usr/libexec"'
+ '[' -e /usr/libexec/bigtop-utils/bigtop-detect-javahome ']'
+ for candidate in '"${JSVC_HOME}"' '"${JSVC_HOME}/.."' '"/usr/lib/bigtop-utils"' '"/usr/libexec"'
+ '[' -e /usr/libexec/bigtop-utils/../bigtop-detect-javahome ']'
+ for candidate in '"${JSVC_HOME}"' '"${JSVC_HOME}/.."' '"/usr/lib/bigtop-utils"' '"/usr/libexec"'
+ '[' -e /usr/lib/bigtop-utils/bigtop-detect-javahome ']'
+ BIGTOP_DETECT_JAVAHOME=/usr/lib/bigtop-utils/bigtop-detect-javahome
+ break
+ '[' -z /usr/lib/bigtop-utils/bigtop-detect-javahome ']'
+ . /usr/lib/bigtop-utils/bigtop-detect-javahome
++ BIGTOP_DEFAULTS_DIR=/etc/default
++ '[' -n /etc/default -a -r /etc/default/bigtop-utils ']'
++ . /etc/default/bigtop-utils
++ JAVA6_HOME_CANDIDATES=('/usr/lib/j2sdk1.6-sun' '/usr/lib/jvm/java-6-sun' '/usr/lib/jvm/java-1.6.0-sun-1.6.0' '/usr/lib/jvm/j2sdk1.6-oracle' '/usr/lib/jvm/j2sdk1.6-oracle/jre' '/usr/java/jdk1.6' '/usr/java/jre1.6')
++ OPENJAVA6_HOME_CANDIDATES=('/usr/lib/jvm/java-1.6.0-openjdk' '/usr/lib/jvm/jre-1.6.0-openjdk')
++ JAVA7_HOME_CANDIDATES=('/usr/java/jdk1.7' '/usr/java/jre1.7' '/usr/lib/jvm/j2sdk1.7-oracle' '/usr/lib/jvm/j2sdk1.7-oracle/jre' '/usr/lib/jvm/java-7-oracle')
++ OPENJAVA7_HOME_CANDIDATES=('/usr/lib/jvm/java-1.7.0-openjdk' '/usr/lib/jvm/java-7-openjdk')
++ JAVA8_HOME_CANDIDATES=('/usr/java/jdk1.8' '/usr/java/jre1.8' '/usr/lib/jvm/j2sdk1.8-oracle' '/usr/lib/jvm/j2sdk1.8-oracle/jre' '/usr/lib/jvm/java-8-oracle')
++ OPENJAVA8_HOME_CANDIDATES=('/usr/lib/jvm/java-1.8.0-openjdk' '/usr/lib/jvm/java-8-openjdk')
++ MISCJAVA_HOME_CANDIDATES=('/Library/Java/Home' '/usr/java/default' '/usr/lib/jvm/default-java' '/usr/lib/jvm/java-openjdk' '/usr/lib/jvm/jre-openjdk')
++ case ${BIGTOP_JAVA_MAJOR} in
++ JAVA_HOME_CANDIDATES=(${JAVA7_HOME_CANDIDATES[@]} ${JAVA8_HOME_CANDIDATES[@]} ${MISCJAVA_HOME_CANDIDATES[@]} ${OPENJAVA7_HOME_CANDIDATES[@]} ${OPENJAVA8_HOME_CANDIDATES[@]})
++ '[' -z '' ']'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/java/jdk1.7*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/java/jre1.7*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/lib/jvm/j2sdk1.7-oracle*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/lib/jvm/j2sdk1.7-oracle/jre*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/lib/jvm/java-7-oracle*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd /usr/java/jdk1.8.0_112
++ for candidate in '`ls -rvd ${candidate_regex}* 2>/dev/null`'
++ '[' -e /usr/java/jdk1.8.0_112/bin/java ']'
++ export JAVA_HOME=/usr/java/jdk1.8.0_112
++ JAVA_HOME=/usr/java/jdk1.8.0_112
++ break 2
+ verify_java_home
+ '[' -z /usr/java/jdk1.8.0_112 ']'
+ echo JAVA_HOME=/usr/java/jdk1.8.0_112
+ . /usr/lib64/cmf/service/common/cdh-default-hadoop
++ [[ -z 5 ]]
++ '[' 5 = 3 ']'
++ '[' 5 = -3 ']'
++ '[' 5 -ge 4 ']'
++ export HADOOP_HOME_WARN_SUPPRESS=true
++ HADOOP_HOME_WARN_SUPPRESS=true
++ export HADOOP_PREFIX=/usr/lib/hadoop
++ HADOOP_PREFIX=/usr/lib/hadoop
++ export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
++ HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
++ export HADOOP_CONF_DIR=/run/cloudera-scm-agent/process/165-hdfs-NAMENODE
++ HADOOP_CONF_DIR=/run/cloudera-scm-agent/process/165-hdfs-NAMENODE
++ export HADOOP_COMMON_HOME=/usr/lib/hadoop
++ HADOOP_COMMON_HOME=/usr/lib/hadoop
++ export HADOOP_HDFS_HOME=/usr/lib/hadoop-hdfs
++ HADOOP_HDFS_HOME=/usr/lib/hadoop-hdfs
++ export HADOOP_MAPRED_HOME=/usr/lib/hadoop-mapreduce
++ HADOOP_MAPRED_HOME=/usr/lib/hadoop-mapreduce
++ '[' 5 = 4 ']'
++ '[' 5 = 5 ']'
++ export HADOOP_YARN_HOME=/usr/lib/hadoop-yarn
++ HADOOP_YARN_HOME=/usr/lib/hadoop-yarn
++ replace_pid -Xms395313152 -Xmx395313152 -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:+CMSParallelRemarkEnabled -XX:OnOutOfMemoryError=/usr/lib64/cmf/service/common/killparent.sh
++ sed 's#{{PID}}#4220#g'
++ echo -Xms395313152 -Xmx395313152 -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:+CMSParallelRemarkEnabled -XX:OnOutOfMemoryError=/usr/lib64/cmf/service/common/killparent.sh
+ export 'HADOOP_NAMENODE_OPTS=-Xms395313152 -Xmx395313152 -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:+CMSParallelRemarkEnabled -XX:OnOutOfMemoryError=/usr/lib64/cmf/service/common/killparent.sh'
+ HADOOP_NAMENODE_OPTS='-Xms395313152 -Xmx395313152 -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:+CMSParallelRemarkEnabled -XX:OnOutOfMemoryError=/usr/lib64/cmf/service/common/killparent.sh'
++ replace_pid
++ echo
++ sed 's#{{PID}}#4220#g'
+ export HADOOP_DATANODE_OPTS=
+ HADOOP_DATANODE_OPTS=
++ replace_pid
++ echo
++ sed 's#{{PID}}#4220#g'
+ export HADOOP_SECONDARYNAMENODE_OPTS=
+ HADOOP_SECONDARYNAMENODE_OPTS=
++ replace_pid
++ echo
++ sed 's#{{PID}}#4220#g'
+ export HADOOP_NFS3_OPTS=
+ HADOOP_NFS3_OPTS=
++ replace_pid
++ echo
++ sed 's#{{PID}}#4220#g'
+ export HADOOP_JOURNALNODE_OPTS=
+ HADOOP_JOURNALNODE_OPTS=
+ '[' 5 -ge 4 ']'
+ HDFS_BIN=/usr/lib/hadoop-hdfs/bin/hdfs
+ export 'HADOOP_OPTS=-Djava.net.preferIPv4Stack=true '
+ HADOOP_OPTS='-Djava.net.preferIPv4Stack=true '
+ echo 'using /usr/java/jdk1.8.0_112 as JAVA_HOME'
+ echo 'using 5 as CDH_VERSION'
+ echo 'using /run/cloudera-scm-agent/process/165-hdfs-NAMENODE as CONF_DIR'
+ echo 'using  as SECURE_USER'
+ echo 'using  as SECURE_GROUP'
+ set_hadoop_classpath
+ set_classpath_in_var HADOOP_CLASSPATH
+ '[' -z HADOOP_CLASSPATH ']'
+ [[ -n /usr/share/cmf ]]
++ tr '\n' :
++ find /usr/share/cmf/lib/plugins -maxdepth 1 -name '*.jar'
+ ADD_TO_CP=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:
+ [[ -n navigator/cdh57 ]]
+ for DIR in '$CM_ADD_TO_CP_DIRS'
++ find /usr/share/cmf/lib/plugins/navigator/cdh57 -maxdepth 1 -name '*.jar'
++ tr '\n' :
+ PLUGIN=/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar:
+ ADD_TO_CP=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar:
+ eval 'OLD_VALUE=$HADOOP_CLASSPATH'
++ OLD_VALUE=
+ NEW_VALUE=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar:
+ export HADOOP_CLASSPATH=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar
+ HADOOP_CLASSPATH=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar
+ set -x
+ replace_conf_dir
+ find /run/cloudera-scm-agent/process/165-hdfs-NAMENODE -type f '!' -path '/run/cloudera-scm-agent/process/165-hdfs-NAMENODE/logs/*' '!' -name '*.log' '!' -name '*.keytab' '!' -name '*jceks' -exec perl -pi -e 's#{{CMF_CONF_DIR}}#/run/cloudera-scm-agent/process/165-hdfs-NAMENODE#g' '{}' ';'
Can't open /run/cloudera-scm-agent/process/165-hdfs-NAMENODE/supervisor.conf: Permission denied.
+ make_scripts_executable
+ find /run/cloudera-scm-agent/process/165-hdfs-NAMENODE -regex '.*\.\(py\|sh\)$' -exec chmod u+x '{}' ';'
+ '[' DATANODE_MAX_LOCKED_MEMORY '!=' '' ']'
+ ulimit -l
+ export HADOOP_IDENT_STRING=hdfs
+ HADOOP_IDENT_STRING=hdfs
+ '[' -n '' ']'
+ acquire_kerberos_tgt hdfs.keytab
+ '[' -z hdfs.keytab ']'
+ '[' -n '' ']'
+ '[' validate-writable-empty-dirs = namenode ']'
+ '[' file-operation = namenode ']'
+ '[' bootstrap = namenode ']'
+ '[' failover = namenode ']'
+ '[' transition-to-active = namenode ']'
+ '[' initializeSharedEdits = namenode ']'
+ '[' initialize-znode = namenode ']'
+ '[' format-namenode = namenode ']'
+ '[' monitor-decommission = namenode ']'
+ '[' jnSyncWait = namenode ']'
+ '[' nnRpcWait = namenode ']'
+ '[' -safemode = '' -a get = '' ']'
+ '[' monitor-upgrade = namenode ']'
+ '[' finalize-upgrade = namenode ']'
+ '[' rolling-upgrade-prepare = namenode ']'
+ '[' rolling-upgrade-finalize = namenode ']'
+ '[' nnDnLiveWait = namenode ']'
+ '[' refresh-datanode = namenode ']'
+ '[' mkdir = namenode ']'
+ '[' nfs3 = namenode ']'
+ '[' namenode = namenode -o secondarynamenode = namenode -o datanode = namenode ']'
+ HADOOP_OPTS='-Dsecurity.audit.logger=INFO,RFAS -Djava.net.preferIPv4Stack=true '
+ export 'HADOOP_OPTS=-Dhdfs.audit.logger=INFO,RFAAUDIT -Dsecurity.audit.logger=INFO,RFAS -Djava.net.preferIPv4Stack=true '
+ HADOOP_OPTS='-Dhdfs.audit.logger=INFO,RFAAUDIT -Dsecurity.audit.logger=INFO,RFAS -Djava.net.preferIPv4Stack=true '
+ '[' namenode = namenode -a rollingUpgrade = '' ']'
+ exec /usr/lib/hadoop-hdfs/bin/hdfs --config /run/cloudera-scm-agent/process/165-hdfs-NAMENODE namenode
Mon Jul 17 02:47:39 EDT 2017
+ source_parcel_environment
+ '[' '!' -z '' ']'
+ locate_cdh_java_home
+ '[' -z '' ']'
+ '[' -z /usr/libexec/bigtop-utils ']'
+ local BIGTOP_DETECT_JAVAHOME=
+ for candidate in '"${JSVC_HOME}"' '"${JSVC_HOME}/.."' '"/usr/lib/bigtop-utils"' '"/usr/libexec"'
+ '[' -e /usr/libexec/bigtop-utils/bigtop-detect-javahome ']'
+ for candidate in '"${JSVC_HOME}"' '"${JSVC_HOME}/.."' '"/usr/lib/bigtop-utils"' '"/usr/libexec"'
+ '[' -e /usr/libexec/bigtop-utils/../bigtop-detect-javahome ']'
+ for candidate in '"${JSVC_HOME}"' '"${JSVC_HOME}/.."' '"/usr/lib/bigtop-utils"' '"/usr/libexec"'
+ '[' -e /usr/lib/bigtop-utils/bigtop-detect-javahome ']'
+ BIGTOP_DETECT_JAVAHOME=/usr/lib/bigtop-utils/bigtop-detect-javahome
+ break
+ '[' -z /usr/lib/bigtop-utils/bigtop-detect-javahome ']'
+ . /usr/lib/bigtop-utils/bigtop-detect-javahome
++ BIGTOP_DEFAULTS_DIR=/etc/default
++ '[' -n /etc/default -a -r /etc/default/bigtop-utils ']'
++ . /etc/default/bigtop-utils
++ JAVA6_HOME_CANDIDATES=('/usr/lib/j2sdk1.6-sun' '/usr/lib/jvm/java-6-sun' '/usr/lib/jvm/java-1.6.0-sun-1.6.0' '/usr/lib/jvm/j2sdk1.6-oracle' '/usr/lib/jvm/j2sdk1.6-oracle/jre' '/usr/java/jdk1.6' '/usr/java/jre1.6')
++ OPENJAVA6_HOME_CANDIDATES=('/usr/lib/jvm/java-1.6.0-openjdk' '/usr/lib/jvm/jre-1.6.0-openjdk')
++ JAVA7_HOME_CANDIDATES=('/usr/java/jdk1.7' '/usr/java/jre1.7' '/usr/lib/jvm/j2sdk1.7-oracle' '/usr/lib/jvm/j2sdk1.7-oracle/jre' '/usr/lib/jvm/java-7-oracle')
++ OPENJAVA7_HOME_CANDIDATES=('/usr/lib/jvm/java-1.7.0-openjdk' '/usr/lib/jvm/java-7-openjdk')
++ JAVA8_HOME_CANDIDATES=('/usr/java/jdk1.8' '/usr/java/jre1.8' '/usr/lib/jvm/j2sdk1.8-oracle' '/usr/lib/jvm/j2sdk1.8-oracle/jre' '/usr/lib/jvm/java-8-oracle')
++ OPENJAVA8_HOME_CANDIDATES=('/usr/lib/jvm/java-1.8.0-openjdk' '/usr/lib/jvm/java-8-openjdk')
++ MISCJAVA_HOME_CANDIDATES=('/Library/Java/Home' '/usr/java/default' '/usr/lib/jvm/default-java' '/usr/lib/jvm/java-openjdk' '/usr/lib/jvm/jre-openjdk')
++ case ${BIGTOP_JAVA_MAJOR} in
++ JAVA_HOME_CANDIDATES=(${JAVA7_HOME_CANDIDATES[@]} ${JAVA8_HOME_CANDIDATES[@]} ${MISCJAVA_HOME_CANDIDATES[@]} ${OPENJAVA7_HOME_CANDIDATES[@]} ${OPENJAVA8_HOME_CANDIDATES[@]})
++ '[' -z '' ']'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/java/jdk1.7*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/java/jre1.7*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/lib/jvm/j2sdk1.7-oracle*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/lib/jvm/j2sdk1.7-oracle/jre*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/lib/jvm/java-7-oracle*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd /usr/java/jdk1.8.0_112
++ for candidate in '`ls -rvd ${candidate_regex}* 2>/dev/null`'
++ '[' -e /usr/java/jdk1.8.0_112/bin/java ']'
++ export JAVA_HOME=/usr/java/jdk1.8.0_112
++ JAVA_HOME=/usr/java/jdk1.8.0_112
++ break 2
+ verify_java_home
+ '[' -z /usr/java/jdk1.8.0_112 ']'
+ echo JAVA_HOME=/usr/java/jdk1.8.0_112
+ . /usr/lib64/cmf/service/common/cdh-default-hadoop
++ [[ -z 5 ]]
++ '[' 5 = 3 ']'
++ '[' 5 = -3 ']'
++ '[' 5 -ge 4 ']'
++ export HADOOP_HOME_WARN_SUPPRESS=true
++ HADOOP_HOME_WARN_SUPPRESS=true
++ export HADOOP_PREFIX=/usr/lib/hadoop
++ HADOOP_PREFIX=/usr/lib/hadoop
++ export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
++ HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
++ export HADOOP_CONF_DIR=/run/cloudera-scm-agent/process/165-hdfs-NAMENODE
++ HADOOP_CONF_DIR=/run/cloudera-scm-agent/process/165-hdfs-NAMENODE
++ export HADOOP_COMMON_HOME=/usr/lib/hadoop
++ HADOOP_COMMON_HOME=/usr/lib/hadoop
++ export HADOOP_HDFS_HOME=/usr/lib/hadoop-hdfs
++ HADOOP_HDFS_HOME=/usr/lib/hadoop-hdfs
++ export HADOOP_MAPRED_HOME=/usr/lib/hadoop-mapreduce
++ HADOOP_MAPRED_HOME=/usr/lib/hadoop-mapreduce
++ '[' 5 = 4 ']'
++ '[' 5 = 5 ']'
++ export HADOOP_YARN_HOME=/usr/lib/hadoop-yarn
++ HADOOP_YARN_HOME=/usr/lib/hadoop-yarn
++ replace_pid -Xms395313152 -Xmx395313152 -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:+CMSParallelRemarkEnabled -XX:OnOutOfMemoryError=/usr/lib64/cmf/service/common/killparent.sh
++ echo -Xms395313152 -Xmx395313152 -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:+CMSParallelRemarkEnabled -XX:OnOutOfMemoryError=/usr/lib64/cmf/service/common/killparent.sh
++ sed 's#{{PID}}#4328#g'
+ export 'HADOOP_NAMENODE_OPTS=-Xms395313152 -Xmx395313152 -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:+CMSParallelRemarkEnabled -XX:OnOutOfMemoryError=/usr/lib64/cmf/service/common/killparent.sh'
+ HADOOP_NAMENODE_OPTS='-Xms395313152 -Xmx395313152 -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:+CMSParallelRemarkEnabled -XX:OnOutOfMemoryError=/usr/lib64/cmf/service/common/killparent.sh'
++ replace_pid
++ sed 's#{{PID}}#4328#g'
++ echo
+ export HADOOP_DATANODE_OPTS=
+ HADOOP_DATANODE_OPTS=
++ replace_pid
++ sed 's#{{PID}}#4328#g'
++ echo
+ export HADOOP_SECONDARYNAMENODE_OPTS=
+ HADOOP_SECONDARYNAMENODE_OPTS=
++ replace_pid
++ echo
++ sed 's#{{PID}}#4328#g'
+ export HADOOP_NFS3_OPTS=
+ HADOOP_NFS3_OPTS=
++ replace_pid
++ sed 's#{{PID}}#4328#g'
++ echo
+ export HADOOP_JOURNALNODE_OPTS=
+ HADOOP_JOURNALNODE_OPTS=
+ '[' 5 -ge 4 ']'
+ HDFS_BIN=/usr/lib/hadoop-hdfs/bin/hdfs
+ export 'HADOOP_OPTS=-Djava.net.preferIPv4Stack=true '
+ HADOOP_OPTS='-Djava.net.preferIPv4Stack=true '
+ echo 'using /usr/java/jdk1.8.0_112 as JAVA_HOME'
+ echo 'using 5 as CDH_VERSION'
+ echo 'using /run/cloudera-scm-agent/process/165-hdfs-NAMENODE as CONF_DIR'
+ echo 'using  as SECURE_USER'
+ echo 'using  as SECURE_GROUP'
+ set_hadoop_classpath
+ set_classpath_in_var HADOOP_CLASSPATH
+ '[' -z HADOOP_CLASSPATH ']'
+ [[ -n /usr/share/cmf ]]
++ tr '\n' :
++ find /usr/share/cmf/lib/plugins -maxdepth 1 -name '*.jar'
+ ADD_TO_CP=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:
+ [[ -n navigator/cdh57 ]]
+ for DIR in '$CM_ADD_TO_CP_DIRS'
++ tr '\n' :
++ find /usr/share/cmf/lib/plugins/navigator/cdh57 -maxdepth 1 -name '*.jar'
+ PLUGIN=/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar:
+ ADD_TO_CP=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar:
+ eval 'OLD_VALUE=$HADOOP_CLASSPATH'
++ OLD_VALUE=
+ NEW_VALUE=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar:
+ export HADOOP_CLASSPATH=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar
+ HADOOP_CLASSPATH=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar
+ set -x
+ replace_conf_dir
+ find /run/cloudera-scm-agent/process/165-hdfs-NAMENODE -type f '!' -path '/run/cloudera-scm-agent/process/165-hdfs-NAMENODE/logs/*' '!' -name '*.log' '!' -name '*.keytab' '!' -name '*jceks' -exec perl -pi -e 's#{{CMF_CONF_DIR}}#/run/cloudera-scm-agent/process/165-hdfs-NAMENODE#g' '{}' ';'
Can't open /run/cloudera-scm-agent/process/165-hdfs-NAMENODE/supervisor.conf: Permission denied.
+ make_scripts_executable
+ find /run/cloudera-scm-agent/process/165-hdfs-NAMENODE -regex '.*\.\(py\|sh\)$' -exec chmod u+x '{}' ';'
+ '[' DATANODE_MAX_LOCKED_MEMORY '!=' '' ']'
+ ulimit -l
+ export HADOOP_IDENT_STRING=hdfs
+ HADOOP_IDENT_STRING=hdfs
+ '[' -n '' ']'
+ acquire_kerberos_tgt hdfs.keytab
+ '[' -z hdfs.keytab ']'
+ '[' -n '' ']'
+ '[' validate-writable-empty-dirs = namenode ']'
+ '[' file-operation = namenode ']'
+ '[' bootstrap = namenode ']'
+ '[' failover = namenode ']'
+ '[' transition-to-active = namenode ']'
+ '[' initializeSharedEdits = namenode ']'
+ '[' initialize-znode = namenode ']'
+ '[' format-namenode = namenode ']'
+ '[' monitor-decommission = namenode ']'
+ '[' jnSyncWait = namenode ']'
+ '[' nnRpcWait = namenode ']'
+ '[' -safemode = '' -a get = '' ']'
+ '[' monitor-upgrade = namenode ']'
+ '[' finalize-upgrade = namenode ']'
+ '[' rolling-upgrade-prepare = namenode ']'
+ '[' rolling-upgrade-finalize = namenode ']'
+ '[' nnDnLiveWait = namenode ']'
+ '[' refresh-datanode = namenode ']'
+ '[' mkdir = namenode ']'
+ '[' nfs3 = namenode ']'
+ '[' namenode = namenode -o secondarynamenode = namenode -o datanode = namenode ']'
+ HADOOP_OPTS='-Dsecurity.audit.logger=INFO,RFAS -Djava.net.preferIPv4Stack=true '
+ export 'HADOOP_OPTS=-Dhdfs.audit.logger=INFO,RFAAUDIT -Dsecurity.audit.logger=INFO,RFAS -Djava.net.preferIPv4Stack=true '
+ HADOOP_OPTS='-Dhdfs.audit.logger=INFO,RFAAUDIT -Dsecurity.audit.logger=INFO,RFAS -Djava.net.preferIPv4Stack=true '
+ '[' namenode = namenode -a rollingUpgrade = '' ']'
+ exec /usr/lib/hadoop-hdfs/bin/hdfs --config /run/cloudera-scm-agent/process/165-hdfs-NAMENODE namenode
Mon Jul 17 02:47:48 EDT 2017
+ source_parcel_environment
+ '[' '!' -z '' ']'
+ locate_cdh_java_home
+ '[' -z '' ']'
+ '[' -z /usr/libexec/bigtop-utils ']'
+ local BIGTOP_DETECT_JAVAHOME=
+ for candidate in '"${JSVC_HOME}"' '"${JSVC_HOME}/.."' '"/usr/lib/bigtop-utils"' '"/usr/libexec"'
+ '[' -e /usr/libexec/bigtop-utils/bigtop-detect-javahome ']'
+ for candidate in '"${JSVC_HOME}"' '"${JSVC_HOME}/.."' '"/usr/lib/bigtop-utils"' '"/usr/libexec"'
+ '[' -e /usr/libexec/bigtop-utils/../bigtop-detect-javahome ']'
+ for candidate in '"${JSVC_HOME}"' '"${JSVC_HOME}/.."' '"/usr/lib/bigtop-utils"' '"/usr/libexec"'
+ '[' -e /usr/lib/bigtop-utils/bigtop-detect-javahome ']'
+ BIGTOP_DETECT_JAVAHOME=/usr/lib/bigtop-utils/bigtop-detect-javahome
+ break
+ '[' -z /usr/lib/bigtop-utils/bigtop-detect-javahome ']'
+ . /usr/lib/bigtop-utils/bigtop-detect-javahome
++ BIGTOP_DEFAULTS_DIR=/etc/default
++ '[' -n /etc/default -a -r /etc/default/bigtop-utils ']'
++ . /etc/default/bigtop-utils
++ JAVA6_HOME_CANDIDATES=('/usr/lib/j2sdk1.6-sun' '/usr/lib/jvm/java-6-sun' '/usr/lib/jvm/java-1.6.0-sun-1.6.0' '/usr/lib/jvm/j2sdk1.6-oracle' '/usr/lib/jvm/j2sdk1.6-oracle/jre' '/usr/java/jdk1.6' '/usr/java/jre1.6')
++ OPENJAVA6_HOME_CANDIDATES=('/usr/lib/jvm/java-1.6.0-openjdk' '/usr/lib/jvm/jre-1.6.0-openjdk')
++ JAVA7_HOME_CANDIDATES=('/usr/java/jdk1.7' '/usr/java/jre1.7' '/usr/lib/jvm/j2sdk1.7-oracle' '/usr/lib/jvm/j2sdk1.7-oracle/jre' '/usr/lib/jvm/java-7-oracle')
++ OPENJAVA7_HOME_CANDIDATES=('/usr/lib/jvm/java-1.7.0-openjdk' '/usr/lib/jvm/java-7-openjdk')
++ JAVA8_HOME_CANDIDATES=('/usr/java/jdk1.8' '/usr/java/jre1.8' '/usr/lib/jvm/j2sdk1.8-oracle' '/usr/lib/jvm/j2sdk1.8-oracle/jre' '/usr/lib/jvm/java-8-oracle')
++ OPENJAVA8_HOME_CANDIDATES=('/usr/lib/jvm/java-1.8.0-openjdk' '/usr/lib/jvm/java-8-openjdk')
++ MISCJAVA_HOME_CANDIDATES=('/Library/Java/Home' '/usr/java/default' '/usr/lib/jvm/default-java' '/usr/lib/jvm/java-openjdk' '/usr/lib/jvm/jre-openjdk')
++ case ${BIGTOP_JAVA_MAJOR} in
++ JAVA_HOME_CANDIDATES=(${JAVA7_HOME_CANDIDATES[@]} ${JAVA8_HOME_CANDIDATES[@]} ${MISCJAVA_HOME_CANDIDATES[@]} ${OPENJAVA7_HOME_CANDIDATES[@]} ${OPENJAVA8_HOME_CANDIDATES[@]})
++ '[' -z '' ']'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/java/jdk1.7*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/java/jre1.7*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/lib/jvm/j2sdk1.7-oracle*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/lib/jvm/j2sdk1.7-oracle/jre*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd '/usr/lib/jvm/java-7-oracle*'
++ for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}'
+++ ls -rvd /usr/java/jdk1.8.0_112
++ for candidate in '`ls -rvd ${candidate_regex}* 2>/dev/null`'
++ '[' -e /usr/java/jdk1.8.0_112/bin/java ']'
++ export JAVA_HOME=/usr/java/jdk1.8.0_112
++ JAVA_HOME=/usr/java/jdk1.8.0_112
++ break 2
+ verify_java_home
+ '[' -z /usr/java/jdk1.8.0_112 ']'
+ echo JAVA_HOME=/usr/java/jdk1.8.0_112
+ . /usr/lib64/cmf/service/common/cdh-default-hadoop
++ [[ -z 5 ]]
++ '[' 5 = 3 ']'
++ '[' 5 = -3 ']'
++ '[' 5 -ge 4 ']'
++ export HADOOP_HOME_WARN_SUPPRESS=true
++ HADOOP_HOME_WARN_SUPPRESS=true
++ export HADOOP_PREFIX=/usr/lib/hadoop
++ HADOOP_PREFIX=/usr/lib/hadoop
++ export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
++ HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
++ export HADOOP_CONF_DIR=/run/cloudera-scm-agent/process/165-hdfs-NAMENODE
++ HADOOP_CONF_DIR=/run/cloudera-scm-agent/process/165-hdfs-NAMENODE
++ export HADOOP_COMMON_HOME=/usr/lib/hadoop
++ HADOOP_COMMON_HOME=/usr/lib/hadoop
++ export HADOOP_HDFS_HOME=/usr/lib/hadoop-hdfs
++ HADOOP_HDFS_HOME=/usr/lib/hadoop-hdfs
++ export HADOOP_MAPRED_HOME=/usr/lib/hadoop-mapreduce
++ HADOOP_MAPRED_HOME=/usr/lib/hadoop-mapreduce
++ '[' 5 = 4 ']'
++ '[' 5 = 5 ']'
++ export HADOOP_YARN_HOME=/usr/lib/hadoop-yarn
++ HADOOP_YARN_HOME=/usr/lib/hadoop-yarn
++ replace_pid -Xms395313152 -Xmx395313152 -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:+CMSParallelRemarkEnabled -XX:OnOutOfMemoryError=/usr/lib64/cmf/service/common/killparent.sh
++ sed 's#{{PID}}#4438#g'
++ echo -Xms395313152 -Xmx395313152 -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:+CMSParallelRemarkEnabled -XX:OnOutOfMemoryError=/usr/lib64/cmf/service/common/killparent.sh
+ export 'HADOOP_NAMENODE_OPTS=-Xms395313152 -Xmx395313152 -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:+CMSParallelRemarkEnabled -XX:OnOutOfMemoryError=/usr/lib64/cmf/service/common/killparent.sh'
+ HADOOP_NAMENODE_OPTS='-Xms395313152 -Xmx395313152 -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=70 -XX:+CMSParallelRemarkEnabled -XX:OnOutOfMemoryError=/usr/lib64/cmf/service/common/killparent.sh'
++ replace_pid
++ echo
++ sed 's#{{PID}}#4438#g'
+ export HADOOP_DATANODE_OPTS=
+ HADOOP_DATANODE_OPTS=
++ replace_pid
++ sed 's#{{PID}}#4438#g'
++ echo
+ export HADOOP_SECONDARYNAMENODE_OPTS=
+ HADOOP_SECONDARYNAMENODE_OPTS=
++ replace_pid
++ sed 's#{{PID}}#4438#g'
++ echo
+ export HADOOP_NFS3_OPTS=
+ HADOOP_NFS3_OPTS=
++ replace_pid
++ sed 's#{{PID}}#4438#g'
++ echo
+ export HADOOP_JOURNALNODE_OPTS=
+ HADOOP_JOURNALNODE_OPTS=
+ '[' 5 -ge 4 ']'
+ HDFS_BIN=/usr/lib/hadoop-hdfs/bin/hdfs
+ export 'HADOOP_OPTS=-Djava.net.preferIPv4Stack=true '
+ HADOOP_OPTS='-Djava.net.preferIPv4Stack=true '
+ echo 'using /usr/java/jdk1.8.0_112 as JAVA_HOME'
+ echo 'using 5 as CDH_VERSION'
+ echo 'using /run/cloudera-scm-agent/process/165-hdfs-NAMENODE as CONF_DIR'
+ echo 'using  as SECURE_USER'
+ echo 'using  as SECURE_GROUP'
+ set_hadoop_classpath
+ set_classpath_in_var HADOOP_CLASSPATH
+ '[' -z HADOOP_CLASSPATH ']'
+ [[ -n /usr/share/cmf ]]
++ tr '\n' :
++ find /usr/share/cmf/lib/plugins -maxdepth 1 -name '*.jar'
+ ADD_TO_CP=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:
+ [[ -n navigator/cdh57 ]]
+ for DIR in '$CM_ADD_TO_CP_DIRS'
++ tr '\n' :
++ find /usr/share/cmf/lib/plugins/navigator/cdh57 -maxdepth 1 -name '*.jar'
+ PLUGIN=/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar:
+ ADD_TO_CP=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar:
+ eval 'OLD_VALUE=$HADOOP_CLASSPATH'
++ OLD_VALUE=
+ NEW_VALUE=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar:
+ export HADOOP_CLASSPATH=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar
+ HADOOP_CLASSPATH=/usr/share/cmf/lib/plugins/event-publish-5.8.3-shaded.jar:/usr/share/cmf/lib/plugins/tt-instrumentation-5.8.3.jar:/usr/share/cmf/lib/plugins/navigator/cdh57/audit-plugin-cdh57-2.7.3-shaded.jar
+ set -x
+ replace_conf_dir
+ find /run/cloudera-scm-agent/process/165-hdfs-NAMENODE -type f '!' -path '/run/cloudera-scm-agent/process/165-hdfs-NAMENODE/logs/*' '!' -name '*.log' '!' -name '*.keytab' '!' -name '*jceks' -exec perl -pi -e 's#{{CMF_CONF_DIR}}#/run/cloudera-scm-agent/process/165-hdfs-NAMENODE#g' '{}' ';'
Can't open /run/cloudera-scm-agent/process/165-hdfs-NAMENODE/supervisor.conf: Permission denied.
+ make_scripts_executable
+ find /run/cloudera-scm-agent/process/165-hdfs-NAMENODE -regex '.*\.\(py\|sh\)$' -exec chmod u+x '{}' ';'
+ '[' DATANODE_MAX_LOCKED_MEMORY '!=' '' ']'
+ ulimit -l
+ export HADOOP_IDENT_STRING=hdfs
+ HADOOP_IDENT_STRING=hdfs
+ '[' -n '' ']'
+ acquire_kerberos_tgt hdfs.keytab
+ '[' -z hdfs.keytab ']'
+ '[' -n '' ']'
+ '[' validate-writable-empty-dirs = namenode ']'
+ '[' file-operation = namenode ']'
+ '[' bootstrap = namenode ']'
+ '[' failover = namenode ']'
+ '[' transition-to-active = namenode ']'
+ '[' initializeSharedEdits = namenode ']'
+ '[' initialize-znode = namenode ']'
+ '[' format-namenode = namenode ']'
+ '[' monitor-decommission = namenode ']'
+ '[' jnSyncWait = namenode ']'
+ '[' nnRpcWait = namenode ']'
+ '[' -safemode = '' -a get = '' ']'
+ '[' monitor-upgrade = namenode ']'
+ '[' finalize-upgrade = namenode ']'
+ '[' rolling-upgrade-prepare = namenode ']'
+ '[' rolling-upgrade-finalize = namenode ']'
+ '[' nnDnLiveWait = namenode ']'
+ '[' refresh-datanode = namenode ']'
+ '[' mkdir = namenode ']'
+ '[' nfs3 = namenode ']'
+ '[' namenode = namenode -o secondarynamenode = namenode -o datanode = namenode ']'
+ HADOOP_OPTS='-Dsecurity.audit.logger=INFO,RFAS -Djava.net.preferIPv4Stack=true '
+ export 'HADOOP_OPTS=-Dhdfs.audit.logger=INFO,RFAAUDIT -Dsecurity.audit.logger=INFO,RFAS -Djava.net.preferIPv4Stack=true '
+ HADOOP_OPTS='-Dhdfs.audit.logger=INFO,RFAAUDIT -Dsecurity.audit.logger=INFO,RFAS -Djava.net.preferIPv4Stack=true '
+ '[' namenode = namenode -a rollingUpgrade = '' ']'
+ exec /usr/lib/hadoop-hdfs/bin/hdfs --config /run/cloudera-scm-agent/process/165-hdfs-NAMENODE namenode

 

Posts: 566
Topics: 3
Kudos: 79
Solutions: 52
Registered: ‎08-16-2016

Re: unable to start the Namenodes after enabling the hdfs high availability

There isn't anything note worthy in the stdout log. Take a look at either the stderr or the Hadoop logs, under /var/log/hadoop-hdfs/.
Announcements