stderr: Traceback (most recent call last): File "/var/lib/ambari-agent/cache/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py", line 348, in NameNode().execute() File "/usr/lib/ambari-agent/lib/resource_management/libraries/script/script.py", line 375, in execute method(env) File "/usr/lib/ambari-agent/lib/resource_management/libraries/script/script.py", line 978, in restart self.start(env, upgrade_type=upgrade_type) File "/var/lib/ambari-agent/cache/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py", line 90, in start upgrade_suspended=params.upgrade_suspended, env=env) File "/usr/lib/ambari-agent/lib/ambari_commons/os_family_impl.py", line 89, in thunk return fn(*args, **kwargs) File "/var/lib/ambari-agent/cache/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py", line 234, in namenode create_hdfs_directories() File "/var/lib/ambari-agent/cache/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py", line 301, in create_hdfs_directories mode=0777, File "/usr/lib/ambari-agent/lib/resource_management/core/base.py", line 166, in __init__ self.env.run() File "/usr/lib/ambari-agent/lib/resource_management/core/environment.py", line 160, in run self.run_action(resource, action) File "/usr/lib/ambari-agent/lib/resource_management/core/environment.py", line 124, in run_action provider_action() File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 606, in action_create_on_execute self.action_delayed("create") File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 603, in action_delayed self.get_hdfs_resource_executor().action_delayed(action_name, self) File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 330, in action_delayed self._assert_valid() File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 289, in _assert_valid self.target_status = self._get_file_status(target) File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 432, in _get_file_status list_status = self.util.run_command(target, 'GETFILESTATUS', method='GET', ignore_status_codes=['404'], assertable_result=False) File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 177, in run_command return self._run_command(*args, **kwargs) File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 237, in _run_command _, out, err = get_user_call_output(cmd, user=self.run_user, logoutput=self.logoutput, quiet=False) File "/usr/lib/ambari-agent/lib/resource_management/libraries/functions/get_user_call_output.py", line 61, in get_user_call_output raise ExecutionFailed(err_msg, code, files_output[0], files_output[1]) resource_management.core.exceptions.ExecutionFailed: Execution of 'curl -sS -L -w '%{http_code}' -X GET -k 'https://lhdcsi04v.production.local:50470/webhdfs/v1/tmp?op=GETFILESTATUS&user.name=hdfs' 1>/tmp/tmpc8gtz_ 2>/tmp/tmp4HCXoq' returned 35. curl: (35) Encountered end of file 000 stdout: 2019-01-05 07:47:45,341 - Stack Feature Version Info: Cluster Stack=2.6, Command Stack=None, Command Version=2.6.5.0-292 -> 2.6.5.0-292 2019-01-05 07:47:45,367 - Using hadoop conf dir: /usr/hdp/2.6.5.0-292/hadoop/conf 2019-01-05 07:47:45,809 - Stack Feature Version Info: Cluster Stack=2.6, Command Stack=None, Command Version=2.6.5.0-292 -> 2.6.5.0-292 2019-01-05 07:47:45,818 - Using hadoop conf dir: /usr/hdp/2.6.5.0-292/hadoop/conf 2019-01-05 07:47:45,820 - Group['livy'] {} 2019-01-05 07:47:45,888 - Group['spark'] {} 2019-01-05 07:47:45,922 - Group['ranger'] {} 2019-01-05 07:47:45,994 - Group['hdfs'] {} 2019-01-05 07:47:46,029 - Group['zeppelin'] {} 2019-01-05 07:47:46,099 - Group['hadoop'] {} 2019-01-05 07:47:46,102 - Group['users'] {} 2019-01-05 07:47:46,103 - Group['knox'] {} 2019-01-05 07:47:46,168 - User['hive'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': None} 2019-01-05 07:48:02,682 - User['zookeeper'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': None} 2019-01-05 07:48:02,777 - User['infra-solr'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': None} 2019-01-05 07:48:02,871 - User['oozie'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users'], 'uid': None} 2019-01-05 07:48:02,903 - User['ams'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': None} 2019-01-05 07:48:03,000 - User['ranger'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'ranger'], 'uid': None} 2019-01-05 07:48:03,076 - User['tez'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users'], 'uid': None} 2019-01-05 07:48:03,191 - User['zeppelin'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'zeppelin', u'hadoop'], 'uid': None} 2019-01-05 07:48:03,254 - User['livy'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': None} 2019-01-05 07:48:03,323 - User['spark'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': None} 2019-01-05 07:48:03,360 - User['ambari-qa'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users'], 'uid': None} 2019-01-05 07:48:03,396 - User['hdfs'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hdfs'], 'uid': None} 2019-01-05 07:48:03,432 - User['yarn'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': None} 2019-01-05 07:48:03,528 - User['mapred'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': None} 2019-01-05 07:48:03,626 - User['knox'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': None} 2019-01-05 07:48:03,704 - User['hcat'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': None} 2019-01-05 07:48:03,799 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2019-01-05 07:48:03,806 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 0'] {'not_if': '(test $(id -u ambari-qa) -gt 1000) || (false)'} 2019-01-05 07:48:03,856 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 0'] due to not_if 2019-01-05 07:48:03,858 - Group['hdfs'] {} 2019-01-05 07:48:03,860 - User['hdfs'] {'fetch_nonlocal_groups': True, 'groups': ['hdfs', u'hdfs']} 2019-01-05 07:48:03,889 - FS Type: 2019-01-05 07:48:03,890 - Directory['/etc/hadoop'] {'mode': 0755} 2019-01-05 07:48:03,948 - File['/usr/hdp/2.6.5.0-292/hadoop/conf/hadoop-env.sh'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'} 2019-01-05 07:48:03,954 - Directory['/var/lib/ambari-agent/tmp/hadoop_java_io_tmpdir'] {'owner': 'hdfs', 'group': 'hadoop', 'mode': 01777} 2019-01-05 07:48:04,017 - Execute[('setenforce', '0')] {'not_if': '(! which getenforce ) || (which getenforce && getenforce | grep -q Disabled)', 'sudo': True, 'only_if': 'test -f /selinux/enforce'} 2019-01-05 07:48:04,058 - Skipping Execute[('setenforce', '0')] due to only_if 2019-01-05 07:48:04,060 - Directory['/var/log/hadoop'] {'owner': 'root', 'create_parents': True, 'group': 'hadoop', 'mode': 0775, 'cd_access': 'a'} 2019-01-05 07:48:04,075 - Directory['/var/run/hadoop'] {'owner': 'root', 'create_parents': True, 'group': 'root', 'cd_access': 'a'} 2019-01-05 07:48:04,077 - Directory['/tmp/hadoop-hdfs'] {'owner': 'hdfs', 'create_parents': True, 'cd_access': 'a'} 2019-01-05 07:48:04,096 - File['/usr/hdp/2.6.5.0-292/hadoop/conf/commons-logging.properties'] {'content': Template('commons-logging.properties.j2'), 'owner': 'hdfs'} 2019-01-05 07:48:04,104 - File['/usr/hdp/2.6.5.0-292/hadoop/conf/health_check'] {'content': Template('health_check.j2'), 'owner': 'hdfs'} 2019-01-05 07:48:04,129 - File['/usr/hdp/2.6.5.0-292/hadoop/conf/log4j.properties'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop', 'mode': 0644} 2019-01-05 07:48:04,170 - File['/usr/hdp/2.6.5.0-292/hadoop/conf/hadoop-metrics2.properties'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'} 2019-01-05 07:48:04,175 - File['/usr/hdp/2.6.5.0-292/hadoop/conf/task-log4j.properties'] {'content': StaticFile('task-log4j.properties'), 'mode': 0755} 2019-01-05 07:48:04,180 - File['/usr/hdp/2.6.5.0-292/hadoop/conf/configuration.xsl'] {'owner': 'hdfs', 'group': 'hadoop'} 2019-01-05 07:48:04,198 - File['/etc/hadoop/conf/topology_mappings.data'] {'owner': 'hdfs', 'content': Template('topology_mappings.data.j2'), 'only_if': 'test -d /etc/hadoop/conf', 'group': 'hadoop', 'mode': 0644} 2019-01-05 07:48:04,221 - File['/etc/hadoop/conf/topology_script.py'] {'content': StaticFile('topology_script.py'), 'only_if': 'test -d /etc/hadoop/conf', 'mode': 0755} 2019-01-05 07:48:05,349 - Using hadoop conf dir: /usr/hdp/2.6.5.0-292/hadoop/conf 2019-01-05 07:48:05,350 - Stack Feature Version Info: Cluster Stack=2.6, Command Stack=None, Command Version=2.6.5.0-292 -> 2.6.5.0-292 2019-01-05 07:48:05,384 - Using hadoop conf dir: /usr/hdp/2.6.5.0-292/hadoop/conf 2019-01-05 07:48:05,410 - Execute['ambari-sudo.sh su hdfs -l -s /bin/bash -c 'ulimit -c unlimited ; /usr/hdp/2.6.5.0-292/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/2.6.5.0-292/hadoop/conf stop namenode''] {'environment': {'HADOOP_LIBEXEC_DIR': '/usr/hdp/2.6.5.0-292/hadoop/libexec'}, 'only_if': 'ambari-sudo.sh -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid'} 2019-01-05 07:48:10,941 - call['! ( ambari-sudo.sh -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid )'] {'tries': 6, 'try_sleep': 10, 'env': {'PATH': '/usr/sbin:/sbin:/usr/lib/ambari-server/*:/usr/sbin:/sbin:/usr/lib/ambari-server/*:/usr/lib64/qt-3.3/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/opt/local/bin:/opt/local/sbin:/var/lib/ambari-agent:/var/lib/ambari-agent', 'HADOOP_LIBEXEC_DIR': '/usr/hdp/2.6.5.0-292/hadoop/libexec'}} 2019-01-05 07:48:10,967 - call returned (0, '') 2019-01-05 07:48:10,969 - File['/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid'] {'action': ['delete']} 2019-01-05 07:48:10,974 - Pid file /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid is empty or does not exist 2019-01-05 07:48:10,985 - Directory['/etc/security/limits.d'] {'owner': 'root', 'create_parents': True, 'group': 'root'} 2019-01-05 07:48:11,005 - File['/etc/security/limits.d/hdfs.conf'] {'content': Template('hdfs.conf.j2'), 'owner': 'root', 'group': 'root', 'mode': 0644} 2019-01-05 07:48:11,008 - XmlConfig['hadoop-policy.xml'] {'owner': 'hdfs', 'group': 'hadoop', 'conf_dir': '/usr/hdp/2.6.5.0-292/hadoop/conf', 'configuration_attributes': {}, 'configurations': ...} 2019-01-05 07:48:11,042 - Generating config: /usr/hdp/2.6.5.0-292/hadoop/conf/hadoop-policy.xml 2019-01-05 07:48:11,042 - File['/usr/hdp/2.6.5.0-292/hadoop/conf/hadoop-policy.xml'] {'owner': 'hdfs', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': None, 'encoding': 'UTF-8'} 2019-01-05 07:48:11,074 - XmlConfig['ssl-client.xml'] {'owner': 'hdfs', 'group': 'hadoop', 'conf_dir': '/usr/hdp/2.6.5.0-292/hadoop/conf', 'configuration_attributes': {}, 'configurations': ...} 2019-01-05 07:48:11,094 - Generating config: /usr/hdp/2.6.5.0-292/hadoop/conf/ssl-client.xml 2019-01-05 07:48:11,094 - File['/usr/hdp/2.6.5.0-292/hadoop/conf/ssl-client.xml'] {'owner': 'hdfs', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': None, 'encoding': 'UTF-8'} 2019-01-05 07:48:11,105 - Directory['/usr/hdp/2.6.5.0-292/hadoop/conf/secure'] {'owner': 'root', 'create_parents': True, 'group': 'hadoop', 'cd_access': 'a'} 2019-01-05 07:48:11,110 - XmlConfig['ssl-client.xml'] {'owner': 'hdfs', 'group': 'hadoop', 'conf_dir': '/usr/hdp/2.6.5.0-292/hadoop/conf/secure', 'configuration_attributes': {}, 'configurations': ...} 2019-01-05 07:48:11,136 - Generating config: /usr/hdp/2.6.5.0-292/hadoop/conf/secure/ssl-client.xml 2019-01-05 07:48:11,138 - File['/usr/hdp/2.6.5.0-292/hadoop/conf/secure/ssl-client.xml'] {'owner': 'hdfs', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': None, 'encoding': 'UTF-8'} 2019-01-05 07:48:11,164 - XmlConfig['ssl-server.xml'] {'owner': 'hdfs', 'group': 'hadoop', 'conf_dir': '/usr/hdp/2.6.5.0-292/hadoop/conf', 'configuration_attributes': {}, 'configurations': ...} 2019-01-05 07:48:11,190 - Generating config: /usr/hdp/2.6.5.0-292/hadoop/conf/ssl-server.xml 2019-01-05 07:48:11,191 - File['/usr/hdp/2.6.5.0-292/hadoop/conf/ssl-server.xml'] {'owner': 'hdfs', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': None, 'encoding': 'UTF-8'} 2019-01-05 07:48:11,216 - XmlConfig['hdfs-site.xml'] {'owner': 'hdfs', 'group': 'hadoop', 'conf_dir': '/usr/hdp/2.6.5.0-292/hadoop/conf', 'configuration_attributes': {u'final': {u'dfs.support.append': u'true', u'dfs.datanode.data.dir': u'true', u'dfs.namenode.http-address': u'true', u'dfs.namenode.name.dir': u'true', u'dfs.webhdfs.enabled': u'true', u'dfs.datanode.failed.volumes.tolerated': u'true'}}, 'configurations': ...} 2019-01-05 07:48:11,246 - Generating config: /usr/hdp/2.6.5.0-292/hadoop/conf/hdfs-site.xml 2019-01-05 07:48:11,247 - File['/usr/hdp/2.6.5.0-292/hadoop/conf/hdfs-site.xml'] {'owner': 'hdfs', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': None, 'encoding': 'UTF-8'} 2019-01-05 07:48:11,397 - XmlConfig['core-site.xml'] {'group': 'hadoop', 'conf_dir': '/usr/hdp/2.6.5.0-292/hadoop/conf', 'mode': 0644, 'configuration_attributes': {u'final': {u'fs.defaultFS': u'true'}}, 'owner': 'hdfs', 'configurations': ...} 2019-01-05 07:48:11,422 - Generating config: /usr/hdp/2.6.5.0-292/hadoop/conf/core-site.xml 2019-01-05 07:48:11,422 - File['/usr/hdp/2.6.5.0-292/hadoop/conf/core-site.xml'] {'owner': 'hdfs', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644, 'encoding': 'UTF-8'} 2019-01-05 07:48:11,521 - File['/usr/hdp/2.6.5.0-292/hadoop/conf/slaves'] {'content': Template('slaves.j2'), 'owner': 'hdfs'} 2019-01-05 07:48:11,525 - Stack Feature Version Info: Cluster Stack=2.6, Command Stack=None, Command Version=2.6.5.0-292 -> 2.6.5.0-292 2019-01-05 07:48:11,553 - Directory['/u02/hadoop/hdfs/namenode'] {'owner': 'hdfs', 'group': 'hadoop', 'create_parents': True, 'mode': 0755, 'cd_access': 'a'} 2019-01-05 07:48:11,557 - Skipping setting up secure ZNode ACL for HFDS as it's supported only for NameNode HA mode. 2019-01-05 07:48:11,570 - Called service start with upgrade_type: None 2019-01-05 07:48:11,570 - Ranger Hdfs plugin is not enabled 2019-01-05 07:48:11,575 - File['/etc/hadoop/conf/dfs.exclude'] {'owner': 'hdfs', 'content': Template('exclude_hosts_list.j2'), 'group': 'hadoop'} 2019-01-05 07:48:11,585 - /u02/hadoop/hdfs/namenode/namenode-formatted/ exists. Namenode DFS already formatted 2019-01-05 07:48:11,586 - Directory['/u02/hadoop/hdfs/namenode/namenode-formatted/'] {'create_parents': True} 2019-01-05 07:48:11,586 - Options for start command are: 2019-01-05 07:48:11,588 - Directory['/var/run/hadoop'] {'owner': 'hdfs', 'group': 'hadoop', 'mode': 0755} 2019-01-05 07:48:11,591 - Changing owner for /var/run/hadoop from 0 to hdfs 2019-01-05 07:48:11,593 - Changing group for /var/run/hadoop from 0 to hadoop 2019-01-05 07:48:11,594 - Directory['/var/run/hadoop/hdfs'] {'owner': 'hdfs', 'group': 'hadoop', 'create_parents': True} 2019-01-05 07:48:11,598 - Directory['/var/log/hadoop/hdfs'] {'owner': 'hdfs', 'group': 'hadoop', 'create_parents': True} 2019-01-05 07:48:11,602 - File['/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid'] {'action': ['delete'], 'not_if': 'ambari-sudo.sh -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid'} 2019-01-05 07:48:11,626 - Execute['ambari-sudo.sh su hdfs -l -s /bin/bash -c 'ulimit -c unlimited ; /usr/hdp/2.6.5.0-292/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/2.6.5.0-292/hadoop/conf start namenode''] {'environment': {'HADOOP_LIBEXEC_DIR': '/usr/hdp/2.6.5.0-292/hadoop/libexec'}, 'not_if': 'ambari-sudo.sh -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid'} 2019-01-05 07:48:16,173 - Waiting for this NameNode to leave Safemode due to the following conditions: HA: False, isActive: True, upgradeType: None 2019-01-05 07:48:16,173 - Waiting up to 19 minutes for the NameNode to leave Safemode... 2019-01-05 07:48:16,174 - Execute['/usr/hdp/current/hadoop-hdfs-namenode/bin/hdfs dfsadmin -fs hdfs://lhdcsi04v.production.local:8020 -safemode get | grep 'Safe mode is OFF''] {'logoutput': True, 'tries': 115, 'user': 'hdfs', 'try_sleep': 10} safemode: Call From lhdcsi04v.production.local/10.237.16.24 to lhdcsi04v.production.local:8020 failed on connection exception: java.net.ConnectException: Connection refused; For more details see: http://wiki.apache.org/hadoop/ConnectionRefused 2019-01-05 07:48:21,032 - Retrying after 10 seconds. Reason: Execution of '/usr/hdp/current/hadoop-hdfs-namenode/bin/hdfs dfsadmin -fs hdfs://lhdcsi04v.production.local:8020 -safemode get | grep 'Safe mode is OFF'' returned 1. safemode: Call From lhdcsi04v.production.local/10.237.16.24 to lhdcsi04v.production.local:8020 failed on connection exception: java.net.ConnectException: Connection refused; For more details see: http://wiki.apache.org/hadoop/ConnectionRefused 2019-01-05 07:48:35,361 - Retrying after 10 seconds. Reason: Execution of '/usr/hdp/current/hadoop-hdfs-namenode/bin/hdfs dfsadmin -fs hdfs://lhdcsi04v.production.local:8020 -safemode get | grep 'Safe mode is OFF'' returned 1. 2019-01-05 07:48:49,815 - Retrying after 10 seconds. Reason: Execution of '/usr/hdp/current/hadoop-hdfs-namenode/bin/hdfs dfsadmin -fs hdfs://lhdcsi04v.production.local:8020 -safemode get | grep 'Safe mode is OFF'' returned 1. 2019-01-05 07:49:04,075 - Retrying after 10 seconds. Reason: Execution of '/usr/hdp/current/hadoop-hdfs-namenode/bin/hdfs dfsadmin -fs hdfs://lhdcsi04v.production.local:8020 -safemode get | grep 'Safe mode is OFF'' returned 1. 2019-01-05 07:49:18,839 - Retrying after 10 seconds. Reason: Execution of '/usr/hdp/current/hadoop-hdfs-namenode/bin/hdfs dfsadmin -fs hdfs://lhdcsi04v.production.local:8020 -safemode get | grep 'Safe mode is OFF'' returned 1. 2019-01-05 07:49:34,239 - Retrying after 10 seconds. Reason: Execution of '/usr/hdp/current/hadoop-hdfs-namenode/bin/hdfs dfsadmin -fs hdfs://lhdcsi04v.production.local:8020 -safemode get | grep 'Safe mode is OFF'' returned 1. 2019-01-05 07:49:48,846 - Retrying after 10 seconds. Reason: Execution of '/usr/hdp/current/hadoop-hdfs-namenode/bin/hdfs dfsadmin -fs hdfs://lhdcsi04v.production.local:8020 -safemode get | grep 'Safe mode is OFF'' returned 1. Safe mode is OFF 2019-01-05 07:50:03,348 - HdfsResource['/tmp'] {'security_enabled': False, 'hadoop_bin_dir': '/usr/hdp/2.6.5.0-292/hadoop/bin', 'keytab': [EMPTY], 'dfs_type': '', 'default_fs': 'hdfs://lhdcsi04v.production.local:8020', 'hdfs_resource_ignore_file': '/var/lib/ambari-agent/data/.hdfs_resource_ignore', 'hdfs_site': ..., 'kinit_path_local': '/usr/bin/kinit', 'principal_name': None, 'user': 'hdfs', 'owner': 'hdfs', 'hadoop_conf_dir': '/usr/hdp/2.6.5.0-292/hadoop/conf', 'type': 'directory', 'action': ['create_on_execute'], 'immutable_paths': [u'/apps/hive/warehouse', u'/mr-history/done', u'/app-logs', u'/tmp'], 'mode': 0777} 2019-01-05 07:50:03,361 - call['ambari-sudo.sh su hdfs -l -s /bin/bash -c 'curl -sS -L -w '"'"'%{http_code}'"'"' -X GET -k '"'"'https://lhdcsi04v.production.local:50470/webhdfs/v1/tmp?op=GETFILESTATUS&user.name=hdfs'"'"' 1>/tmp/tmpc8gtz_ 2>/tmp/tmp4HCXoq''] {'logoutput': None, 'quiet': False} 2019-01-05 07:50:03,774 - call returned (35, '') Command failed after 1 tries