[root@master ~]# less /var/log/ambari-agent/ambari-agent.log
INFO 2020-02-12 16:46:53,497 __init__.py:57 - Event from server at /user/ (correlation_id=36): {u'status': u'OK', u'id': 19}
INFO 2020-02-12 16:46:54,569 security.py:135 - Event to server at /reports/alerts_status (correlation_id=37): [{'name': u'hive_metastore_process', 'timestamp': 1581506213667L, 'clusterId': '2', 'definitionId': 248, 'state': 'CRITICAL', 'text': '...'}, {'name': u'ambari_agent_disk_usage', 'timestamp': 1581506213510L, 'clusterId': '2', 'definitionId': 16, 'state': 'OK', 'text': '...'}]
INFO 2020-02-12 16:46:54,589 __init__.py:57 - Event from server at /user/ (correlation_id=37): {u'status': u'OK'}
INFO 2020-02-12 16:47:03,543 security.py:135 - Event to server at /heartbeat (correlation_id=38): {'id': 19}
INFO 2020-02-12 16:47:03,567 __init__.py:57 - Event from server at /user/ (correlation_id=38): {u'status': u'OK', u'id': 20}
INFO 2020-02-12 16:47:09,589 security.py:135 - Event to server at /reports/alerts_status (correlation_id=39): [{'name': u'hive_server_process', 'timestamp': 1581506227127L, 'clusterId': '2', 'definitionId': 251, 'state': 'CRITICAL', 'text': '...'}]
INFO 2020-02-12 16:47:09,601 __init__.py:57 - Event from server at /user/ (correlation_id=39): {u'status': u'OK'}
INFO 2020-02-12 16:47:13,573 security.py:135 - Event to server at /heartbeat (correlation_id=40): {'id': 20}
INFO 2020-02-12 16:47:13,584 __init__.py:57 - Event from server at /user/ (correlation_id=40): {u'status': u'OK', u'id': 21}
[root@master ~]# less /var/log/hadoop-yarn/embedded-yarn-ats-hbase/hs_err_pid21696.log
#
# There is insufficient memory for the Java Runtime Environment to continue.
# Native memory allocation (mmap) failed to map 3086221312 bytes for committing reserved memory.
# Possible reasons:
# The system is out of physical RAM or swap space
# In 32 bit mode, the process size limit was hit
# Possible solutions:
# Reduce memory load on the system
# Increase physical memory or swap space
# Check if swap backing store is full
# Use 64 bit Java on a 64 bit OS
# Decrease Java heap size (-Xmx/-Xms)
# Decrease number of Java threads
# Decrease Java thread stack sizes (-Xss)
# Set larger code cache with -XX:ReservedCodeCacheSize=
# This output file may be truncated or incomplete.
#
# Out of Memory Error (os_linux.cpp:2627), pid=21696, tid=0x00007f39deddf700
#
# JRE version: (8.0_112-b15) (build )
# Java VM: Java HotSpot(TM) 64-Bit Server VM (25.112-b15 mixed mode linux-amd64 compressed oops)
yarn service log while installing ...
1) timeline service 2.0 log is as follows..
stderr: /var/lib/ambari-agent/data/errors-902.txt
Traceback (most recent call last):
File "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/YARN/package/scripts/timelinereader.py", line 108, in
ApplicationTimelineReader().execute()
File "/usr/lib/ambari-agent/lib/resource_management/libraries/script/script.py", line 353, in execute
method(env)
File "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/YARN/package/scripts/timelinereader.py", line 49, in start
self.configure(env) # FOR SECURITY
File "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/YARN/package/scripts/timelinereader.py", line 66, in configure
configure_hbase(env)
File "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/YARN/package/scripts/hbase_service.py", line 89, in configure_hbase
owner=params.yarn_hbase_user
File "/usr/lib/ambari-agent/lib/resource_management/core/base.py", line 166, in __init__
self.env.run()
File "/usr/lib/ambari-agent/lib/resource_management/core/environment.py", line 160, in run
self.run_action(resource, action)
File "/usr/lib/ambari-agent/lib/resource_management/core/environment.py", line 124, in run_action
provider_action()
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 654, in action_create_on_execute
self.action_delayed("create")
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 651, in action_delayed
self.get_hdfs_resource_executor().action_delayed(action_name, self)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 350, in action_delayed
self.action_delayed_for_nameservice(None, action_name, main_resource)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 372, in action_delayed_for_nameservice
self._assert_valid()
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 311, in _assert_valid
self.target_status = self._get_file_status(target)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 474, in _get_file_status
list_status = self.util.run_command(target, 'GETFILESTATUS', method='GET', ignore_status_codes=['404'], assertable_result=False)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 199, in run_command
return self._run_command(*args, **kwargs)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 259, in _run_command
_, out, err = get_user_call_output(cmd, user=self.run_user, logoutput=self.logoutput, quiet=False)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/functions/get_user_call_output.py", line 62, in get_user_call_output
raise ExecutionFailed(err_msg, code, files_output[0], files_output[1])
resource_management.core.exceptions.ExecutionFailed: Execution of 'curl -sS -L -w '%{http_code}' -X GET '
http://master.localdomain:50070/webhdfs/v1/atsv2/hbase/data?op=GETFILESTATUS&user.name=hdfs' 1>/tmp/tmp3UMLTY 2>/tmp/tmpw9Xv6a' returned 7. curl: (7) Failed connect to master.localdomain:50070; Connection refused
000
stdout: /var/lib/ambari-agent/data/output-902.txt
2020-02-13 10:31:43,660 - Stack Feature Version Info: Cluster Stack=3.0, Command Stack=None, Command Version=3.0.1.0-187 -> 3.0.1.0-187
2020-02-13 10:31:43,723 - Using hadoop conf dir: /usr/hdp/3.0.1.0-187/hadoop/conf
2020-02-13 10:31:44,040 - Stack Feature Version Info: Cluster Stack=3.0, Command Stack=None, Command Version=3.0.1.0-187 -> 3.0.1.0-187
2020-02-13 10:31:44,051 - Using hadoop conf dir: /usr/hdp/3.0.1.0-187/hadoop/conf
2020-02-13 10:31:44,065 - Group['livy'] {}
2020-02-13 10:31:44,068 - Group['spark'] {}
2020-02-13 10:31:44,068 - Group['hdfs'] {}
2020-02-13 10:31:44,069 - Group['hadoop'] {}
2020-02-13 10:31:44,069 - Group['users'] {}
2020-02-13 10:31:44,070 - User['yarn-ats'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:31:44,073 - User['hive'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:31:44,077 - User['infra-solr'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:31:44,080 - User['zookeeper'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:31:44,084 - User['ams'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:31:44,086 - User['tez'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop', 'users'], 'uid': None}
2020-02-13 10:31:44,087 - User['livy'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['livy', 'hadoop'], 'uid': None}
2020-02-13 10:31:44,091 - User['spark'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['spark', 'hadoop'], 'uid': None}
2020-02-13 10:31:44,095 - User['ambari-qa'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop', 'users'], 'uid': None}
2020-02-13 10:31:44,097 - User['kafka'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:31:44,098 - User['hdfs'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hdfs', 'hadoop'], 'uid': None}
2020-02-13 10:31:44,099 - User['sqoop'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:31:44,100 - User['yarn'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:31:44,100 - User['mapred'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:31:44,101 - User['hbase'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:31:44,102 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555}
2020-02-13 10:31:44,103 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 0'] {'not_if': '(test $(id -u ambari-qa) -gt 1000) || (false)'}
2020-02-13 10:31:44,117 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 0'] due to not_if
2020-02-13 10:31:44,118 - Directory['/tmp/hbase-hbase'] {'owner': 'hbase', 'create_parents': True, 'mode': 0775, 'cd_access': 'a'}
2020-02-13 10:31:44,119 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555}
2020-02-13 10:31:44,121 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555}
2020-02-13 10:31:44,124 - call['/var/lib/ambari-agent/tmp/changeUid.sh hbase'] {}
2020-02-13 10:31:44,148 - call returned (0, '1005')
2020-02-13 10:31:44,150 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase 1005'] {'not_if': '(test $(id -u hbase) -gt 1000) || (false)'}
2020-02-13 10:31:44,162 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase 1005'] due to not_if
2020-02-13 10:31:44,164 - Group['hdfs'] {}
2020-02-13 10:31:44,165 - User['hdfs'] {'fetch_nonlocal_groups': True, 'groups': ['hdfs', 'hadoop', u'hdfs']}
2020-02-13 10:31:44,167 - FS Type: HDFS
2020-02-13 10:31:44,167 - Directory['/etc/hadoop'] {'mode': 0755}
2020-02-13 10:31:44,208 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/hadoop-env.sh'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'}
2020-02-13 10:31:44,210 - Writing File['/usr/hdp/3.0.1.0-187/hadoop/conf/hadoop-env.sh'] because contents don't match
2020-02-13 10:31:44,212 - Directory['/var/lib/ambari-agent/tmp/hadoop_java_io_tmpdir'] {'owner': 'hdfs', 'group': 'hadoop', 'mode': 01777}
2020-02-13 10:31:44,247 - Execute[('setenforce', '0')] {'not_if': '(! which getenforce ) || (which getenforce && getenforce | grep -q Disabled)', 'sudo': True, 'only_if': 'test -f /selinux/enforce'}
2020-02-13 10:31:44,260 - Skipping Execute[('setenforce', '0')] due to not_if
2020-02-13 10:31:44,261 - Directory['/var/log/hadoop'] {'owner': 'root', 'create_parents': True, 'group': 'hadoop', 'mode': 0775, 'cd_access': 'a'}
2020-02-13 10:31:44,263 - Directory['/var/run/hadoop'] {'owner': 'root', 'create_parents': True, 'group': 'root', 'cd_access': 'a'}
2020-02-13 10:31:44,263 - Directory['/var/run/hadoop/hdfs'] {'owner': 'hdfs', 'cd_access': 'a'}
2020-02-13 10:31:44,264 - Directory['/tmp/hadoop-hdfs'] {'owner': 'hdfs', 'create_parents': True, 'cd_access': 'a'}
2020-02-13 10:31:44,269 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/commons-logging.properties'] {'content': Template('commons-logging.properties.j2'), 'owner': 'hdfs'}
2020-02-13 10:31:44,272 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/health_check'] {'content': Template('health_check.j2'), 'owner': 'hdfs'}
2020-02-13 10:31:44,279 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/log4j.properties'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:31:44,291 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/hadoop-metrics2.properties'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'}
2020-02-13 10:31:44,292 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/task-log4j.properties'] {'content': StaticFile('task-log4j.properties'), 'mode': 0755}
2020-02-13 10:31:44,293 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/configuration.xsl'] {'owner': 'hdfs', 'group': 'hadoop'}
2020-02-13 10:31:44,300 - File['/etc/hadoop/conf/topology_mappings.data'] {'owner': 'hdfs', 'content': Template('topology_mappings.data.j2'), 'only_if': 'test -d /etc/hadoop/conf', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:31:44,304 - File['/etc/hadoop/conf/topology_script.py'] {'content': StaticFile('topology_script.py'), 'only_if': 'test -d /etc/hadoop/conf', 'mode': 0755}
2020-02-13 10:31:44,307 - Skipping unlimited key JCE policy check and setup since it is not required
2020-02-13 10:31:44,668 - Using hadoop conf dir: /usr/hdp/3.0.1.0-187/hadoop/conf
2020-02-13 10:31:44,669 - Stack Feature Version Info: Cluster Stack=3.0, Command Stack=None, Command Version=3.0.1.0-187 -> 3.0.1.0-187
2020-02-13 10:31:44,700 - Using hadoop conf dir: /usr/hdp/3.0.1.0-187/hadoop/conf
2020-02-13 10:31:44,717 - Directory['/var/log/hadoop-yarn'] {'group': 'hadoop', 'cd_access': 'a', 'create_parents': True, 'ignore_failures': True, 'mode': 0775, 'owner': 'yarn'}
2020-02-13 10:31:44,718 - Directory['/var/run/hadoop-yarn'] {'owner': 'yarn', 'create_parents': True, 'group': 'hadoop', 'cd_access': 'a'}
2020-02-13 10:31:44,719 - Creating directory Directory['/var/run/hadoop-yarn'] since it doesn't exist.
2020-02-13 10:31:44,719 - Changing owner for /var/run/hadoop-yarn from 0 to yarn
2020-02-13 10:31:44,719 - Changing group for /var/run/hadoop-yarn from 0 to hadoop
2020-02-13 10:31:44,719 - Directory['/var/run/hadoop-yarn/yarn'] {'owner': 'yarn', 'create_parents': True, 'group': 'hadoop', 'cd_access': 'a'}
2020-02-13 10:31:44,719 - Creating directory Directory['/var/run/hadoop-yarn/yarn'] since it doesn't exist.
2020-02-13 10:31:44,720 - Changing owner for /var/run/hadoop-yarn/yarn from 0 to yarn
2020-02-13 10:31:44,720 - Changing group for /var/run/hadoop-yarn/yarn from 0 to hadoop
2020-02-13 10:31:44,720 - Directory['/var/log/hadoop-yarn/yarn'] {'owner': 'yarn', 'group': 'hadoop', 'create_parents': True, 'cd_access': 'a'}
2020-02-13 10:31:44,720 - Directory['/var/run/hadoop-mapreduce'] {'owner': 'mapred', 'create_parents': True, 'group': 'hadoop', 'cd_access': 'a'}
2020-02-13 10:31:44,721 - Creating directory Directory['/var/run/hadoop-mapreduce'] since it doesn't exist.
2020-02-13 10:31:44,721 - Changing owner for /var/run/hadoop-mapreduce from 0 to mapred
2020-02-13 10:31:44,721 - Changing group for /var/run/hadoop-mapreduce from 0 to hadoop
2020-02-13 10:31:44,721 - Directory['/var/run/hadoop-mapreduce/mapred'] {'owner': 'mapred', 'create_parents': True, 'group': 'hadoop', 'cd_access': 'a'}
2020-02-13 10:31:44,721 - Creating directory Directory['/var/run/hadoop-mapreduce/mapred'] since it doesn't exist.
2020-02-13 10:31:44,722 - Changing owner for /var/run/hadoop-mapreduce/mapred from 0 to mapred
2020-02-13 10:31:44,722 - Changing group for /var/run/hadoop-mapreduce/mapred from 0 to hadoop
2020-02-13 10:31:44,722 - Directory['/var/log/hadoop-mapreduce'] {'owner': 'mapred', 'create_parents': True, 'group': 'hadoop', 'cd_access': 'a'}
2020-02-13 10:31:44,723 - Directory['/var/log/hadoop-mapreduce/mapred'] {'owner': 'mapred', 'group': 'hadoop', 'create_parents': True, 'cd_access': 'a'}
2020-02-13 10:31:44,723 - Directory['/usr/hdp/3.0.1.0-187/hadoop/conf/embedded-yarn-ats-hbase'] {'owner': 'yarn-ats', 'group': 'hadoop', 'create_parents': True, 'cd_access': 'a'}
2020-02-13 10:31:44,724 - Directory['/var/run/hadoop-yarn-hbase'] {'owner': 'yarn-ats', 'create_parents': True, 'group': 'hadoop', 'cd_access': 'a'}
2020-02-13 10:31:44,724 - Creating directory Directory['/var/run/hadoop-yarn-hbase'] since it doesn't exist.
2020-02-13 10:31:44,725 - Changing owner for /var/run/hadoop-yarn-hbase from 0 to yarn-ats
2020-02-13 10:31:44,725 - Changing group for /var/run/hadoop-yarn-hbase from 0 to hadoop
2020-02-13 10:31:44,727 - Directory['/var/run/hadoop-yarn-hbase/yarn-ats'] {'owner': 'yarn-ats', 'create_parents': True, 'group': 'hadoop', 'cd_access': 'a'}
2020-02-13 10:31:44,727 - Creating directory Directory['/var/run/hadoop-yarn-hbase/yarn-ats'] since it doesn't exist.
2020-02-13 10:31:44,728 - Changing owner for /var/run/hadoop-yarn-hbase/yarn-ats from 0 to yarn-ats
2020-02-13 10:31:44,729 - Changing group for /var/run/hadoop-yarn-hbase/yarn-ats from 0 to hadoop
2020-02-13 10:31:44,730 - Directory['/var/log/hadoop-yarn/embedded-yarn-ats-hbase'] {'owner': 'yarn-ats', 'group': 'hadoop', 'create_parents': True, 'cd_access': 'a'}
2020-02-13 10:31:44,733 - Directory['/tmp'] {'create_parents': True, 'cd_access': 'a'}
2020-02-13 10:31:44,734 - Execute[('chmod', '1777', u'/tmp')] {'sudo': True}
2020-02-13 10:31:44,744 - XmlConfig['hbase-policy.xml'] {'group': 'hadoop', 'conf_dir': '/usr/hdp/3.0.1.0-187/hadoop/conf/embedded-yarn-ats-hbase', 'mode': 0644, 'configuration_attributes': {}, 'owner': 'yarn-ats', 'configurations': {u'security.admin.protocol.acl': u'*', u'security.masterregion.protocol.acl': u'*', u'security.client.protocol.acl': u'*'}}
2020-02-13 10:31:44,758 - Generating config: /usr/hdp/3.0.1.0-187/hadoop/conf/embedded-yarn-ats-hbase/hbase-policy.xml
2020-02-13 10:31:44,758 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/embedded-yarn-ats-hbase/hbase-policy.xml'] {'owner': 'yarn-ats', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644, 'encoding': 'UTF-8'}
2020-02-13 10:31:44,770 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/embedded-yarn-ats-hbase/hbase-env.sh'] {'content': InlineTemplate(...), 'owner': 'yarn-ats', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:31:44,775 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/embedded-yarn-ats-hbase/hbase_grant_permissions.sh'] {'content': Template('yarn_hbase_grant_permissions.j2'), 'owner': 'yarn-ats', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:31:44,785 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/embedded-yarn-ats-hbase/log4j.properties'] {'content': InlineTemplate(...), 'owner': 'yarn-ats', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:31:44,798 - Directory['/usr/lib/ambari-logsearch-logfeeder/conf'] {'create_parents': True, 'mode': 0755, 'cd_access': 'a'}
2020-02-13 10:31:44,798 - Generate Log Feeder config file: /usr/lib/ambari-logsearch-logfeeder/conf/input.config-yarn.json
2020-02-13 10:31:44,798 - File['/usr/lib/ambari-logsearch-logfeeder/conf/input.config-yarn.json'] {'content': Template('input.config-yarn.json.j2'), 'mode': 0644}
2020-02-13 10:31:44,800 - XmlConfig['core-site.xml'] {'group': 'hadoop', 'conf_dir': '/usr/hdp/3.0.1.0-187/hadoop/conf', 'mode': 0644, 'configuration_attributes': {u'final': {u'fs.defaultFS': u'true'}}, 'owner': 'hdfs', 'configurations': ...}
2020-02-13 10:31:44,812 - Generating config: /usr/hdp/3.0.1.0-187/hadoop/conf/core-site.xml
2020-02-13 10:31:44,813 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/core-site.xml'] {'owner': 'hdfs', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644, 'encoding': 'UTF-8'}
2020-02-13 10:31:44,866 - Writing File['/usr/hdp/3.0.1.0-187/hadoop/conf/core-site.xml'] because contents don't match
2020-02-13 10:31:44,868 - XmlConfig['hdfs-site.xml'] {'group': 'hadoop', 'conf_dir': '/usr/hdp/3.0.1.0-187/hadoop/conf', 'mode': 0644, 'configuration_attributes': {u'final': {u'dfs.datanode.failed.volumes.tolerated': u'true', u'dfs.datanode.data.dir': u'true', u'dfs.namenode.http-address': u'true', u'dfs.namenode.name.dir': u'true', u'dfs.webhdfs.enabled': u'true'}}, 'owner': 'hdfs', 'configurations': ...}
2020-02-13 10:31:44,875 - Generating config: /usr/hdp/3.0.1.0-187/hadoop/conf/hdfs-site.xml
2020-02-13 10:31:44,876 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/hdfs-site.xml'] {'owner': 'hdfs', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644, 'encoding': 'UTF-8'}
2020-02-13 10:31:44,968 - XmlConfig['mapred-site.xml'] {'group': 'hadoop', 'conf_dir': '/usr/hdp/3.0.1.0-187/hadoop/conf', 'mode': 0644, 'configuration_attributes': {}, 'owner': 'yarn', 'configurations': ...}
2020-02-13 10:31:44,987 - Generating config: /usr/hdp/3.0.1.0-187/hadoop/conf/mapred-site.xml
2020-02-13 10:31:44,988 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/mapred-site.xml'] {'owner': 'yarn', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644, 'encoding': 'UTF-8'}
2020-02-13 10:31:45,085 - Changing owner for /usr/hdp/3.0.1.0-187/hadoop/conf/mapred-site.xml from 995 to yarn
2020-02-13 10:31:45,086 - XmlConfig['yarn-site.xml'] {'group': 'hadoop', 'conf_dir': '/usr/hdp/3.0.1.0-187/hadoop/conf', 'mode': 0644, 'configuration_attributes': {u'hidden': {u'hadoop.registry.dns.bind-port': u'true'}}, 'owner': 'yarn', 'configurations': ...}
2020-02-13 10:31:45,095 - Generating config: /usr/hdp/3.0.1.0-187/hadoop/conf/yarn-site.xml
2020-02-13 10:31:45,096 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/yarn-site.xml'] {'owner': 'yarn', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644, 'encoding': 'UTF-8'}
2020-02-13 10:31:45,345 - XmlConfig['capacity-scheduler.xml'] {'group': 'hadoop', 'conf_dir': '/usr/hdp/3.0.1.0-187/hadoop/conf', 'mode': 0644, 'configuration_attributes': {}, 'owner': 'yarn', 'configurations': ...}
2020-02-13 10:31:45,360 - Generating config: /usr/hdp/3.0.1.0-187/hadoop/conf/capacity-scheduler.xml
2020-02-13 10:31:45,361 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/capacity-scheduler.xml'] {'owner': 'yarn', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644, 'encoding': 'UTF-8'}
2020-02-13 10:31:45,403 - Changing owner for /usr/hdp/3.0.1.0-187/hadoop/conf/capacity-scheduler.xml from 1004 to yarn
2020-02-13 10:31:45,403 - XmlConfig['hbase-site.xml'] {'group': 'hadoop', 'conf_dir': '/usr/hdp/3.0.1.0-187/hadoop/conf/embedded-yarn-ats-hbase', 'mode': 0644, 'configuration_attributes': {}, 'owner': 'yarn-ats', 'configurations': ...}
2020-02-13 10:31:45,421 - Generating config: /usr/hdp/3.0.1.0-187/hadoop/conf/embedded-yarn-ats-hbase/hbase-site.xml
2020-02-13 10:31:45,421 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/embedded-yarn-ats-hbase/hbase-site.xml'] {'owner': 'yarn-ats', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644, 'encoding': 'UTF-8'}
2020-02-13 10:31:45,495 - XmlConfig['resource-types.xml'] {'group': 'hadoop', 'conf_dir': '/usr/hdp/3.0.1.0-187/hadoop/conf', 'mode': 0644, 'configuration_attributes': {}, 'owner': 'yarn', 'configurations': {u'yarn.resource-types.yarn.io_gpu.maximum-allocation': u'8', u'yarn.resource-types': u''}}
2020-02-13 10:31:45,507 - Generating config: /usr/hdp/3.0.1.0-187/hadoop/conf/resource-types.xml
2020-02-13 10:31:45,508 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/resource-types.xml'] {'owner': 'yarn', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644, 'encoding': 'UTF-8'}
2020-02-13 10:31:45,517 - File['/etc/security/limits.d/yarn.conf'] {'content': Template('yarn.conf.j2'), 'mode': 0644}
2020-02-13 10:31:45,520 - File['/etc/security/limits.d/mapreduce.conf'] {'content': Template('mapreduce.conf.j2'), 'mode': 0644}
2020-02-13 10:31:45,538 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/yarn-env.sh'] {'content': InlineTemplate(...), 'owner': 'yarn', 'group': 'hadoop', 'mode': 0755}
2020-02-13 10:31:45,539 - File['/usr/hdp/3.0.1.0-187/hadoop-yarn/bin/container-executor'] {'group': 'hadoop', 'mode': 02050}
2020-02-13 10:31:45,553 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/container-executor.cfg'] {'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:31:45,554 - Directory['/cgroups_test/cpu'] {'group': 'hadoop', 'create_parents': True, 'mode': 0755, 'cd_access': 'a'}
2020-02-13 10:31:45,560 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/mapred-env.sh'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'mode': 0755}
2020-02-13 10:31:45,566 - Directory['/var/log/hadoop-yarn/nodemanager/recovery-state'] {'owner': 'yarn', 'group': 'hadoop', 'create_parents': True, 'mode': 0755, 'cd_access': 'a'}
2020-02-13 10:31:45,572 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/taskcontroller.cfg'] {'content': Template('taskcontroller.cfg.j2'), 'owner': 'hdfs'}
2020-02-13 10:31:45,573 - XmlConfig['mapred-site.xml'] {'owner': 'mapred', 'group': 'hadoop', 'conf_dir': '/usr/hdp/3.0.1.0-187/hadoop/conf', 'configuration_attributes': {}, 'configurations': ...}
2020-02-13 10:31:45,589 - Generating config: /usr/hdp/3.0.1.0-187/hadoop/conf/mapred-site.xml
2020-02-13 10:31:45,589 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/mapred-site.xml'] {'owner': 'mapred', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': None, 'encoding': 'UTF-8'}
2020-02-13 10:31:45,662 - Changing owner for /usr/hdp/3.0.1.0-187/hadoop/conf/mapred-site.xml from 996 to mapred
2020-02-13 10:31:45,663 - XmlConfig['capacity-scheduler.xml'] {'owner': 'hdfs', 'group': 'hadoop', 'conf_dir': '/usr/hdp/3.0.1.0-187/hadoop/conf', 'configuration_attributes': {}, 'configurations': ...}
2020-02-13 10:31:45,685 - Generating config: /usr/hdp/3.0.1.0-187/hadoop/conf/capacity-scheduler.xml
2020-02-13 10:31:45,685 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/capacity-scheduler.xml'] {'owner': 'hdfs', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': None, 'encoding': 'UTF-8'}
2020-02-13 10:31:45,715 - Changing owner for /usr/hdp/3.0.1.0-187/hadoop/conf/capacity-scheduler.xml from 996 to hdfs
2020-02-13 10:31:45,716 - XmlConfig['ssl-client.xml'] {'owner': 'hdfs', 'group': 'hadoop', 'conf_dir': '/usr/hdp/3.0.1.0-187/hadoop/conf', 'configuration_attributes': {}, 'configurations': ...}
2020-02-13 10:31:45,737 - Generating config: /usr/hdp/3.0.1.0-187/hadoop/conf/ssl-client.xml
2020-02-13 10:31:45,738 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/ssl-client.xml'] {'owner': 'hdfs', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': None, 'encoding': 'UTF-8'}
2020-02-13 10:31:45,755 - Directory['/usr/hdp/3.0.1.0-187/hadoop/conf/secure'] {'owner': 'root', 'create_parents': True, 'group': 'hadoop', 'cd_access': 'a'}
2020-02-13 10:31:45,756 - XmlConfig['ssl-client.xml'] {'owner': 'hdfs', 'group': 'hadoop', 'conf_dir': '/usr/hdp/3.0.1.0-187/hadoop/conf/secure', 'configuration_attributes': {}, 'configurations': ...}
2020-02-13 10:31:45,786 - Generating config: /usr/hdp/3.0.1.0-187/hadoop/conf/secure/ssl-client.xml
2020-02-13 10:31:45,786 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/secure/ssl-client.xml'] {'owner': 'hdfs', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': None, 'encoding': 'UTF-8'}
2020-02-13 10:31:45,803 - XmlConfig['ssl-server.xml'] {'owner': 'hdfs', 'group': 'hadoop', 'conf_dir': '/usr/hdp/3.0.1.0-187/hadoop/conf', 'configuration_attributes': {}, 'configurations': ...}
2020-02-13 10:31:45,828 - Generating config: /usr/hdp/3.0.1.0-187/hadoop/conf/ssl-server.xml
2020-02-13 10:31:45,829 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/ssl-server.xml'] {'owner': 'hdfs', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': None, 'encoding': 'UTF-8'}
2020-02-13 10:31:45,853 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/ssl-client.xml.example'] {'owner': 'mapred', 'group': 'hadoop'}
2020-02-13 10:31:45,853 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/ssl-server.xml.example'] {'owner': 'mapred', 'group': 'hadoop'}
2020-02-13 10:31:45,858 - HdfsResource['/atsv2/hbase/data'] {'security_enabled': False, 'hadoop_bin_dir': '/usr/hdp/3.0.1.0-187/hadoop/bin', 'keytab': [EMPTY], 'dfs_type': 'HDFS', 'default_fs': 'hdfs://master.localdomain:8020', 'hdfs_resource_ignore_file': '/var/lib/ambari-agent/data/.hdfs_resource_ignore', 'hdfs_site': ..., 'kinit_path_local': 'kinit', 'principal_name': [EMPTY], 'user': 'hdfs', 'owner': 'yarn-ats', 'hadoop_conf_dir': '/usr/hdp/3.0.1.0-187/hadoop/conf', 'type': 'directory', 'action': ['create_on_execute'], 'immutable_paths': [u'/mr-history/done', u'/warehouse/tablespace/managed/hive', u'/warehouse/tablespace/external/hive', u'/app-logs', u'/tmp']}
2020-02-13 10:31:45,868 - call['ambari-sudo.sh su hdfs -l -s /bin/bash -c 'curl -sS -L -w '"'"'%{http_code}'"'"' -X GET '"'"'
http://master.localdomain:50070/webhdfs/v1/atsv2/hbase/data?op=GETFILESTATUS&user.name=hdfs'"'"' 1>/tmp/tmp3UMLTY 2>/tmp/tmpw9Xv6a''] {'logoutput': None, 'quiet': False}
2020-02-13 10:31:46,113 - call returned (7, '')
Command failed after 1 tries
Hive meta store while installing...
stderr: /var/lib/ambari-agent/data/errors-903.txt
Traceback (most recent call last):
File "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/HIVE/package/scripts/hive_metastore.py", line 200, in
HiveMetastore().execute()
File "/usr/lib/ambari-agent/lib/resource_management/libraries/script/script.py", line 353, in execute
method(env)
File "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/HIVE/package/scripts/hive_metastore.py", line 56, in start
create_hive_hdfs_dirs()
File "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/HIVE/package/scripts/hive.py", line 269, in create_hive_hdfs_dirs
mode=params.hive_hdfs_user_mode
File "/usr/lib/ambari-agent/lib/resource_management/core/base.py", line 166, in __init__
self.env.run()
File "/usr/lib/ambari-agent/lib/resource_management/core/environment.py", line 160, in run
self.run_action(resource, action)
File "/usr/lib/ambari-agent/lib/resource_management/core/environment.py", line 124, in run_action
provider_action()
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 654, in action_create_on_execute
self.action_delayed("create")
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 651, in action_delayed
self.get_hdfs_resource_executor().action_delayed(action_name, self)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 350, in action_delayed
self.action_delayed_for_nameservice(None, action_name, main_resource)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 372, in action_delayed_for_nameservice
self._assert_valid()
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 311, in _assert_valid
self.target_status = self._get_file_status(target)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 474, in _get_file_status
list_status = self.util.run_command(target, 'GETFILESTATUS', method='GET', ignore_status_codes=['404'], assertable_result=False)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 199, in run_command
return self._run_command(*args, **kwargs)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 259, in _run_command
_, out, err = get_user_call_output(cmd, user=self.run_user, logoutput=self.logoutput, quiet=False)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/functions/get_user_call_output.py", line 62, in get_user_call_output
raise ExecutionFailed(err_msg, code, files_output[0], files_output[1])
resource_management.core.exceptions.ExecutionFailed: Execution of 'curl -sS -L -w '%{http_code}' -X GET '
http://master.localdomain:50070/webhdfs/v1/user/hive?op=GETFILESTATUS&user.name=hdfs' 1>/tmp/tmpDe4U26 2>/tmp/tmpczqnbH' returned 7. curl: (7) Failed connect to master.localdomain:50070; Connection refused
000
stdout: /var/lib/ambari-agent/data/output-903.txt
2020-02-13 10:33:40,391 - Stack Feature Version Info: Cluster Stack=3.0, Command Stack=None, Command Version=3.0.1.0-187 -> 3.0.1.0-187
2020-02-13 10:33:40,437 - Using hadoop conf dir: /usr/hdp/3.0.1.0-187/hadoop/conf
2020-02-13 10:33:40,728 - Stack Feature Version Info: Cluster Stack=3.0, Command Stack=None, Command Version=3.0.1.0-187 -> 3.0.1.0-187
2020-02-13 10:33:40,734 - Using hadoop conf dir: /usr/hdp/3.0.1.0-187/hadoop/conf
2020-02-13 10:33:40,736 - Group['livy'] {}
2020-02-13 10:33:40,738 - Group['spark'] {}
2020-02-13 10:33:40,739 - Group['hdfs'] {}
2020-02-13 10:33:40,740 - Group['hadoop'] {}
2020-02-13 10:33:40,741 - Group['users'] {}
2020-02-13 10:33:40,742 - User['yarn-ats'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:33:40,744 - User['hive'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:33:40,744 - User['infra-solr'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:33:40,745 - User['zookeeper'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:33:40,746 - User['ams'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:33:40,746 - User['tez'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop', 'users'], 'uid': None}
2020-02-13 10:33:40,747 - User['livy'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['livy', 'hadoop'], 'uid': None}
2020-02-13 10:33:40,748 - User['spark'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['spark', 'hadoop'], 'uid': None}
2020-02-13 10:33:40,750 - User['ambari-qa'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop', 'users'], 'uid': None}
2020-02-13 10:33:40,753 - User['kafka'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:33:40,755 - User['hdfs'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hdfs', 'hadoop'], 'uid': None}
2020-02-13 10:33:40,757 - User['sqoop'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:33:40,758 - User['yarn'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:33:40,760 - User['mapred'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:33:40,762 - User['hbase'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:33:40,763 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555}
2020-02-13 10:33:40,765 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 0'] {'not_if': '(test $(id -u ambari-qa) -gt 1000) || (false)'}
2020-02-13 10:33:40,778 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 0'] due to not_if
2020-02-13 10:33:40,778 - Directory['/tmp/hbase-hbase'] {'owner': 'hbase', 'create_parents': True, 'mode': 0775, 'cd_access': 'a'}
2020-02-13 10:33:40,779 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555}
2020-02-13 10:33:40,780 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555}
2020-02-13 10:33:40,781 - call['/var/lib/ambari-agent/tmp/changeUid.sh hbase'] {}
2020-02-13 10:33:40,797 - call returned (0, '1005')
2020-02-13 10:33:40,798 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase 1005'] {'not_if': '(test $(id -u hbase) -gt 1000) || (false)'}
2020-02-13 10:33:40,812 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase 1005'] due to not_if
2020-02-13 10:33:40,812 - Group['hdfs'] {}
2020-02-13 10:33:40,813 - User['hdfs'] {'fetch_nonlocal_groups': True, 'groups': ['hdfs', 'hadoop', u'hdfs']}
2020-02-13 10:33:40,814 - FS Type: HDFS
2020-02-13 10:33:40,814 - Directory['/etc/hadoop'] {'mode': 0755}
2020-02-13 10:33:40,849 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/hadoop-env.sh'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'}
2020-02-13 10:33:40,850 - Directory['/var/lib/ambari-agent/tmp/hadoop_java_io_tmpdir'] {'owner': 'hdfs', 'group': 'hadoop', 'mode': 01777}
2020-02-13 10:33:40,894 - Execute[('setenforce', '0')] {'not_if': '(! which getenforce ) || (which getenforce && getenforce | grep -q Disabled)', 'sudo': True, 'only_if': 'test -f /selinux/enforce'}
2020-02-13 10:33:40,908 - Skipping Execute[('setenforce', '0')] due to not_if
2020-02-13 10:33:40,909 - Directory['/var/log/hadoop'] {'owner': 'root', 'create_parents': True, 'group': 'hadoop', 'mode': 0775, 'cd_access': 'a'}
2020-02-13 10:33:40,913 - Directory['/var/run/hadoop'] {'owner': 'root', 'create_parents': True, 'group': 'root', 'cd_access': 'a'}
2020-02-13 10:33:40,914 - Directory['/var/run/hadoop/hdfs'] {'owner': 'hdfs', 'cd_access': 'a'}
2020-02-13 10:33:40,915 - Directory['/tmp/hadoop-hdfs'] {'owner': 'hdfs', 'create_parents': True, 'cd_access': 'a'}
2020-02-13 10:33:40,928 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/commons-logging.properties'] {'content': Template('commons-logging.properties.j2'), 'owner': 'hdfs'}
2020-02-13 10:33:40,930 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/health_check'] {'content': Template('health_check.j2'), 'owner': 'hdfs'}
2020-02-13 10:33:40,935 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/log4j.properties'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:33:40,952 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/hadoop-metrics2.properties'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'}
2020-02-13 10:33:40,956 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/task-log4j.properties'] {'content': StaticFile('task-log4j.properties'), 'mode': 0755}
2020-02-13 10:33:40,959 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/configuration.xsl'] {'owner': 'hdfs', 'group': 'hadoop'}
2020-02-13 10:33:40,965 - File['/etc/hadoop/conf/topology_mappings.data'] {'owner': 'hdfs', 'content': Template('topology_mappings.data.j2'), 'only_if': 'test -d /etc/hadoop/conf', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:33:40,977 - File['/etc/hadoop/conf/topology_script.py'] {'content': StaticFile('topology_script.py'), 'only_if': 'test -d /etc/hadoop/conf', 'mode': 0755}
2020-02-13 10:33:40,981 - Skipping unlimited key JCE policy check and setup since it is not required
2020-02-13 10:33:41,422 - Using hadoop conf dir: /usr/hdp/3.0.1.0-187/hadoop/conf
2020-02-13 10:33:41,436 - call['ambari-python-wrap /usr/bin/hdp-select status hive-server2'] {'timeout': 20}
2020-02-13 10:33:41,468 - call returned (0, 'hive-server2 - 3.0.1.0-187')
2020-02-13 10:33:41,471 - Stack Feature Version Info: Cluster Stack=3.0, Command Stack=None, Command Version=3.0.1.0-187 -> 3.0.1.0-187
2020-02-13 10:33:41,516 - File['/var/lib/ambari-agent/cred/lib/CredentialUtil.jar'] {'content': DownloadSource('
http://master.localdomain:8080/resources/CredentialUtil.jar'), 'mode': 0755}
2020-02-13 10:33:41,518 - Not downloading the file from
http://master.localdomain:8080/resources/CredentialUtil.jar, because /var/lib/ambari-agent/tmp/CredentialUtil.jar already exists
2020-02-13 10:33:42,972 - Yarn already refreshed
2020-02-13 10:33:42,974 - HdfsResource['/user/hive'] {'security_enabled': False, 'hadoop_bin_dir': '/usr/hdp/3.0.1.0-187/hadoop/bin', 'keytab': [EMPTY], 'dfs_type': 'HDFS', 'default_fs': 'hdfs://master.localdomain:8020', 'hdfs_resource_ignore_file': '/var/lib/ambari-agent/data/.hdfs_resource_ignore', 'hdfs_site': ..., 'kinit_path_local': 'kinit', 'principal_name': 'missing_principal', 'user': 'hdfs', 'owner': 'hive', 'hadoop_conf_dir': '/usr/hdp/3.0.1.0-187/hadoop/conf', 'type': 'directory', 'action': ['create_on_execute'], 'immutable_paths': [u'/mr-history/done', u'/warehouse/tablespace/managed/hive', u'/warehouse/tablespace/external/hive', u'/app-logs', u'/tmp'], 'mode': 0755}
2020-02-13 10:33:42,979 - call['ambari-sudo.sh su hdfs -l -s /bin/bash -c 'curl -sS -L -w '"'"'%{http_code}'"'"' -X GET '"'"'
http://master.localdomain:50070/webhdfs/v1/user/hive?op=GETFILESTATUS&user.name=hdfs'"'"' 1>/tmp/tmpDe4U26 2>/tmp/tmpczqnbH''] {'logoutput': None, 'quiet': False}
2020-02-13 10:33:43,092 - call returned (7, '')
Command failed after 1 tries
hive server2 ...
stderr: /var/lib/ambari-agent/data/errors-904.txt
Traceback (most recent call last):
File "/usr/lib/ambari-agent/lib/resource_management/core/source.py", line 195, in get_content
web_file = opener.open(req)
File "/usr/lib64/python2.7/urllib2.py", line 437, in open
response = meth(req, response)
File "/usr/lib64/python2.7/urllib2.py", line 550, in http_response
'http', request, response, code, msg, hdrs)
File "/usr/lib64/python2.7/urllib2.py", line 475, in error
return self._call_chain(*args)
File "/usr/lib64/python2.7/urllib2.py", line 409, in _call_chain
result = func(*args)
File "/usr/lib64/python2.7/urllib2.py", line 558, in http_error_default
raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
HTTPError: HTTP Error 404: Not Found
The above exception was the cause of the following exception:
Traceback (most recent call last):
File "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/HIVE/package/scripts/hive_server.py", line 137, in
HiveServer().execute()
File "/usr/lib/ambari-agent/lib/resource_management/libraries/script/script.py", line 353, in execute
method(env)
File "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/HIVE/package/scripts/hive_server.py", line 50, in start
self.configure(env) # FOR SECURITY
File "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/HIVE/package/scripts/hive_server.py", line 45, in configure
hive(name='hiveserver2')
File "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/HIVE/package/scripts/hive.py", line 114, in hive
jdbc_connector(params.hive_jdbc_target, params.hive_previous_jdbc_jar)
File "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/HIVE/package/scripts/hive.py", line 628, in jdbc_connector
File(params.downloaded_custom_connector, content = DownloadSource(params.driver_curl_source))
File "/usr/lib/ambari-agent/lib/resource_management/core/base.py", line 166, in __init__
self.env.run()
File "/usr/lib/ambari-agent/lib/resource_management/core/environment.py", line 160, in run
self.run_action(resource, action)
File "/usr/lib/ambari-agent/lib/resource_management/core/environment.py", line 124, in run_action
provider_action()
File "/usr/lib/ambari-agent/lib/resource_management/core/providers/system.py", line 123, in action_create
content = self._get_content()
File "/usr/lib/ambari-agent/lib/resource_management/core/providers/system.py", line 160, in _get_content
return content()
File "/usr/lib/ambari-agent/lib/resource_management/core/source.py", line 52, in __call__
return self.get_content()
File "/usr/lib/ambari-agent/lib/resource_management/core/source.py", line 197, in get_content
raise Fail("Failed to download file from {0} due to HTTP error: {1}".format(self.url, str(ex)))
resource_management.core.exceptions.Fail: Failed to download file from
http://master.localdomain:8080/resources/mysql-connector-java.jar due to HTTP error: HTTP Error 404: Not Found
stdout: /var/lib/ambari-agent/data/output-904.txt
2020-02-13 10:35:24,347 - Stack Feature Version Info: Cluster Stack=3.0, Command Stack=None, Command Version=3.0.1.0-187 -> 3.0.1.0-187
2020-02-13 10:35:24,373 - Using hadoop conf dir: /usr/hdp/3.0.1.0-187/hadoop/conf
2020-02-13 10:35:24,667 - Stack Feature Version Info: Cluster Stack=3.0, Command Stack=None, Command Version=3.0.1.0-187 -> 3.0.1.0-187
2020-02-13 10:35:24,682 - Using hadoop conf dir: /usr/hdp/3.0.1.0-187/hadoop/conf
2020-02-13 10:35:24,684 - Group['livy'] {}
2020-02-13 10:35:24,685 - Group['spark'] {}
2020-02-13 10:35:24,685 - Group['hdfs'] {}
2020-02-13 10:35:24,685 - Group['hadoop'] {}
2020-02-13 10:35:24,685 - Group['users'] {}
2020-02-13 10:35:24,686 - User['yarn-ats'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:35:24,687 - User['hive'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:35:24,688 - User['infra-solr'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:35:24,689 - User['zookeeper'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:35:24,690 - User['ams'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:35:24,691 - User['tez'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop', 'users'], 'uid': None}
2020-02-13 10:35:24,693 - User['livy'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['livy', 'hadoop'], 'uid': None}
2020-02-13 10:35:24,694 - User['spark'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['spark', 'hadoop'], 'uid': None}
2020-02-13 10:35:24,695 - User['ambari-qa'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop', 'users'], 'uid': None}
2020-02-13 10:35:24,697 - User['kafka'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:35:24,698 - User['hdfs'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hdfs', 'hadoop'], 'uid': None}
2020-02-13 10:35:24,699 - User['sqoop'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:35:24,700 - User['yarn'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:35:24,701 - User['mapred'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:35:24,701 - User['hbase'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:35:24,702 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555}
2020-02-13 10:35:24,704 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 0'] {'not_if': '(test $(id -u ambari-qa) -gt 1000) || (false)'}
2020-02-13 10:35:24,712 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 0'] due to not_if
2020-02-13 10:35:24,712 - Directory['/tmp/hbase-hbase'] {'owner': 'hbase', 'create_parents': True, 'mode': 0775, 'cd_access': 'a'}
2020-02-13 10:35:24,713 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555}
2020-02-13 10:35:24,715 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555}
2020-02-13 10:35:24,716 - call['/var/lib/ambari-agent/tmp/changeUid.sh hbase'] {}
2020-02-13 10:35:24,733 - call returned (0, '1005')
2020-02-13 10:35:24,733 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase 1005'] {'not_if': '(test $(id -u hbase) -gt 1000) || (false)'}
2020-02-13 10:35:24,745 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase 1005'] due to not_if
2020-02-13 10:35:24,746 - Group['hdfs'] {}
2020-02-13 10:35:24,748 - User['hdfs'] {'fetch_nonlocal_groups': True, 'groups': ['hdfs', 'hadoop', u'hdfs']}
2020-02-13 10:35:24,749 - FS Type: HDFS
2020-02-13 10:35:24,749 - Directory['/etc/hadoop'] {'mode': 0755}
2020-02-13 10:35:24,783 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/hadoop-env.sh'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'}
2020-02-13 10:35:24,783 - Directory['/var/lib/ambari-agent/tmp/hadoop_java_io_tmpdir'] {'owner': 'hdfs', 'group': 'hadoop', 'mode': 01777}
2020-02-13 10:35:24,805 - Execute[('setenforce', '0')] {'not_if': '(! which getenforce ) || (which getenforce && getenforce | grep -q Disabled)', 'sudo': True, 'only_if': 'test -f /selinux/enforce'}
2020-02-13 10:35:24,817 - Skipping Execute[('setenforce', '0')] due to not_if
2020-02-13 10:35:24,817 - Directory['/var/log/hadoop'] {'owner': 'root', 'create_parents': True, 'group': 'hadoop', 'mode': 0775, 'cd_access': 'a'}
2020-02-13 10:35:24,826 - Directory['/var/run/hadoop'] {'owner': 'root', 'create_parents': True, 'group': 'root', 'cd_access': 'a'}
2020-02-13 10:35:24,827 - Directory['/var/run/hadoop/hdfs'] {'owner': 'hdfs', 'cd_access': 'a'}
2020-02-13 10:35:24,829 - Directory['/tmp/hadoop-hdfs'] {'owner': 'hdfs', 'create_parents': True, 'cd_access': 'a'}
2020-02-13 10:35:24,837 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/commons-logging.properties'] {'content': Template('commons-logging.properties.j2'), 'owner': 'hdfs'}
2020-02-13 10:35:24,843 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/health_check'] {'content': Template('health_check.j2'), 'owner': 'hdfs'}
2020-02-13 10:35:24,859 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/log4j.properties'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:35:24,884 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/hadoop-metrics2.properties'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'}
2020-02-13 10:35:24,885 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/task-log4j.properties'] {'content': StaticFile('task-log4j.properties'), 'mode': 0755}
2020-02-13 10:35:24,886 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/configuration.xsl'] {'owner': 'hdfs', 'group': 'hadoop'}
2020-02-13 10:35:24,898 - File['/etc/hadoop/conf/topology_mappings.data'] {'owner': 'hdfs', 'content': Template('topology_mappings.data.j2'), 'only_if': 'test -d /etc/hadoop/conf', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:35:24,906 - File['/etc/hadoop/conf/topology_script.py'] {'content': StaticFile('topology_script.py'), 'only_if': 'test -d /etc/hadoop/conf', 'mode': 0755}
2020-02-13 10:35:24,915 - Skipping unlimited key JCE policy check and setup since it is not required
2020-02-13 10:35:25,383 - Using hadoop conf dir: /usr/hdp/3.0.1.0-187/hadoop/conf
2020-02-13 10:35:25,403 - call['ambari-python-wrap /usr/bin/hdp-select status hive-server2'] {'timeout': 20}
2020-02-13 10:35:25,442 - call returned (0, 'hive-server2 - 3.0.1.0-187')
2020-02-13 10:35:25,445 - Stack Feature Version Info: Cluster Stack=3.0, Command Stack=None, Command Version=3.0.1.0-187 -> 3.0.1.0-187
2020-02-13 10:35:25,487 - File['/var/lib/ambari-agent/cred/lib/CredentialUtil.jar'] {'content': DownloadSource('
http://master.localdomain:8080/resources/CredentialUtil.jar'), 'mode': 0755}
2020-02-13 10:35:25,489 - Not downloading the file from
http://master.localdomain:8080/resources/CredentialUtil.jar, because /var/lib/ambari-agent/tmp/CredentialUtil.jar already exists
2020-02-13 10:35:27,317 - Directories to fill with configs: [u'/usr/hdp/current/hive-server2/conf', u'/usr/hdp/current/hive-server2/conf/']
2020-02-13 10:35:27,321 - Directory['/etc/hive/3.0.1.0-187/0'] {'owner': 'hive', 'group': 'hadoop', 'create_parents': True, 'mode': 0755}
2020-02-13 10:35:27,324 - XmlConfig['mapred-site.xml'] {'group': 'hadoop', 'conf_dir': '/etc/hive/3.0.1.0-187/0', 'mode': 0644, 'configuration_attributes': {}, 'owner': 'hive', 'configurations': ...}
2020-02-13 10:35:27,350 - Generating config: /etc/hive/3.0.1.0-187/0/mapred-site.xml
2020-02-13 10:35:27,350 - File['/etc/hive/3.0.1.0-187/0/mapred-site.xml'] {'owner': 'hive', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644, 'encoding': 'UTF-8'}
2020-02-13 10:35:27,459 - File['/etc/hive/3.0.1.0-187/0/hive-default.xml.template'] {'owner': 'hive', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:35:27,461 - File['/etc/hive/3.0.1.0-187/0/hive-env.sh.template'] {'owner': 'hive', 'group': 'hadoop', 'mode': 0755}
2020-02-13 10:35:27,465 - File['/etc/hive/3.0.1.0-187/0/llap-daemon-log4j2.properties'] {'content': InlineTemplate(...), 'owner': 'hive', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:35:27,471 - File['/etc/hive/3.0.1.0-187/0/llap-cli-log4j2.properties'] {'content': InlineTemplate(...), 'owner': 'hive', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:35:27,480 - File['/etc/hive/3.0.1.0-187/0/hive-log4j2.properties'] {'content': InlineTemplate(...), 'owner': 'hive', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:35:27,484 - File['/etc/hive/3.0.1.0-187/0/hive-exec-log4j2.properties'] {'content': InlineTemplate(...), 'owner': 'hive', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:35:27,493 - File['/etc/hive/3.0.1.0-187/0/beeline-log4j2.properties'] {'content': InlineTemplate(...), 'owner': 'hive', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:35:27,495 - XmlConfig['beeline-site.xml'] {'owner': 'hive', 'group': 'hadoop', 'mode': 0644, 'conf_dir': '/etc/hive/3.0.1.0-187/0', 'configurations': {'beeline.hs2.jdbc.url.container': u'jdbc:hive2://master.localdomain:2181/;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=hiveserver2', 'beeline.hs2.jdbc.url.default': 'container'}}
2020-02-13 10:35:27,515 - Generating config: /etc/hive/3.0.1.0-187/0/beeline-site.xml
2020-02-13 10:35:27,515 - File['/etc/hive/3.0.1.0-187/0/beeline-site.xml'] {'owner': 'hive', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644, 'encoding': 'UTF-8'}
2020-02-13 10:35:27,519 - File['/etc/hive/3.0.1.0-187/0/parquet-logging.properties'] {'content': ..., 'owner': 'hive', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:35:27,522 - Directory['/etc/hive/3.0.1.0-187/0'] {'owner': 'hive', 'group': 'hadoop', 'create_parents': True, 'mode': 0755}
2020-02-13 10:35:27,523 - XmlConfig['mapred-site.xml'] {'group': 'hadoop', 'conf_dir': '/etc/hive/3.0.1.0-187/0', 'mode': 0644, 'configuration_attributes': {}, 'owner': 'hive', 'configurations': ...}
2020-02-13 10:35:27,536 - Generating config: /etc/hive/3.0.1.0-187/0/mapred-site.xml
2020-02-13 10:35:27,536 - File['/etc/hive/3.0.1.0-187/0/mapred-site.xml'] {'owner': 'hive', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644, 'encoding': 'UTF-8'}
2020-02-13 10:35:27,614 - File['/etc/hive/3.0.1.0-187/0/hive-default.xml.template'] {'owner': 'hive', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:35:27,615 - File['/etc/hive/3.0.1.0-187/0/hive-env.sh.template'] {'owner': 'hive', 'group': 'hadoop', 'mode': 0755}
2020-02-13 10:35:27,622 - File['/etc/hive/3.0.1.0-187/0/llap-daemon-log4j2.properties'] {'content': InlineTemplate(...), 'owner': 'hive', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:35:27,629 - File['/etc/hive/3.0.1.0-187/0/llap-cli-log4j2.properties'] {'content': InlineTemplate(...), 'owner': 'hive', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:35:27,632 - File['/etc/hive/3.0.1.0-187/0/hive-log4j2.properties'] {'content': InlineTemplate(...), 'owner': 'hive', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:35:27,637 - File['/etc/hive/3.0.1.0-187/0/hive-exec-log4j2.properties'] {'content': InlineTemplate(...), 'owner': 'hive', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:35:27,640 - File['/etc/hive/3.0.1.0-187/0/beeline-log4j2.properties'] {'content': InlineTemplate(...), 'owner': 'hive', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:35:27,656 - XmlConfig['beeline-site.xml'] {'owner': 'hive', 'group': 'hadoop', 'mode': 0644, 'conf_dir': '/etc/hive/3.0.1.0-187/0', 'configurations': {'beeline.hs2.jdbc.url.container': u'jdbc:hive2://master.localdomain:2181/;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=hiveserver2', 'beeline.hs2.jdbc.url.default': 'container'}}
2020-02-13 10:35:27,680 - Generating config: /etc/hive/3.0.1.0-187/0/beeline-site.xml
2020-02-13 10:35:27,680 - File['/etc/hive/3.0.1.0-187/0/beeline-site.xml'] {'owner': 'hive', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644, 'encoding': 'UTF-8'}
2020-02-13 10:35:27,685 - File['/etc/hive/3.0.1.0-187/0/parquet-logging.properties'] {'content': ..., 'owner': 'hive', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:35:27,687 - File['/usr/hdp/current/hive-server2/conf/hive-site.jceks'] {'content': StaticFile('/var/lib/ambari-agent/cred/conf/hive_server/hive-site.jceks'), 'owner': 'hive', 'group': 'hadoop', 'mode': 0640}
2020-02-13 10:35:27,691 - Writing File['/usr/hdp/current/hive-server2/conf/hive-site.jceks'] because contents don't match
2020-02-13 10:35:27,694 - XmlConfig['hive-site.xml'] {'group': 'hadoop', 'conf_dir': '/usr/hdp/current/hive-server2/conf/', 'mode': 0644, 'configuration_attributes': {u'hidden': {u'javax.jdo.option.ConnectionPassword': u'HIVE_CLIENT,CONFIG_DOWNLOAD'}}, 'owner': 'hive', 'configurations': ...}
2020-02-13 10:35:27,714 - Generating config: /usr/hdp/current/hive-server2/conf/hive-site.xml
2020-02-13 10:35:27,715 - File['/usr/hdp/current/hive-server2/conf/hive-site.xml'] {'owner': 'hive', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644, 'encoding': 'UTF-8'}
2020-02-13 10:35:28,032 - Writing File['/usr/hdp/current/hive-server2/conf/hive-site.xml'] because contents don't match
2020-02-13 10:35:28,045 - File['/usr/hdp/current/hive-server2/conf//hive-env.sh'] {'content': InlineTemplate(...), 'owner': 'hive', 'group': 'hadoop', 'mode': 0755}
2020-02-13 10:35:28,046 - Writing File['/usr/hdp/current/hive-server2/conf//hive-env.sh'] because contents don't match
2020-02-13 10:35:28,047 - Directory['/etc/security/limits.d'] {'owner': 'root', 'create_parents': True, 'group': 'root'}
2020-02-13 10:35:28,052 - File['/etc/security/limits.d/hive.conf'] {'content': Template('hive.conf.j2'), 'owner': 'root', 'group': 'root', 'mode': 0644}
2020-02-13 10:35:28,053 - File['/usr/lib/ambari-agent/DBConnectionVerification.jar'] {'content': DownloadSource('
http://master.localdomain:8080/resources/DBConnectionVerification.jar'), 'mode': 0644}
2020-02-13 10:35:28,053 - Not downloading the file from
http://master.localdomain:8080/resources/DBConnectionVerification.jar, because /var/lib/ambari-agent/tmp/DBConnectionVerification.jar already exists
2020-02-13 10:35:28,055 - File['/var/lib/ambari-agent/tmp/mysql-connector-java.jar'] {'content': DownloadSource('
http://master.localdomain:8080/resources/mysql-connector-java.jar')}
2020-02-13 10:35:28,055 - Downloading the file from
http://master.localdomain:8080/resources/mysql-connector-java.jarCommand failed after 1 tries
spark2 server
stderr: /var/lib/ambari-agent/data/errors-905.txt
Traceback (most recent call last):
File "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/SPARK2/package/scripts/job_history_server.py", line 102, in
JobHistoryServer().execute()
File "/usr/lib/ambari-agent/lib/resource_management/libraries/script/script.py", line 353, in execute
method(env)
File "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/SPARK2/package/scripts/job_history_server.py", line 54, in start
self.configure(env)
File "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/SPARK2/package/scripts/job_history_server.py", line 48, in configure
setup_spark(env, 'server', upgrade_type=upgrade_type, action = 'config')
File "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/SPARK2/package/scripts/setup_spark.py", line 60, in setup_spark
mode=0775
File "/usr/lib/ambari-agent/lib/resource_management/core/base.py", line 166, in __init__
self.env.run()
File "/usr/lib/ambari-agent/lib/resource_management/core/environment.py", line 160, in run
self.run_action(resource, action)
File "/usr/lib/ambari-agent/lib/resource_management/core/environment.py", line 124, in run_action
provider_action()
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 654, in action_create_on_execute
self.action_delayed("create")
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 651, in action_delayed
self.get_hdfs_resource_executor().action_delayed(action_name, self)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 350, in action_delayed
self.action_delayed_for_nameservice(None, action_name, main_resource)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 372, in action_delayed_for_nameservice
self._assert_valid()
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 311, in _assert_valid
self.target_status = self._get_file_status(target)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 474, in _get_file_status
list_status = self.util.run_command(target, 'GETFILESTATUS', method='GET', ignore_status_codes=['404'], assertable_result=False)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 199, in run_command
return self._run_command(*args, **kwargs)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", line 259, in _run_command
_, out, err = get_user_call_output(cmd, user=self.run_user, logoutput=self.logoutput, quiet=False)
File "/usr/lib/ambari-agent/lib/resource_management/libraries/functions/get_user_call_output.py", line 62, in get_user_call_output
raise ExecutionFailed(err_msg, code, files_output[0], files_output[1])
resource_management.core.exceptions.ExecutionFailed: Execution of 'curl -sS -L -w '%{http_code}' -X GET '
http://master.localdomain:50070/webhdfs/v1/user/spark?op=GETFILESTATUS&user.name=hdfs' 1>/tmp/tmpBaVXIV 2>/tmp/tmpMaGSZn' returned 7. curl: (7) Failed connect to master.localdomain:50070; Connection refused
000
stdout: /var/lib/ambari-agent/data/output-905.txt
2020-02-13 10:36:37,908 - Stack Feature Version Info: Cluster Stack=3.0, Command Stack=None, Command Version=3.0.1.0-187 -> 3.0.1.0-187
2020-02-13 10:36:37,923 - Using hadoop conf dir: /usr/hdp/3.0.1.0-187/hadoop/conf
2020-02-13 10:36:38,056 - Stack Feature Version Info: Cluster Stack=3.0, Command Stack=None, Command Version=3.0.1.0-187 -> 3.0.1.0-187
2020-02-13 10:36:38,061 - Using hadoop conf dir: /usr/hdp/3.0.1.0-187/hadoop/conf
2020-02-13 10:36:38,062 - Group['livy'] {}
2020-02-13 10:36:38,063 - Group['spark'] {}
2020-02-13 10:36:38,063 - Group['hdfs'] {}
2020-02-13 10:36:38,063 - Group['hadoop'] {}
2020-02-13 10:36:38,063 - Group['users'] {}
2020-02-13 10:36:38,064 - User['yarn-ats'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:36:38,065 - User['hive'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:36:38,066 - User['infra-solr'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:36:38,067 - User['zookeeper'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:36:38,068 - User['ams'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:36:38,068 - User['tez'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop', 'users'], 'uid': None}
2020-02-13 10:36:38,069 - User['livy'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['livy', 'hadoop'], 'uid': None}
2020-02-13 10:36:38,070 - User['spark'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['spark', 'hadoop'], 'uid': None}
2020-02-13 10:36:38,071 - User['ambari-qa'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop', 'users'], 'uid': None}
2020-02-13 10:36:38,071 - User['kafka'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:36:38,072 - User['hdfs'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hdfs', 'hadoop'], 'uid': None}
2020-02-13 10:36:38,072 - User['sqoop'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:36:38,073 - User['yarn'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:36:38,073 - User['mapred'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:36:38,074 - User['hbase'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop'], 'uid': None}
2020-02-13 10:36:38,074 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555}
2020-02-13 10:36:38,075 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 0'] {'not_if': '(test $(id -u ambari-qa) -gt 1000) || (false)'}
2020-02-13 10:36:38,089 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 0'] due to not_if
2020-02-13 10:36:38,090 - Directory['/tmp/hbase-hbase'] {'owner': 'hbase', 'create_parents': True, 'mode': 0775, 'cd_access': 'a'}
2020-02-13 10:36:38,090 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555}
2020-02-13 10:36:38,091 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555}
2020-02-13 10:36:38,092 - call['/var/lib/ambari-agent/tmp/changeUid.sh hbase'] {}
2020-02-13 10:36:38,102 - call returned (0, '1005')
2020-02-13 10:36:38,103 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase 1005'] {'not_if': '(test $(id -u hbase) -gt 1000) || (false)'}
2020-02-13 10:36:38,107 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase 1005'] due to not_if
2020-02-13 10:36:38,108 - Group['hdfs'] {}
2020-02-13 10:36:38,108 - User['hdfs'] {'fetch_nonlocal_groups': True, 'groups': ['hdfs', 'hadoop', u'hdfs']}
2020-02-13 10:36:38,108 - FS Type: HDFS
2020-02-13 10:36:38,109 - Directory['/etc/hadoop'] {'mode': 0755}
2020-02-13 10:36:38,120 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/hadoop-env.sh'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'}
2020-02-13 10:36:38,120 - Writing File['/usr/hdp/3.0.1.0-187/hadoop/conf/hadoop-env.sh'] because contents don't match
2020-02-13 10:36:38,121 - Directory['/var/lib/ambari-agent/tmp/hadoop_java_io_tmpdir'] {'owner': 'hdfs', 'group': 'hadoop', 'mode': 01777}
2020-02-13 10:36:38,137 - Execute[('setenforce', '0')] {'not_if': '(! which getenforce ) || (which getenforce && getenforce | grep -q Disabled)', 'sudo': True, 'only_if': 'test -f /selinux/enforce'}
2020-02-13 10:36:38,145 - Skipping Execute[('setenforce', '0')] due to not_if
2020-02-13 10:36:38,145 - Directory['/var/log/hadoop'] {'owner': 'root', 'create_parents': True, 'group': 'hadoop', 'mode': 0775, 'cd_access': 'a'}
2020-02-13 10:36:38,147 - Directory['/var/run/hadoop'] {'owner': 'root', 'create_parents': True, 'group': 'root', 'cd_access': 'a'}
2020-02-13 10:36:38,147 - Directory['/var/run/hadoop/hdfs'] {'owner': 'hdfs', 'cd_access': 'a'}
2020-02-13 10:36:38,148 - Directory['/tmp/hadoop-hdfs'] {'owner': 'hdfs', 'create_parents': True, 'cd_access': 'a'}
2020-02-13 10:36:38,151 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/commons-logging.properties'] {'content': Template('commons-logging.properties.j2'), 'owner': 'hdfs'}
2020-02-13 10:36:38,153 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/health_check'] {'content': Template('health_check.j2'), 'owner': 'hdfs'}
2020-02-13 10:36:38,158 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/log4j.properties'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:36:38,166 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/hadoop-metrics2.properties'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'}
2020-02-13 10:36:38,167 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/task-log4j.properties'] {'content': StaticFile('task-log4j.properties'), 'mode': 0755}
2020-02-13 10:36:38,168 - File['/usr/hdp/3.0.1.0-187/hadoop/conf/configuration.xsl'] {'owner': 'hdfs', 'group': 'hadoop'}
2020-02-13 10:36:38,171 - File['/etc/hadoop/conf/topology_mappings.data'] {'owner': 'hdfs', 'content': Template('topology_mappings.data.j2'), 'only_if': 'test -d /etc/hadoop/conf', 'group': 'hadoop', 'mode': 0644}
2020-02-13 10:36:38,174 - File['/etc/hadoop/conf/topology_script.py'] {'content': StaticFile('topology_script.py'), 'only_if': 'test -d /etc/hadoop/conf', 'mode': 0755}
2020-02-13 10:36:38,176 - Skipping unlimited key JCE policy check and setup since it is not required
2020-02-13 10:36:38,513 - Using hadoop conf dir: /usr/hdp/3.0.1.0-187/hadoop/conf
2020-02-13 10:36:38,538 - Directory['/var/run/spark2'] {'owner': 'spark', 'create_parents': True, 'group': 'hadoop', 'mode': 0775}
2020-02-13 10:36:38,539 - Creating directory Directory['/var/run/spark2'] since it doesn't exist.
2020-02-13 10:36:38,540 - Changing owner for /var/run/spark2 from 0 to spark
2020-02-13 10:36:38,540 - Changing group for /var/run/spark2 from 0 to hadoop
2020-02-13 10:36:38,540 - Changing permission for /var/run/spark2 from 755 to 775
2020-02-13 10:36:38,540 - Directory['/var/log/spark2'] {'owner': 'spark', 'group': 'hadoop', 'create_parents': True, 'mode': 0775}
2020-02-13 10:36:38,541 - HdfsResource['/user/spark'] {'security_enabled': False, 'hadoop_bin_dir': '/usr/hdp/3.0.1.0-187/hadoop/bin', 'keytab': [EMPTY], 'dfs_type': 'HDFS', 'default_fs': 'hdfs://master.localdomain:8020', 'hdfs_resource_ignore_file': '/var/lib/ambari-agent/data/.hdfs_resource_ignore', 'hdfs_site': ..., 'kinit_path_local': 'kinit', 'principal_name': [EMPTY], 'user': 'hdfs', 'owner': 'spark', 'hadoop_conf_dir': '/usr/hdp/3.0.1.0-187/hadoop/conf', 'type': 'directory', 'action': ['create_on_execute'], 'immutable_paths': [u'/mr-history/done', u'/warehouse/tablespace/managed/hive', u'/warehouse/tablespace/external/hive', u'/app-logs', u'/tmp'], 'mode': 0775}
2020-02-13 10:36:38,582 - call['ambari-sudo.sh su hdfs -l -s /bin/bash -c 'curl -sS -L -w '"'"'%{http_code}'"'"' -X GET '"'"'
http://master.localdomain:50070/webhdfs/v1/user/spark?op=GETFILESTATUS&user.name=hdfs'"'"' 1>/tmp/tmpBaVXIV 2>/tmp/tmpMaGSZn''] {'logoutput': None, 'quiet': False}
2020-02-13 10:36:38,820 - call returned (7, '')
Command failed after 1 tries