stderr: /var/lib/ambari-agent/data/errors-384.txt Traceback (most recent call last): File "/var/lib/ambari-agent/cache/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py", line 68, in HiveClient().execute() File "/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py", line 280, in execute method(env) File "/var/lib/ambari-agent/cache/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py", line 35, in install self.configure(env) File "/var/lib/ambari-agent/cache/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py", line 43, in configure hive(name='client') File "/usr/lib/python2.6/site-packages/ambari_commons/os_family_impl.py", line 89, in thunk return fn(*args, **kwargs) File "/var/lib/ambari-agent/cache/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py", line 282, in hive mode = 0644, File "/usr/lib/python2.6/site-packages/resource_management/core/base.py", line 155, in __init__ self.env.run() File "/usr/lib/python2.6/site-packages/resource_management/core/environment.py", line 160, in run self.run_action(resource, action) File "/usr/lib/python2.6/site-packages/resource_management/core/environment.py", line 124, in run_action provider_action() File "/usr/lib/python2.6/site-packages/resource_management/core/providers/system.py", line 123, in action_create content = self._get_content() File "/usr/lib/python2.6/site-packages/resource_management/core/providers/system.py", line 160, in _get_content return content() File "/usr/lib/python2.6/site-packages/resource_management/core/source.py", line 51, in __call__ return self.get_content() File "/usr/lib/python2.6/site-packages/resource_management/core/source.py", line 193, in get_content web_file = opener.open(req) File "/usr/lib64/python2.6/urllib2.py", line 391, in open response = self._open(req, data) File "/usr/lib64/python2.6/urllib2.py", line 409, in _open '_open', req) File "/usr/lib64/python2.6/urllib2.py", line 369, in _call_chain result = func(*args) File "/usr/lib64/python2.6/urllib2.py", line 1190, in http_open return self.do_open(httplib.HTTPConnection, req) File "/usr/lib64/python2.6/urllib2.py", line 1165, in do_open raise URLError(err) urllib2.URLError: stdout: /var/lib/ambari-agent/data/output-384.txt 2016-10-31 18:52:15,495 - The hadoop conf dir /usr/hdp/current/hadoop-client/conf exists, will call conf-select on it for version 2.5.0.0-1245 2016-10-31 18:52:15,495 - Checking if need to create versioned conf dir /etc/hadoop/2.5.0.0-1245/0 2016-10-31 18:52:15,496 - call[('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.0.0-1245', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False, 'stderr': -1} 2016-10-31 18:52:15,522 - call returned (1, '/etc/hadoop/2.5.0.0-1245/0 exist already', '') 2016-10-31 18:52:15,523 - checked_call[('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.0.0-1245', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False} 2016-10-31 18:52:15,546 - checked_call returned (0, '') 2016-10-31 18:52:15,547 - Ensuring that hadoop has the correct symlink structure 2016-10-31 18:52:15,547 - Using hadoop conf dir: /usr/hdp/current/hadoop-client/conf 2016-10-31 18:52:15,549 - Group['hadoop'] {} 2016-10-31 18:52:15,551 - Group['users'] {} 2016-10-31 18:52:15,551 - User['hive'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-10-31 18:52:15,551 - User['logsearch'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-10-31 18:52:15,552 - User['zookeeper'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-10-31 18:52:15,552 - User['infra-solr'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-10-31 18:52:15,553 - User['ams'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-10-31 18:52:15,553 - User['ambari-qa'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['users']} 2016-10-31 18:52:15,554 - User['tez'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['users']} 2016-10-31 18:52:15,554 - User['hdfs'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-10-31 18:52:15,554 - User['sqoop'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-10-31 18:52:15,555 - User['yarn'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-10-31 18:52:15,555 - User['mapred'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-10-31 18:52:15,556 - User['hcat'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-10-31 18:52:15,557 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2016-10-31 18:52:15,558 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa'] {'not_if': '(test $(id -u ambari-qa) -gt 1000) || (false)'} 2016-10-31 18:52:15,565 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa'] due to not_if 2016-10-31 18:52:15,565 - Group['hdfs'] {} 2016-10-31 18:52:15,566 - User['hdfs'] {'fetch_nonlocal_groups': True, 'groups': ['hadoop', 'hdfs']} 2016-10-31 18:52:15,567 - FS Type: 2016-10-31 18:52:15,567 - Directory['/etc/hadoop'] {'mode': 0755} 2016-10-31 18:52:15,584 - File['/usr/hdp/current/hadoop-client/conf/hadoop-env.sh'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'} 2016-10-31 18:52:15,584 - Directory['/var/lib/ambari-agent/tmp/hadoop_java_io_tmpdir'] {'owner': 'hdfs', 'group': 'hadoop', 'mode': 01777} 2016-10-31 18:52:15,597 - Initializing 2 repositories 2016-10-31 18:52:15,598 - Repository['HDP-2.5'] {'base_url': 'http://3.209.124.205/HDP/centos6', 'action': ['create'], 'components': ['HDP', 'main'], 'repo_template': '[{{repo_id}}]\nname={{repo_id}}\n{% if mirror_list %}mirrorlist={{mirror_list}}{% else %}baseurl={{base_url}}{% endif %}\n\npath=/\nenabled=1\ngpgcheck=0', 'repo_file_name': 'HDP', 'mirror_list': None} 2016-10-31 18:52:15,608 - File['/etc/yum.repos.d/HDP.repo'] {'content': '[HDP-2.5]\nname=HDP-2.5\nbaseurl=http://3.209.124.205/HDP/centos6\n\npath=/\nenabled=1\ngpgcheck=0'} 2016-10-31 18:52:15,609 - Repository['HDP-UTILS-1.1.0.21'] {'base_url': 'http://3.209.124.205/HDP-UTILS-1.1.0.21/repos/centos6', 'action': ['create'], 'components': ['HDP-UTILS', 'main'], 'repo_template': '[{{repo_id}}]\nname={{repo_id}}\n{% if mirror_list %}mirrorlist={{mirror_list}}{% else %}baseurl={{base_url}}{% endif %}\n\npath=/\nenabled=1\ngpgcheck=0', 'repo_file_name': 'HDP-UTILS', 'mirror_list': None} 2016-10-31 18:52:15,611 - File['/etc/yum.repos.d/HDP-UTILS.repo'] {'content': '[HDP-UTILS-1.1.0.21]\nname=HDP-UTILS-1.1.0.21\nbaseurl=http://3.209.124.205/HDP-UTILS-1.1.0.21/repos/centos6\n\npath=/\nenabled=1\ngpgcheck=0'} 2016-10-31 18:52:15,611 - Package['unzip'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2016-10-31 18:52:15,656 - Skipping installation of existing package unzip 2016-10-31 18:52:15,656 - Package['curl'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2016-10-31 18:52:15,666 - Skipping installation of existing package curl 2016-10-31 18:52:15,666 - Package['hdp-select'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2016-10-31 18:52:15,676 - Skipping installation of existing package hdp-select 2016-10-31 18:52:15,857 - The hadoop conf dir /usr/hdp/current/hadoop-client/conf exists, will call conf-select on it for version 2.5.0.0-1245 2016-10-31 18:52:15,857 - Checking if need to create versioned conf dir /etc/hadoop/2.5.0.0-1245/0 2016-10-31 18:52:15,857 - call[('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.0.0-1245', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False, 'stderr': -1} 2016-10-31 18:52:15,883 - call returned (1, '/etc/hadoop/2.5.0.0-1245/0 exist already', '') 2016-10-31 18:52:15,884 - checked_call[('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.0.0-1245', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False} 2016-10-31 18:52:15,904 - checked_call returned (0, '') 2016-10-31 18:52:15,905 - Ensuring that hadoop has the correct symlink structure 2016-10-31 18:52:15,905 - Using hadoop conf dir: /usr/hdp/current/hadoop-client/conf 2016-10-31 18:52:15,910 - call['ambari-python-wrap /usr/bin/hdp-select status hive-server2'] {'timeout': 20} 2016-10-31 18:52:15,934 - call returned (0, 'hive-server2 - 2.5.0.0-1245') 2016-10-31 18:52:15,936 - Stack Feature Version Info: stack_version=2.5, version=2.5.0.0-1245, current_cluster_version=2.5.0.0-1245 -> 2.5.0.0-1245 2016-10-31 18:52:15,947 - Version 2.5.0.0-1245 was provided as effective cluster version. Using package version 2_5_0_0_1245 2016-10-31 18:52:15,948 - Package['hive_2_5_0_0_1245'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2016-10-31 18:52:15,993 - Skipping installation of existing package hive_2_5_0_0_1245 2016-10-31 18:52:15,994 - Version 2.5.0.0-1245 was provided as effective cluster version. Using package version 2_5_0_0_1245 2016-10-31 18:52:15,995 - Package['hive_2_5_0_0_1245-hcatalog'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2016-10-31 18:52:16,005 - Skipping installation of existing package hive_2_5_0_0_1245-hcatalog 2016-10-31 18:52:16,006 - Version 2.5.0.0-1245 was provided as effective cluster version. Using package version 2_5_0_0_1245 2016-10-31 18:52:16,006 - Package['hive_2_5_0_0_1245-webhcat'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2016-10-31 18:52:16,017 - Skipping installation of existing package hive_2_5_0_0_1245-webhcat 2016-10-31 18:52:16,017 - Version 2.5.0.0-1245 was provided as effective cluster version. Using package version 2_5_0_0_1245 2016-10-31 18:52:16,018 - Package['hive2_2_5_0_0_1245'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2016-10-31 18:52:16,028 - Skipping installation of existing package hive2_2_5_0_0_1245 2016-10-31 18:52:16,029 - Version 2.5.0.0-1245 was provided as effective cluster version. Using package version 2_5_0_0_1245 2016-10-31 18:52:16,030 - Package['tez_hive2_2_5_0_0_1245'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2016-10-31 18:52:16,040 - Skipping installation of existing package tez_hive2_2_5_0_0_1245 2016-10-31 18:52:16,042 - Directory['/etc/hive'] {'mode': 0755} 2016-10-31 18:52:16,042 - Directories to fill with configs: ['/usr/hdp/current/hive-client/conf'] 2016-10-31 18:52:16,042 - Directory['/usr/hdp/current/hive-client/conf'] {'owner': 'hive', 'group': 'hadoop', 'create_parents': True} 2016-10-31 18:52:16,043 - XmlConfig['mapred-site.xml'] {'group': 'hadoop', 'conf_dir': '/usr/hdp/current/hive-client/conf', 'mode': 0644, 'configuration_attributes': {}, 'owner': 'hive', 'configurations': ...} 2016-10-31 18:52:16,053 - Generating config: /usr/hdp/current/hive-client/conf/mapred-site.xml 2016-10-31 18:52:16,053 - File['/usr/hdp/current/hive-client/conf/mapred-site.xml'] {'owner': 'hive', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644, 'encoding': 'UTF-8'} 2016-10-31 18:52:16,085 - File['/usr/hdp/current/hive-client/conf/hive-default.xml.template'] {'owner': 'hive', 'group': 'hadoop'} 2016-10-31 18:52:16,086 - File['/usr/hdp/current/hive-client/conf/hive-env.sh.template'] {'owner': 'hive', 'group': 'hadoop'} 2016-10-31 18:52:16,086 - File['/usr/hdp/current/hive-client/conf/hive-exec-log4j.properties'] {'content': ..., 'owner': 'hive', 'group': 'hadoop', 'mode': 0644} 2016-10-31 18:52:16,087 - File['/usr/hdp/current/hive-client/conf/hive-log4j.properties'] {'content': ..., 'owner': 'hive', 'group': 'hadoop', 'mode': 0644} 2016-10-31 18:52:16,087 - XmlConfig['hive-site.xml'] {'group': 'hadoop', 'conf_dir': '/usr/hdp/current/hive-client/conf', 'mode': 0644, 'configuration_attributes': {'hidden': {'javax.jdo.option.ConnectionPassword': 'HIVE_CLIENT,WEBHCAT_SERVER,HCAT,CONFIG_DOWNLOAD'}}, 'owner': 'hive', 'configurations': ...} 2016-10-31 18:52:16,094 - Generating config: /usr/hdp/current/hive-client/conf/hive-site.xml 2016-10-31 18:52:16,094 - File['/usr/hdp/current/hive-client/conf/hive-site.xml'] {'owner': 'hive', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644, 'encoding': 'UTF-8'} 2016-10-31 18:52:16,198 - File['/usr/hdp/current/hive-client/conf/hive-env.sh'] {'content': InlineTemplate(...), 'owner': 'hive', 'group': 'hadoop'} 2016-10-31 18:52:16,199 - Directory['/etc/security/limits.d'] {'owner': 'root', 'create_parents': True, 'group': 'root'} 2016-10-31 18:52:16,201 - File['/etc/security/limits.d/hive.conf'] {'content': Template('hive.conf.j2'), 'owner': 'root', 'group': 'root', 'mode': 0644} 2016-10-31 18:52:16,202 - File['/usr/lib/ambari-agent/DBConnectionVerification.jar'] {'content': DownloadSource('http://dn1.tcsgegdc.com:8080/resources/DBConnectionVerification.jar'), 'mode': 0644} 2016-10-31 18:52:16,202 - Downloading the file from http://dn1.tcsgegdc.com:8080/resources/DBConnectionVerification.jar Command failed after 1 tries