stderr: Traceback (most recent call last): File "/var/lib/ambari-agent/cache/extensions/IBM-Big_SQL/5.0.1.0/services/BIGSQL/package/scripts/bigsql-head.py", line 2218, in BigsqlHead().execute() File "/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py", line 329, in execute method(env) File "/var/lib/ambari-agent/cache/extensions/IBM-Big_SQL/5.0.1.0/services/BIGSQL/package/scripts/bigsql-head.py", line 57, in install self.install_primary(env) File "/var/lib/ambari-agent/cache/extensions/IBM-Big_SQL/5.0.1.0/services/BIGSQL/package/scripts/bigsql-head.py", line 319, in install_primary self.install_packages(env) File "/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py", line 708, in install_packages retry_count=agent_stack_retry_count) File "/usr/lib/python2.6/site-packages/resource_management/core/base.py", line 166, in __init__ self.env.run() File "/usr/lib/python2.6/site-packages/resource_management/core/environment.py", line 160, in run self.run_action(resource, action) File "/usr/lib/python2.6/site-packages/resource_management/core/environment.py", line 124, in run_action provider_action() File "/usr/lib/python2.6/site-packages/resource_management/core/providers/package/__init__.py", line 54, in action_install self.install_package(package_name, self.resource.use_repos, self.resource.skip_repos) File "/usr/lib/python2.6/site-packages/resource_management/core/providers/package/yumrpm.py", line 53, in install_package self.checked_call_with_retries(cmd, sudo=True, logoutput=self.get_logoutput()) File "/usr/lib/python2.6/site-packages/resource_management/core/providers/package/__init__.py", line 86, in checked_call_with_retries return self._call_with_retries(cmd, is_checked=True, **kwargs) File "/usr/lib/python2.6/site-packages/resource_management/core/providers/package/__init__.py", line 98, in _call_with_retries code, out = func(cmd, **kwargs) File "/usr/lib/python2.6/site-packages/resource_management/core/shell.py", line 72, in inner result = function(command, **kwargs) File "/usr/lib/python2.6/site-packages/resource_management/core/shell.py", line 102, in checked_call tries=tries, try_sleep=try_sleep, timeout_kill_strategy=timeout_kill_strategy) File "/usr/lib/python2.6/site-packages/resource_management/core/shell.py", line 150, in _call_wrapper result = _call(command, **kwargs_copy) File "/usr/lib/python2.6/site-packages/resource_management/core/shell.py", line 303, in _call raise ExecutionFailed(err_msg, code, out, err) resource_management.core.exceptions.ExecutionFailed: Execution of '/usr/bin/yum -d 0 -e 0 -y install db2luw_5_0_1_0' returned 1. Error downloading packages: db2luw_5_0_1_0-11.1.9.0-s170813.x86_64: [Errno 256] No more mirrors to try. stdout: 2017-10-31 17:33:12,576 - Stack Feature Version Info: Cluster Stack=2.6, Cluster Current Version=2.6.2.0-205, Command Stack=None, Command Version=2.6.2.0-205 -> 2.6.2.0-205 2017-10-31 17:33:12,584 - Using hadoop conf dir: /usr/hdp/current/hadoop-client/conf User Group mapping (user_group) is missing in the hostLevelParams 2017-10-31 17:33:12,585 - Group['hadoop'] {} 2017-10-31 17:33:12,586 - Group['users'] {} 2017-10-31 17:33:12,586 - Group['knox'] {} 2017-10-31 17:33:12,586 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,587 - call['/var/lib/ambari-agent/tmp/changeUid.sh hive'] {} 2017-10-31 17:33:12,594 - call returned (0, '1009') 2017-10-31 17:33:12,595 - User['hive'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': 1009} 2017-10-31 17:33:12,596 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,596 - call['/var/lib/ambari-agent/tmp/changeUid.sh zookeeper'] {} 2017-10-31 17:33:12,603 - call returned (0, '1002') 2017-10-31 17:33:12,604 - User['zookeeper'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': 1002} 2017-10-31 17:33:12,605 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,607 - call['/var/lib/ambari-agent/tmp/changeUid.sh infra-solr'] {} 2017-10-31 17:33:12,613 - call returned (0, '1001') 2017-10-31 17:33:12,614 - User['infra-solr'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': 1001} 2017-10-31 17:33:12,616 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,617 - call['/var/lib/ambari-agent/tmp/changeUid.sh ams'] {} 2017-10-31 17:33:12,623 - call returned (0, '1003') 2017-10-31 17:33:12,624 - User['ams'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': 1003} 2017-10-31 17:33:12,625 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,625 - call['/var/lib/ambari-agent/tmp/changeUid.sh tez'] {} 2017-10-31 17:33:12,632 - call returned (0, '1010') 2017-10-31 17:33:12,632 - User['tez'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users'], 'uid': 1010} 2017-10-31 17:33:12,633 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,634 - call['/var/lib/ambari-agent/tmp/changeUid.sh dsmuser'] {} 2017-10-31 17:33:12,641 - call returned (0, '1015') 2017-10-31 17:33:12,641 - User['dsmuser'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': 1015} 2017-10-31 17:33:12,642 - User['ambari-qa'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users'], 'uid': None} 2017-10-31 17:33:12,643 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,644 - call['/var/lib/ambari-agent/tmp/changeUid.sh hdfs'] {} 2017-10-31 17:33:12,650 - call returned (0, '1005') 2017-10-31 17:33:12,651 - User['hdfs'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': 1005} 2017-10-31 17:33:12,652 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,653 - call['/var/lib/ambari-agent/tmp/changeUid.sh sqoop'] {} 2017-10-31 17:33:12,659 - call returned (0, '1011') 2017-10-31 17:33:12,660 - User['sqoop'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': 1011} 2017-10-31 17:33:12,661 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,662 - call['/var/lib/ambari-agent/tmp/changeUid.sh hadoop'] {} 2017-10-31 17:33:12,668 - call returned (0, '1016') 2017-10-31 17:33:12,668 - User['hadoop'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': 1016} 2017-10-31 17:33:12,669 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,670 - call['/var/lib/ambari-agent/tmp/changeUid.sh yarn'] {} 2017-10-31 17:33:12,676 - call returned (0, '1006') 2017-10-31 17:33:12,676 - User['yarn'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': 1006} 2017-10-31 17:33:12,677 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,678 - call['/var/lib/ambari-agent/tmp/changeUid.sh bigsql'] {} 2017-10-31 17:33:12,684 - call returned (0, '2824') 2017-10-31 17:33:12,684 - User['bigsql'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': 2824} 2017-10-31 17:33:12,686 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,686 - call['/var/lib/ambari-agent/tmp/changeUid.sh mapred'] {} 2017-10-31 17:33:12,692 - call returned (0, '1007') 2017-10-31 17:33:12,693 - User['mapred'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': 1007} 2017-10-31 17:33:12,694 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,694 - call['/var/lib/ambari-agent/tmp/changeUid.sh hbase'] {} 2017-10-31 17:33:12,700 - call returned (0, '1012') 2017-10-31 17:33:12,701 - User['hbase'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': 1012} 2017-10-31 17:33:12,702 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,702 - call['/var/lib/ambari-agent/tmp/changeUid.sh knox'] {} 2017-10-31 17:33:12,708 - call returned (0, '1018') 2017-10-31 17:33:12,708 - User['knox'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': 1018} 2017-10-31 17:33:12,709 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,710 - call['/var/lib/ambari-agent/tmp/changeUid.sh hcat'] {} 2017-10-31 17:33:12,716 - call returned (0, '1013') 2017-10-31 17:33:12,716 - User['hcat'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop'], 'uid': 1013} 2017-10-31 17:33:12,717 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,718 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 0'] {'not_if': '(test $(id -u ambari-qa) -gt 1000) || (false)'} 2017-10-31 17:33:12,721 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 0'] due to not_if 2017-10-31 17:33:12,722 - Directory['/tmp/hbase-hbase'] {'owner': 'hbase', 'create_parents': True, 'mode': 0775, 'cd_access': 'a'} 2017-10-31 17:33:12,722 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,723 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-10-31 17:33:12,724 - call['/var/lib/ambari-agent/tmp/changeUid.sh hbase'] {} 2017-10-31 17:33:12,729 - call returned (0, '1012') 2017-10-31 17:33:12,730 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase 1012'] {'not_if': '(test $(id -u hbase) -gt 1000) || (false)'} 2017-10-31 17:33:12,733 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase 1012'] due to not_if 2017-10-31 17:33:12,734 - Group['hdfs'] {} 2017-10-31 17:33:12,734 - User['hdfs'] {'fetch_nonlocal_groups': True, 'groups': [u'hadoop', u'hdfs']} 2017-10-31 17:33:12,734 - FS Type: 2017-10-31 17:33:12,734 - Directory['/etc/hadoop'] {'mode': 0755} 2017-10-31 17:33:12,750 - File['/usr/hdp/current/hadoop-client/conf/hadoop-env.sh'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'} 2017-10-31 17:33:12,751 - Directory['/var/lib/ambari-agent/tmp/hadoop_java_io_tmpdir'] {'owner': 'hdfs', 'group': 'hadoop', 'mode': 01777} 2017-10-31 17:33:12,764 - Initializing 3 repositories 2017-10-31 17:33:12,765 - Repository['HDP-2.6'] {'base_url': 'http://public-repo-1.hortonworks.com/HDP/centos7/2.x/updates/2.6.2.0', 'action': ['create'], 'components': [u'HDP', 'main'], 'repo_template': '[{{repo_id}}]\nname={{repo_id}}\n{% if mirror_list %}mirrorlist={{mirror_list}}{% else %}baseurl={{base_url}}{% endif %}\n\npath=/\nenabled=1\ngpgcheck=0', 'repo_file_name': 'HDP', 'mirror_list': None} 2017-10-31 17:33:12,770 - File['/etc/yum.repos.d/HDP.repo'] {'content': '[HDP-2.6]\nname=HDP-2.6\nbaseurl=http://public-repo-1.hortonworks.com/HDP/centos7/2.x/updates/2.6.2.0\n\npath=/\nenabled=1\ngpgcheck=0'} 2017-10-31 17:33:12,771 - Repository['HDP-UTILS-1.1.0.21'] {'base_url': 'http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos7', 'action': ['create'], 'components': [u'HDP-UTILS', 'main'], 'repo_template': '[{{repo_id}}]\nname={{repo_id}}\n{% if mirror_list %}mirrorlist={{mirror_list}}{% else %}baseurl={{base_url}}{% endif %}\n\npath=/\nenabled=1\ngpgcheck=0', 'repo_file_name': 'HDP-UTILS', 'mirror_list': None} 2017-10-31 17:33:12,773 - File['/etc/yum.repos.d/HDP-UTILS.repo'] {'content': '[HDP-UTILS-1.1.0.21]\nname=HDP-UTILS-1.1.0.21\nbaseurl=http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos7\n\npath=/\nenabled=1\ngpgcheck=0'} 2017-10-31 17:33:12,774 - Repository['IBM-Big_SQL-5_0_1_0'] {'base_url': 'http://ibm-open-platform.ibm.com/repos/BigSQL/rhel/7/x86_64/5.0.1.0', 'action': ['create'], 'components': [u'IBM-Big_SQL-5_0_1_0', 'main'], 'repo_template': '[{{repo_id}}]\nname={{repo_id}}\n{% if mirror_list %}mirrorlist={{mirror_list}}{% else %}baseurl={{base_url}}{% endif %}\n\npath=/\nenabled=1\ngpgcheck=0', 'repo_file_name': 'IBM-Big_SQL-5_0_1_0', 'mirror_list': None} 2017-10-31 17:33:12,776 - File['/etc/yum.repos.d/IBM-Big_SQL-5_0_1_0.repo'] {'content': '[IBM-Big_SQL-5_0_1_0]\nname=IBM-Big_SQL-5_0_1_0\nbaseurl=http://ibm-open-platform.ibm.com/repos/BigSQL/rhel/7/x86_64/5.0.1.0\n\npath=/\nenabled=1\ngpgcheck=0'} 2017-10-31 17:33:12,777 - Package['unzip'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2017-10-31 17:33:12,840 - Skipping installation of existing package unzip 2017-10-31 17:33:12,840 - Package['curl'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2017-10-31 17:33:12,847 - Skipping installation of existing package curl 2017-10-31 17:33:12,847 - Package['hdp-select'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2017-10-31 17:33:12,854 - Skipping installation of existing package hdp-select Running Ambari as: root Tokens in hdfs data path: [1] Validate REST. Cluster: HDP_cluster Given bigsql db path: [/var/ibm/bigsql/database] Filtered bigsql database cfg path: [/var/ibm/bigsql/database] Filtered hdfs data path: [/hadoop/bigsql] Current database path is: [/var/ibm/bigsql/database,/hadoop/bigsql] 2017-10-31 17:33:13,124 - Execute[' sudo getent group hadoop'] {} 2017-10-31 17:33:13,132 - Execute[' sudo useradd -m -g hadoop bigsql; exit 0'] {} 2017-10-31 17:33:13,140 - Execute[' sudo id bigsql'] {} 2017-10-31 17:33:13,148 - Execute[' sudo usermod -u 2824 bigsql; exit 0'] {} 2017-10-31 17:33:13,156 - Execute[' sudo usermod -a -G hadoop bigsql; exit 0'] {} 2017-10-31 17:33:13,165 - Execute[' sudo usermod -g hadoop bigsql; exit 0'] {} 2017-10-31 17:33:13,172 - Execute[' sudo su - bigsql sh -c "exit 0"; exit 0'] {} 2017-10-31 17:33:13,205 - Execute[' sudo mkhomedir_helper bigsql ; exit 0'] {} Running Ambari as: root Tokens in hdfs data path: [1] 2017-10-31 17:33:13,228 - Execute['sudo chown bigsql:hadoop /home/bigsql'] {} 2017-10-31 17:33:13,235 - Execute[' echo bigsql:[PROTECTED] | sudo /usr/sbin/chpasswd; exit 0'] {} Going to update bigsql-head-env with properties: ('bigsql_active_primary', ':', u'acer.machine') Running Ambari as: root Tokens in hdfs data path: [1] 2017-10-31 17:33:13,874 - Execute['sudo chmod 755 /var/lib/ambari-agent/cache/extensions/IBM-Big_SQL/5.0.1.0/services/BIGSQL/package; exit 0'] {} 2017-10-31 17:33:13,882 - Execute['sudo chmod 755 /var/lib/ambari-agent/cache/extensions/IBM-Big_SQL/5.0.1.0/services/BIGSQL/package/scripts; exit 0'] {} 2017-10-31 17:33:13,890 - Execute['sudo chmod 755 /var/lib/ambari-agent/cache/extensions/IBM-Big_SQL/5.0.1.0/services/BIGSQL/package/scripts/*; exit 0'] {} 2017-10-31 17:33:14,043 - Directory['/home/bigsql/hosts'] {'owner': 'bigsql', 'create_parents': True} 2017-10-31 17:33:14,043 - Execute['sudo chmod 755 /var/lib/ambari-agent/cache/extensions/IBM-Big_SQL/5.0.1.0/services/BIGSQL/package/scripts/*'] {} 2017-10-31 17:33:14,057 - Execute['sudo /var/lib/ambari-agent/cache/extensions/IBM-Big_SQL/5.0.1.0/services/BIGSQL/package/scripts/bigsql-check-mask.sh /var/lib/ambari-agent/cache/extensions/IBM-Big_SQL/5.0.1.0/services/BIGSQL/package/scripts'] {} 2017-10-31 17:33:14,101 - File['/home/bigsql/hosts/configure-head.txt'] {'owner': 'bigsql'} 2017-10-31 17:33:14,761 - Execute['sudo cp /var/lib/ambari-agent/cache/extensions/IBM-Big_SQL/5.0.1.0/services/BIGSQL/package/files/response.properties /tmp/response.properties'] {} uid=2824(bigsql) gid=1000(hadoop) gruppi=1000(hadoop) uid=2824(bigsql) gid=1000(hadoop) gruppi=1000(hadoop) uid=2824(bigsql) gid=1000(hadoop) gruppi=1000(hadoop) Nodes: (0, u'acer.machine', 0, 'acer.machine') 2 (1, u'asus.machine', 0, 'asus.machine') 2 (2, u'toshiba.machine', 0, 'toshiba.machine') 2 Maximum number of retries: 90 Time between retries: 10 acer.machine Ready asus.machine Ready toshiba.machine Ready Hosts installed successfully: 3 Hosts with install errors : 0 Hosts with unknown result : 0 Total number of hosts : 3 2017-10-31 17:33:15,967 - Execute['sudo /var/lib/ambari-agent/cache/extensions/IBM-Big_SQL/5.0.1.0/services/BIGSQL/package/scripts/bigsql-precheck.sh -M PRE_ADD_HOST -u bigsql -z "bigsql,2824,hadoop,1000" -s os -x hdfs -R -T -b hbase -i hive -p 32051 -Z root -d '/var/ibm/bigsql/database,/hadoop/bigsql' -m toshiba.machine -I acer.machine -N asus.machine -f 28051'] {} Successfully set up SSH 2017-10-31 17:34:19,881 - Version 2.6.2.0-205 was provided as effective cluster version. Using package version 2_6_2_0_205 2017-10-31 17:34:19,882 - Package['bigsql-dist_5_0_1_0'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2017-10-31 17:34:19,979 - Skipping installation of existing package bigsql-dist_5_0_1_0 2017-10-31 17:34:19,980 - Version 2.6.2.0-205 was provided as effective cluster version. Using package version 2_6_2_0_205 2017-10-31 17:34:19,981 - Package['db2luw_5_0_1_0'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2017-10-31 17:34:19,990 - Installing package db2luw_5_0_1_0 ('/usr/bin/yum -d 0 -e 0 -y install db2luw_5_0_1_0') 2017-10-31 18:28:47,040 - Execution of '/usr/bin/yum -d 0 -e 0 -y install db2luw_5_0_1_0' returned 1. Error downloading packages: db2luw_5_0_1_0-11.1.9.0-s170813.x86_64: [Errno 256] No more mirrors to try. 2017-10-31 18:28:47,040 - Failed to install package db2luw_5_0_1_0. Executing '/usr/bin/yum clean metadata' 2017-10-31 18:28:47,234 - Retrying to install package db2luw_5_0_1_0 after 30 seconds Command failed after 1 tries