Created 12-19-2016 01:47 PM
Traceback (most recent call last): File "/var/lib/ambari-agent/cache/common-services/STORM/0.9.1/package/scripts/drpc_server.py", line 139, in <module> DrpcServer().execute() File "/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py", line 280, in execute method(env) File "/var/lib/ambari-agent/cache/common-services/STORM/0.9.1/package/scripts/drpc_server.py", line 43, in install self.install_packages(env) File "/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py", line 567, in install_packages retry_count=agent_stack_retry_count) File "/usr/lib/python2.6/site-packages/resource_management/core/base.py", line 155, in __init__ self.env.run() File "/usr/lib/python2.6/site-packages/resource_management/core/environment.py", line 160, in run self.run_action(resource, action) File "/usr/lib/python2.6/site-packages/resource_management/core/environment.py", line 124, in run_action provider_action() File "/usr/lib/python2.6/site-packages/resource_management/core/providers/package/__init__.py", line 54, in action_install self.install_package(package_name, self.resource.use_repos, self.resource.skip_repos) File "/usr/lib/python2.6/site-packages/resource_management/core/providers/package/yumrpm.py", line 49, in install_package self.checked_call_with_retries(cmd, sudo=True, logoutput=self.get_logoutput()) File "/usr/lib/python2.6/site-packages/resource_management/core/providers/package/__init__.py", line 83, in checked_call_with_retries return self._call_with_retries(cmd, is_checked=True, **kwargs) File "/usr/lib/python2.6/site-packages/resource_management/core/providers/package/__init__.py", line 91, in _call_with_retries code, out = func(cmd, **kwargs) File "/usr/lib/python2.6/site-packages/resource_management/core/shell.py", line 71, in inner result = function(command, **kwargs) File "/usr/lib/python2.6/site-packages/resource_management/core/shell.py", line 93, in checked_call tries=tries, try_sleep=try_sleep) File "/usr/lib/python2.6/site-packages/resource_management/core/shell.py", line 141, in _call_wrapper result = _call(command, **kwargs_copy) File "/usr/lib/python2.6/site-packages/resource_management/core/shell.py", line 294, in _call raise Fail(err_msg) resource_management.core.exceptions.Fail: Execution of '/usr/bin/yum -d 0 -e 0 -y install storm_2_5_3_0_37' returned 1. Error: Nothing to dostdout: /var/lib/ambari-agent/data/output-1062.txt
2016-12-19 15:30:51,397 - Using hadoop conf dir: /usr/hdp/current/hadoop-client/conf 2016-12-19 15:30:51,398 - Group['livy'] {} 2016-12-19 15:30:51,399 - Group['spark'] {} 2016-12-19 15:30:51,399 - Group['zeppelin'] {} 2016-12-19 15:30:51,399 - Group['hadoop'] {} 2016-12-19 15:30:51,399 - Group['users'] {} 2016-12-19 15:30:51,400 - Group['knox'] {} 2016-12-19 15:30:51,400 - User['hive'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,400 - User['storm'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,401 - User['zookeeper'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,401 - User['infra-solr'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,402 - User['oozie'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users']} 2016-12-19 15:30:51,402 - User['atlas'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,403 - User['ams'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,403 - User['falcon'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users']} 2016-12-19 15:30:51,403 - User['tez'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users']} 2016-12-19 15:30:51,404 - User['zeppelin'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,404 - User['accumulo'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,405 - User['mahout'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,405 - User['livy'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,406 - User['spark'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,406 - User['ambari-qa'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users']} 2016-12-19 15:30:51,407 - User['flume'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,407 - User['kafka'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,407 - User['hdfs'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,408 - User['sqoop'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,408 - User['yarn'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,409 - User['mapred'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,409 - User['hbase'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,410 - User['knox'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,410 - User['hcat'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-19 15:30:51,411 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2016-12-19 15:30:51,412 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa'] {'not_if': '(test $(id -u ambari-qa) -gt 1000) || (false)'} 2016-12-19 15:30:51,416 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa'] due to not_if 2016-12-19 15:30:51,417 - Directory['/tmp/hbase-hbase'] {'owner': 'hbase', 'create_parents': True, 'mode': 0775, 'cd_access': 'a'} 2016-12-19 15:30:51,418 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2016-12-19 15:30:51,419 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase'] {'not_if': '(test $(id -u hbase) -gt 1000) || (false)'} 2016-12-19 15:30:51,423 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase'] due to not_if 2016-12-19 15:30:51,423 - Group['hdfs'] {} 2016-12-19 15:30:51,424 - User['hdfs'] {'fetch_nonlocal_groups': True, 'groups': [u'hadoop', u'hdfs']} 2016-12-19 15:30:51,425 - FS Type: 2016-12-19 15:30:51,425 - Directory['/etc/hadoop'] {'mode': 0755} 2016-12-19 15:30:51,434 - File['/usr/hdp/current/hadoop-client/conf/hadoop-env.sh'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'} 2016-12-19 15:30:51,434 - Directory['/var/lib/ambari-agent/tmp/hadoop_java_io_tmpdir'] {'owner': 'hdfs', 'group': 'hadoop', 'mode': 01777} 2016-12-19 15:30:51,449 - Initializing 2 repositories 2016-12-19 15:30:51,449 - Repository['HDP-2.5'] {'base_url': 'http://localhost/HDP/HDP-2.5.0.0-centos7-rpm/HDP/centos7/', 'action': ['create'], 'components': [u'HDP', 'main'], 'repo_template': '[{{repo_id}}]\nname={{repo_id}}\n{% if mirror_list %}mirrorlist={{mirror_list}}{% else %}baseurl={{base_url}}{% endif %}\n\npath=/\nenabled=1\ngpgcheck=0', 'repo_file_name': 'HDP', 'mirror_list': None} 2016-12-19 15:30:51,454 - File['/etc/yum.repos.d/HDP.repo'] {'content': '[HDP-2.5]\nname=HDP-2.5\nbaseurl=http://localhost/HDP/HDP-2.5.0.0-centos7-rpm/HDP/centos7/\n\npath=/\nenabled=1\ngpgcheck=0'} 2016-12-19 15:30:51,455 - Repository['HDP-UTILS-1.1.0.21'] {'base_url': 'http://localhost/HDP/HDP-2.5.0.0-centos7-rpm/HDP/centos7/', 'action': ['create'], 'components': [u'HDP-UTILS', 'main'], 'repo_template': '[{{repo_id}}]\nname={{repo_id}}\n{% if mirror_list %}mirrorlist={{mirror_list}}{% else %}baseurl={{base_url}}{% endif %}\n\npath=/\nenabled=1\ngpgcheck=0', 'repo_file_name': 'HDP-UTILS', 'mirror_list': None} 2016-12-19 15:30:51,457 - File['/etc/yum.repos.d/HDP-UTILS.repo'] {'content': '[HDP-UTILS-1.1.0.21]\nname=HDP-UTILS-1.1.0.21\nbaseurl=http://localhost/HDP/HDP-2.5.0.0-centos7-rpm/HDP/centos7/\n\npath=/\nenabled=1\ngpgcheck=0'} 2016-12-19 15:30:51,457 - Package['unzip'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2016-12-19 15:30:51,518 - Skipping installation of existing package unzip 2016-12-19 15:30:51,519 - Package['curl'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2016-12-19 15:30:51,530 - Skipping installation of existing package curl 2016-12-19 15:30:51,530 - Package['hdp-select'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2016-12-19 15:30:51,541 - Skipping installation of existing package hdp-select 2016-12-19 15:30:51,727 - Package['storm_2_5_3_0_37'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2016-12-19 15:30:51,795 - Installing package storm_2_5_3_0_37 ('/usr/bin/yum -d 0 -e 0 -y install storm_2_5_3_0_37')
Created 12-19-2016 02:25 PM
it looks yum is not able to findout the repo or repo is not setup correctly, could you please run a yum repolist command on ambari node to ensure the Ambari / HDP stack are configured correctly, your repo should point to HDP-2.5.3.0
Created 12-19-2016 02:25 PM
it looks yum is not able to findout the repo or repo is not setup correctly, could you please run a yum repolist command on ambari node to ensure the Ambari / HDP stack are configured correctly, your repo should point to HDP-2.5.3.0
Created 12-19-2016 03:42 PM
run the following command manually on the node
yum install -y install storm_2_5_3_0_37
Created 12-20-2016 06:48 AM
after setting up the repo to use online :
[root@localhost xdm-setup]# yum install -y install storm_2_5_3_0_37 Loaded plugins: fastestmirror, langpacks Loading mirror speeds from cached hostfile * base: ftp.is.co.za * epel: fedora.is.co.za * extras: ftp.is.co.za * rpmfusion-free-updates: mir01.syntis.net * updates: ftp.is.co.za No package install available. No package storm_2_5_3_0_37 available. Error: Nothing to do [root@localhost xdm-setup]#
Am I missing something?
Created 12-20-2016 07:40 AM
my bad I run via ambari then tried manually installing it seems like ambari overwrites the repo config when going through the wizard. So what I did I run ambari until error then changed the repo file manually then I re-started at
Install, Start and Test and now it is installing. the yum install -y install storm_2_5_3_0_37 started but I stopped it in order to run it via ambari. Let me see if it resumes now.
Created 12-20-2016 05:57 AM
Current repo
HDP-2.5.0.0-centos7-rpm let me see if I will try and download new repo I have even tried the install when on-line still the same problem.
thank you for response.
Created 12-20-2016 06:08 AM
@Christian van den Heever could you please try with this repo
http://public-repo-1.hortonworks.com/HDP/centos7/2.x/updates/2.5.3.0/hdp.repo
Created 12-20-2016 06:28 AM
Should work if I downloadhttp://public-repo-1.hortonworks.com/HDP/centos7/2.x/ updates/2.5.3.0/HDP-2.5.3.0-centos7-rpm.tar.gz correct? then setup with createrepo and httpd? for local repo?
Created 12-20-2016 06:46 AM
I have used the online repo please see errors
Traceback (most recent call last): File "/var/lib/ambari-agent/cache/common-services/STORM/0.9.1/package/scripts/drpc_server.py", line 139, in <module> DrpcServer().execute() File "/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py", line 280, in execute method(env) File "/var/lib/ambari-agent/cache/common-services/STORM/0.9.1/package/scripts/drpc_server.py", line 43, in install self.install_packages(env) File "/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py", line 567, in install_packages retry_count=agent_stack_retry_count) File "/usr/lib/python2.6/site-packages/resource_management/core/base.py", line 155, in __init__ self.env.run() File "/usr/lib/python2.6/site-packages/resource_management/core/environment.py", line 160, in run self.run_action(resource, action) File "/usr/lib/python2.6/site-packages/resource_management/core/environment.py", line 124, in run_action provider_action() File "/usr/lib/python2.6/site-packages/resource_management/core/providers/package/__init__.py", line 54, in action_install self.install_package(package_name, self.resource.use_repos, self.resource.skip_repos) File "/usr/lib/python2.6/site-packages/resource_management/core/providers/package/yumrpm.py", line 49, in install_package self.checked_call_with_retries(cmd, sudo=True, logoutput=self.get_logoutput()) File "/usr/lib/python2.6/site-packages/resource_management/core/providers/package/__init__.py", line 83, in checked_call_with_retries return self._call_with_retries(cmd, is_checked=True, **kwargs) File "/usr/lib/python2.6/site-packages/resource_management/core/providers/package/__init__.py", line 91, in _call_with_retries code, out = func(cmd, **kwargs) File "/usr/lib/python2.6/site-packages/resource_management/core/shell.py", line 71, in inner result = function(command, **kwargs) File "/usr/lib/python2.6/site-packages/resource_management/core/shell.py", line 93, in checked_call tries=tries, try_sleep=try_sleep) File "/usr/lib/python2.6/site-packages/resource_management/core/shell.py", line 141, in _call_wrapper result = _call(command, **kwargs_copy) File "/usr/lib/python2.6/site-packages/resource_management/core/shell.py", line 294, in _call raise Fail(err_msg) resource_management.core.exceptions.Fail: Execution of '/usr/bin/yum -d 0 -e 0 -y install storm_2_5_3_0_37' returned 1. Error: Nothing to do 2016-12-20 08:44:43,682 - Using hadoop conf dir: /usr/hdp/current/hadoop-client/conf 2016-12-20 08:44:43,684 - Group['livy'] {} 2016-12-20 08:44:43,685 - Group['spark'] {} 2016-12-20 08:44:43,685 - Group['zeppelin'] {} 2016-12-20 08:44:43,685 - Group['hadoop'] {} 2016-12-20 08:44:43,685 - Group['users'] {} 2016-12-20 08:44:43,686 - Group['knox'] {} 2016-12-20 08:44:43,686 - User['hive'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,686 - User['storm'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,687 - User['zookeeper'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,687 - User['infra-solr'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,688 - User['oozie'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users']} 2016-12-20 08:44:43,688 - User['atlas'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,689 - User['ams'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,689 - User['falcon'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users']} 2016-12-20 08:44:43,690 - User['tez'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users']} 2016-12-20 08:44:43,690 - User['zeppelin'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,691 - User['accumulo'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,691 - User['mahout'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,692 - User['livy'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,692 - User['spark'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,693 - User['ambari-qa'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users']} 2016-12-20 08:44:43,693 - User['flume'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,694 - User['kafka'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,695 - User['hdfs'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,695 - User['sqoop'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,696 - User['yarn'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,697 - User['mapred'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,697 - User['hbase'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,698 - User['knox'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,698 - User['hcat'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2016-12-20 08:44:43,699 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2016-12-20 08:44:43,700 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa'] {'not_if': '(test $(id -u ambari-qa) -gt 1000) || (false)'} 2016-12-20 08:44:43,704 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa'] due to not_if 2016-12-20 08:44:43,705 - Directory['/tmp/hbase-hbase'] {'owner': 'hbase', 'create_parents': True, 'mode': 0775, 'cd_access': 'a'} 2016-12-20 08:44:43,705 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2016-12-20 08:44:43,706 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase'] {'not_if': '(test $(id -u hbase) -gt 1000) || (false)'} 2016-12-20 08:44:43,711 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase'] due to not_if 2016-12-20 08:44:43,711 - Group['hdfs'] {} 2016-12-20 08:44:43,711 - User['hdfs'] {'fetch_nonlocal_groups': True, 'groups': [u'hadoop', u'hdfs']} 2016-12-20 08:44:43,712 - FS Type: 2016-12-20 08:44:43,712 - Directory['/etc/hadoop'] {'mode': 0755} 2016-12-20 08:44:43,723 - File['/usr/hdp/current/hadoop-client/conf/hadoop-env.sh'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'} 2016-12-20 08:44:43,725 - Directory['/var/lib/ambari-agent/tmp/hadoop_java_io_tmpdir'] {'owner': 'hdfs', 'group': 'hadoop', 'mode': 01777} 2016-12-20 08:44:43,740 - Initializing 2 repositories 2016-12-20 08:44:43,741 - Repository['HDP-2.5'] {'base_url': 'http://public-repo-1.hortonworks.com/HDP/centos7/2.x/updates/2.5.3.0', 'action': ['create'], 'components': [u'HDP', 'main'], 'repo_template': '[{{repo_id}}]\nname={{repo_id}}\n{% if mirror_list %}mirrorlist={{mirror_list}}{% else %}baseurl={{base_url}}{% endif %}\n\npath=/\nenabled=1\ngpgcheck=0', 'repo_file_name': 'HDP', 'mirror_list': None} 2016-12-20 08:44:43,748 - File['/etc/yum.repos.d/HDP.repo'] {'content': '[HDP-2.5]\nname=HDP-2.5\nbaseurl=http://public-repo-1.hortonworks.com/HDP/centos7/2.x/updates/2.5.3.0\n\npath=/\nenabled=1\ngpgcheck=0'} 2016-12-20 08:44:43,749 - Repository['HDP-UTILS-1.1.0.21'] {'base_url': 'http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos7', 'action': ['create'], 'components': [u'HDP-UTILS', 'main'], 'repo_template': '[{{repo_id}}]\nname={{repo_id}}\n{% if mirror_list %}mirrorlist={{mirror_list}}{% else %}baseurl={{base_url}}{% endif %}\n\npath=/\nenabled=1\ngpgcheck=0', 'repo_file_name': 'HDP-UTILS', 'mirror_list': None} 2016-12-20 08:44:43,752 - File['/etc/yum.repos.d/HDP-UTILS.repo'] {'content': '[HDP-UTILS-1.1.0.21]\nname=HDP-UTILS-1.1.0.21\nbaseurl=http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos7\n\npath=/\nenabled=1\ngpgcheck=0'} 2016-12-20 08:44:43,753 - Package['unzip'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2016-12-20 08:44:43,829 - Skipping installation of existing package unzip 2016-12-20 08:44:43,829 - Package['curl'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2016-12-20 08:44:43,842 - Skipping installation of existing package curl 2016-12-20 08:44:43,842 - Package['hdp-select'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2016-12-20 08:44:43,855 - Skipping installation of existing package hdp-select 2016-12-20 08:44:44,061 - Package['storm_2_5_3_0_37'] {'retry_on_repo_unavailability': False, 'retry_count': 5} 2016-12-20 08:44:44,126 - Installing package storm_2_5_3_0_37 ('/usr/bin/yum -d 0 -e 0 -y install storm_2_5_3_0_37') Command failed after 1 tries
so basically I have the same error any ideas?
Created 12-20-2016 07:43 AM
please ignore