stderr: /var/lib/ambari-agent/data/errors-392.txt Traceback (most recent call last): File "/var/lib/ambari-agent/cache/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py", line 211, in HiveServer().execute() File "/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py", line 280, in execute method(env) File "/var/lib/ambari-agent/cache/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py", line 85, in start self.configure(env) # FOR SECURITY File "/var/lib/ambari-agent/cache/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py", line 56, in configure hive(name='hiveserver2') File "/usr/lib/python2.6/site-packages/ambari_commons/os_family_impl.py", line 89, in thunk return fn(*args, **kwargs) File "/var/lib/ambari-agent/cache/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py", line 140, in hive copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped) File "/usr/lib/python2.6/site-packages/resource_management/libraries/functions/copy_tarball.py", line 257, in copy_to_hdfs replace_existing_files=replace_existing_files, File "/usr/lib/python2.6/site-packages/resource_management/core/base.py", line 155, in __init__ self.env.run() File "/usr/lib/python2.6/site-packages/resource_management/core/environment.py", line 160, in run self.run_action(resource, action) File "/usr/lib/python2.6/site-packages/resource_management/core/environment.py", line 124, in run_action provider_action() File "/usr/lib/python2.6/site-packages/resource_management/libraries/providers/hdfs_resource.py", line 459, in action_create_on_execute self.action_delayed("create") File "/usr/lib/python2.6/site-packages/resource_management/libraries/providers/hdfs_resource.py", line 456, in action_delayed self.get_hdfs_resource_executor().action_delayed(action_name, self) File "/usr/lib/python2.6/site-packages/resource_management/libraries/providers/hdfs_resource.py", line 255, in action_delayed self._create_resource() File "/usr/lib/python2.6/site-packages/resource_management/libraries/providers/hdfs_resource.py", line 269, in _create_resource self._create_file(self.main_resource.resource.target, source=self.main_resource.resource.source, mode=self.mode) File "/usr/lib/python2.6/site-packages/resource_management/libraries/providers/hdfs_resource.py", line 322, in _create_file self.util.run_command(target, 'CREATE', method='PUT', overwrite=True, assertable_result=False, file_to_put=source, **kwargs) File "/usr/lib/python2.6/site-packages/resource_management/libraries/providers/hdfs_resource.py", line 179, in run_command _, out, err = get_user_call_output(cmd, user=self.run_user, logoutput=self.logoutput, quiet=False) File "/usr/lib/python2.6/site-packages/resource_management/libraries/functions/get_user_call_output.py", line 61, in get_user_call_output raise Fail(err_msg) resource_management.core.exceptions.Fail: Execution of 'curl -sS -L -w '%{http_code}' -X PUT --data-binary @/usr/hdp/2.5.3.0-37/hadoop/mapreduce.tar.gz 'http://ip-172-31-17-61.us-west-2.compute.internal:50070/webhdfs/v1/hdp/apps/2.5.3.0-37/mapreduce/mapreduce.tar.gz?op=CREATE&user.name=hdfs&overwrite=True&permission=444' 1>/tmp/tmprliTpv 2>/tmp/tmpNAj1YJ' returned 52. curl: (52) Empty reply from server 100 stdout: /var/lib/ambari-agent/data/output-392.txt 2016-12-06 14:54:07,886 - The hadoop conf dir /usr/hdp/current/hadoop-client/conf exists, will call conf-select on it for version 2.5.3.0-37 2016-12-06 14:54:07,887 - Checking if need to create versioned conf dir /etc/hadoop/2.5.3.0-37/0 2016-12-06 14:54:07,888 - call[('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.3.0-37', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False, 'stderr': -1} 2016-12-06 14:54:07,942 - call returned (1, '/etc/hadoop/2.5.3.0-37/0 exist already', '') 2016-12-06 14:54:07,943 - checked_call[('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.3.0-37', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False} 2016-12-06 14:54:07,995 - checked_call returned (0, '') 2016-12-06 14:54:07,997 - Ensuring that hadoop has the correct symlink structure 2016-12-06 14:54:07,998 - Using hadoop conf dir: /usr/hdp/current/hadoop-client/conf 2016-12-06 14:54:08,258 - The hadoop conf dir /usr/hdp/current/hadoop-client/conf exists, will call conf-select on it for version 2.5.3.0-37 2016-12-06 14:54:08,258 - Checking if need to create versioned conf dir /etc/hadoop/2.5.3.0-37/0 2016-12-06 14:54:08,259 - call[('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.3.0-37', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False, 'stderr': -1} 2016-12-06 14:54:08,313 - call returned (1, '/etc/hadoop/2.5.3.0-37/0 exist already', '') 2016-12-06 14:54:08,314 - checked_call[('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.3.0-37', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False} 2016-12-06 14:54:08,365 - checked_call returned (0, '') 2016-12-06 14:54:08,367 - Ensuring that hadoop has the correct symlink structure 2016-12-06 14:54:08,367 - Using hadoop conf dir: /usr/hdp/current/hadoop-client/conf 2016-12-06 14:54:08,370 - Group['livy'] {} 2016-12-06 14:54:08,373 - Group['spark'] {} 2016-12-06 14:54:08,374 - Group['zeppelin'] {} 2016-12-06 14:54:08,374 - Group['hadoop'] {} 2016-12-06 14:54:08,375 - Group['users'] {} 2016-12-06 14:54:08,375 - User['hive'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-12-06 14:54:08,377 - User['zookeeper'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-12-06 14:54:08,378 - User['infra-solr'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-12-06 14:54:08,379 - User['ams'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-12-06 14:54:08,380 - User['tez'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['users']} 2016-12-06 14:54:08,381 - User['zeppelin'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-12-06 14:54:08,382 - User['livy'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-12-06 14:54:08,383 - User['spark'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-12-06 14:54:08,385 - User['ambari-qa'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['users']} 2016-12-06 14:54:08,386 - User['kafka'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-12-06 14:54:08,387 - User['hdfs'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-12-06 14:54:08,389 - User['sqoop'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-12-06 14:54:08,390 - User['yarn'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-12-06 14:54:08,391 - User['mapred'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-12-06 14:54:08,392 - User['hbase'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-12-06 14:54:08,393 - User['hcat'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-12-06 14:54:08,394 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2016-12-06 14:54:08,399 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa'] {'not_if': '(test $(id -u ambari-qa) -gt 1000) || (false)'} 2016-12-06 14:54:08,416 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa'] due to not_if 2016-12-06 14:54:08,417 - Directory['/tmp/hbase-hbase'] {'owner': 'hbase', 'create_parents': True, 'mode': 0775, 'cd_access': 'a'} 2016-12-06 14:54:08,419 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2016-12-06 14:54:08,422 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase'] {'not_if': '(test $(id -u hbase) -gt 1000) || (false)'} 2016-12-06 14:54:08,438 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase'] due to not_if 2016-12-06 14:54:08,439 - Group['hdfs'] {} 2016-12-06 14:54:08,440 - User['hdfs'] {'fetch_nonlocal_groups': True, 'groups': ['hadoop', 'hdfs']} 2016-12-06 14:54:08,441 - FS Type: 2016-12-06 14:54:08,442 - Directory['/etc/hadoop'] {'mode': 0755} 2016-12-06 14:54:08,475 - File['/usr/hdp/current/hadoop-client/conf/hadoop-env.sh'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'} 2016-12-06 14:54:08,476 - Directory['/var/lib/ambari-agent/tmp/hadoop_java_io_tmpdir'] {'owner': 'hdfs', 'group': 'hadoop', 'mode': 01777} 2016-12-06 14:54:08,504 - Execute[('setenforce', '0')] {'not_if': '(! which getenforce ) || (which getenforce && getenforce | grep -q Disabled)', 'sudo': True, 'only_if': 'test -f /selinux/enforce'} 2016-12-06 14:54:08,559 - Directory['/var/log/hadoop'] {'owner': 'root', 'create_parents': True, 'group': 'hadoop', 'mode': 0775, 'cd_access': 'a'} 2016-12-06 14:54:08,562 - Directory['/var/run/hadoop'] {'owner': 'root', 'create_parents': True, 'group': 'root', 'cd_access': 'a'} 2016-12-06 14:54:08,562 - Directory['/tmp/hadoop-hdfs'] {'owner': 'hdfs', 'create_parents': True, 'cd_access': 'a'} 2016-12-06 14:54:08,584 - File['/usr/hdp/current/hadoop-client/conf/commons-logging.properties'] {'content': Template('commons-logging.properties.j2'), 'owner': 'hdfs'} 2016-12-06 14:54:08,588 - File['/usr/hdp/current/hadoop-client/conf/health_check'] {'content': Template('health_check.j2'), 'owner': 'hdfs'} 2016-12-06 14:54:08,589 - File['/usr/hdp/current/hadoop-client/conf/log4j.properties'] {'content': ..., 'owner': 'hdfs', 'group': 'hadoop', 'mode': 0644} 2016-12-06 14:54:08,613 - File['/usr/hdp/current/hadoop-client/conf/hadoop-metrics2.properties'] {'content': Template('hadoop-metrics2.properties.j2'), 'owner': 'hdfs', 'group': 'hadoop'} 2016-12-06 14:54:08,615 - File['/usr/hdp/current/hadoop-client/conf/task-log4j.properties'] {'content': StaticFile('task-log4j.properties'), 'mode': 0755} 2016-12-06 14:54:08,617 - File['/usr/hdp/current/hadoop-client/conf/configuration.xsl'] {'owner': 'hdfs', 'group': 'hadoop'} 2016-12-06 14:54:08,625 - File['/etc/hadoop/conf/topology_mappings.data'] {'owner': 'hdfs', 'content': Template('topology_mappings.data.j2'), 'only_if': 'test -d /etc/hadoop/conf', 'group': 'hadoop'} 2016-12-06 14:54:08,640 - File['/etc/hadoop/conf/topology_script.py'] {'content': StaticFile('topology_script.py'), 'only_if': 'test -d /etc/hadoop/conf', 'mode': 0755} 2016-12-06 14:54:09,068 - The hadoop conf dir /usr/hdp/current/hadoop-client/conf exists, will call conf-select on it for version 2.5.3.0-37 2016-12-06 14:54:09,069 - Checking if need to create versioned conf dir /etc/hadoop/2.5.3.0-37/0 2016-12-06 14:54:09,070 - call[('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.3.0-37', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False, 'stderr': -1} 2016-12-06 14:54:09,122 - call returned (1, '/etc/hadoop/2.5.3.0-37/0 exist already', '') 2016-12-06 14:54:09,123 - checked_call[('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.3.0-37', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False} 2016-12-06 14:54:09,177 - checked_call returned (0, '') 2016-12-06 14:54:09,179 - Ensuring that hadoop has the correct symlink structure 2016-12-06 14:54:09,180 - Using hadoop conf dir: /usr/hdp/current/hadoop-client/conf 2016-12-06 14:54:09,193 - call['ambari-python-wrap /usr/bin/hdp-select status hive-server2'] {'timeout': 20} 2016-12-06 14:54:09,241 - call returned (0, 'hive-server2 - 2.5.3.0-37') 2016-12-06 14:54:09,244 - Stack Feature Version Info: stack_version=2.5, version=2.5.3.0-37, current_cluster_version=2.5.3.0-37 -> 2.5.3.0-37 2016-12-06 14:54:09,274 - HdfsResource['/user/hcat'] {'security_enabled': False, 'hadoop_bin_dir': '/usr/hdp/current/hadoop-client/bin', 'keytab': [EMPTY], 'dfs_type': '', 'default_fs': 'hdfs://ip-172-31-17-61.us-west-2.compute.internal:8020', 'hdfs_resource_ignore_file': '/var/lib/ambari-agent/data/.hdfs_resource_ignore', 'hdfs_site': ..., 'kinit_path_local': '/usr/bin/kinit', 'principal_name': 'missing_principal', 'user': 'hdfs', 'owner': 'hcat', 'hadoop_conf_dir': '/usr/hdp/current/hadoop-client/conf', 'type': 'directory', 'action': ['create_on_execute'], 'immutable_paths': [u'/apps/hive/warehouse', u'/mr-history/done', u'/app-logs', u'/tmp'], 'mode': 0755} 2016-12-06 14:54:09,282 - call['ambari-sudo.sh su hdfs -l -s /bin/bash -c 'curl -sS -L -w '"'"'%{http_code}'"'"' -X GET '"'"'http://ip-172-31-17-61.us-west-2.compute.internal:50070/webhdfs/v1/user/hcat?op=GETFILESTATUS&user.name=hdfs'"'"' 1>/tmp/tmpULlRb8 2>/tmp/tmpX9npMx''] {'logoutput': None, 'quiet': False} 2016-12-06 14:54:09,377 - call returned (0, '') 2016-12-06 14:54:09,380 - call['ambari-sudo.sh su hdfs -l -s /bin/bash -c 'curl -sS -L -w '"'"'%{http_code}'"'"' -X PUT '"'"'http://ip-172-31-17-61.us-west-2.compute.internal:50070/webhdfs/v1/user/hcat?op=MKDIRS&user.name=hdfs'"'"' 1>/tmp/tmpSMOG25 2>/tmp/tmpoq0z6k''] {'logoutput': None, 'quiet': False} 2016-12-06 14:54:09,474 - call returned (0, '') 2016-12-06 14:54:09,477 - call['ambari-sudo.sh su hdfs -l -s /bin/bash -c 'curl -sS -L -w '"'"'%{http_code}'"'"' -X PUT '"'"'http://ip-172-31-17-61.us-west-2.compute.internal:50070/webhdfs/v1/user/hcat?op=SETPERMISSION&user.name=hdfs&permission=755'"'"' 1>/tmp/tmpQ_YHwb 2>/tmp/tmpktQCAR''] {'logoutput': None, 'quiet': False} 2016-12-06 14:54:09,568 - call returned (0, '') 2016-12-06 14:54:09,571 - call['ambari-sudo.sh su hdfs -l -s /bin/bash -c 'curl -sS -L -w '"'"'%{http_code}'"'"' -X PUT '"'"'http://ip-172-31-17-61.us-west-2.compute.internal:50070/webhdfs/v1/user/hcat?op=SETOWNER&user.name=hdfs&owner=hcat&group='"'"' 1>/tmp/tmpbgR9Uk 2>/tmp/tmp4HT_4z''] {'logoutput': None, 'quiet': False} 2016-12-06 14:54:09,665 - call returned (0, '') 2016-12-06 14:54:09,668 - Called copy_to_hdfs tarball: mapreduce 2016-12-06 14:54:09,668 - Default version is 2.5.3.0-37 2016-12-06 14:54:09,668 - Source file: /usr/hdp/2.5.3.0-37/hadoop/mapreduce.tar.gz , Dest file in HDFS: /hdp/apps/2.5.3.0-37/mapreduce/mapreduce.tar.gz 2016-12-06 14:54:09,670 - HdfsResource['/hdp/apps/2.5.3.0-37/mapreduce'] {'security_enabled': False, 'hadoop_bin_dir': '/usr/hdp/current/hadoop-client/bin', 'keytab': [EMPTY], 'dfs_type': '', 'default_fs': 'hdfs://ip-172-31-17-61.us-west-2.compute.internal:8020', 'hdfs_resource_ignore_file': '/var/lib/ambari-agent/data/.hdfs_resource_ignore', 'hdfs_site': ..., 'kinit_path_local': '/usr/bin/kinit', 'principal_name': 'missing_principal', 'user': 'hdfs', 'owner': 'hdfs', 'hadoop_conf_dir': '/usr/hdp/current/hadoop-client/conf', 'type': 'directory', 'action': ['create_on_execute'], 'immutable_paths': [u'/apps/hive/warehouse', u'/mr-history/done', u'/app-logs', u'/tmp'], 'mode': 0555} 2016-12-06 14:54:09,672 - call['ambari-sudo.sh su hdfs -l -s /bin/bash -c 'curl -sS -L -w '"'"'%{http_code}'"'"' -X GET '"'"'http://ip-172-31-17-61.us-west-2.compute.internal:50070/webhdfs/v1/hdp/apps/2.5.3.0-37/mapreduce?op=GETFILESTATUS&user.name=hdfs'"'"' 1>/tmp/tmprPSYDu 2>/tmp/tmpOLGvP8''] {'logoutput': None, 'quiet': False} 2016-12-06 14:54:09,761 - call returned (0, '') 2016-12-06 14:54:09,764 - HdfsResource['/hdp/apps/2.5.3.0-37/mapreduce/mapreduce.tar.gz'] {'security_enabled': False, 'hadoop_bin_dir': '/usr/hdp/current/hadoop-client/bin', 'keytab': [EMPTY], 'source': '/usr/hdp/2.5.3.0-37/hadoop/mapreduce.tar.gz', 'dfs_type': '', 'default_fs': 'hdfs://ip-172-31-17-61.us-west-2.compute.internal:8020', 'replace_existing_files': False, 'hdfs_resource_ignore_file': '/var/lib/ambari-agent/data/.hdfs_resource_ignore', 'hdfs_site': ..., 'kinit_path_local': '/usr/bin/kinit', 'principal_name': 'missing_principal', 'user': 'hdfs', 'owner': 'hdfs', 'group': 'hadoop', 'hadoop_conf_dir': '/usr/hdp/current/hadoop-client/conf', 'type': 'file', 'action': ['create_on_execute'], 'immutable_paths': [u'/apps/hive/warehouse', u'/mr-history/done', u'/app-logs', u'/tmp'], 'mode': 0444} 2016-12-06 14:54:09,766 - call['ambari-sudo.sh su hdfs -l -s /bin/bash -c 'curl -sS -L -w '"'"'%{http_code}'"'"' -X GET '"'"'http://ip-172-31-17-61.us-west-2.compute.internal:50070/webhdfs/v1/hdp/apps/2.5.3.0-37/mapreduce/mapreduce.tar.gz?op=GETFILESTATUS&user.name=hdfs'"'"' 1>/tmp/tmpx7AKum 2>/tmp/tmph95s46''] {'logoutput': None, 'quiet': False} 2016-12-06 14:54:09,856 - call returned (0, '') 2016-12-06 14:54:09,859 - Creating new file /hdp/apps/2.5.3.0-37/mapreduce/mapreduce.tar.gz in DFS 2016-12-06 14:54:09,861 - call['ambari-sudo.sh su hdfs -l -s /bin/bash -c 'curl -sS -L -w '"'"'%{http_code}'"'"' -X PUT --data-binary @/usr/hdp/2.5.3.0-37/hadoop/mapreduce.tar.gz '"'"'http://ip-172-31-17-61.us-west-2.compute.internal:50070/webhdfs/v1/hdp/apps/2.5.3.0-37/mapreduce/mapreduce.tar.gz?op=CREATE&user.name=hdfs&overwrite=True&permission=444'"'"' 1>/tmp/tmprliTpv 2>/tmp/tmpNAj1YJ''] {'logoutput': None, 'quiet': False} 2016-12-06 14:55:40,384 - call returned (52, '') Command failed after 1 tries