2017-01-10 13:07:45,820 - The hadoop conf dir /usr/hdp/current/hadoop-client/conf exists, will call conf-select on it for version 2.5.0.0-1245 2017-01-10 13:07:45,824 - Checking if need to create versioned conf dir /etc/hadoop/2.5.0.0-1245/0 2017-01-10 13:07:45,827 - call[('ambari-python-wrap', u'/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.0.0-1245', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False, 'stderr': -1} 2017-01-10 13:07:45,902 - call returned (1, '/etc/hadoop/2.5.0.0-1245/0 exist already', '') 2017-01-10 13:07:45,905 - checked_call[('ambari-python-wrap', u'/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.0.0-1245', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False} 2017-01-10 13:07:45,976 - checked_call returned (0, '') 2017-01-10 13:07:45,979 - Ensuring that hadoop has the correct symlink structure 2017-01-10 13:07:45,980 - Using hadoop conf dir: /usr/hdp/current/hadoop-client/conf 2017-01-10 13:07:46,292 - The hadoop conf dir /usr/hdp/current/hadoop-client/conf exists, will call conf-select on it for version 2.5.0.0-1245 2017-01-10 13:07:46,296 - Checking if need to create versioned conf dir /etc/hadoop/2.5.0.0-1245/0 2017-01-10 13:07:46,300 - call[('ambari-python-wrap', u'/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.0.0-1245', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False, 'stderr': -1} 2017-01-10 13:07:46,371 - call returned (1, '/etc/hadoop/2.5.0.0-1245/0 exist already', '') 2017-01-10 13:07:46,373 - checked_call[('ambari-python-wrap', u'/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.0.0-1245', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False} 2017-01-10 13:07:46,443 - checked_call returned (0, '') 2017-01-10 13:07:46,446 - Ensuring that hadoop has the correct symlink structure 2017-01-10 13:07:46,446 - Using hadoop conf dir: /usr/hdp/current/hadoop-client/conf 2017-01-10 13:07:46,450 - Group['livy'] {} 2017-01-10 13:07:46,454 - Group['spark'] {} 2017-01-10 13:07:46,454 - Group['hadoop'] {} 2017-01-10 13:07:46,454 - Group['users'] {} 2017-01-10 13:07:46,455 - User['hive'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2017-01-10 13:07:46,458 - User['zookeeper'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2017-01-10 13:07:46,459 - User['infra-solr'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2017-01-10 13:07:46,460 - User['atlas'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2017-01-10 13:07:46,461 - User['ams'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2017-01-10 13:07:46,462 - User['tez'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users']} 2017-01-10 13:07:46,463 - User['livy'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2017-01-10 13:07:46,464 - User['spark'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2017-01-10 13:07:46,465 - User['ambari-qa'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users']} 2017-01-10 13:07:46,466 - User['kafka'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2017-01-10 13:07:46,467 - User['hdfs'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2017-01-10 13:07:46,468 - User['yarn'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2017-01-10 13:07:46,469 - User['mapred'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2017-01-10 13:07:46,470 - User['hbase'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2017-01-10 13:07:46,471 - User['hcat'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']} 2017-01-10 13:07:46,472 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-01-10 13:07:46,475 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa'] {'not_if': '(test $(id -u ambari-qa) -gt 1000) || (false)'} 2017-01-10 13:07:46,504 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa'] due to not_if 2017-01-10 13:07:46,506 - Directory['/tmp/hbase-hbase'] {'owner': 'hbase', 'create_parents': True, 'mode': 0775, 'cd_access': 'a'} 2017-01-10 13:07:46,510 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2017-01-10 13:07:46,512 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase'] {'not_if': '(test $(id -u hbase) -gt 1000) || (false)'} 2017-01-10 13:07:46,540 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase'] due to not_if 2017-01-10 13:07:46,543 - Group['hdfs'] {} 2017-01-10 13:07:46,544 - User['hdfs'] {'fetch_nonlocal_groups': True, 'groups': [u'hadoop', u'hdfs']} 2017-01-10 13:07:46,546 - FS Type: 2017-01-10 13:07:46,546 - Directory['/etc/hadoop'] {'mode': 0755} 2017-01-10 13:07:46,575 - File['/usr/hdp/current/hadoop-client/conf/hadoop-env.sh'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'} 2017-01-10 13:07:46,576 - Directory['/var/lib/ambari-agent/tmp/hadoop_java_io_tmpdir'] {'owner': 'hdfs', 'group': 'hadoop', 'mode': 01777} 2017-01-10 13:07:46,632 - Execute[('setenforce', '0')] {'not_if': '(! which getenforce ) || (which getenforce && getenforce | grep -q Disabled)', 'sudo': True, 'only_if': 'test -f /selinux/enforce'} 2017-01-10 13:07:46,669 - Skipping Execute[('setenforce', '0')] due to not_if 2017-01-10 13:07:46,671 - Directory['/var/log/hadoop'] {'owner': 'root', 'create_parents': True, 'group': 'hadoop', 'mode': 0775, 'cd_access': 'a'} 2017-01-10 13:07:46,678 - Directory['/var/run/hadoop'] {'owner': 'root', 'create_parents': True, 'group': 'root', 'cd_access': 'a'} 2017-01-10 13:07:46,678 - Directory['/tmp/hadoop-hdfs'] {'owner': 'hdfs', 'create_parents': True, 'cd_access': 'a'} 2017-01-10 13:07:46,688 - File['/usr/hdp/current/hadoop-client/conf/commons-logging.properties'] {'content': Template('commons-logging.properties.j2'), 'owner': 'hdfs'} 2017-01-10 13:07:46,692 - File['/usr/hdp/current/hadoop-client/conf/health_check'] {'content': Template('health_check.j2'), 'owner': 'hdfs'} 2017-01-10 13:07:46,693 - File['/usr/hdp/current/hadoop-client/conf/log4j.properties'] {'content': ..., 'owner': 'hdfs', 'group': 'hadoop', 'mode': 0644} 2017-01-10 13:07:46,715 - File['/usr/hdp/current/hadoop-client/conf/hadoop-metrics2.properties'] {'content': Template('hadoop-metrics2.properties.j2'), 'owner': 'hdfs', 'group': 'hadoop'} 2017-01-10 13:07:46,716 - File['/usr/hdp/current/hadoop-client/conf/task-log4j.properties'] {'content': StaticFile('task-log4j.properties'), 'mode': 0755} 2017-01-10 13:07:46,717 - File['/usr/hdp/current/hadoop-client/conf/configuration.xsl'] {'owner': 'hdfs', 'group': 'hadoop'} 2017-01-10 13:07:46,725 - File['/etc/hadoop/conf/topology_mappings.data'] {'owner': 'hdfs', 'content': Template('topology_mappings.data.j2'), 'only_if': 'test -d /etc/hadoop/conf', 'group': 'hadoop'} 2017-01-10 13:07:46,749 - File['/etc/hadoop/conf/topology_script.py'] {'content': StaticFile('topology_script.py'), 'only_if': 'test -d /etc/hadoop/conf', 'mode': 0755} 2017-01-10 13:07:47,206 - Stack Feature Version Info: stack_version=2.5, version=2.5.0.0-1245, current_cluster_version=2.5.0.0-1245 -> 2.5.0.0-1245 2017-01-10 13:07:47,218 - The hadoop conf dir /usr/hdp/current/hadoop-client/conf exists, will call conf-select on it for version 2.5.0.0-1245 2017-01-10 13:07:47,221 - Checking if need to create versioned conf dir /etc/hadoop/2.5.0.0-1245/0 2017-01-10 13:07:47,225 - call[('ambari-python-wrap', u'/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.0.0-1245', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False, 'stderr': -1} 2017-01-10 13:07:47,298 - call returned (1, '/etc/hadoop/2.5.0.0-1245/0 exist already', '') 2017-01-10 13:07:47,301 - checked_call[('ambari-python-wrap', u'/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.0.0-1245', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False} 2017-01-10 13:07:47,370 - checked_call returned (0, '') 2017-01-10 13:07:47,373 - Ensuring that hadoop has the correct symlink structure 2017-01-10 13:07:47,374 - Using hadoop conf dir: /usr/hdp/current/hadoop-client/conf 2017-01-10 13:07:47,402 - Directory['/etc/atlas/conf'] {'owner': 'atlas', 'group': 'hadoop', 'create_parents': True, 'mode': 0755, 'cd_access': 'a'} 2017-01-10 13:07:47,411 - Directory['/var/run/atlas'] {'owner': 'atlas', 'group': 'hadoop', 'create_parents': True, 'mode': 0755, 'cd_access': 'a'} 2017-01-10 13:07:47,412 - Directory['/etc/atlas/conf/solr'] {'group': 'hadoop', 'cd_access': 'a', 'create_parents': True, 'mode': 0755, 'owner': 'atlas', 'recursive_ownership': True} 2017-01-10 13:07:47,413 - Directory['/logs/atlas'] {'owner': 'atlas', 'group': 'hadoop', 'create_parents': True, 'mode': 0755, 'cd_access': 'a'} 2017-01-10 13:07:47,414 - Directory['/usr/hdp/current/atlas-server/data'] {'owner': 'atlas', 'group': 'hadoop', 'create_parents': True, 'mode': 0644, 'cd_access': 'a'} 2017-01-10 13:07:47,415 - Changing permission for /usr/hdp/current/atlas-server/data from 755 to 644 2017-01-10 13:07:47,415 - Directory['/usr/hdp/current/atlas-server/server/webapp'] {'owner': 'atlas', 'group': 'hadoop', 'create_parents': True, 'mode': 0644, 'cd_access': 'a'} 2017-01-10 13:07:47,415 - Changing permission for /usr/hdp/current/atlas-server/server/webapp from 755 to 644 2017-01-10 13:07:47,416 - File['/usr/hdp/current/atlas-server/server/webapp/atlas.war'] {'content': StaticFile('/usr/hdp/current/atlas-server/server/webapp/atlas.war')} 2017-01-10 13:07:48,546 - File['/etc/atlas/conf/atlas-log4j.xml'] {'content': InlineTemplate(...), 'owner': 'atlas', 'group': 'hadoop', 'mode': 0644} 2017-01-10 13:07:48,559 - File['/etc/atlas/conf/atlas-env.sh'] {'content': InlineTemplate(...), 'owner': 'atlas', 'group': 'hadoop', 'mode': 0755} 2017-01-10 13:07:48,561 - Execute[('chown', u'atlas:hadoop', '/etc/atlas/conf/policy-store.txt')] {'sudo': True} 2017-01-10 13:07:48,601 - Execute[('chmod', '644', '/etc/atlas/conf/policy-store.txt')] {'sudo': True} 2017-01-10 13:07:48,641 - Execute[('chown', u'atlas:hadoop', '/etc/atlas/conf/users-credentials.properties')] {'sudo': True} 2017-01-10 13:07:48,678 - Execute[('chmod', '644', '/etc/atlas/conf/users-credentials.properties')] {'sudo': True} 2017-01-10 13:07:48,733 - File['/etc/atlas/conf/solr/solrconfig.xml'] {'content': InlineTemplate(...), 'owner': 'atlas', 'group': 'hadoop', 'mode': 0644} 2017-01-10 13:07:48,737 - PropertiesFile['/etc/atlas/conf/atlas-application.properties'] {'owner': 'atlas', 'group': 'hadoop', 'mode': 0644, 'properties': ...} 2017-01-10 13:07:48,746 - Generating properties file: /etc/atlas/conf/atlas-application.properties 2017-01-10 13:07:48,746 - File['/etc/atlas/conf/atlas-application.properties'] {'owner': 'atlas', 'content': InlineTemplate(...), 'group': 'hadoop', 'mode': 0644} 2017-01-10 13:07:48,803 - Writing File['/etc/atlas/conf/atlas-application.properties'] because contents don't match 2017-01-10 13:07:48,804 - Directory['/var/log/ambari-infra-solr-client'] {'create_parents': True, 'mode': 0755, 'cd_access': 'a'} 2017-01-10 13:07:48,805 - Directory['/usr/lib/ambari-infra-solr-client'] {'recursive_ownership': True, 'create_parents': True, 'mode': 0755, 'cd_access': 'a'} 2017-01-10 13:07:48,806 - File['/usr/lib/ambari-infra-solr-client/solrCloudCli.sh'] {'content': StaticFile('/usr/lib/ambari-infra-solr-client/solrCloudCli.sh'), 'mode': 0755} 2017-01-10 13:07:48,812 - File['/usr/lib/ambari-infra-solr-client/log4j.properties'] {'content': InlineTemplate(...), 'mode': 0644} 2017-01-10 13:07:48,813 - File['/var/log/ambari-infra-solr-client/solr-client.log'] {'content': '', 'mode': 0664} 2017-01-10 13:07:48,814 - Writing File['/var/log/ambari-infra-solr-client/solr-client.log'] because contents don't match 2017-01-10 13:07:48,814 - Execute['ambari-sudo.sh JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.111-1.b15.el7_2.x86_64/jre /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string hdpmaster.devnet.local:2181,hdpmetastore.devnet.local:2181,hdpmanager.devnet.local:2181 --znode /infra-solr --check-znode --retry 5 --interval 10'] {} 2017-01-10 13:07:49,860 - Execute['ambari-sudo.sh JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.111-1.b15.el7_2.x86_64/jre /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string hdpmaster.devnet.local:2181,hdpmetastore.devnet.local:2181,hdpmanager.devnet.local:2181/infra-solr --download-config --config-dir /var/lib/ambari-agent/tmp/solr_config_atlas_configs_0.048270421225 --config-set atlas_configs --retry 30 --interval 5'] {'only_if': 'ambari-sudo.sh JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.111-1.b15.el7_2.x86_64/jre /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string hdpmaster.devnet.local:2181,hdpmetastore.devnet.local:2181,hdpmanager.devnet.local:2181/infra-solr --check-config --config-set atlas_configs --retry 30 --interval 5'} 2017-01-10 13:07:52,835 - File['/var/lib/ambari-agent/tmp/solr_config_atlas_configs_0.048270421225/solrconfig.xml'] {'content': InlineTemplate(...), 'only_if': 'test -d /var/lib/ambari-agent/tmp/solr_config_atlas_configs_0.048270421225'} 2017-01-10 13:07:52,926 - Execute['ambari-sudo.sh JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.111-1.b15.el7_2.x86_64/jre /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string hdpmaster.devnet.local:2181,hdpmetastore.devnet.local:2181,hdpmanager.devnet.local:2181/infra-solr --upload-config --config-dir /var/lib/ambari-agent/tmp/solr_config_atlas_configs_0.048270421225 --config-set atlas_configs --retry 30 --interval 5'] {'only_if': 'test -d /var/lib/ambari-agent/tmp/solr_config_atlas_configs_0.048270421225'} 2017-01-10 13:07:54,119 - Execute['ambari-sudo.sh JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.111-1.b15.el7_2.x86_64/jre /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string hdpmaster.devnet.local:2181,hdpmetastore.devnet.local:2181,hdpmanager.devnet.local:2181/infra-solr --upload-config --config-dir /etc/atlas/conf/solr --config-set atlas_configs --retry 30 --interval 5'] {'not_if': 'test -d /var/lib/ambari-agent/tmp/solr_config_atlas_configs_0.048270421225'} 2017-01-10 13:07:54,142 - Skipping Execute['ambari-sudo.sh JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.111-1.b15.el7_2.x86_64/jre /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string hdpmaster.devnet.local:2181,hdpmetastore.devnet.local:2181,hdpmanager.devnet.local:2181/infra-solr --upload-config --config-dir /etc/atlas/conf/solr --config-set atlas_configs --retry 30 --interval 5'] due to not_if 2017-01-10 13:07:54,145 - Directory['/var/lib/ambari-agent/tmp/solr_config_atlas_configs_0.048270421225'] {'action': ['delete'], 'create_parents': True} 2017-01-10 13:07:54,145 - Removing directory Directory['/var/lib/ambari-agent/tmp/solr_config_atlas_configs_0.048270421225'] and all its content 2017-01-10 13:07:54,148 - Execute['ambari-sudo.sh JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.111-1.b15.el7_2.x86_64/jre /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string hdpmaster.devnet.local:2181,hdpmetastore.devnet.local:2181,hdpmanager.devnet.local:2181/infra-solr --create-collection --collection vertex_index --config-set atlas_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10 --no-sharding'] {} 2017-01-10 13:07:55,684 - Execute['ambari-sudo.sh JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.111-1.b15.el7_2.x86_64/jre /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string hdpmaster.devnet.local:2181,hdpmetastore.devnet.local:2181,hdpmanager.devnet.local:2181/infra-solr --create-collection --collection edge_index --config-set atlas_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10 --no-sharding'] {} 2017-01-10 13:07:57,188 - Execute['ambari-sudo.sh JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.111-1.b15.el7_2.x86_64/jre /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string hdpmaster.devnet.local:2181,hdpmetastore.devnet.local:2181,hdpmanager.devnet.local:2181/infra-solr --create-collection --collection fulltext_index --config-set atlas_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10 --no-sharding'] {} 2017-01-10 13:07:58,724 - File['/var/lib/ambari-agent/tmp/atlas_hbase_setup.rb'] {'content': Template('atlas_hbase_setup.rb.j2'), 'owner': 'hbase', 'group': 'hadoop'} 2017-01-10 13:07:58,727 - Stack Feature Version Info: stack_version=2.5, version=2.5.0.0-1245, current_cluster_version=2.5.0.0-1245 -> 2.5.0.0-1245 2017-01-10 13:07:58,733 - Atlas plugin is enabled, configuring Atlas plugin. 2017-01-10 13:07:58,733 - Ranger admin not installed 2017-01-10 13:07:58,734 - Stack Feature Version Info: stack_version=2.5, version=2.5.0.0-1245, current_cluster_version=2.5.0.0-1245 -> 2.5.0.0-1245 2017-01-10 13:07:58,741 - Execute['source /etc/atlas/conf/atlas-env.sh ; /usr/hdp/current/atlas-server/bin/atlas_start.py'] {'not_if': 'ls /var/run/atlas/atlas.pid >/dev/null 2>&1 && ps -p `cat /var/run/atlas/atlas.pid` >/dev/null 2>&1', 'user': 'atlas'} 2017-01-10 13:07:59,047 - Execute['find /logs/atlas -maxdepth 1 -type f -name '*' -exec echo '==> {} <==' \; -exec tail -n 40 {} \;'] {'logoutput': True, 'ignore_failures': True, 'user': 'atlas'}