stderr: 2016-11-29 00:19:22,494 - Solr is running, it cannot be started again stdout: 2016-11-29 00:19:07,687 - The hadoop conf dir /usr/hdp/current/hadoop-client/conf exists, will call conf-select on it for version 2.5.0.0-1245 2016-11-29 00:19:07,687 - Checking if need to create versioned conf dir /etc/hadoop/2.5.0.0-1245/0 2016-11-29 00:19:07,688 - call[('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.0.0-1245', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False, 'stderr': -1} 2016-11-29 00:19:07,719 - call returned (1, '/etc/hadoop/2.5.0.0-1245/0 exist already', '') 2016-11-29 00:19:07,720 - checked_call[('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.0.0-1245', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False} 2016-11-29 00:19:07,796 - checked_call returned (0, '') 2016-11-29 00:19:07,797 - Ensuring that hadoop has the correct symlink structure 2016-11-29 00:19:07,797 - Using hadoop conf dir: /usr/hdp/current/hadoop-client/conf 2016-11-29 00:19:08,064 - The hadoop conf dir /usr/hdp/current/hadoop-client/conf exists, will call conf-select on it for version 2.5.0.0-1245 2016-11-29 00:19:08,065 - Checking if need to create versioned conf dir /etc/hadoop/2.5.0.0-1245/0 2016-11-29 00:19:08,066 - call[('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.0.0-1245', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False, 'stderr': -1} 2016-11-29 00:19:08,145 - call returned (1, '/etc/hadoop/2.5.0.0-1245/0 exist already', '') 2016-11-29 00:19:08,145 - checked_call[('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.0.0-1245', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False} 2016-11-29 00:19:08,204 - checked_call returned (0, '') 2016-11-29 00:19:08,205 - Ensuring that hadoop has the correct symlink structure 2016-11-29 00:19:08,205 - Using hadoop conf dir: /usr/hdp/current/hadoop-client/conf 2016-11-29 00:19:08,207 - Group['livy'] {} 2016-11-29 00:19:08,209 - Group['spark'] {} 2016-11-29 00:19:08,209 - Group['solr'] {} 2016-11-29 00:19:08,210 - Group['ranger'] {} 2016-11-29 00:19:08,210 - Group['zeppelin'] {} 2016-11-29 00:19:08,210 - Group['hadoop'] {} 2016-11-29 00:19:08,210 - Group['nifi'] {} 2016-11-29 00:19:08,211 - Group['users'] {} 2016-11-29 00:19:08,211 - Group['knox'] {} 2016-11-29 00:19:08,211 - User['hive'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,219 - User['infra-solr'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,220 - User['atlas'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,221 - User['ams'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,222 - User['falcon'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['users']} 2016-11-29 00:19:08,223 - User['ranger'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['ranger']} 2016-11-29 00:19:08,224 - User['nifi'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,225 - User['spark'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,225 - User['solr'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,229 - User['flume'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,230 - User['hbase'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,230 - User['hcat'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,231 - User['storm'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,231 - User['zookeeper'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,232 - User['oozie'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['users']} 2016-11-29 00:19:08,232 - User['tez'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['users']} 2016-11-29 00:19:08,233 - User['zeppelin'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,233 - User['livy'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,234 - User['ambari-qa'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['users']} 2016-11-29 00:19:08,234 - User['kafka'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,235 - User['hdfs'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,235 - User['sqoop'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,236 - User['yarn'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,237 - User['mapred'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,238 - User['knox'] {'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': ['hadoop']} 2016-11-29 00:19:08,239 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2016-11-29 00:19:08,270 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa'] {'not_if': '(test $(id -u ambari-qa) -gt 1000) || (false)'} 2016-11-29 00:19:08,295 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa'] due to not_if 2016-11-29 00:19:08,295 - Directory['/tmp/hbase-hbase'] {'owner': 'hbase', 'create_parents': True, 'mode': 0775, 'cd_access': 'a'} 2016-11-29 00:19:08,296 - File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555} 2016-11-29 00:19:08,297 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase'] {'not_if': '(test $(id -u hbase) -gt 1000) || (false)'} 2016-11-29 00:19:08,315 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase'] due to not_if 2016-11-29 00:19:08,315 - Group['hdfs'] {} 2016-11-29 00:19:08,316 - User['hdfs'] {'fetch_nonlocal_groups': True, 'groups': ['hadoop', 'hdfs']} 2016-11-29 00:19:08,317 - FS Type: 2016-11-29 00:19:08,317 - Directory['/etc/hadoop'] {'mode': 0755} 2016-11-29 00:19:08,340 - File['/usr/hdp/current/hadoop-client/conf/hadoop-env.sh'] {'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'} 2016-11-29 00:19:08,341 - Directory['/var/lib/ambari-agent/tmp/hadoop_java_io_tmpdir'] {'owner': 'hdfs', 'group': 'hadoop', 'mode': 01777} 2016-11-29 00:19:08,364 - Execute[('setenforce', '0')] {'not_if': '(! which getenforce ) || (which getenforce && getenforce | grep -q Disabled)', 'sudo': True, 'only_if': 'test -f /selinux/enforce'} 2016-11-29 00:19:08,389 - Skipping Execute[('setenforce', '0')] due to not_if 2016-11-29 00:19:08,389 - Directory['/var/log/hadoop'] {'owner': 'root', 'create_parents': True, 'group': 'hadoop', 'mode': 0775, 'cd_access': 'a'} 2016-11-29 00:19:08,391 - Directory['/var/run/hadoop'] {'owner': 'root', 'create_parents': True, 'group': 'root', 'cd_access': 'a'} 2016-11-29 00:19:08,391 - Directory['/tmp/hadoop-hdfs'] {'owner': 'hdfs', 'create_parents': True, 'cd_access': 'a'} 2016-11-29 00:19:08,397 - File['/usr/hdp/current/hadoop-client/conf/commons-logging.properties'] {'content': Template('commons-logging.properties.j2'), 'owner': 'hdfs'} 2016-11-29 00:19:08,400 - File['/usr/hdp/current/hadoop-client/conf/health_check'] {'content': Template('health_check.j2'), 'owner': 'hdfs'} 2016-11-29 00:19:08,402 - File['/usr/hdp/current/hadoop-client/conf/log4j.properties'] {'content': ..., 'owner': 'hdfs', 'group': 'hadoop', 'mode': 0644} 2016-11-29 00:19:08,420 - File['/usr/hdp/current/hadoop-client/conf/hadoop-metrics2.properties'] {'content': Template('hadoop-metrics2.properties.j2'), 'owner': 'hdfs', 'group': 'hadoop'} 2016-11-29 00:19:08,420 - File['/usr/hdp/current/hadoop-client/conf/task-log4j.properties'] {'content': StaticFile('task-log4j.properties'), 'mode': 0755} 2016-11-29 00:19:08,422 - File['/usr/hdp/current/hadoop-client/conf/configuration.xsl'] {'owner': 'hdfs', 'group': 'hadoop'} 2016-11-29 00:19:08,427 - File['/etc/hadoop/conf/topology_mappings.data'] {'owner': 'hdfs', 'content': Template('topology_mappings.data.j2'), 'only_if': 'test -d /etc/hadoop/conf', 'group': 'hadoop'} 2016-11-29 00:19:08,438 - File['/etc/hadoop/conf/topology_script.py'] {'content': StaticFile('topology_script.py'), 'only_if': 'test -d /etc/hadoop/conf', 'mode': 0755} 2016-11-29 00:19:08,673 - The hadoop conf dir /usr/hdp/current/hadoop-client/conf exists, will call conf-select on it for version 2.5.0.0-1245 2016-11-29 00:19:08,673 - Checking if need to create versioned conf dir /etc/hadoop/2.5.0.0-1245/0 2016-11-29 00:19:08,674 - call[('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.0.0-1245', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False, 'stderr': -1} 2016-11-29 00:19:08,701 - call returned (1, '/etc/hadoop/2.5.0.0-1245/0 exist already', '') 2016-11-29 00:19:08,701 - checked_call[('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.5.0.0-1245', '--conf-version', '0')] {'logoutput': False, 'sudo': True, 'quiet': False} 2016-11-29 00:19:08,730 - checked_call returned (0, '') 2016-11-29 00:19:08,731 - Ensuring that hadoop has the correct symlink structure 2016-11-29 00:19:08,731 - Using hadoop conf dir: /usr/hdp/current/hadoop-client/conf 2016-11-29 00:19:08,732 - Directory['/opt/lucidworks-hdpsearch/solr'] {'owner': 'solr', 'create_parents': True, 'group': 'solr', 'mode': 0755, 'cd_access': 'a'} 2016-11-29 00:19:08,743 - Directory['/var/log/solr'] {'owner': 'solr', 'create_parents': True, 'group': 'solr', 'mode': 0755, 'cd_access': 'a'} 2016-11-29 00:19:08,743 - Directory['/var/log/service_solr'] {'owner': 'solr', 'create_parents': True, 'group': 'solr', 'mode': 0755, 'cd_access': 'a'} 2016-11-29 00:19:08,744 - Directory['/var/run/solr'] {'owner': 'solr', 'create_parents': True, 'group': 'solr', 'mode': 0755, 'cd_access': 'a'} 2016-11-29 00:19:08,744 - Directory['/etc/solr/conf'] {'owner': 'solr', 'create_parents': True, 'group': 'solr', 'mode': 0755, 'cd_access': 'a'} 2016-11-29 00:19:08,744 - Directory['/etc/solr/data_dir'] {'owner': 'solr', 'group': 'solr', 'create_parents': True, 'mode': 0755, 'cd_access': 'a'} 2016-11-29 00:19:08,745 - Execute[('chmod', '-R', '777', '/opt/lucidworks-hdpsearch/solr/server/solr-webapp')] {'sudo': True} 2016-11-29 00:19:08,793 - File['/opt/lucidworks-hdpsearch/solr/bin/solr.in.sh'] {'owner': 'solr', 'content': InlineTemplate(...)} 2016-11-29 00:19:08,795 - File['/etc/solr/conf/log4j.properties'] {'owner': 'solr', 'content': InlineTemplate(...)} 2016-11-29 00:19:08,804 - File['/etc/solr/data_dir/solr.xml'] {'owner': 'solr', 'content': Template('solr.xml.j2')} 2016-11-29 00:19:08,805 - HdfsResource['/user/solr'] {'security_enabled': False, 'hadoop_bin_dir': '/usr/hdp/current/hadoop-client/bin', 'keytab': [EMPTY], 'dfs_type': '', 'default_fs': 'hdfs://sandbox.hortonworks.com:8020', 'hdfs_resource_ignore_file': '/var/lib/ambari-agent/data/.hdfs_resource_ignore', 'hdfs_site': ..., 'kinit_path_local': 'kinit', 'principal_name': [EMPTY], 'user': 'hdfs', 'owner': 'solr', 'hadoop_conf_dir': '/usr/hdp/current/hadoop-client/conf', 'type': 'directory', 'action': ['create_on_execute'], 'immutable_paths': [u'/apps/hive/warehouse', u'/apps/falcon', u'/mr-history/done', u'/app-logs', u'/tmp']} 2016-11-29 00:19:08,807 - call['ambari-sudo.sh su hdfs -l -s /bin/bash -c 'curl -sS -L -w '"'"'%{http_code}'"'"' -X GET '"'"'http://sandbox.hortonworks.com:50070/webhdfs/v1/user/solr?op=GETFILESTATUS&user.name=hdfs'"'"' 1>/tmp/tmpROgE51 2>/tmp/tmp95X26l''] {'logoutput': None, 'quiet': False} 2016-11-29 00:19:08,876 - call returned (0, '') 2016-11-29 00:19:08,876 - call['export JAVA_HOME=/usr/lib/jvm/java; /opt/lucidworks-hdpsearch/solr/server/scripts/cloud-scripts/zkcli.sh -zkhost sandbox.hortonworks.com:2181 -cmd get /solr/clusterstate.json'] {'timeout': 60} 2016-11-29 00:19:09,386 - call returned (0, '{}') 2016-11-29 00:19:09,387 - ZK node /solr/clusterstate.json already exists, skipping ... 2016-11-29 00:19:09,387 - HdfsResource['/solr'] {'security_enabled': False, 'hadoop_bin_dir': '/usr/hdp/current/hadoop-client/bin', 'keytab': [EMPTY], 'dfs_type': '', 'default_fs': 'hdfs://sandbox.hortonworks.com:8020', 'hdfs_resource_ignore_file': '/var/lib/ambari-agent/data/.hdfs_resource_ignore', 'hdfs_site': ..., 'kinit_path_local': 'kinit', 'principal_name': [EMPTY], 'user': 'hdfs', 'owner': 'solr', 'hadoop_conf_dir': '/usr/hdp/current/hadoop-client/conf', 'type': 'directory', 'action': ['create_on_execute'], 'immutable_paths': [u'/apps/hive/warehouse', u'/apps/falcon', u'/mr-history/done', u'/app-logs', u'/tmp']} 2016-11-29 00:19:09,388 - call['ambari-sudo.sh su hdfs -l -s /bin/bash -c 'curl -sS -L -w '"'"'%{http_code}'"'"' -X GET '"'"'http://sandbox.hortonworks.com:50070/webhdfs/v1/solr?op=GETFILESTATUS&user.name=hdfs'"'"' 1>/tmp/tmp5SUGyK 2>/tmp/tmpm8FwRG''] {'logoutput': None, 'quiet': False} 2016-11-29 00:19:09,557 - call returned (0, '') 2016-11-29 00:19:09,557 - call['export JAVA_HOME=/usr/lib/jvm/java; /opt/lucidworks-hdpsearch/solr/server/scripts/cloud-scripts/zkcli.sh -zkhost sandbox.hortonworks.com:2181 -cmd get /solr/clusterprops.json'] {'timeout': 60} 2016-11-29 00:19:09,975 - call returned (1, 'Exception in thread "main" org.apache.zookeeper.KeeperException$NoNodeException: KeeperErrorCode = NoNode for /solr/clusterprops.json\n\tat org.apache.zookeeper.KeeperException.create(KeeperException.java:111)\n\tat org.apache.zookeeper.KeeperException.create(KeeperException.java:51)\n\tat org.apache.zookeeper.ZooKeeper.getData(ZooKeeper.java:1155)\n\tat org.apache.solr.common.cloud.SolrZkClient$7.execute(SolrZkClient.java:345)\n\tat org.apache.solr.common.cloud.SolrZkClient$7.execute(SolrZkClient.java:342)\n\tat org.apache.solr.common.cloud.ZkCmdExecutor.retryOperation(ZkCmdExecutor.java:60)\n\tat org.apache.solr.common.cloud.SolrZkClient.getData(SolrZkClient.java:342)\n\tat org.apache.solr.cloud.ZkCLI.main(ZkCLI.java:296)') 2016-11-29 00:19:09,975 - call['export JAVA_HOME=/usr/lib/jvm/java; /opt/lucidworks-hdpsearch/solr/server/scripts/cloud-scripts/zkcli.sh -zkhost sandbox.hortonworks.com:2181 -cmd get /solr/security.json'] {'timeout': 60} 2016-11-29 00:19:10,341 - call returned (1, 'Exception in thread "main" org.apache.zookeeper.KeeperException$NoNodeException: KeeperErrorCode = NoNode for /solr/security.json\n\tat org.apache.zookeeper.KeeperException.create(KeeperException.java:111)\n\tat org.apache.zookeeper.KeeperException.create(KeeperException.java:51)\n\tat org.apache.zookeeper.ZooKeeper.getData(ZooKeeper.java:1155)\n\tat org.apache.solr.common.cloud.SolrZkClient$7.execute(SolrZkClient.java:345)\n\tat org.apache.solr.common.cloud.SolrZkClient$7.execute(SolrZkClient.java:342)\n\tat org.apache.solr.common.cloud.ZkCmdExecutor.retryOperation(ZkCmdExecutor.java:60)\n\tat org.apache.solr.common.cloud.SolrZkClient.getData(SolrZkClient.java:342)\n\tat org.apache.solr.cloud.ZkCLI.main(ZkCLI.java:296)') 2016-11-29 00:19:10,341 - call['hadoop --config /usr/hdp/current/hadoop-client/conf dfs -ls /solr'] {} 2016-11-29 00:19:12,990 - call returned (0, 'DEPRECATED: Use of this script to execute hdfs command is deprecated.\nInstead use the hdfs command for it.\n\nFound 2 items\ndrwxr-xr-x - solr hdfs 0 2016-11-26 00:07 /solr/collection1\ndrwxr-xr-x - solr hdfs 0 2016-11-26 00:18 /solr/tweets') 2016-11-29 00:19:12,990 - call['hadoop --config /usr/hdp/current/hadoop-client/conf dfs -ls /solr/collection1'] {} 2016-11-29 00:19:16,276 - call returned (0, 'DEPRECATED: Use of this script to execute hdfs command is deprecated.\nInstead use the hdfs command for it.\n\nFound 2 items\ndrwxr-xr-x - solr hdfs 0 2016-11-26 00:07 /solr/collection1/core_node1\ndrwxr-xr-x - solr hdfs 0 2016-11-26 00:07 /solr/collection1/core_node2') 2016-11-29 00:19:16,278 - call['hadoop --config /usr/hdp/current/hadoop-client/conf dfs -ls /solr/tweets'] {} 2016-11-29 00:19:19,184 - call returned (0, 'DEPRECATED: Use of this script to execute hdfs command is deprecated.\nInstead use the hdfs command for it.\n\nFound 1 items\ndrwxr-xr-x - solr hdfs 0 2016-11-26 00:18 /solr/tweets/core_node1') 2016-11-29 00:19:19,185 - Execute['hadoop --config /usr/hdp/current/hadoop-client/conf dfs -rm -f /solr/collection1/core_node1/data/index/write.lock /solr/collection1/core_node2/data/index/write.lock /solr/tweets/core_node1/data/index/write.lock '] {'user': 'hdfs'} 2016-11-29 00:19:22,366 - call['netstat -lnt | awk -v v1=8983 '$6 == "LISTEN" && $4 ~ ":"+v1''] {'timeout': 60} 2016-11-29 00:19:22,391 - call returned (0, '') 2016-11-29 00:19:22,391 - Solr port validation output: 2016-11-29 00:19:22,392 - call['/opt/lucidworks-hdpsearch/solr/bin/solr status'] {'timeout': 60} 2016-11-29 00:19:22,494 - call returned (1, 'Found 1 Solr nodes: \n\nSolr process 9567 from /var/run/solr/solr-8983.pid not found.') 2016-11-29 00:19:22,494 - Solr status output: Found 1 Solr nodes: Solr process 9567 from /var/run/solr/solr-8983.pid not found. 2016-11-29 00:19:22,494 - Solr is running, it cannot be started again Command failed after 1 tries