[10/Mar/2021 05:44:28 +0000] 29129 MainThread redactor INFO Started launcher: /opt/cloudera/cm-agent/service/csd/csd.sh start_history_server [10/Mar/2021 05:44:28 +0000] 29129 MainThread redactor INFO Re-exec watcher: /opt/cloudera/cm-agent/bin/cm proc_watcher 29138 [10/Mar/2021 05:44:28 +0000] 29139 MainThread redactor INFO Re-exec redactor: /opt/cloudera/cm-agent/bin/cm redactor --fds 3 5 [10/Mar/2021 05:44:29 +0000] 29139 MainThread redactor INFO Started redactor Wed Mar 10 05:44:29 EST 2021 + locate_java_home + locate_java_home_no_verify + JAVA11_HOME_CANDIDATES=('/usr/java/jdk-11' '/usr/lib/jvm/jdk-11' '/usr/lib/jvm/java-11-oracle') + local JAVA11_HOME_CANDIDATES + OPENJAVA11_HOME_CANDIDATES=('/usr/lib/jvm/java-11' '/usr/java/jdk-11' '/usr/lib/jvm/jdk-11' '/usr/lib64/jvm/jdk-11') + local OPENJAVA11_HOME_CANDIDATES + JAVA8_HOME_CANDIDATES=('/usr/java/jdk1.8' '/usr/java/jre1.8' '/usr/lib/jvm/j2sdk1.8-oracle' '/usr/lib/jvm/j2sdk1.8-oracle/jre' '/usr/lib/jvm/java-8-oracle') + local JAVA8_HOME_CANDIDATES + OPENJAVA8_HOME_CANDIDATES=('/usr/lib/jvm/java-1.8.0-openjdk' '/usr/lib/jvm/java-8-openjdk' '/usr/lib64/jvm/java-1.8.0-openjdk' '/usr/lib64/jvm/java-8-openjdk') + local OPENJAVA8_HOME_CANDIDATES + MISCJAVA_HOME_CANDIDATES=('/Library/Java/Home' '/usr/java/default' '/usr/lib/jvm/default-java' '/usr/lib/jvm/java-openjdk' '/usr/lib/jvm/jre-openjdk') + local MISCJAVA_HOME_CANDIDATES + case ${BIGTOP_JAVA_MAJOR} in + JAVA_HOME_CANDIDATES=(${JAVA11_HOME_CANDIDATES[@]} ${OPENJAVA11_HOME_CANDIDATES[@]} ${JAVA8_HOME_CANDIDATES[@]} ${OPENJAVA8_HOME_CANDIDATES[@]} ${MISCJAVA_HOME_CANDIDATES[@]}) + '[' -n '' ']' + '[' -z '' ']' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/java/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib/jvm/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib/jvm/java-11-oracle*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib/jvm/java-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/java/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib/jvm/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib64/jvm/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd /usr/java/jdk1.8.0_232-cloudera + for candidate in '`ls -rvd ${candidate_regex}* 2>/dev/null`' + '[' -e /usr/java/jdk1.8.0_232-cloudera/bin/java ']' + export JAVA_HOME=/usr/java/jdk1.8.0_232-cloudera + JAVA_HOME=/usr/java/jdk1.8.0_232-cloudera + break 2 + verify_java_home + '[' -z /usr/java/jdk1.8.0_232-cloudera ']' + echo JAVA_HOME=/usr/java/jdk1.8.0_232-cloudera + '[' -n '-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid{{PID}}.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh' ']' + get_gc_args + JAVA8_GC_TUNING_ARGS=' ' + set_basic_gc_tuning_args_based_on_java_version + get_java_major_version JAVA_MAJOR + '[' -z /usr/java/jdk1.8.0_232-cloudera/bin/java ']' ++ /usr/java/jdk1.8.0_232-cloudera/bin/java -version + local 'VERSION_STRING=openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode)' + local 'RE_JAVA=[java|openjdk][[:space:]]version[[:space:]]\"1\.([0-9][0-9]*)\.?+' + [[ openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode) =~ [java|openjdk][[:space:]]version[[:space:]]\"1\.([0-9][0-9]*)\.?+ ]] + eval JAVA_MAJOR=8 ++ JAVA_MAJOR=8 + BASIC_GC_TUNING_ARGS= + case $JAVA_MAJOR in + BASIC_GC_TUNING_ARGS=' ' + CSD_GC_ARGS=' ' + CSD_JAVA_OPTS+=' ' ++ replace_pid -XX:+HeapDumpOnOutOfMemoryError '-XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid{{PID}}.hprof' -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh ++ echo -XX:+HeapDumpOnOutOfMemoryError '-XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid{{PID}}.hprof' -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh ++ sed 's#{{PID}}#29138#g' + export 'CSD_JAVA_OPTS=-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29138.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh' + CSD_JAVA_OPTS='-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29138.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh' + echo 'Using -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29138.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh as CSD_JAVA_OPTS' + source_parcel_environment + '[' '!' -z /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/meta/cdh_env.sh ']' + OLD_IFS=' ' + IFS=: + SCRIPT_ARRAY=($SCM_DEFINES_SCRIPTS) + DIRNAME_ARRAY=($PARCEL_DIRNAMES) + IFS=' ' + COUNT=1 ++ seq 1 1 + for i in '`seq 1 $COUNT`' + SCRIPT=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/meta/cdh_env.sh + PARCEL_DIRNAME=CDH-7.1.4-1.cdh7.1.4.p0.6300266 + . /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/meta/cdh_env.sh ++ CDH_DIRNAME=CDH-7.1.4-1.cdh7.1.4.p0.6300266 ++ export CDH_HADOOP_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ CDH_HADOOP_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ export CDH_MR1_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-0.20-mapreduce ++ CDH_MR1_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-0.20-mapreduce ++ export CDH_HDFS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-hdfs ++ CDH_HDFS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-hdfs ++ export CDH_OZONE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone ++ CDH_OZONE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone ++ export CDH_HTTPFS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-httpfs ++ CDH_HTTPFS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-httpfs ++ export CDH_MR2_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-mapreduce ++ CDH_MR2_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-mapreduce ++ export CDH_YARN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-yarn ++ CDH_YARN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-yarn ++ export CDH_HBASE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase ++ CDH_HBASE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase ++ export CDH_HBASE_FILESYSTEM_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase_filesystem ++ CDH_HBASE_FILESYSTEM_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase_filesystem ++ export CDH_HBASE_CONNECTORS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase_connectors ++ CDH_HBASE_CONNECTORS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase_connectors ++ export CDH_ZOOKEEPER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/zookeeper ++ CDH_ZOOKEEPER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/zookeeper ++ export CDH_ZEPPELIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/zeppelin ++ CDH_ZEPPELIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/zeppelin ++ export CDH_HIVE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hive ++ CDH_HIVE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hive ++ export CDH_HUE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hue ++ CDH_HUE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hue ++ export CDH_OOZIE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/oozie ++ CDH_OOZIE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/oozie ++ export CDH_HUE_PLUGINS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ CDH_HUE_PLUGINS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ export CDH_HCAT_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hive-hcatalog ++ CDH_HCAT_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hive-hcatalog ++ export CDH_SENTRY_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/sentry ++ CDH_SENTRY_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/sentry ++ export JSVC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/bigtop-utils ++ JSVC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/bigtop-utils ++ export CDH_HADOOP_BIN=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/bin/hadoop ++ CDH_HADOOP_BIN=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/bin/hadoop ++ export CDH_IMPALA_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/impala ++ CDH_IMPALA_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/impala ++ export CDH_SOLR_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/solr ++ CDH_SOLR_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/solr ++ export CDH_HBASE_INDEXER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase-solr ++ CDH_HBASE_INDEXER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase-solr ++ export SEARCH_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/search ++ SEARCH_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/search ++ export CDH_SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ CDH_SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ export WEBHCAT_DEFAULT_XML=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/etc/hive-webhcat/conf.dist/webhcat-default.xml ++ WEBHCAT_DEFAULT_XML=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/etc/hive-webhcat/conf.dist/webhcat-default.xml ++ export CDH_KMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-kms ++ CDH_KMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-kms ++ export CDH_PARQUET_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/parquet ++ CDH_PARQUET_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/parquet ++ export CDH_AVRO_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/avro ++ CDH_AVRO_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/avro ++ export CDH_KAFKA_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/kafka ++ CDH_KAFKA_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/kafka ++ export CDH_SCHEMA_REGISTRY_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/schemaregistry ++ CDH_SCHEMA_REGISTRY_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/schemaregistry ++ export CDH_STREAMS_MESSAGING_MANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_messaging_manager ++ CDH_STREAMS_MESSAGING_MANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_messaging_manager ++ export CDH_STREAMS_MESSAGING_MANAGER_UI_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_messaging_manager_ui ++ CDH_STREAMS_MESSAGING_MANAGER_UI_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_messaging_manager_ui ++ export CDH_STREAMS_REPLICATION_MANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_replication_manager ++ CDH_STREAMS_REPLICATION_MANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_replication_manager ++ export CDH_CRUISE_CONTROL_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/cruise_control ++ CDH_CRUISE_CONTROL_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/cruise_control ++ export CDH_KNOX_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/knox ++ CDH_KNOX_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/knox ++ export CDH_KUDU_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/kudu ++ CDH_KUDU_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/kudu ++ export CDH_RANGER_ADMIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-admin ++ CDH_RANGER_ADMIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-admin ++ export CDH_RANGER_TAGSYNC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-tagsync ++ CDH_RANGER_TAGSYNC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-tagsync ++ export CDH_RANGER_USERSYNC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-usersync ++ CDH_RANGER_USERSYNC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-usersync ++ export CDH_RANGER_KMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-kms ++ CDH_RANGER_KMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-kms ++ export CDH_RANGER_RAZ_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-raz ++ CDH_RANGER_RAZ_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-raz ++ export CDH_RANGER_RMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-rms ++ CDH_RANGER_RMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-rms ++ export CDH_ATLAS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/atlas ++ CDH_ATLAS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/atlas ++ export CDH_TEZ_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/tez ++ CDH_TEZ_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/tez ++ export CDH_PHOENIX_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/phoenix ++ CDH_PHOENIX_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/phoenix ++ export DAS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/data_analytics_studio ++ DAS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/data_analytics_studio ++ export QUEUEMANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/queuemanager ++ QUEUEMANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/queuemanager ++ export CDH_RANGER_HBASE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hbase-plugin ++ CDH_RANGER_HBASE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hbase-plugin ++ export CDH_RANGER_HIVE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hive-plugin ++ CDH_RANGER_HIVE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hive-plugin ++ export CDH_RANGER_ATLAS_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-atlas-plugin ++ CDH_RANGER_ATLAS_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-atlas-plugin ++ export CDH_RANGER_SOLR_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-solr-plugin ++ CDH_RANGER_SOLR_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-solr-plugin ++ export CDH_RANGER_HDFS_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hdfs-plugin ++ CDH_RANGER_HDFS_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hdfs-plugin ++ export CDH_RANGER_KNOX_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-knox-plugin ++ CDH_RANGER_KNOX_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-knox-plugin ++ export CDH_RANGER_YARN_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-yarn-plugin ++ CDH_RANGER_YARN_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-yarn-plugin ++ export CDH_RANGER_OZONE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-ozone-plugin ++ CDH_RANGER_OZONE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-ozone-plugin ++ export CDH_RANGER_KAFKA_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-kafka-plugin ++ CDH_RANGER_KAFKA_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-kafka-plugin + echo 'Using /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER as conf dir' + echo 'Using scripts/control.sh as process script' + replace_conf_dir + echo CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER + echo CMF_CONF_DIR= + EXCLUDE_CMF_FILES=('cloudera-config.sh' 'hue.sh' 'impala.sh' 'sqoop.sh' 'supervisor.conf' 'config.zip' 'proc.json' '*.log' '*.keytab' '*jceks' '*bcfks' 'supervisor_status') ++ printf '! -name %s ' cloudera-config.sh hue.sh impala.sh sqoop.sh supervisor.conf config.zip proc.json '*.log' spark_on_yarn.keytab '*jceks' '*bcfks' supervisor_status + find /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER -type f '!' -path '/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/logs/*' '!' -name cloudera-config.sh '!' -name hue.sh '!' -name impala.sh '!' -name sqoop.sh '!' -name supervisor.conf '!' -name config.zip '!' -name proc.json '!' -name '*.log' '!' -name spark_on_yarn.keytab '!' -name '*jceks' '!' -name '*bcfks' '!' -name supervisor_status -exec perl -pi -e 's#\{\{CMF_CONF_DIR}}#/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER#g' '{}' ';' + make_scripts_executable + find /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER -regex '.*\.\(py\|sh\)$' -exec chmod u+x '{}' ';' + get_java_major_version JAVA_MAJOR_VERSION + '[' -z /usr/java/jdk1.8.0_232-cloudera/bin/java ']' ++ /usr/java/jdk1.8.0_232-cloudera/bin/java -version + local 'VERSION_STRING=openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode)' + local 'RE_JAVA=[java|openjdk][[:space:]]version[[:space:]]\"1\.([0-9][0-9]*)\.?+' + [[ openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode) =~ [java|openjdk][[:space:]]version[[:space:]]\"1\.([0-9][0-9]*)\.?+ ]] + eval JAVA_MAJOR_VERSION=8 ++ JAVA_MAJOR_VERSION=8 + export JAVA_MAJOR_VERSION + RUN_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER + '[' '' == true ']' + chmod u+x /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/scripts/control.sh + export COMMON_SCRIPT=/opt/cloudera/cm-agent/service/common/cloudera-config.sh + COMMON_SCRIPT=/opt/cloudera/cm-agent/service/common/cloudera-config.sh + exec /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/scripts/control.sh start_history_server ++ log 'Running Spark CSD control script...' +++ date ++ timestamp='Wed Mar 10 05:44:30 EST 2021' ++ '[' -z ']' ++ echo 'Wed Mar 10 05:44:30 EST 2021: Running Spark CSD control script...' ++ echo 'Wed Mar 10 05:44:30 EST 2021: Running Spark CSD control script...' Wed Mar 10 05:44:30 EST 2021: Running Spark CSD control script... ++ log 'Detected CDH_VERSION of [7]' +++ date ++ timestamp='Wed Mar 10 05:44:30 EST 2021' ++ '[' -z ']' ++ echo 'Wed Mar 10 05:44:30 EST 2021: Detected CDH_VERSION of [7]' ++ echo 'Wed Mar 10 05:44:30 EST 2021: Detected CDH_VERSION of [7]' Wed Mar 10 05:44:30 EST 2021: Detected CDH_VERSION of [7] ++ export BIGTOP_DEFAULTS_DIR= ++ BIGTOP_DEFAULTS_DIR= +++ m_readlink /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop +++ '[' -n ']' +++ echo /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ export HADOOP_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ HADOOP_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ export HDFS_BIN=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/../../bin/hdfs ++ HDFS_BIN=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/../../bin/hdfs ++ export HADOOP_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ++ HADOOP_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ++ HBASE_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/hbase-conf ++ DEFAULT_SPARK_HOME=/usr/lib/spark +++ m_readlink /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark +++ '[' -n ']' +++ echo /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ export SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ export SPARK_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf ++ SPARK_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf ++ '[' '!' -d /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf ']' ++ export SPARK_ENV=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh ++ SPARK_ENV=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh ++ export SPARK_DEFAULTS=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-defaults.conf ++ SPARK_DEFAULTS=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-defaults.conf ++ export 'SPARK_DAEMON_JAVA_OPTS= -Djava.net.preferIPv4Stack=true' ++ SPARK_DAEMON_JAVA_OPTS=' -Djava.net.preferIPv4Stack=true' +++ m_readlink /opt/cloudera/parcels +++ '[' -n ']' +++ echo /opt/cloudera/parcels ++ export PARCELS_ROOT=/opt/cloudera/parcels ++ PARCELS_ROOT=/opt/cloudera/parcels + case $1 in + start_history_server + log 'Starting Spark History Server' ++ date + timestamp='Wed Mar 10 05:44:30 EST 2021' + '[' -z ']' + echo 'Wed Mar 10 05:44:30 EST 2021: Starting Spark History Server' + echo 'Wed Mar 10 05:44:30 EST 2021: Starting Spark History Server' Wed Mar 10 05:44:30 EST 2021: Starting Spark History Server + local CONF_FILE=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf ++ get_default_fs /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ++ get_hadoop_conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf fs.defaultFS ++ local conf=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ++ local key=fs.defaultFS ++ /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/../../bin/hdfs --config /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf getconf -confKey fs.defaultFS + local DEFAULT_FS=hdfs://ces1pub.pbm.ihost.com:8020 ++ prepend_protocol /user/spark/applicationHistory hdfs://ces1pub.pbm.ihost.com:8020 ++ local url=/user/spark/applicationHistory ++ local proto=hdfs://ces1pub.pbm.ihost.com:8020 ++ [[ /user/spark/applicationHistory =~ [:alnum:]*:.* ]] ++ echo hdfs://ces1pub.pbm.ihost.com:8020/user/spark/applicationHistory + local LOG_DIR=hdfs://ces1pub.pbm.ihost.com:8020/user/spark/applicationHistory ++ dirname /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + local CLASSPATH_FILE=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt + local CLASSPATH_FILE_TMP=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + touch /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + '[' '!' -f /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.orig ']' + cp -p /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.orig + cp -p /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.orig /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + echo spark.history.fs.logDirectory=hdfs://ces1pub.pbm.ihost.com:8020/user/spark/applicationHistory + '[' '' '!=' '' ']' + local FILTERS_KEY=spark.ui.filters ++ read_property spark.ui.filters /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf ++ local key=spark.ui.filters ++ local file=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf +++ grep '^spark.ui.filters=' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf +++ tail -n 1 +++ sed 's/^spark.ui.filters=\(.*\)/\1/' ++ echo + local FILTERS= + '[' true = true ']' ++ add_to_list '' org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ++ local list= ++ local item=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ++ '[' -n '' ']' ++ list=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ++ echo org.apache.spark.deploy.yarn.YarnProxyRedirectFilter + FILTERS=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter + '[' false = true ']' + '[' -n org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ']' + replace_spark_conf spark.ui.filters org.apache.spark.deploy.yarn.YarnProxyRedirectFilter /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + local key=spark.ui.filters + local value=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter + local file=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + local temp=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + touch /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + chown --reference=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + chmod --reference=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + grep -v '^spark.ui.filters=' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + '[' -n org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ']' + echo spark.ui.filters=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter + mv /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + set +x + '[' false '!=' true ']' + replace_spark_conf spark.history.store.path '' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + local key=spark.history.store.path + local value= + local file=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + local temp=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + touch /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + chown --reference=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + chmod --reference=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + grep -v '^spark.history.store.path=' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + '[' -n '' ']' + mv /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + [[ -d /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone ]] + add_to_classpath /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp '/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-*.jar' + local CLASSPATH_FILE=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + local 'CLASSPATH=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-*.jar' + IFS=: + read -a CLASSPATH_ENTRIES + for pattern in '"${CLASSPATH_ENTRIES[@]}"' + for entry in '$pattern' ++ m_readlink /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ++ '[' -n ']' ++ echo /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + entry=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ++ basename /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + name=hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + '[' -f /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ']' + is_blacklisted /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ++ basename /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + local JAR=hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + [[ -f /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^jetty.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^jersey.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^jackson.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^jackson.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ .*slf4j.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ .*servlet.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ .*-tests.jar ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^junit-.* ]] + return 1 + grep -q '/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar$' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + echo /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + '[' -s /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp ']' + cat /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + sort + uniq + rm -f /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + ARGS=("org.apache.spark.deploy.history.HistoryServer" "--properties-file" "$CONF_FILE") + run_spark_class org.apache.spark.deploy.history.HistoryServer --properties-file /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + ARGS=($@) + local ARGS + ARGS+=($ADDITIONAL_ARGS) + prepare_spark_env + replace '\{\{HADOOP_HOME}}' /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{HADOOP_HOME}}#/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop#g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + replace '\{\{SPARK_HOME}}' /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{SPARK_HOME}}#/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark#g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + replace '\{\{SPARK_EXTRA_LIB_PATH}}' '' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{SPARK_EXTRA_LIB_PATH}}##g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + replace '\{\{PYTHON_PATH}}' '' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{PYTHON_PATH}}##g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + replace '\{\{CDH_PYTHON}}' '' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{CDH_PYTHON}}##g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh ++ basename /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf + local HADOOP_CONF_DIR_NAME=yarn-conf + replace '\{\{HADOOP_CONF_DIR_NAME}}' yarn-conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{HADOOP_CONF_DIR_NAME}}#yarn-conf#g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + export 'SPARK_DAEMON_JAVA_OPTS=-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29138.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh -Djava.net.preferIPv4Stack=true' + SPARK_DAEMON_JAVA_OPTS='-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29138.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh -Djava.net.preferIPv4Stack=true' + export 'SPARK_JAVA_OPTS=-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29138.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh ' + SPARK_JAVA_OPTS='-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29138.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh ' + cmd='/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark/bin/spark-class org.apache.spark.deploy.history.HistoryServer --properties-file /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf' + echo 'Running [/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark/bin/spark-class org.apache.spark.deploy.history.HistoryServer --properties-file /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf]' + exec /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark/bin/spark-class org.apache.spark.deploy.history.HistoryServer --properties-file /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf Exception in thread "main" java.lang.reflect.InvocationTargetException at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.spark.deploy.history.HistoryServer$.main(HistoryServer.scala:294) at org.apache.spark.deploy.history.HistoryServer.main(HistoryServer.scala) Caused by: org.apache.hadoop.security.AccessControlException: Permission denied: user=spark, access=EXECUTE, inode="/user":mapred:supergroup:drwxrwx--- at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:399) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:315) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:242) at org.apache.hadoop.hdfs.server.namenode.GPFSPermissionChecker.checkPermission(GPFSPermissionChecker.java:90) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:193) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:604) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1858) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1876) at org.apache.hadoop.hdfs.server.namenode.GPFSDirectory.resolvePathV0(GPFSDirectory.java:665) at org.apache.hadoop.hdfs.server.namenode.GPFSDirectory.resolvePath(GPFSDirectory.java:507) at org.apache.hadoop.hdfs.server.namenode.GPFSNamesystemV0.getFileInfo(GPFSNamesystemV0.java:1961) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getFileInfo(NameNodeRpcServer.java:1143) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getFileInfo(ClientNamenodeProtocolServerSideTranslatorPB.java:939) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678) at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1688) at org.apache.hadoop.hdfs.DistributedFileSystem$29.doCall(DistributedFileSystem.java:1744) at org.apache.hadoop.hdfs.DistributedFileSystem$29.doCall(DistributedFileSystem.java:1741) at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1756) at org.apache.spark.deploy.history.FsHistoryProvider.org$apache$spark$deploy$history$FsHistoryProvider$$startPolling(FsHistoryProvider.scala:258) at org.apache.spark.deploy.history.FsHistoryProvider.initialize(FsHistoryProvider.scala:212) at org.apache.spark.deploy.history.FsHistoryProvider.(FsHistoryProvider.scala:208) at org.apache.spark.deploy.history.FsHistoryProvider.(FsHistoryProvider.scala:87) ... 6 more Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): Permission denied: user=spark, access=EXECUTE, inode="/user":mapred:supergroup:drwxrwx--- at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:399) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:315) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:242) at org.apache.hadoop.hdfs.server.namenode.GPFSPermissionChecker.checkPermission(GPFSPermissionChecker.java:90) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:193) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:604) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1858) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1876) at org.apache.hadoop.hdfs.server.namenode.GPFSDirectory.resolvePathV0(GPFSDirectory.java:665) at org.apache.hadoop.hdfs.server.namenode.GPFSDirectory.resolvePath(GPFSDirectory.java:507) at org.apache.hadoop.hdfs.server.namenode.GPFSNamesystemV0.getFileInfo(GPFSNamesystemV0.java:1961) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getFileInfo(NameNodeRpcServer.java:1143) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getFileInfo(ClientNamenodeProtocolServerSideTranslatorPB.java:939) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1562) at org.apache.hadoop.ipc.Client.call(Client.java:1508) at org.apache.hadoop.ipc.Client.call(Client.java:1405) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:233) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118) at com.sun.proxy.$Proxy9.getFileInfo(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:957) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:431) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362) at com.sun.proxy.$Proxy10.getFileInfo(Unknown Source) at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1686) ... 14 more [10/Mar/2021 05:44:36 +0000] 29422 MainThread redactor INFO Started launcher: /opt/cloudera/cm-agent/service/csd/csd.sh start_history_server [10/Mar/2021 05:44:36 +0000] 29422 MainThread redactor INFO Re-exec watcher: /opt/cloudera/cm-agent/bin/cm proc_watcher 29430 [10/Mar/2021 05:44:36 +0000] 29431 MainThread redactor INFO Re-exec redactor: /opt/cloudera/cm-agent/bin/cm redactor --fds 3 5 [10/Mar/2021 05:44:37 +0000] 29431 MainThread redactor INFO Started redactor Wed Mar 10 05:44:37 EST 2021 + locate_java_home + locate_java_home_no_verify + JAVA11_HOME_CANDIDATES=('/usr/java/jdk-11' '/usr/lib/jvm/jdk-11' '/usr/lib/jvm/java-11-oracle') + local JAVA11_HOME_CANDIDATES + OPENJAVA11_HOME_CANDIDATES=('/usr/lib/jvm/java-11' '/usr/java/jdk-11' '/usr/lib/jvm/jdk-11' '/usr/lib64/jvm/jdk-11') + local OPENJAVA11_HOME_CANDIDATES + JAVA8_HOME_CANDIDATES=('/usr/java/jdk1.8' '/usr/java/jre1.8' '/usr/lib/jvm/j2sdk1.8-oracle' '/usr/lib/jvm/j2sdk1.8-oracle/jre' '/usr/lib/jvm/java-8-oracle') + local JAVA8_HOME_CANDIDATES + OPENJAVA8_HOME_CANDIDATES=('/usr/lib/jvm/java-1.8.0-openjdk' '/usr/lib/jvm/java-8-openjdk' '/usr/lib64/jvm/java-1.8.0-openjdk' '/usr/lib64/jvm/java-8-openjdk') + local OPENJAVA8_HOME_CANDIDATES + MISCJAVA_HOME_CANDIDATES=('/Library/Java/Home' '/usr/java/default' '/usr/lib/jvm/default-java' '/usr/lib/jvm/java-openjdk' '/usr/lib/jvm/jre-openjdk') + local MISCJAVA_HOME_CANDIDATES + case ${BIGTOP_JAVA_MAJOR} in + JAVA_HOME_CANDIDATES=(${JAVA11_HOME_CANDIDATES[@]} ${OPENJAVA11_HOME_CANDIDATES[@]} ${JAVA8_HOME_CANDIDATES[@]} ${OPENJAVA8_HOME_CANDIDATES[@]} ${MISCJAVA_HOME_CANDIDATES[@]}) + '[' -n '' ']' + '[' -z '' ']' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/java/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib/jvm/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib/jvm/java-11-oracle*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib/jvm/java-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/java/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib/jvm/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib64/jvm/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd /usr/java/jdk1.8.0_232-cloudera + for candidate in '`ls -rvd ${candidate_regex}* 2>/dev/null`' + '[' -e /usr/java/jdk1.8.0_232-cloudera/bin/java ']' + export JAVA_HOME=/usr/java/jdk1.8.0_232-cloudera + JAVA_HOME=/usr/java/jdk1.8.0_232-cloudera + break 2 + verify_java_home + '[' -z /usr/java/jdk1.8.0_232-cloudera ']' + echo JAVA_HOME=/usr/java/jdk1.8.0_232-cloudera + '[' -n '-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid{{PID}}.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh' ']' + get_gc_args + JAVA8_GC_TUNING_ARGS=' ' + set_basic_gc_tuning_args_based_on_java_version + get_java_major_version JAVA_MAJOR + '[' -z /usr/java/jdk1.8.0_232-cloudera/bin/java ']' ++ /usr/java/jdk1.8.0_232-cloudera/bin/java -version + local 'VERSION_STRING=openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode)' + local 'RE_JAVA=[java|openjdk][[:space:]]version[[:space:]]\"1\.([0-9][0-9]*)\.?+' + [[ openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode) =~ [java|openjdk][[:space:]]version[[:space:]]\"1\.([0-9][0-9]*)\.?+ ]] + eval JAVA_MAJOR=8 ++ JAVA_MAJOR=8 + BASIC_GC_TUNING_ARGS= + case $JAVA_MAJOR in + BASIC_GC_TUNING_ARGS=' ' + CSD_GC_ARGS=' ' + CSD_JAVA_OPTS+=' ' ++ replace_pid -XX:+HeapDumpOnOutOfMemoryError '-XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid{{PID}}.hprof' -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh ++ echo -XX:+HeapDumpOnOutOfMemoryError '-XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid{{PID}}.hprof' -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh ++ sed 's#{{PID}}#29430#g' + export 'CSD_JAVA_OPTS=-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29430.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh' + CSD_JAVA_OPTS='-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29430.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh' + echo 'Using -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29430.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh as CSD_JAVA_OPTS' + source_parcel_environment + '[' '!' -z /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/meta/cdh_env.sh ']' + OLD_IFS=' ' + IFS=: + SCRIPT_ARRAY=($SCM_DEFINES_SCRIPTS) + DIRNAME_ARRAY=($PARCEL_DIRNAMES) + IFS=' ' + COUNT=1 ++ seq 1 1 + for i in '`seq 1 $COUNT`' + SCRIPT=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/meta/cdh_env.sh + PARCEL_DIRNAME=CDH-7.1.4-1.cdh7.1.4.p0.6300266 + . /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/meta/cdh_env.sh ++ CDH_DIRNAME=CDH-7.1.4-1.cdh7.1.4.p0.6300266 ++ export CDH_HADOOP_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ CDH_HADOOP_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ export CDH_MR1_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-0.20-mapreduce ++ CDH_MR1_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-0.20-mapreduce ++ export CDH_HDFS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-hdfs ++ CDH_HDFS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-hdfs ++ export CDH_OZONE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone ++ CDH_OZONE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone ++ export CDH_HTTPFS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-httpfs ++ CDH_HTTPFS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-httpfs ++ export CDH_MR2_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-mapreduce ++ CDH_MR2_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-mapreduce ++ export CDH_YARN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-yarn ++ CDH_YARN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-yarn ++ export CDH_HBASE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase ++ CDH_HBASE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase ++ export CDH_HBASE_FILESYSTEM_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase_filesystem ++ CDH_HBASE_FILESYSTEM_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase_filesystem ++ export CDH_HBASE_CONNECTORS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase_connectors ++ CDH_HBASE_CONNECTORS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase_connectors ++ export CDH_ZOOKEEPER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/zookeeper ++ CDH_ZOOKEEPER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/zookeeper ++ export CDH_ZEPPELIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/zeppelin ++ CDH_ZEPPELIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/zeppelin ++ export CDH_HIVE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hive ++ CDH_HIVE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hive ++ export CDH_HUE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hue ++ CDH_HUE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hue ++ export CDH_OOZIE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/oozie ++ CDH_OOZIE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/oozie ++ export CDH_HUE_PLUGINS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ CDH_HUE_PLUGINS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ export CDH_HCAT_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hive-hcatalog ++ CDH_HCAT_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hive-hcatalog ++ export CDH_SENTRY_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/sentry ++ CDH_SENTRY_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/sentry ++ export JSVC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/bigtop-utils ++ JSVC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/bigtop-utils ++ export CDH_HADOOP_BIN=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/bin/hadoop ++ CDH_HADOOP_BIN=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/bin/hadoop ++ export CDH_IMPALA_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/impala ++ CDH_IMPALA_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/impala ++ export CDH_SOLR_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/solr ++ CDH_SOLR_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/solr ++ export CDH_HBASE_INDEXER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase-solr ++ CDH_HBASE_INDEXER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase-solr ++ export SEARCH_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/search ++ SEARCH_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/search ++ export CDH_SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ CDH_SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ export WEBHCAT_DEFAULT_XML=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/etc/hive-webhcat/conf.dist/webhcat-default.xml ++ WEBHCAT_DEFAULT_XML=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/etc/hive-webhcat/conf.dist/webhcat-default.xml ++ export CDH_KMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-kms ++ CDH_KMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-kms ++ export CDH_PARQUET_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/parquet ++ CDH_PARQUET_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/parquet ++ export CDH_AVRO_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/avro ++ CDH_AVRO_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/avro ++ export CDH_KAFKA_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/kafka ++ CDH_KAFKA_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/kafka ++ export CDH_SCHEMA_REGISTRY_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/schemaregistry ++ CDH_SCHEMA_REGISTRY_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/schemaregistry ++ export CDH_STREAMS_MESSAGING_MANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_messaging_manager ++ CDH_STREAMS_MESSAGING_MANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_messaging_manager ++ export CDH_STREAMS_MESSAGING_MANAGER_UI_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_messaging_manager_ui ++ CDH_STREAMS_MESSAGING_MANAGER_UI_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_messaging_manager_ui ++ export CDH_STREAMS_REPLICATION_MANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_replication_manager ++ CDH_STREAMS_REPLICATION_MANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_replication_manager ++ export CDH_CRUISE_CONTROL_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/cruise_control ++ CDH_CRUISE_CONTROL_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/cruise_control ++ export CDH_KNOX_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/knox ++ CDH_KNOX_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/knox ++ export CDH_KUDU_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/kudu ++ CDH_KUDU_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/kudu ++ export CDH_RANGER_ADMIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-admin ++ CDH_RANGER_ADMIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-admin ++ export CDH_RANGER_TAGSYNC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-tagsync ++ CDH_RANGER_TAGSYNC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-tagsync ++ export CDH_RANGER_USERSYNC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-usersync ++ CDH_RANGER_USERSYNC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-usersync ++ export CDH_RANGER_KMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-kms ++ CDH_RANGER_KMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-kms ++ export CDH_RANGER_RAZ_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-raz ++ CDH_RANGER_RAZ_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-raz ++ export CDH_RANGER_RMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-rms ++ CDH_RANGER_RMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-rms ++ export CDH_ATLAS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/atlas ++ CDH_ATLAS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/atlas ++ export CDH_TEZ_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/tez ++ CDH_TEZ_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/tez ++ export CDH_PHOENIX_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/phoenix ++ CDH_PHOENIX_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/phoenix ++ export DAS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/data_analytics_studio ++ DAS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/data_analytics_studio ++ export QUEUEMANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/queuemanager ++ QUEUEMANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/queuemanager ++ export CDH_RANGER_HBASE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hbase-plugin ++ CDH_RANGER_HBASE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hbase-plugin ++ export CDH_RANGER_HIVE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hive-plugin ++ CDH_RANGER_HIVE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hive-plugin ++ export CDH_RANGER_ATLAS_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-atlas-plugin ++ CDH_RANGER_ATLAS_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-atlas-plugin ++ export CDH_RANGER_SOLR_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-solr-plugin ++ CDH_RANGER_SOLR_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-solr-plugin ++ export CDH_RANGER_HDFS_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hdfs-plugin ++ CDH_RANGER_HDFS_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hdfs-plugin ++ export CDH_RANGER_KNOX_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-knox-plugin ++ CDH_RANGER_KNOX_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-knox-plugin ++ export CDH_RANGER_YARN_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-yarn-plugin ++ CDH_RANGER_YARN_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-yarn-plugin ++ export CDH_RANGER_OZONE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-ozone-plugin ++ CDH_RANGER_OZONE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-ozone-plugin ++ export CDH_RANGER_KAFKA_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-kafka-plugin ++ CDH_RANGER_KAFKA_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-kafka-plugin + echo 'Using /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER as conf dir' + echo 'Using scripts/control.sh as process script' + replace_conf_dir + echo CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER + echo CMF_CONF_DIR= + EXCLUDE_CMF_FILES=('cloudera-config.sh' 'hue.sh' 'impala.sh' 'sqoop.sh' 'supervisor.conf' 'config.zip' 'proc.json' '*.log' '*.keytab' '*jceks' '*bcfks' 'supervisor_status') ++ printf '! -name %s ' cloudera-config.sh hue.sh impala.sh sqoop.sh supervisor.conf config.zip proc.json '*.log' spark_on_yarn.keytab '*jceks' '*bcfks' supervisor_status + find /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER -type f '!' -path '/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/logs/*' '!' -name cloudera-config.sh '!' -name hue.sh '!' -name impala.sh '!' -name sqoop.sh '!' -name supervisor.conf '!' -name config.zip '!' -name proc.json '!' -name '*.log' '!' -name spark_on_yarn.keytab '!' -name '*jceks' '!' -name '*bcfks' '!' -name supervisor_status -exec perl -pi -e 's#\{\{CMF_CONF_DIR}}#/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER#g' '{}' ';' + make_scripts_executable + find /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER -regex '.*\.\(py\|sh\)$' -exec chmod u+x '{}' ';' + get_java_major_version JAVA_MAJOR_VERSION + '[' -z /usr/java/jdk1.8.0_232-cloudera/bin/java ']' ++ /usr/java/jdk1.8.0_232-cloudera/bin/java -version + local 'VERSION_STRING=openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode)' + local 'RE_JAVA=[java|openjdk][[:space:]]version[[:space:]]\"1\.([0-9][0-9]*)\.?+' + [[ openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode) =~ [java|openjdk][[:space:]]version[[:space:]]\"1\.([0-9][0-9]*)\.?+ ]] + eval JAVA_MAJOR_VERSION=8 ++ JAVA_MAJOR_VERSION=8 + export JAVA_MAJOR_VERSION + RUN_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER + '[' '' == true ']' + chmod u+x /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/scripts/control.sh + export COMMON_SCRIPT=/opt/cloudera/cm-agent/service/common/cloudera-config.sh + COMMON_SCRIPT=/opt/cloudera/cm-agent/service/common/cloudera-config.sh + exec /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/scripts/control.sh start_history_server ++ log 'Running Spark CSD control script...' +++ date ++ timestamp='Wed Mar 10 05:44:37 EST 2021' ++ '[' -z ']' ++ echo 'Wed Mar 10 05:44:37 EST 2021: Running Spark CSD control script...' ++ echo 'Wed Mar 10 05:44:37 EST 2021: Running Spark CSD control script...' Wed Mar 10 05:44:37 EST 2021: Running Spark CSD control script... ++ log 'Detected CDH_VERSION of [7]' +++ date ++ timestamp='Wed Mar 10 05:44:37 EST 2021' ++ '[' -z ']' ++ echo 'Wed Mar 10 05:44:37 EST 2021: Detected CDH_VERSION of [7]' ++ echo 'Wed Mar 10 05:44:37 EST 2021: Detected CDH_VERSION of [7]' Wed Mar 10 05:44:37 EST 2021: Detected CDH_VERSION of [7] ++ export BIGTOP_DEFAULTS_DIR= ++ BIGTOP_DEFAULTS_DIR= +++ m_readlink /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop +++ '[' -n ']' +++ echo /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ export HADOOP_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ HADOOP_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ export HDFS_BIN=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/../../bin/hdfs ++ HDFS_BIN=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/../../bin/hdfs ++ export HADOOP_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ++ HADOOP_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ++ HBASE_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/hbase-conf ++ DEFAULT_SPARK_HOME=/usr/lib/spark +++ m_readlink /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark +++ '[' -n ']' +++ echo /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ export SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ export SPARK_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf ++ SPARK_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf ++ '[' '!' -d /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf ']' ++ export SPARK_ENV=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh ++ SPARK_ENV=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh ++ export SPARK_DEFAULTS=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-defaults.conf ++ SPARK_DEFAULTS=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-defaults.conf ++ export 'SPARK_DAEMON_JAVA_OPTS= -Djava.net.preferIPv4Stack=true' ++ SPARK_DAEMON_JAVA_OPTS=' -Djava.net.preferIPv4Stack=true' +++ m_readlink /opt/cloudera/parcels +++ '[' -n ']' +++ echo /opt/cloudera/parcels ++ export PARCELS_ROOT=/opt/cloudera/parcels ++ PARCELS_ROOT=/opt/cloudera/parcels + case $1 in + start_history_server + log 'Starting Spark History Server' ++ date + timestamp='Wed Mar 10 05:44:37 EST 2021' + '[' -z ']' + echo 'Wed Mar 10 05:44:37 EST 2021: Starting Spark History Server' + echo 'Wed Mar 10 05:44:37 EST 2021: Starting Spark History Server' Wed Mar 10 05:44:37 EST 2021: Starting Spark History Server + local CONF_FILE=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf ++ get_default_fs /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ++ get_hadoop_conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf fs.defaultFS ++ local conf=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ++ local key=fs.defaultFS ++ /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/../../bin/hdfs --config /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf getconf -confKey fs.defaultFS + local DEFAULT_FS=hdfs://ces1pub.pbm.ihost.com:8020 ++ prepend_protocol /user/spark/applicationHistory hdfs://ces1pub.pbm.ihost.com:8020 ++ local url=/user/spark/applicationHistory ++ local proto=hdfs://ces1pub.pbm.ihost.com:8020 ++ [[ /user/spark/applicationHistory =~ [:alnum:]*:.* ]] ++ echo hdfs://ces1pub.pbm.ihost.com:8020/user/spark/applicationHistory + local LOG_DIR=hdfs://ces1pub.pbm.ihost.com:8020/user/spark/applicationHistory ++ dirname /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + local CLASSPATH_FILE=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt + local CLASSPATH_FILE_TMP=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + touch /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + '[' '!' -f /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.orig ']' + cp -p /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.orig /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + echo spark.history.fs.logDirectory=hdfs://ces1pub.pbm.ihost.com:8020/user/spark/applicationHistory + '[' '' '!=' '' ']' + local FILTERS_KEY=spark.ui.filters ++ read_property spark.ui.filters /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf ++ local key=spark.ui.filters ++ local file=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf +++ grep '^spark.ui.filters=' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf +++ tail -n 1 +++ sed 's/^spark.ui.filters=\(.*\)/\1/' ++ echo + local FILTERS= + '[' true = true ']' ++ add_to_list '' org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ++ local list= ++ local item=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ++ '[' -n '' ']' ++ list=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ++ echo org.apache.spark.deploy.yarn.YarnProxyRedirectFilter + FILTERS=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter + '[' false = true ']' + '[' -n org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ']' + replace_spark_conf spark.ui.filters org.apache.spark.deploy.yarn.YarnProxyRedirectFilter /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + local key=spark.ui.filters + local value=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter + local file=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + local temp=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + touch /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + chown --reference=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + chmod --reference=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + grep -v '^spark.ui.filters=' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + '[' -n org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ']' + echo spark.ui.filters=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter + mv /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + set +x + '[' false '!=' true ']' + replace_spark_conf spark.history.store.path '' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + local key=spark.history.store.path + local value= + local file=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + local temp=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + touch /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + chown --reference=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + chmod --reference=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + grep -v '^spark.history.store.path=' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + '[' -n '' ']' + mv /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + [[ -d /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone ]] + add_to_classpath /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp '/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-*.jar' + local CLASSPATH_FILE=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + local 'CLASSPATH=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-*.jar' + IFS=: + read -a CLASSPATH_ENTRIES + for pattern in '"${CLASSPATH_ENTRIES[@]}"' + for entry in '$pattern' ++ m_readlink /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ++ '[' -n ']' ++ echo /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + entry=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ++ basename /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + name=hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + '[' -f /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ']' + is_blacklisted /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ++ basename /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + local JAR=hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + [[ -f /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^jetty.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^jersey.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^jackson.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^jackson.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ .*slf4j.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ .*servlet.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ .*-tests.jar ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^junit-.* ]] + return 1 + grep -q '/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar$' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + echo /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + '[' -s /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp ']' + cat /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + sort + uniq + rm -f /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + ARGS=("org.apache.spark.deploy.history.HistoryServer" "--properties-file" "$CONF_FILE") + run_spark_class org.apache.spark.deploy.history.HistoryServer --properties-file /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + ARGS=($@) + local ARGS + ARGS+=($ADDITIONAL_ARGS) + prepare_spark_env + replace '\{\{HADOOP_HOME}}' /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{HADOOP_HOME}}#/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop#g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + replace '\{\{SPARK_HOME}}' /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{SPARK_HOME}}#/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark#g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + replace '\{\{SPARK_EXTRA_LIB_PATH}}' '' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{SPARK_EXTRA_LIB_PATH}}##g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + replace '\{\{PYTHON_PATH}}' '' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{PYTHON_PATH}}##g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + replace '\{\{CDH_PYTHON}}' '' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{CDH_PYTHON}}##g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh ++ basename /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf + local HADOOP_CONF_DIR_NAME=yarn-conf + replace '\{\{HADOOP_CONF_DIR_NAME}}' yarn-conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{HADOOP_CONF_DIR_NAME}}#yarn-conf#g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + export 'SPARK_DAEMON_JAVA_OPTS=-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29430.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh -Djava.net.preferIPv4Stack=true' + SPARK_DAEMON_JAVA_OPTS='-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29430.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh -Djava.net.preferIPv4Stack=true' + export 'SPARK_JAVA_OPTS=-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29430.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh ' + SPARK_JAVA_OPTS='-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29430.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh ' + cmd='/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark/bin/spark-class org.apache.spark.deploy.history.HistoryServer --properties-file /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf' + echo 'Running [/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark/bin/spark-class org.apache.spark.deploy.history.HistoryServer --properties-file /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf]' + exec /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark/bin/spark-class org.apache.spark.deploy.history.HistoryServer --properties-file /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf Exception in thread "main" java.lang.reflect.InvocationTargetException at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.spark.deploy.history.HistoryServer$.main(HistoryServer.scala:294) at org.apache.spark.deploy.history.HistoryServer.main(HistoryServer.scala) Caused by: org.apache.hadoop.security.AccessControlException: Permission denied: user=spark, access=EXECUTE, inode="/user":mapred:supergroup:drwxrwx--- at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:399) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:315) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:242) at org.apache.hadoop.hdfs.server.namenode.GPFSPermissionChecker.checkPermission(GPFSPermissionChecker.java:90) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:193) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:604) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1858) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1876) at org.apache.hadoop.hdfs.server.namenode.GPFSDirectory.resolvePathV0(GPFSDirectory.java:665) at org.apache.hadoop.hdfs.server.namenode.GPFSDirectory.resolvePath(GPFSDirectory.java:507) at org.apache.hadoop.hdfs.server.namenode.GPFSNamesystemV0.getFileInfo(GPFSNamesystemV0.java:1961) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getFileInfo(NameNodeRpcServer.java:1143) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getFileInfo(ClientNamenodeProtocolServerSideTranslatorPB.java:939) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678) at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1688) at org.apache.hadoop.hdfs.DistributedFileSystem$29.doCall(DistributedFileSystem.java:1744) at org.apache.hadoop.hdfs.DistributedFileSystem$29.doCall(DistributedFileSystem.java:1741) at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1756) at org.apache.spark.deploy.history.FsHistoryProvider.org$apache$spark$deploy$history$FsHistoryProvider$$startPolling(FsHistoryProvider.scala:258) at org.apache.spark.deploy.history.FsHistoryProvider.initialize(FsHistoryProvider.scala:212) at org.apache.spark.deploy.history.FsHistoryProvider.(FsHistoryProvider.scala:208) at org.apache.spark.deploy.history.FsHistoryProvider.(FsHistoryProvider.scala:87) ... 6 more Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): Permission denied: user=spark, access=EXECUTE, inode="/user":mapred:supergroup:drwxrwx--- at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:399) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:315) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:242) at org.apache.hadoop.hdfs.server.namenode.GPFSPermissionChecker.checkPermission(GPFSPermissionChecker.java:90) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:193) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:604) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1858) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1876) at org.apache.hadoop.hdfs.server.namenode.GPFSDirectory.resolvePathV0(GPFSDirectory.java:665) at org.apache.hadoop.hdfs.server.namenode.GPFSDirectory.resolvePath(GPFSDirectory.java:507) at org.apache.hadoop.hdfs.server.namenode.GPFSNamesystemV0.getFileInfo(GPFSNamesystemV0.java:1961) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getFileInfo(NameNodeRpcServer.java:1143) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getFileInfo(ClientNamenodeProtocolServerSideTranslatorPB.java:939) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1562) at org.apache.hadoop.ipc.Client.call(Client.java:1508) at org.apache.hadoop.ipc.Client.call(Client.java:1405) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:233) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118) at com.sun.proxy.$Proxy9.getFileInfo(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:957) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:431) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362) at com.sun.proxy.$Proxy10.getFileInfo(Unknown Source) at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1686) ... 14 more [10/Mar/2021 05:44:45 +0000] 29718 MainThread redactor INFO Started launcher: /opt/cloudera/cm-agent/service/csd/csd.sh start_history_server [10/Mar/2021 05:44:45 +0000] 29718 MainThread redactor INFO Re-exec watcher: /opt/cloudera/cm-agent/bin/cm proc_watcher 29727 [10/Mar/2021 05:44:45 +0000] 29728 MainThread redactor INFO Re-exec redactor: /opt/cloudera/cm-agent/bin/cm redactor --fds 3 5 [10/Mar/2021 05:44:46 +0000] 29728 MainThread redactor INFO Started redactor Wed Mar 10 05:44:46 EST 2021 + locate_java_home + locate_java_home_no_verify + JAVA11_HOME_CANDIDATES=('/usr/java/jdk-11' '/usr/lib/jvm/jdk-11' '/usr/lib/jvm/java-11-oracle') + local JAVA11_HOME_CANDIDATES + OPENJAVA11_HOME_CANDIDATES=('/usr/lib/jvm/java-11' '/usr/java/jdk-11' '/usr/lib/jvm/jdk-11' '/usr/lib64/jvm/jdk-11') + local OPENJAVA11_HOME_CANDIDATES + JAVA8_HOME_CANDIDATES=('/usr/java/jdk1.8' '/usr/java/jre1.8' '/usr/lib/jvm/j2sdk1.8-oracle' '/usr/lib/jvm/j2sdk1.8-oracle/jre' '/usr/lib/jvm/java-8-oracle') + local JAVA8_HOME_CANDIDATES + OPENJAVA8_HOME_CANDIDATES=('/usr/lib/jvm/java-1.8.0-openjdk' '/usr/lib/jvm/java-8-openjdk' '/usr/lib64/jvm/java-1.8.0-openjdk' '/usr/lib64/jvm/java-8-openjdk') + local OPENJAVA8_HOME_CANDIDATES + MISCJAVA_HOME_CANDIDATES=('/Library/Java/Home' '/usr/java/default' '/usr/lib/jvm/default-java' '/usr/lib/jvm/java-openjdk' '/usr/lib/jvm/jre-openjdk') + local MISCJAVA_HOME_CANDIDATES + case ${BIGTOP_JAVA_MAJOR} in + JAVA_HOME_CANDIDATES=(${JAVA11_HOME_CANDIDATES[@]} ${OPENJAVA11_HOME_CANDIDATES[@]} ${JAVA8_HOME_CANDIDATES[@]} ${OPENJAVA8_HOME_CANDIDATES[@]} ${MISCJAVA_HOME_CANDIDATES[@]}) + '[' -n '' ']' + '[' -z '' ']' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/java/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib/jvm/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib/jvm/java-11-oracle*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib/jvm/java-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/java/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib/jvm/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib64/jvm/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd /usr/java/jdk1.8.0_232-cloudera + for candidate in '`ls -rvd ${candidate_regex}* 2>/dev/null`' + '[' -e /usr/java/jdk1.8.0_232-cloudera/bin/java ']' + export JAVA_HOME=/usr/java/jdk1.8.0_232-cloudera + JAVA_HOME=/usr/java/jdk1.8.0_232-cloudera + break 2 + verify_java_home + '[' -z /usr/java/jdk1.8.0_232-cloudera ']' + echo JAVA_HOME=/usr/java/jdk1.8.0_232-cloudera + '[' -n '-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid{{PID}}.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh' ']' + get_gc_args + JAVA8_GC_TUNING_ARGS=' ' + set_basic_gc_tuning_args_based_on_java_version + get_java_major_version JAVA_MAJOR + '[' -z /usr/java/jdk1.8.0_232-cloudera/bin/java ']' ++ /usr/java/jdk1.8.0_232-cloudera/bin/java -version + local 'VERSION_STRING=openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode)' + local 'RE_JAVA=[java|openjdk][[:space:]]version[[:space:]]\"1\.([0-9][0-9]*)\.?+' + [[ openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode) =~ [java|openjdk][[:space:]]version[[:space:]]\"1\.([0-9][0-9]*)\.?+ ]] + eval JAVA_MAJOR=8 ++ JAVA_MAJOR=8 + BASIC_GC_TUNING_ARGS= + case $JAVA_MAJOR in + BASIC_GC_TUNING_ARGS=' ' + CSD_GC_ARGS=' ' + CSD_JAVA_OPTS+=' ' ++ replace_pid -XX:+HeapDumpOnOutOfMemoryError '-XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid{{PID}}.hprof' -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh ++ echo -XX:+HeapDumpOnOutOfMemoryError '-XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid{{PID}}.hprof' -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh ++ sed 's#{{PID}}#29727#g' + export 'CSD_JAVA_OPTS=-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29727.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh' + CSD_JAVA_OPTS='-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29727.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh' + echo 'Using -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29727.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh as CSD_JAVA_OPTS' + source_parcel_environment + '[' '!' -z /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/meta/cdh_env.sh ']' + OLD_IFS=' ' + IFS=: + SCRIPT_ARRAY=($SCM_DEFINES_SCRIPTS) + DIRNAME_ARRAY=($PARCEL_DIRNAMES) + IFS=' ' + COUNT=1 ++ seq 1 1 + for i in '`seq 1 $COUNT`' + SCRIPT=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/meta/cdh_env.sh + PARCEL_DIRNAME=CDH-7.1.4-1.cdh7.1.4.p0.6300266 + . /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/meta/cdh_env.sh ++ CDH_DIRNAME=CDH-7.1.4-1.cdh7.1.4.p0.6300266 ++ export CDH_HADOOP_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ CDH_HADOOP_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ export CDH_MR1_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-0.20-mapreduce ++ CDH_MR1_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-0.20-mapreduce ++ export CDH_HDFS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-hdfs ++ CDH_HDFS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-hdfs ++ export CDH_OZONE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone ++ CDH_OZONE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone ++ export CDH_HTTPFS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-httpfs ++ CDH_HTTPFS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-httpfs ++ export CDH_MR2_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-mapreduce ++ CDH_MR2_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-mapreduce ++ export CDH_YARN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-yarn ++ CDH_YARN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-yarn ++ export CDH_HBASE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase ++ CDH_HBASE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase ++ export CDH_HBASE_FILESYSTEM_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase_filesystem ++ CDH_HBASE_FILESYSTEM_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase_filesystem ++ export CDH_HBASE_CONNECTORS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase_connectors ++ CDH_HBASE_CONNECTORS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase_connectors ++ export CDH_ZOOKEEPER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/zookeeper ++ CDH_ZOOKEEPER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/zookeeper ++ export CDH_ZEPPELIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/zeppelin ++ CDH_ZEPPELIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/zeppelin ++ export CDH_HIVE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hive ++ CDH_HIVE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hive ++ export CDH_HUE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hue ++ CDH_HUE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hue ++ export CDH_OOZIE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/oozie ++ CDH_OOZIE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/oozie ++ export CDH_HUE_PLUGINS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ CDH_HUE_PLUGINS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ export CDH_HCAT_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hive-hcatalog ++ CDH_HCAT_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hive-hcatalog ++ export CDH_SENTRY_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/sentry ++ CDH_SENTRY_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/sentry ++ export JSVC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/bigtop-utils ++ JSVC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/bigtop-utils ++ export CDH_HADOOP_BIN=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/bin/hadoop ++ CDH_HADOOP_BIN=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/bin/hadoop ++ export CDH_IMPALA_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/impala ++ CDH_IMPALA_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/impala ++ export CDH_SOLR_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/solr ++ CDH_SOLR_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/solr ++ export CDH_HBASE_INDEXER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase-solr ++ CDH_HBASE_INDEXER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase-solr ++ export SEARCH_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/search ++ SEARCH_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/search ++ export CDH_SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ CDH_SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ export WEBHCAT_DEFAULT_XML=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/etc/hive-webhcat/conf.dist/webhcat-default.xml ++ WEBHCAT_DEFAULT_XML=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/etc/hive-webhcat/conf.dist/webhcat-default.xml ++ export CDH_KMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-kms ++ CDH_KMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-kms ++ export CDH_PARQUET_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/parquet ++ CDH_PARQUET_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/parquet ++ export CDH_AVRO_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/avro ++ CDH_AVRO_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/avro ++ export CDH_KAFKA_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/kafka ++ CDH_KAFKA_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/kafka ++ export CDH_SCHEMA_REGISTRY_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/schemaregistry ++ CDH_SCHEMA_REGISTRY_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/schemaregistry ++ export CDH_STREAMS_MESSAGING_MANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_messaging_manager ++ CDH_STREAMS_MESSAGING_MANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_messaging_manager ++ export CDH_STREAMS_MESSAGING_MANAGER_UI_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_messaging_manager_ui ++ CDH_STREAMS_MESSAGING_MANAGER_UI_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_messaging_manager_ui ++ export CDH_STREAMS_REPLICATION_MANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_replication_manager ++ CDH_STREAMS_REPLICATION_MANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_replication_manager ++ export CDH_CRUISE_CONTROL_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/cruise_control ++ CDH_CRUISE_CONTROL_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/cruise_control ++ export CDH_KNOX_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/knox ++ CDH_KNOX_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/knox ++ export CDH_KUDU_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/kudu ++ CDH_KUDU_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/kudu ++ export CDH_RANGER_ADMIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-admin ++ CDH_RANGER_ADMIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-admin ++ export CDH_RANGER_TAGSYNC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-tagsync ++ CDH_RANGER_TAGSYNC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-tagsync ++ export CDH_RANGER_USERSYNC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-usersync ++ CDH_RANGER_USERSYNC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-usersync ++ export CDH_RANGER_KMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-kms ++ CDH_RANGER_KMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-kms ++ export CDH_RANGER_RAZ_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-raz ++ CDH_RANGER_RAZ_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-raz ++ export CDH_RANGER_RMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-rms ++ CDH_RANGER_RMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-rms ++ export CDH_ATLAS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/atlas ++ CDH_ATLAS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/atlas ++ export CDH_TEZ_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/tez ++ CDH_TEZ_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/tez ++ export CDH_PHOENIX_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/phoenix ++ CDH_PHOENIX_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/phoenix ++ export DAS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/data_analytics_studio ++ DAS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/data_analytics_studio ++ export QUEUEMANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/queuemanager ++ QUEUEMANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/queuemanager ++ export CDH_RANGER_HBASE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hbase-plugin ++ CDH_RANGER_HBASE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hbase-plugin ++ export CDH_RANGER_HIVE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hive-plugin ++ CDH_RANGER_HIVE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hive-plugin ++ export CDH_RANGER_ATLAS_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-atlas-plugin ++ CDH_RANGER_ATLAS_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-atlas-plugin ++ export CDH_RANGER_SOLR_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-solr-plugin ++ CDH_RANGER_SOLR_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-solr-plugin ++ export CDH_RANGER_HDFS_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hdfs-plugin ++ CDH_RANGER_HDFS_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hdfs-plugin ++ export CDH_RANGER_KNOX_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-knox-plugin ++ CDH_RANGER_KNOX_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-knox-plugin ++ export CDH_RANGER_YARN_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-yarn-plugin ++ CDH_RANGER_YARN_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-yarn-plugin ++ export CDH_RANGER_OZONE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-ozone-plugin ++ CDH_RANGER_OZONE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-ozone-plugin ++ export CDH_RANGER_KAFKA_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-kafka-plugin ++ CDH_RANGER_KAFKA_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-kafka-plugin + echo 'Using /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER as conf dir' + echo 'Using scripts/control.sh as process script' + replace_conf_dir + echo CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER + echo CMF_CONF_DIR= + EXCLUDE_CMF_FILES=('cloudera-config.sh' 'hue.sh' 'impala.sh' 'sqoop.sh' 'supervisor.conf' 'config.zip' 'proc.json' '*.log' '*.keytab' '*jceks' '*bcfks' 'supervisor_status') ++ printf '! -name %s ' cloudera-config.sh hue.sh impala.sh sqoop.sh supervisor.conf config.zip proc.json '*.log' spark_on_yarn.keytab '*jceks' '*bcfks' supervisor_status + find /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER -type f '!' -path '/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/logs/*' '!' -name cloudera-config.sh '!' -name hue.sh '!' -name impala.sh '!' -name sqoop.sh '!' -name supervisor.conf '!' -name config.zip '!' -name proc.json '!' -name '*.log' '!' -name spark_on_yarn.keytab '!' -name '*jceks' '!' -name '*bcfks' '!' -name supervisor_status -exec perl -pi -e 's#\{\{CMF_CONF_DIR}}#/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER#g' '{}' ';' + make_scripts_executable + find /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER -regex '.*\.\(py\|sh\)$' -exec chmod u+x '{}' ';' + get_java_major_version JAVA_MAJOR_VERSION + '[' -z /usr/java/jdk1.8.0_232-cloudera/bin/java ']' ++ /usr/java/jdk1.8.0_232-cloudera/bin/java -version + local 'VERSION_STRING=openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode)' + local 'RE_JAVA=[java|openjdk][[:space:]]version[[:space:]]\"1\.([0-9][0-9]*)\.?+' + [[ openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode) =~ [java|openjdk][[:space:]]version[[:space:]]\"1\.([0-9][0-9]*)\.?+ ]] + eval JAVA_MAJOR_VERSION=8 ++ JAVA_MAJOR_VERSION=8 + export JAVA_MAJOR_VERSION + RUN_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER + '[' '' == true ']' + chmod u+x /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/scripts/control.sh + export COMMON_SCRIPT=/opt/cloudera/cm-agent/service/common/cloudera-config.sh + COMMON_SCRIPT=/opt/cloudera/cm-agent/service/common/cloudera-config.sh + exec /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/scripts/control.sh start_history_server ++ log 'Running Spark CSD control script...' +++ date ++ timestamp='Wed Mar 10 05:44:46 EST 2021' ++ '[' -z ']' ++ echo 'Wed Mar 10 05:44:46 EST 2021: Running Spark CSD control script...' ++ echo 'Wed Mar 10 05:44:46 EST 2021: Running Spark CSD control script...' Wed Mar 10 05:44:46 EST 2021: Running Spark CSD control script... ++ log 'Detected CDH_VERSION of [7]' +++ date ++ timestamp='Wed Mar 10 05:44:46 EST 2021' ++ '[' -z ']' ++ echo 'Wed Mar 10 05:44:46 EST 2021: Detected CDH_VERSION of [7]' ++ echo 'Wed Mar 10 05:44:46 EST 2021: Detected CDH_VERSION of [7]' Wed Mar 10 05:44:46 EST 2021: Detected CDH_VERSION of [7] ++ export BIGTOP_DEFAULTS_DIR= ++ BIGTOP_DEFAULTS_DIR= +++ m_readlink /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop +++ '[' -n ']' +++ echo /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ export HADOOP_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ HADOOP_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ export HDFS_BIN=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/../../bin/hdfs ++ HDFS_BIN=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/../../bin/hdfs ++ export HADOOP_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ++ HADOOP_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ++ HBASE_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/hbase-conf ++ DEFAULT_SPARK_HOME=/usr/lib/spark +++ m_readlink /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark +++ '[' -n ']' +++ echo /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ export SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ export SPARK_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf ++ SPARK_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf ++ '[' '!' -d /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf ']' ++ export SPARK_ENV=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh ++ SPARK_ENV=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh ++ export SPARK_DEFAULTS=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-defaults.conf ++ SPARK_DEFAULTS=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-defaults.conf ++ export 'SPARK_DAEMON_JAVA_OPTS= -Djava.net.preferIPv4Stack=true' ++ SPARK_DAEMON_JAVA_OPTS=' -Djava.net.preferIPv4Stack=true' +++ m_readlink /opt/cloudera/parcels +++ '[' -n ']' +++ echo /opt/cloudera/parcels ++ export PARCELS_ROOT=/opt/cloudera/parcels ++ PARCELS_ROOT=/opt/cloudera/parcels + case $1 in + start_history_server + log 'Starting Spark History Server' ++ date + timestamp='Wed Mar 10 05:44:46 EST 2021' + '[' -z ']' + echo 'Wed Mar 10 05:44:46 EST 2021: Starting Spark History Server' + echo 'Wed Mar 10 05:44:46 EST 2021: Starting Spark History Server' Wed Mar 10 05:44:46 EST 2021: Starting Spark History Server + local CONF_FILE=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf ++ get_default_fs /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ++ get_hadoop_conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf fs.defaultFS ++ local conf=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ++ local key=fs.defaultFS ++ /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/../../bin/hdfs --config /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf getconf -confKey fs.defaultFS + local DEFAULT_FS=hdfs://ces1pub.pbm.ihost.com:8020 ++ prepend_protocol /user/spark/applicationHistory hdfs://ces1pub.pbm.ihost.com:8020 ++ local url=/user/spark/applicationHistory ++ local proto=hdfs://ces1pub.pbm.ihost.com:8020 ++ [[ /user/spark/applicationHistory =~ [:alnum:]*:.* ]] ++ echo hdfs://ces1pub.pbm.ihost.com:8020/user/spark/applicationHistory + local LOG_DIR=hdfs://ces1pub.pbm.ihost.com:8020/user/spark/applicationHistory ++ dirname /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + local CLASSPATH_FILE=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt + local CLASSPATH_FILE_TMP=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + touch /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + '[' '!' -f /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.orig ']' + cp -p /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.orig /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + echo spark.history.fs.logDirectory=hdfs://ces1pub.pbm.ihost.com:8020/user/spark/applicationHistory + '[' '' '!=' '' ']' + local FILTERS_KEY=spark.ui.filters ++ read_property spark.ui.filters /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf ++ local key=spark.ui.filters ++ local file=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf +++ grep '^spark.ui.filters=' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf +++ tail -n 1 +++ sed 's/^spark.ui.filters=\(.*\)/\1/' ++ echo + local FILTERS= + '[' true = true ']' ++ add_to_list '' org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ++ local list= ++ local item=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ++ '[' -n '' ']' ++ list=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ++ echo org.apache.spark.deploy.yarn.YarnProxyRedirectFilter + FILTERS=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter + '[' false = true ']' + '[' -n org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ']' + replace_spark_conf spark.ui.filters org.apache.spark.deploy.yarn.YarnProxyRedirectFilter /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + local key=spark.ui.filters + local value=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter + local file=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + local temp=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + touch /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + chown --reference=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + chmod --reference=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + grep -v '^spark.ui.filters=' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + '[' -n org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ']' + echo spark.ui.filters=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter + mv /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + set +x + '[' false '!=' true ']' + replace_spark_conf spark.history.store.path '' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + local key=spark.history.store.path + local value= + local file=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + local temp=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + touch /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + chown --reference=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + chmod --reference=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + grep -v '^spark.history.store.path=' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + '[' -n '' ']' + mv /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + [[ -d /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone ]] + add_to_classpath /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp '/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-*.jar' + local CLASSPATH_FILE=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + local 'CLASSPATH=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-*.jar' + IFS=: + read -a CLASSPATH_ENTRIES + for pattern in '"${CLASSPATH_ENTRIES[@]}"' + for entry in '$pattern' ++ m_readlink /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ++ '[' -n ']' ++ echo /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + entry=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ++ basename /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + name=hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + '[' -f /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ']' + is_blacklisted /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ++ basename /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + local JAR=hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + [[ -f /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^jetty.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^jersey.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^jackson.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^jackson.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ .*slf4j.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ .*servlet.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ .*-tests.jar ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^junit-.* ]] + return 1 + grep -q '/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar$' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + echo /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + '[' -s /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp ']' + cat /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + sort + uniq + rm -f /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + ARGS=("org.apache.spark.deploy.history.HistoryServer" "--properties-file" "$CONF_FILE") + run_spark_class org.apache.spark.deploy.history.HistoryServer --properties-file /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + ARGS=($@) + local ARGS + ARGS+=($ADDITIONAL_ARGS) + prepare_spark_env + replace '\{\{HADOOP_HOME}}' /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{HADOOP_HOME}}#/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop#g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + replace '\{\{SPARK_HOME}}' /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{SPARK_HOME}}#/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark#g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + replace '\{\{SPARK_EXTRA_LIB_PATH}}' '' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{SPARK_EXTRA_LIB_PATH}}##g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + replace '\{\{PYTHON_PATH}}' '' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{PYTHON_PATH}}##g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + replace '\{\{CDH_PYTHON}}' '' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{CDH_PYTHON}}##g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh ++ basename /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf + local HADOOP_CONF_DIR_NAME=yarn-conf + replace '\{\{HADOOP_CONF_DIR_NAME}}' yarn-conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{HADOOP_CONF_DIR_NAME}}#yarn-conf#g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + export 'SPARK_DAEMON_JAVA_OPTS=-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29727.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh -Djava.net.preferIPv4Stack=true' + SPARK_DAEMON_JAVA_OPTS='-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29727.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh -Djava.net.preferIPv4Stack=true' + export 'SPARK_JAVA_OPTS=-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29727.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh ' + SPARK_JAVA_OPTS='-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid29727.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh ' + cmd='/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark/bin/spark-class org.apache.spark.deploy.history.HistoryServer --properties-file /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf' + echo 'Running [/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark/bin/spark-class org.apache.spark.deploy.history.HistoryServer --properties-file /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf]' + exec /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark/bin/spark-class org.apache.spark.deploy.history.HistoryServer --properties-file /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf Exception in thread "main" java.lang.reflect.InvocationTargetException at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.spark.deploy.history.HistoryServer$.main(HistoryServer.scala:294) at org.apache.spark.deploy.history.HistoryServer.main(HistoryServer.scala) Caused by: org.apache.hadoop.security.AccessControlException: Permission denied: user=spark, access=EXECUTE, inode="/user":mapred:supergroup:drwxrwx--- at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:399) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:315) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:242) at org.apache.hadoop.hdfs.server.namenode.GPFSPermissionChecker.checkPermission(GPFSPermissionChecker.java:90) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:193) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:604) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1858) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1876) at org.apache.hadoop.hdfs.server.namenode.GPFSDirectory.resolvePathV0(GPFSDirectory.java:665) at org.apache.hadoop.hdfs.server.namenode.GPFSDirectory.resolvePath(GPFSDirectory.java:507) at org.apache.hadoop.hdfs.server.namenode.GPFSNamesystemV0.getFileInfo(GPFSNamesystemV0.java:1961) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getFileInfo(NameNodeRpcServer.java:1143) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getFileInfo(ClientNamenodeProtocolServerSideTranslatorPB.java:939) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678) at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1688) at org.apache.hadoop.hdfs.DistributedFileSystem$29.doCall(DistributedFileSystem.java:1744) at org.apache.hadoop.hdfs.DistributedFileSystem$29.doCall(DistributedFileSystem.java:1741) at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1756) at org.apache.spark.deploy.history.FsHistoryProvider.org$apache$spark$deploy$history$FsHistoryProvider$$startPolling(FsHistoryProvider.scala:258) at org.apache.spark.deploy.history.FsHistoryProvider.initialize(FsHistoryProvider.scala:212) at org.apache.spark.deploy.history.FsHistoryProvider.(FsHistoryProvider.scala:208) at org.apache.spark.deploy.history.FsHistoryProvider.(FsHistoryProvider.scala:87) ... 6 more Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): Permission denied: user=spark, access=EXECUTE, inode="/user":mapred:supergroup:drwxrwx--- at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:399) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:315) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:242) at org.apache.hadoop.hdfs.server.namenode.GPFSPermissionChecker.checkPermission(GPFSPermissionChecker.java:90) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:193) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:604) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1858) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1876) at org.apache.hadoop.hdfs.server.namenode.GPFSDirectory.resolvePathV0(GPFSDirectory.java:665) at org.apache.hadoop.hdfs.server.namenode.GPFSDirectory.resolvePath(GPFSDirectory.java:507) at org.apache.hadoop.hdfs.server.namenode.GPFSNamesystemV0.getFileInfo(GPFSNamesystemV0.java:1961) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getFileInfo(NameNodeRpcServer.java:1143) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getFileInfo(ClientNamenodeProtocolServerSideTranslatorPB.java:939) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1562) at org.apache.hadoop.ipc.Client.call(Client.java:1508) at org.apache.hadoop.ipc.Client.call(Client.java:1405) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:233) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118) at com.sun.proxy.$Proxy9.getFileInfo(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:957) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:431) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362) at com.sun.proxy.$Proxy10.getFileInfo(Unknown Source) at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1686) ... 14 more [10/Mar/2021 05:44:55 +0000] 30034 MainThread redactor INFO Started launcher: /opt/cloudera/cm-agent/service/csd/csd.sh start_history_server [10/Mar/2021 05:44:55 +0000] 30034 MainThread redactor INFO Re-exec watcher: /opt/cloudera/cm-agent/bin/cm proc_watcher 30042 [10/Mar/2021 05:44:55 +0000] 30043 MainThread redactor INFO Re-exec redactor: /opt/cloudera/cm-agent/bin/cm redactor --fds 3 5 [10/Mar/2021 05:44:56 +0000] 30043 MainThread redactor INFO Started redactor Wed Mar 10 05:44:56 EST 2021 + locate_java_home + locate_java_home_no_verify + JAVA11_HOME_CANDIDATES=('/usr/java/jdk-11' '/usr/lib/jvm/jdk-11' '/usr/lib/jvm/java-11-oracle') + local JAVA11_HOME_CANDIDATES + OPENJAVA11_HOME_CANDIDATES=('/usr/lib/jvm/java-11' '/usr/java/jdk-11' '/usr/lib/jvm/jdk-11' '/usr/lib64/jvm/jdk-11') + local OPENJAVA11_HOME_CANDIDATES + JAVA8_HOME_CANDIDATES=('/usr/java/jdk1.8' '/usr/java/jre1.8' '/usr/lib/jvm/j2sdk1.8-oracle' '/usr/lib/jvm/j2sdk1.8-oracle/jre' '/usr/lib/jvm/java-8-oracle') + local JAVA8_HOME_CANDIDATES + OPENJAVA8_HOME_CANDIDATES=('/usr/lib/jvm/java-1.8.0-openjdk' '/usr/lib/jvm/java-8-openjdk' '/usr/lib64/jvm/java-1.8.0-openjdk' '/usr/lib64/jvm/java-8-openjdk') + local OPENJAVA8_HOME_CANDIDATES + MISCJAVA_HOME_CANDIDATES=('/Library/Java/Home' '/usr/java/default' '/usr/lib/jvm/default-java' '/usr/lib/jvm/java-openjdk' '/usr/lib/jvm/jre-openjdk') + local MISCJAVA_HOME_CANDIDATES + case ${BIGTOP_JAVA_MAJOR} in + JAVA_HOME_CANDIDATES=(${JAVA11_HOME_CANDIDATES[@]} ${OPENJAVA11_HOME_CANDIDATES[@]} ${JAVA8_HOME_CANDIDATES[@]} ${OPENJAVA8_HOME_CANDIDATES[@]} ${MISCJAVA_HOME_CANDIDATES[@]}) + '[' -n '' ']' + '[' -z '' ']' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/java/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib/jvm/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib/jvm/java-11-oracle*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib/jvm/java-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/java/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib/jvm/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd '/usr/lib64/jvm/jdk-11*' + for candidate_regex in '${JAVA_HOME_CANDIDATES[@]}' ++ ls -rvd /usr/java/jdk1.8.0_232-cloudera + for candidate in '`ls -rvd ${candidate_regex}* 2>/dev/null`' + '[' -e /usr/java/jdk1.8.0_232-cloudera/bin/java ']' + export JAVA_HOME=/usr/java/jdk1.8.0_232-cloudera + JAVA_HOME=/usr/java/jdk1.8.0_232-cloudera + break 2 + verify_java_home + '[' -z /usr/java/jdk1.8.0_232-cloudera ']' + echo JAVA_HOME=/usr/java/jdk1.8.0_232-cloudera + '[' -n '-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid{{PID}}.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh' ']' + get_gc_args + JAVA8_GC_TUNING_ARGS=' ' + set_basic_gc_tuning_args_based_on_java_version + get_java_major_version JAVA_MAJOR + '[' -z /usr/java/jdk1.8.0_232-cloudera/bin/java ']' ++ /usr/java/jdk1.8.0_232-cloudera/bin/java -version + local 'VERSION_STRING=openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode)' + local 'RE_JAVA=[java|openjdk][[:space:]]version[[:space:]]\"1\.([0-9][0-9]*)\.?+' + [[ openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode) =~ [java|openjdk][[:space:]]version[[:space:]]\"1\.([0-9][0-9]*)\.?+ ]] + eval JAVA_MAJOR=8 ++ JAVA_MAJOR=8 + BASIC_GC_TUNING_ARGS= + case $JAVA_MAJOR in + BASIC_GC_TUNING_ARGS=' ' + CSD_GC_ARGS=' ' + CSD_JAVA_OPTS+=' ' ++ replace_pid -XX:+HeapDumpOnOutOfMemoryError '-XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid{{PID}}.hprof' -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh ++ echo -XX:+HeapDumpOnOutOfMemoryError '-XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid{{PID}}.hprof' -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh ++ sed 's#{{PID}}#30042#g' + export 'CSD_JAVA_OPTS=-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid30042.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh' + CSD_JAVA_OPTS='-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid30042.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh' + echo 'Using -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid30042.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh as CSD_JAVA_OPTS' + source_parcel_environment + '[' '!' -z /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/meta/cdh_env.sh ']' + OLD_IFS=' ' + IFS=: + SCRIPT_ARRAY=($SCM_DEFINES_SCRIPTS) + DIRNAME_ARRAY=($PARCEL_DIRNAMES) + IFS=' ' + COUNT=1 ++ seq 1 1 + for i in '`seq 1 $COUNT`' + SCRIPT=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/meta/cdh_env.sh + PARCEL_DIRNAME=CDH-7.1.4-1.cdh7.1.4.p0.6300266 + . /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/meta/cdh_env.sh ++ CDH_DIRNAME=CDH-7.1.4-1.cdh7.1.4.p0.6300266 ++ export CDH_HADOOP_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ CDH_HADOOP_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ export CDH_MR1_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-0.20-mapreduce ++ CDH_MR1_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-0.20-mapreduce ++ export CDH_HDFS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-hdfs ++ CDH_HDFS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-hdfs ++ export CDH_OZONE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone ++ CDH_OZONE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone ++ export CDH_HTTPFS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-httpfs ++ CDH_HTTPFS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-httpfs ++ export CDH_MR2_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-mapreduce ++ CDH_MR2_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-mapreduce ++ export CDH_YARN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-yarn ++ CDH_YARN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-yarn ++ export CDH_HBASE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase ++ CDH_HBASE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase ++ export CDH_HBASE_FILESYSTEM_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase_filesystem ++ CDH_HBASE_FILESYSTEM_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase_filesystem ++ export CDH_HBASE_CONNECTORS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase_connectors ++ CDH_HBASE_CONNECTORS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase_connectors ++ export CDH_ZOOKEEPER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/zookeeper ++ CDH_ZOOKEEPER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/zookeeper ++ export CDH_ZEPPELIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/zeppelin ++ CDH_ZEPPELIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/zeppelin ++ export CDH_HIVE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hive ++ CDH_HIVE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hive ++ export CDH_HUE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hue ++ CDH_HUE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hue ++ export CDH_OOZIE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/oozie ++ CDH_OOZIE_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/oozie ++ export CDH_HUE_PLUGINS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ CDH_HUE_PLUGINS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ export CDH_HCAT_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hive-hcatalog ++ CDH_HCAT_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hive-hcatalog ++ export CDH_SENTRY_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/sentry ++ CDH_SENTRY_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/sentry ++ export JSVC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/bigtop-utils ++ JSVC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/bigtop-utils ++ export CDH_HADOOP_BIN=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/bin/hadoop ++ CDH_HADOOP_BIN=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/bin/hadoop ++ export CDH_IMPALA_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/impala ++ CDH_IMPALA_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/impala ++ export CDH_SOLR_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/solr ++ CDH_SOLR_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/solr ++ export CDH_HBASE_INDEXER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase-solr ++ CDH_HBASE_INDEXER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hbase-solr ++ export SEARCH_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/search ++ SEARCH_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/search ++ export CDH_SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ CDH_SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ export WEBHCAT_DEFAULT_XML=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/etc/hive-webhcat/conf.dist/webhcat-default.xml ++ WEBHCAT_DEFAULT_XML=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/etc/hive-webhcat/conf.dist/webhcat-default.xml ++ export CDH_KMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-kms ++ CDH_KMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-kms ++ export CDH_PARQUET_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/parquet ++ CDH_PARQUET_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/parquet ++ export CDH_AVRO_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/avro ++ CDH_AVRO_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/avro ++ export CDH_KAFKA_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/kafka ++ CDH_KAFKA_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/kafka ++ export CDH_SCHEMA_REGISTRY_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/schemaregistry ++ CDH_SCHEMA_REGISTRY_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/schemaregistry ++ export CDH_STREAMS_MESSAGING_MANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_messaging_manager ++ CDH_STREAMS_MESSAGING_MANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_messaging_manager ++ export CDH_STREAMS_MESSAGING_MANAGER_UI_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_messaging_manager_ui ++ CDH_STREAMS_MESSAGING_MANAGER_UI_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_messaging_manager_ui ++ export CDH_STREAMS_REPLICATION_MANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_replication_manager ++ CDH_STREAMS_REPLICATION_MANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/streams_replication_manager ++ export CDH_CRUISE_CONTROL_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/cruise_control ++ CDH_CRUISE_CONTROL_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/cruise_control ++ export CDH_KNOX_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/knox ++ CDH_KNOX_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/knox ++ export CDH_KUDU_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/kudu ++ CDH_KUDU_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/kudu ++ export CDH_RANGER_ADMIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-admin ++ CDH_RANGER_ADMIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-admin ++ export CDH_RANGER_TAGSYNC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-tagsync ++ CDH_RANGER_TAGSYNC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-tagsync ++ export CDH_RANGER_USERSYNC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-usersync ++ CDH_RANGER_USERSYNC_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-usersync ++ export CDH_RANGER_KMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-kms ++ CDH_RANGER_KMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-kms ++ export CDH_RANGER_RAZ_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-raz ++ CDH_RANGER_RAZ_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-raz ++ export CDH_RANGER_RMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-rms ++ CDH_RANGER_RMS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-rms ++ export CDH_ATLAS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/atlas ++ CDH_ATLAS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/atlas ++ export CDH_TEZ_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/tez ++ CDH_TEZ_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/tez ++ export CDH_PHOENIX_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/phoenix ++ CDH_PHOENIX_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/phoenix ++ export DAS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/data_analytics_studio ++ DAS_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/data_analytics_studio ++ export QUEUEMANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/queuemanager ++ QUEUEMANAGER_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/queuemanager ++ export CDH_RANGER_HBASE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hbase-plugin ++ CDH_RANGER_HBASE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hbase-plugin ++ export CDH_RANGER_HIVE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hive-plugin ++ CDH_RANGER_HIVE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hive-plugin ++ export CDH_RANGER_ATLAS_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-atlas-plugin ++ CDH_RANGER_ATLAS_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-atlas-plugin ++ export CDH_RANGER_SOLR_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-solr-plugin ++ CDH_RANGER_SOLR_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-solr-plugin ++ export CDH_RANGER_HDFS_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hdfs-plugin ++ CDH_RANGER_HDFS_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-hdfs-plugin ++ export CDH_RANGER_KNOX_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-knox-plugin ++ CDH_RANGER_KNOX_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-knox-plugin ++ export CDH_RANGER_YARN_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-yarn-plugin ++ CDH_RANGER_YARN_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-yarn-plugin ++ export CDH_RANGER_OZONE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-ozone-plugin ++ CDH_RANGER_OZONE_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-ozone-plugin ++ export CDH_RANGER_KAFKA_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-kafka-plugin ++ CDH_RANGER_KAFKA_PLUGIN_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/ranger-kafka-plugin + echo 'Using /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER as conf dir' + echo 'Using scripts/control.sh as process script' + replace_conf_dir + echo CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER + echo CMF_CONF_DIR= + EXCLUDE_CMF_FILES=('cloudera-config.sh' 'hue.sh' 'impala.sh' 'sqoop.sh' 'supervisor.conf' 'config.zip' 'proc.json' '*.log' '*.keytab' '*jceks' '*bcfks' 'supervisor_status') ++ printf '! -name %s ' cloudera-config.sh hue.sh impala.sh sqoop.sh supervisor.conf config.zip proc.json '*.log' spark_on_yarn.keytab '*jceks' '*bcfks' supervisor_status + find /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER -type f '!' -path '/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/logs/*' '!' -name cloudera-config.sh '!' -name hue.sh '!' -name impala.sh '!' -name sqoop.sh '!' -name supervisor.conf '!' -name config.zip '!' -name proc.json '!' -name '*.log' '!' -name spark_on_yarn.keytab '!' -name '*jceks' '!' -name '*bcfks' '!' -name supervisor_status -exec perl -pi -e 's#\{\{CMF_CONF_DIR}}#/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER#g' '{}' ';' + make_scripts_executable + find /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER -regex '.*\.\(py\|sh\)$' -exec chmod u+x '{}' ';' + get_java_major_version JAVA_MAJOR_VERSION + '[' -z /usr/java/jdk1.8.0_232-cloudera/bin/java ']' ++ /usr/java/jdk1.8.0_232-cloudera/bin/java -version + local 'VERSION_STRING=openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode)' + local 'RE_JAVA=[java|openjdk][[:space:]]version[[:space:]]\"1\.([0-9][0-9]*)\.?+' + [[ openjdk version "1.8.0_232" OpenJDK Runtime Environment (AdoptOpenJDK)(build 1.8.0_232-b09) OpenJDK 64-Bit Server VM (AdoptOpenJDK)(build 25.232-b09, mixed mode) =~ [java|openjdk][[:space:]]version[[:space:]]\"1\.([0-9][0-9]*)\.?+ ]] + eval JAVA_MAJOR_VERSION=8 ++ JAVA_MAJOR_VERSION=8 + export JAVA_MAJOR_VERSION + RUN_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER + '[' '' == true ']' + chmod u+x /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/scripts/control.sh + export COMMON_SCRIPT=/opt/cloudera/cm-agent/service/common/cloudera-config.sh + COMMON_SCRIPT=/opt/cloudera/cm-agent/service/common/cloudera-config.sh + exec /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/scripts/control.sh start_history_server ++ log 'Running Spark CSD control script...' +++ date ++ timestamp='Wed Mar 10 05:44:56 EST 2021' ++ '[' -z ']' ++ echo 'Wed Mar 10 05:44:56 EST 2021: Running Spark CSD control script...' ++ echo 'Wed Mar 10 05:44:56 EST 2021: Running Spark CSD control script...' Wed Mar 10 05:44:56 EST 2021: Running Spark CSD control script... ++ log 'Detected CDH_VERSION of [7]' +++ date ++ timestamp='Wed Mar 10 05:44:56 EST 2021' ++ '[' -z ']' ++ echo 'Wed Mar 10 05:44:56 EST 2021: Detected CDH_VERSION of [7]' ++ echo 'Wed Mar 10 05:44:56 EST 2021: Detected CDH_VERSION of [7]' Wed Mar 10 05:44:56 EST 2021: Detected CDH_VERSION of [7] ++ export BIGTOP_DEFAULTS_DIR= ++ BIGTOP_DEFAULTS_DIR= +++ m_readlink /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop +++ '[' -n ']' +++ echo /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ export HADOOP_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ HADOOP_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop ++ export HDFS_BIN=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/../../bin/hdfs ++ HDFS_BIN=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/../../bin/hdfs ++ export HADOOP_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ++ HADOOP_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ++ HBASE_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/hbase-conf ++ DEFAULT_SPARK_HOME=/usr/lib/spark +++ m_readlink /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark +++ '[' -n ']' +++ echo /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ export SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ SPARK_HOME=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark ++ export SPARK_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf ++ SPARK_CONF_DIR=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf ++ '[' '!' -d /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf ']' ++ export SPARK_ENV=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh ++ SPARK_ENV=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh ++ export SPARK_DEFAULTS=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-defaults.conf ++ SPARK_DEFAULTS=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-defaults.conf ++ export 'SPARK_DAEMON_JAVA_OPTS= -Djava.net.preferIPv4Stack=true' ++ SPARK_DAEMON_JAVA_OPTS=' -Djava.net.preferIPv4Stack=true' +++ m_readlink /opt/cloudera/parcels +++ '[' -n ']' +++ echo /opt/cloudera/parcels ++ export PARCELS_ROOT=/opt/cloudera/parcels ++ PARCELS_ROOT=/opt/cloudera/parcels + case $1 in + start_history_server + log 'Starting Spark History Server' ++ date + timestamp='Wed Mar 10 05:44:56 EST 2021' + '[' -z ']' + echo 'Wed Mar 10 05:44:56 EST 2021: Starting Spark History Server' + echo 'Wed Mar 10 05:44:56 EST 2021: Starting Spark History Server' Wed Mar 10 05:44:56 EST 2021: Starting Spark History Server + local CONF_FILE=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf ++ get_default_fs /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ++ get_hadoop_conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf fs.defaultFS ++ local conf=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ++ local key=fs.defaultFS ++ /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop/../../bin/hdfs --config /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf getconf -confKey fs.defaultFS + local DEFAULT_FS=hdfs://ces1pub.pbm.ihost.com:8020 ++ prepend_protocol /user/spark/applicationHistory hdfs://ces1pub.pbm.ihost.com:8020 ++ local url=/user/spark/applicationHistory ++ local proto=hdfs://ces1pub.pbm.ihost.com:8020 ++ [[ /user/spark/applicationHistory =~ [:alnum:]*:.* ]] ++ echo hdfs://ces1pub.pbm.ihost.com:8020/user/spark/applicationHistory + local LOG_DIR=hdfs://ces1pub.pbm.ihost.com:8020/user/spark/applicationHistory ++ dirname /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + local CLASSPATH_FILE=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt + local CLASSPATH_FILE_TMP=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + touch /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + '[' '!' -f /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.orig ']' + cp -p /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.orig /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + echo spark.history.fs.logDirectory=hdfs://ces1pub.pbm.ihost.com:8020/user/spark/applicationHistory + '[' '' '!=' '' ']' + local FILTERS_KEY=spark.ui.filters ++ read_property spark.ui.filters /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf ++ local key=spark.ui.filters ++ local file=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf +++ grep '^spark.ui.filters=' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf +++ tail -n 1 +++ sed 's/^spark.ui.filters=\(.*\)/\1/' ++ echo + local FILTERS= + '[' true = true ']' ++ add_to_list '' org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ++ local list= ++ local item=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ++ '[' -n '' ']' ++ list=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ++ echo org.apache.spark.deploy.yarn.YarnProxyRedirectFilter + FILTERS=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter + '[' false = true ']' + '[' -n org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ']' + replace_spark_conf spark.ui.filters org.apache.spark.deploy.yarn.YarnProxyRedirectFilter /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + local key=spark.ui.filters + local value=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter + local file=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + local temp=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + touch /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + chown --reference=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + chmod --reference=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + grep -v '^spark.ui.filters=' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + '[' -n org.apache.spark.deploy.yarn.YarnProxyRedirectFilter ']' + echo spark.ui.filters=org.apache.spark.deploy.yarn.YarnProxyRedirectFilter + mv /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + set +x + '[' false '!=' true ']' + replace_spark_conf spark.history.store.path '' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + local key=spark.history.store.path + local value= + local file=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + local temp=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + touch /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + chown --reference=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + chmod --reference=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp + grep -v '^spark.history.store.path=' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + '[' -n '' ']' + mv /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf.tmp /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + [[ -d /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone ]] + add_to_classpath /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp '/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-*.jar' + local CLASSPATH_FILE=/var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + local 'CLASSPATH=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-*.jar' + IFS=: + read -a CLASSPATH_ENTRIES + for pattern in '"${CLASSPATH_ENTRIES[@]}"' + for entry in '$pattern' ++ m_readlink /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ++ '[' -n ']' ++ echo /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + entry=/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ++ basename /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + name=hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + '[' -f /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ']' + is_blacklisted /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ++ basename /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + local JAR=hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + [[ -f /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^jetty.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^jersey.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^jackson.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^jackson.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ .*slf4j.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ .*servlet.* ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ .*-tests.jar ]] + [[ hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar =~ ^junit-.* ]] + return 1 + grep -q '/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar$' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + echo /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop-ozone/share/ozone/lib/hadoop-ozone-filesystem-hadoop3-0.5.0.7.1.4.0-203.jar + '[' -s /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp ']' + cat /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + sort + uniq + rm -f /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/classpath.txt.tmp + ARGS=("org.apache.spark.deploy.history.HistoryServer" "--properties-file" "$CONF_FILE") + run_spark_class org.apache.spark.deploy.history.HistoryServer --properties-file /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf + ARGS=($@) + local ARGS + ARGS+=($ADDITIONAL_ARGS) + prepare_spark_env + replace '\{\{HADOOP_HOME}}' /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{HADOOP_HOME}}#/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/hadoop#g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + replace '\{\{SPARK_HOME}}' /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{SPARK_HOME}}#/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark#g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + replace '\{\{SPARK_EXTRA_LIB_PATH}}' '' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{SPARK_EXTRA_LIB_PATH}}##g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + replace '\{\{PYTHON_PATH}}' '' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{PYTHON_PATH}}##g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + replace '\{\{CDH_PYTHON}}' '' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{CDH_PYTHON}}##g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh ++ basename /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf + local HADOOP_CONF_DIR_NAME=yarn-conf + replace '\{\{HADOOP_CONF_DIR_NAME}}' yarn-conf /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + perl -pi -e 's#\{\{HADOOP_CONF_DIR_NAME}}#yarn-conf#g' /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh + export 'SPARK_DAEMON_JAVA_OPTS=-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid30042.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh -Djava.net.preferIPv4Stack=true' + SPARK_DAEMON_JAVA_OPTS='-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid30042.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh -Djava.net.preferIPv4Stack=true' + export 'SPARK_JAVA_OPTS=-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid30042.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh ' + SPARK_JAVA_OPTS='-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp/spark_on_yarn_spar40365358-SPARK_YARN_HISTORY_SERVER-42af0f75a56c8c9b8b467a684_pid30042.hprof -XX:OnOutOfMemoryError=/opt/cloudera/cm-agent/service/common/killparent.sh ' + cmd='/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark/bin/spark-class org.apache.spark.deploy.history.HistoryServer --properties-file /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf' + echo 'Running [/opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark/bin/spark-class org.apache.spark.deploy.history.HistoryServer --properties-file /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf]' + exec /opt/cloudera/parcels/CDH-7.1.4-1.cdh7.1.4.p0.6300266/lib/spark/bin/spark-class org.apache.spark.deploy.history.HistoryServer --properties-file /var/run/cloudera-scm-agent/process/1546336559-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-history-server.conf Exception in thread "main" java.lang.reflect.InvocationTargetException at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.spark.deploy.history.HistoryServer$.main(HistoryServer.scala:294) at org.apache.spark.deploy.history.HistoryServer.main(HistoryServer.scala) Caused by: org.apache.hadoop.security.AccessControlException: Permission denied: user=spark, access=EXECUTE, inode="/user":mapred:supergroup:drwxrwx--- at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:399) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:315) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:242) at org.apache.hadoop.hdfs.server.namenode.GPFSPermissionChecker.checkPermission(GPFSPermissionChecker.java:90) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:193) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:604) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1858) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1876) at org.apache.hadoop.hdfs.server.namenode.GPFSDirectory.resolvePathV0(GPFSDirectory.java:665) at org.apache.hadoop.hdfs.server.namenode.GPFSDirectory.resolvePath(GPFSDirectory.java:507) at org.apache.hadoop.hdfs.server.namenode.GPFSNamesystemV0.getFileInfo(GPFSNamesystemV0.java:1961) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getFileInfo(NameNodeRpcServer.java:1143) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getFileInfo(ClientNamenodeProtocolServerSideTranslatorPB.java:939) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678) at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1688) at org.apache.hadoop.hdfs.DistributedFileSystem$29.doCall(DistributedFileSystem.java:1744) at org.apache.hadoop.hdfs.DistributedFileSystem$29.doCall(DistributedFileSystem.java:1741) at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1756) at org.apache.spark.deploy.history.FsHistoryProvider.org$apache$spark$deploy$history$FsHistoryProvider$$startPolling(FsHistoryProvider.scala:258) at org.apache.spark.deploy.history.FsHistoryProvider.initialize(FsHistoryProvider.scala:212) at org.apache.spark.deploy.history.FsHistoryProvider.(FsHistoryProvider.scala:208) at org.apache.spark.deploy.history.FsHistoryProvider.(FsHistoryProvider.scala:87) ... 6 more Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): Permission denied: user=spark, access=EXECUTE, inode="/user":mapred:supergroup:drwxrwx--- at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:399) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:315) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:242) at org.apache.hadoop.hdfs.server.namenode.GPFSPermissionChecker.checkPermission(GPFSPermissionChecker.java:90) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:193) at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:604) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1858) at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:1876) at org.apache.hadoop.hdfs.server.namenode.GPFSDirectory.resolvePathV0(GPFSDirectory.java:665) at org.apache.hadoop.hdfs.server.namenode.GPFSDirectory.resolvePath(GPFSDirectory.java:507) at org.apache.hadoop.hdfs.server.namenode.GPFSNamesystemV0.getFileInfo(GPFSNamesystemV0.java:1961) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getFileInfo(NameNodeRpcServer.java:1143) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getFileInfo(ClientNamenodeProtocolServerSideTranslatorPB.java:939) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1562) at org.apache.hadoop.ipc.Client.call(Client.java:1508) at org.apache.hadoop.ipc.Client.call(Client.java:1405) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:233) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118) at com.sun.proxy.$Proxy9.getFileInfo(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:957) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:431) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158) at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362) at com.sun.proxy.$Proxy10.getFileInfo(Unknown Source) at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1686) ... 14 more