17/11/08 15:13:25 INFO zlib.ZlibFactory: Successfully loaded & initialized native-zlib library 17/11/08 15:13:25 INFO compress.CodecPool: Got brand-new decompressor [.deflate] Container: container_e08_1510129660245_0004_01_000001 on clustername_45454_1510132635164 LogAggregationType: AGGREGATED ========================================================================================== LogType:directory.info Log Upload Time:Wed Nov 08 14:47:15 +0530 2017 LogLength:5937 Log Contents: ls -l: total 32 -rw-r--r--. 1 yarn hadoop 69 Nov 8 14:47 container_tokens -rwx------. 1 yarn hadoop 653 Nov 8 14:47 default_container_executor_session.sh -rwx------. 1 yarn hadoop 707 Nov 8 14:47 default_container_executor.sh lrwxrwxrwx. 1 yarn hadoop 70 Nov 8 14:47 dma-collect.properties -> /hadoop/yarn/local/usercache/root/filecache/104/dma-collect.properties -rwx------. 1 yarn hadoop 6109 Nov 8 14:47 launch_container.sh lrwxrwxrwx. 1 yarn hadoop 85 Nov 8 14:47 __spark_conf__ -> /hadoop/yarn/local/usercache/root/filecache/105/__spark_conf__2207740214486653748.zip lrwxrwxrwx. 1 yarn hadoop 54 Nov 8 14:47 __spark__.jar -> /hadoop/yarn/local/filecache/13/spark-hdp-assembly.jar drwx--x---. 2 yarn hadoop 4096 Nov 8 14:47 tmp find -L . -maxdepth 5 -ls: 8390584 4 drwx--x--- 3 yarn hadoop 4096 Nov 8 14:47 . 8390590 4 -rw-r--r-- 1 yarn hadoop 56 Nov 8 14:47 ./.launch_container.sh.crc 8390592 4 -rw-r--r-- 1 yarn hadoop 16 Nov 8 14:47 ./.default_container_executor_session.sh.crc 8390593 4 -rwx------ 1 yarn hadoop 707 Nov 8 14:47 ./default_container_executor.sh 8390542 4 drwx------ 2 yarn hadoop 4096 Nov 8 14:47 ./__spark_conf__ 8390553 4 -r-x------ 1 yarn hadoop 2250 Nov 8 14:47 ./__spark_conf__/yarn-env.cmd 8390562 4 -r-x------ 1 yarn hadoop 1020 Nov 8 14:47 ./__spark_conf__/commons-logging.properties 8390573 8 -r-x------ 1 yarn hadoop 4956 Nov 8 14:47 ./__spark_conf__/metrics.properties 8390564 4 -r-x------ 1 yarn hadoop 2490 Nov 8 14:47 ./__spark_conf__/hadoop-metrics.properties 8390554 4 -r-x------ 1 yarn hadoop 884 Nov 8 14:47 ./__spark_conf__/ssl-client.xml 8390552 4 -r-x------ 1 yarn hadoop 1631 Nov 8 14:47 ./__spark_conf__/kms-log4j.properties 8390558 4 -r-x------ 1 yarn hadoop 758 Nov 8 14:47 ./__spark_conf__/mapred-site.xml.template 8390580 32 -r-x------ 1 yarn hadoop 29535 Nov 8 14:47 ./__spark_conf__/__spark_conf__.properties 8390572 4 -r-x------ 1 yarn hadoop 74 Nov 8 14:47 ./__spark_conf__/topology_mappings.data 8390560 8 -r-x------ 1 yarn hadoop 5309 Nov 8 14:47 ./__spark_conf__/yarn-env.sh 8390571 4 -r-x------ 1 yarn hadoop 15 Nov 8 14:47 ./__spark_conf__/slaves 8390544 4 -r-x------ 1 yarn hadoop 949 Nov 8 14:47 ./__spark_conf__/log4j.properties 8390546 4 -r-x------ 1 yarn hadoop 900 Nov 8 14:47 ./__spark_conf__/hadoop-metrics2.properties 8390557 4 -r-x------ 1 yarn hadoop 2358 Nov 8 14:47 ./__spark_conf__/topology_script.py 8390565 8 -r-x------ 1 yarn hadoop 4221 Nov 8 14:47 ./__spark_conf__/task-log4j.properties 8390549 4 -r-x------ 1 yarn hadoop 3169 Nov 8 14:47 ./__spark_conf__/core-site.xml 8390548 20 -r-x------ 1 yarn hadoop 17488 Nov 8 14:47 ./__spark_conf__/yarn-site.xml 8390556 4 -r-x------ 1 yarn hadoop 3518 Nov 8 14:47 ./__spark_conf__/kms-acls.xml 8390563 4 -r-x------ 1 yarn hadoop 1054 Nov 8 14:47 ./__spark_conf__/container-executor.cfg 8390550 4 -r-x------ 1 yarn hadoop 1 Nov 8 14:47 ./__spark_conf__/dfs.exclude 8390579 8 -r-x------ 1 yarn hadoop 4113 Nov 8 14:47 ./__spark_conf__/mapred-queues.xml.template 8390569 4 -r-x------ 1 yarn hadoop 1527 Nov 8 14:47 ./__spark_conf__/kms-env.sh 8390575 4 -r-x------ 1 yarn hadoop 1000 Nov 8 14:47 ./__spark_conf__/ssl-server.xml 8390555 4 -r-x------ 1 yarn hadoop 2136 Nov 8 14:47 ./__spark_conf__/capacity-scheduler.xml 8390545 8 -r-x------ 1 yarn hadoop 6869 Nov 8 14:47 ./__spark_conf__/mapred-site.xml 8390551 0 -r-x------ 1 yarn hadoop 0 Nov 8 14:47 ./__spark_conf__/yarn.exclude 8390574 4 -r-x------ 1 yarn hadoop 951 Nov 8 14:47 ./__spark_conf__/mapred-env.cmd 8390547 4 -r-x------ 1 yarn hadoop 3979 Nov 8 14:47 ./__spark_conf__/hadoop-env.cmd 8390566 4 -r-x------ 1 yarn hadoop 856 Nov 8 14:47 ./__spark_conf__/mapred-env.sh 8390570 4 -r-x------ 1 yarn hadoop 1308 Nov 8 14:47 ./__spark_conf__/hadoop-policy.xml 8390576 4 -r-x------ 1 yarn hadoop 2697 Nov 8 14:47 ./__spark_conf__/ssl-server.xml.example 8390567 4 -r-x------ 1 yarn hadoop 1602 Nov 8 14:47 ./__spark_conf__/health_check 8390568 4 -r-x------ 1 yarn hadoop 2316 Nov 8 14:47 ./__spark_conf__/ssl-client.xml.example 8390578 4 -r-x------ 1 yarn hadoop 945 Nov 8 14:47 ./__spark_conf__/taskcontroller.cfg 8390559 4 -r-x------ 1 yarn hadoop 1335 Nov 8 14:47 ./__spark_conf__/configuration.xsl 8390543 8 -r-x------ 1 yarn hadoop 6326 Nov 8 14:47 ./__spark_conf__/hadoop-env.sh 8390561 8 -r-x------ 1 yarn hadoop 6752 Nov 8 14:47 ./__spark_conf__/hdfs-site.xml 8390577 8 -r-x------ 1 yarn hadoop 5511 Nov 8 14:47 ./__spark_conf__/kms-site.xml 8390587 4 -rw-r--r-- 1 yarn hadoop 69 Nov 8 14:47 ./container_tokens 8390497 4 -r-x------ 1 yarn hadoop 1890 Nov 8 14:47 ./dma-collect.properties 8390591 4 -rwx------ 1 yarn hadoop 653 Nov 8 14:47 ./default_container_executor_session.sh 8390589 8 -rwx------ 1 yarn hadoop 6109 Nov 8 14:47 ./launch_container.sh 8390586 4 drwx--x--- 2 yarn hadoop 4096 Nov 8 14:47 ./tmp 8390594 4 -rw-r--r-- 1 yarn hadoop 16 Nov 8 14:47 ./.default_container_executor.sh.crc 6949365 186420 -r-xr-xr-x 1 yarn hadoop 190891225 Oct 12 17:30 ./__spark__.jar 8390588 4 -rw-r--r-- 1 yarn hadoop 12 Nov 8 14:47 ./.container_tokens.crc broken symlinks(find -L . -maxdepth 5 -type l -ls): End of LogType:directory.info LogType:launch_container.sh Log Upload Time:Wed Nov 08 14:47:15 +0530 2017 LogLength:6109 Log Contents: #!/bin/bash export SPARK_YARN_STAGING_DIR=".sparkStaging/application_1510129660245_0004" export PATH="/usr/sbin:/sbin:/usr/lib/ambari-server/*:/sbin:/usr/sbin:/bin:/usr/bin:/var/lib/ambari-agent" export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/usr/hdp/current/hadoop-client/conf"} export MAX_APP_ATTEMPTS="2" export JAVA_HOME=${JAVA_HOME:-"/usr/java/jre1.8.0_131"} export SPARK_YARN_CACHE_FILES="hdfs://clustername:8020/hdp/apps/2.6.1.0-129/spark/spark-hdp-assembly.jar#__spark__.jar,hdfs://clustername:8020/user/root/.sparkStaging/application_1510129660245_0004/dma-collect.properties#dma-collect.properties" export LANG="en_US.UTF-8" export APP_SUBMIT_TIME_ENV="1510132629142" export NM_HOST="clustername" export SPARK_YARN_CACHE_FILES_FILE_SIZES="190891225,1890" export SPARK_YARN_CACHE_ARCHIVES_TIME_STAMPS="1510132629111" export LOGNAME="root" export JVM_PID="$$" export PWD="/hadoop/yarn/local/usercache/root/appcache/application_1510129660245_0004/container_e08_1510129660245_0004_01_000001" export LOCAL_DIRS="/hadoop/yarn/local/usercache/root/appcache/application_1510129660245_0004,/hadoop/yarn/local/usercache/root/appcache/application_1510129660245_0004" export APPLICATION_WEB_PROXY_BASE="/proxy/application_1510129660245_0004" export NM_HTTP_PORT="8042" export SPARK_DIST_CLASSPATH="/home/analytics/sparkServer/lib" export LOG_DIRS="/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_01_000001,/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_01_000001" export NM_AUX_SERVICE_mapreduce_shuffle="AAA0+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= " export NM_PORT="45454" export SPARK_YARN_CACHE_FILES_TIME_STAMPS="1507722357508,1510132629075" export USER="root" export HADOOP_YARN_HOME=${HADOOP_YARN_HOME:-"/usr/hdp/current/hadoop-yarn-nodemanager"} export CLASSPATH="$PWD:$PWD/__spark_conf__:$PWD/__spark__.jar:/etc/hadoop/conf:/usr/hdp/current/hadoop-client/*:/usr/hdp/current/hadoop-client/lib/*:/usr/hdp/current/hadoop-hdfs-client/*:/usr/hdp/current/hadoop-hdfs-client/lib/*:/usr/hdp/current/hadoop-yarn-client/*:/usr/hdp/current/hadoop-yarn-client/lib/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.6.1.0-129/hadoop/lib/hadoop-lzo-0.6.0.2.6.1.0-129.jar:/etc/hadoop/conf/secure:/home/analytics/sparkServer/lib" export SPARK_YARN_CACHE_ARCHIVES="hdfs://clustername:8020/user/root/.sparkStaging/application_1510129660245_0004/__spark_conf__2207740214486653748.zip#__spark_conf__" export SPARK_YARN_CACHE_ARCHIVES_FILE_SIZES="137793" export SPARK_YARN_MODE="true" export SPARK_YARN_CACHE_FILES_VISIBILITIES="PUBLIC,PRIVATE" export HADOOP_TOKEN_FILE_LOCATION="/hadoop/yarn/local/usercache/root/appcache/application_1510129660245_0004/container_e08_1510129660245_0004_01_000001/container_tokens" export NM_AUX_SERVICE_spark_shuffle="" export SPARK_USER="root" export LOCAL_USER_DIRS="/hadoop/yarn/local/usercache/root/,/hadoop/yarn/local/usercache/root/" export HADOOP_HOME="/usr/hdp/2.6.1.0-129/hadoop" export SPARK_YARN_CACHE_ARCHIVES_VISIBILITIES="PRIVATE" export HOME="/home/" export NM_AUX_SERVICE_spark2_shuffle="" export CONTAINER_ID="container_e08_1510129660245_0004_01_000001" export MALLOC_ARENA_MAX="4" ln -sf "/hadoop/yarn/local/usercache/root/filecache/104/dma-collect.properties" "dma-collect.properties" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/hadoop/yarn/local/filecache/13/spark-hdp-assembly.jar" "__spark__.jar" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/hadoop/yarn/local/usercache/root/filecache/105/__spark_conf__2207740214486653748.zip" "__spark_conf__" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi # Creating copy of launch script cp "launch_container.sh" "/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_01_000001/launch_container.sh" chmod 640 "/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_01_000001/launch_container.sh" # Determining directory contents echo "ls -l:" 1>"/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_01_000001/directory.info" ls -l 1>>"/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_01_000001/directory.info" echo "find -L . -maxdepth 5 -ls:" 1>>"/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_01_000001/directory.info" find -L . -maxdepth 5 -ls 1>>"/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_01_000001/directory.info" echo "broken symlinks(find -L . -maxdepth 5 -type l -ls):" 1>>"/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_01_000001/directory.info" find -L . -maxdepth 5 -type l -ls 1>>"/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_01_000001/directory.info" exec /bin/bash -c "$JAVA_HOME/bin/java -server -Xmx512m -Djava.io.tmpdir=$PWD/tmp '-Dhdp.verion=2.6.1.0-129' -Dhdp.version=2.6.1.0-129 -Dspark.yarn.app.container.log.dir=/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_01_000001 org.apache.spark.deploy.yarn.ExecutorLauncher --arg '107.110.7.59:53643' --executor-memory 2048m --executor-cores 4 --properties-file $PWD/__spark_conf__/__spark_conf__.properties 1> /hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_01_000001/stdout 2> /hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_01_000001/stderr" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi End of LogType:launch_container.sh LogType:stderr Log Upload Time:Wed Nov 08 14:47:15 +0530 2017 LogLength:4352 Log Contents: SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/hadoop/yarn/local/filecache/13/spark-hdp-assembly.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.6.1.0-129/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 17/11/08 14:47:10 INFO ApplicationMaster: Registered signal handlers for [TERM, HUP, INT] 17/11/08 14:47:10 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 17/11/08 14:47:10 INFO ApplicationMaster: ApplicationAttemptId: appattempt_1510129660245_0004_000001 17/11/08 14:47:11 WARN DomainSocketFactory: The short-circuit local reads feature cannot be used because libhadoop cannot be loaded. 17/11/08 14:47:11 INFO SecurityManager: Changing view acls to: yarn,root 17/11/08 14:47:11 INFO SecurityManager: Changing modify acls to: yarn,root 17/11/08 14:47:11 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(yarn, root); users with modify permissions: Set(yarn, root) Exception in thread "main" java.lang.ExceptionInInitializerError at javax.crypto.JceSecurityManager.(JceSecurityManager.java:65) at javax.crypto.Cipher.getConfiguredPermission(Cipher.java:2587) at javax.crypto.Cipher.getMaxAllowedKeyLength(Cipher.java:2611) at sun.security.ssl.CipherSuite$BulkCipher.isUnlimited(Unknown Source) at sun.security.ssl.CipherSuite$BulkCipher.(Unknown Source) at sun.security.ssl.CipherSuite.(Unknown Source) at sun.security.ssl.SSLContextImpl.getApplicableCipherSuiteList(Unknown Source) at sun.security.ssl.SSLContextImpl.access$100(Unknown Source) at sun.security.ssl.SSLContextImpl$AbstractTLSContext.(Unknown Source) at java.lang.Class.forName0(Native Method) at java.lang.Class.forName(Unknown Source) at java.security.Provider$Service.getImplClass(Unknown Source) at java.security.Provider$Service.newInstance(Unknown Source) at sun.security.jca.GetInstance.getInstance(Unknown Source) at sun.security.jca.GetInstance.getInstance(Unknown Source) at javax.net.ssl.SSLContext.getInstance(Unknown Source) at javax.net.ssl.SSLContext.getDefault(Unknown Source) at org.apache.spark.SSLOptions.liftedTree1$1(SSLOptions.scala:123) at org.apache.spark.SSLOptions.(SSLOptions.scala:115) at org.apache.spark.SSLOptions$.parse(SSLOptions.scala:200) at org.apache.spark.SecurityManager.(SecurityManager.scala:245) at org.apache.spark.deploy.yarn.ApplicationMaster.run(ApplicationMaster.scala:190) at org.apache.spark.deploy.yarn.ApplicationMaster$$anonfun$main$1.apply$mcV$sp(ApplicationMaster.scala:674) at org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:68) at org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:67) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Unknown Source) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866) at org.apache.spark.deploy.SparkHadoopUtil.runAsSparkUser(SparkHadoopUtil.scala:67) at org.apache.spark.deploy.yarn.ApplicationMaster$.main(ApplicationMaster.scala:672) at org.apache.spark.deploy.yarn.ExecutorLauncher$.main(ApplicationMaster.scala:699) at org.apache.spark.deploy.yarn.ExecutorLauncher.main(ApplicationMaster.scala) Caused by: java.lang.SecurityException: Can not initialize cryptographic mechanism at javax.crypto.JceSecurity.(JceSecurity.java:88) ... 32 more Caused by: java.lang.SecurityException: Cannot locate policy or framework files! at javax.crypto.JceSecurity.setupJurisdictionPolicies(JceSecurity.java:255) at javax.crypto.JceSecurity.access$000(JceSecurity.java:48) at javax.crypto.JceSecurity$1.run(JceSecurity.java:80) at java.security.AccessController.doPrivileged(Native Method) at javax.crypto.JceSecurity.(JceSecurity.java:77) ... 32 more 17/11/08 14:47:11 INFO ApplicationMaster: Final app status: UNDEFINED, exitCode: 0, (reason: Shutdown hook called before final status was reported.) 17/11/08 14:47:11 INFO ShutdownHookManager: Shutdown hook called End of LogType:stderr LogType:stdout Log Upload Time:Wed Nov 08 14:47:15 +0530 2017 LogLength:0 Log Contents: End of LogType:stdout Container: container_e08_1510129660245_0004_02_000001 on clustername_45454_1510132635164 LogAggregationType: AGGREGATED ========================================================================================== LogType:directory.info Log Upload Time:Wed Nov 08 14:47:15 +0530 2017 LogLength:5937 Log Contents: ls -l: total 32 -rw-r--r--. 1 yarn hadoop 69 Nov 8 14:47 container_tokens -rwx------. 1 yarn hadoop 653 Nov 8 14:47 default_container_executor_session.sh -rwx------. 1 yarn hadoop 707 Nov 8 14:47 default_container_executor.sh lrwxrwxrwx. 1 yarn hadoop 70 Nov 8 14:47 dma-collect.properties -> /hadoop/yarn/local/usercache/root/filecache/104/dma-collect.properties -rwx------. 1 yarn hadoop 6109 Nov 8 14:47 launch_container.sh lrwxrwxrwx. 1 yarn hadoop 85 Nov 8 14:47 __spark_conf__ -> /hadoop/yarn/local/usercache/root/filecache/105/__spark_conf__2207740214486653748.zip lrwxrwxrwx. 1 yarn hadoop 54 Nov 8 14:47 __spark__.jar -> /hadoop/yarn/local/filecache/13/spark-hdp-assembly.jar drwx--x---. 2 yarn hadoop 4096 Nov 8 14:47 tmp find -L . -maxdepth 5 -ls: 8390584 4 drwx--x--- 3 yarn hadoop 4096 Nov 8 14:47 . 8390591 4 -rw-r--r-- 1 yarn hadoop 56 Nov 8 14:47 ./.launch_container.sh.crc 8390593 4 -rw-r--r-- 1 yarn hadoop 16 Nov 8 14:47 ./.default_container_executor_session.sh.crc 8390594 4 -rwx------ 1 yarn hadoop 707 Nov 8 14:47 ./default_container_executor.sh 8390542 4 drwx------ 2 yarn hadoop 4096 Nov 8 14:47 ./__spark_conf__ 8390553 4 -r-x------ 1 yarn hadoop 2250 Nov 8 14:47 ./__spark_conf__/yarn-env.cmd 8390562 4 -r-x------ 1 yarn hadoop 1020 Nov 8 14:47 ./__spark_conf__/commons-logging.properties 8390573 8 -r-x------ 1 yarn hadoop 4956 Nov 8 14:47 ./__spark_conf__/metrics.properties 8390564 4 -r-x------ 1 yarn hadoop 2490 Nov 8 14:47 ./__spark_conf__/hadoop-metrics.properties 8390554 4 -r-x------ 1 yarn hadoop 884 Nov 8 14:47 ./__spark_conf__/ssl-client.xml 8390552 4 -r-x------ 1 yarn hadoop 1631 Nov 8 14:47 ./__spark_conf__/kms-log4j.properties 8390558 4 -r-x------ 1 yarn hadoop 758 Nov 8 14:47 ./__spark_conf__/mapred-site.xml.template 8390580 32 -r-x------ 1 yarn hadoop 29535 Nov 8 14:47 ./__spark_conf__/__spark_conf__.properties 8390572 4 -r-x------ 1 yarn hadoop 74 Nov 8 14:47 ./__spark_conf__/topology_mappings.data 8390560 8 -r-x------ 1 yarn hadoop 5309 Nov 8 14:47 ./__spark_conf__/yarn-env.sh 8390571 4 -r-x------ 1 yarn hadoop 15 Nov 8 14:47 ./__spark_conf__/slaves 8390544 4 -r-x------ 1 yarn hadoop 949 Nov 8 14:47 ./__spark_conf__/log4j.properties 8390546 4 -r-x------ 1 yarn hadoop 900 Nov 8 14:47 ./__spark_conf__/hadoop-metrics2.properties 8390557 4 -r-x------ 1 yarn hadoop 2358 Nov 8 14:47 ./__spark_conf__/topology_script.py 8390565 8 -r-x------ 1 yarn hadoop 4221 Nov 8 14:47 ./__spark_conf__/task-log4j.properties 8390549 4 -r-x------ 1 yarn hadoop 3169 Nov 8 14:47 ./__spark_conf__/core-site.xml 8390548 20 -r-x------ 1 yarn hadoop 17488 Nov 8 14:47 ./__spark_conf__/yarn-site.xml 8390556 4 -r-x------ 1 yarn hadoop 3518 Nov 8 14:47 ./__spark_conf__/kms-acls.xml 8390563 4 -r-x------ 1 yarn hadoop 1054 Nov 8 14:47 ./__spark_conf__/container-executor.cfg 8390550 4 -r-x------ 1 yarn hadoop 1 Nov 8 14:47 ./__spark_conf__/dfs.exclude 8390579 8 -r-x------ 1 yarn hadoop 4113 Nov 8 14:47 ./__spark_conf__/mapred-queues.xml.template 8390569 4 -r-x------ 1 yarn hadoop 1527 Nov 8 14:47 ./__spark_conf__/kms-env.sh 8390575 4 -r-x------ 1 yarn hadoop 1000 Nov 8 14:47 ./__spark_conf__/ssl-server.xml 8390555 4 -r-x------ 1 yarn hadoop 2136 Nov 8 14:47 ./__spark_conf__/capacity-scheduler.xml 8390545 8 -r-x------ 1 yarn hadoop 6869 Nov 8 14:47 ./__spark_conf__/mapred-site.xml 8390551 0 -r-x------ 1 yarn hadoop 0 Nov 8 14:47 ./__spark_conf__/yarn.exclude 8390574 4 -r-x------ 1 yarn hadoop 951 Nov 8 14:47 ./__spark_conf__/mapred-env.cmd 8390547 4 -r-x------ 1 yarn hadoop 3979 Nov 8 14:47 ./__spark_conf__/hadoop-env.cmd 8390566 4 -r-x------ 1 yarn hadoop 856 Nov 8 14:47 ./__spark_conf__/mapred-env.sh 8390570 4 -r-x------ 1 yarn hadoop 1308 Nov 8 14:47 ./__spark_conf__/hadoop-policy.xml 8390576 4 -r-x------ 1 yarn hadoop 2697 Nov 8 14:47 ./__spark_conf__/ssl-server.xml.example 8390567 4 -r-x------ 1 yarn hadoop 1602 Nov 8 14:47 ./__spark_conf__/health_check 8390568 4 -r-x------ 1 yarn hadoop 2316 Nov 8 14:47 ./__spark_conf__/ssl-client.xml.example 8390578 4 -r-x------ 1 yarn hadoop 945 Nov 8 14:47 ./__spark_conf__/taskcontroller.cfg 8390559 4 -r-x------ 1 yarn hadoop 1335 Nov 8 14:47 ./__spark_conf__/configuration.xsl 8390543 8 -r-x------ 1 yarn hadoop 6326 Nov 8 14:47 ./__spark_conf__/hadoop-env.sh 8390561 8 -r-x------ 1 yarn hadoop 6752 Nov 8 14:47 ./__spark_conf__/hdfs-site.xml 8390577 8 -r-x------ 1 yarn hadoop 5511 Nov 8 14:47 ./__spark_conf__/kms-site.xml 8390588 4 -rw-r--r-- 1 yarn hadoop 69 Nov 8 14:47 ./container_tokens 8390497 4 -r-x------ 1 yarn hadoop 1890 Nov 8 14:47 ./dma-collect.properties 8390592 4 -rwx------ 1 yarn hadoop 653 Nov 8 14:47 ./default_container_executor_session.sh 8390590 8 -rwx------ 1 yarn hadoop 6109 Nov 8 14:47 ./launch_container.sh 8390587 4 drwx--x--- 2 yarn hadoop 4096 Nov 8 14:47 ./tmp 8390595 4 -rw-r--r-- 1 yarn hadoop 16 Nov 8 14:47 ./.default_container_executor.sh.crc 6949365 186420 -r-xr-xr-x 1 yarn hadoop 190891225 Oct 12 17:30 ./__spark__.jar 8390589 4 -rw-r--r-- 1 yarn hadoop 12 Nov 8 14:47 ./.container_tokens.crc broken symlinks(find -L . -maxdepth 5 -type l -ls): End of LogType:directory.info LogType:launch_container.sh Log Upload Time:Wed Nov 08 14:47:15 +0530 2017 LogLength:6109 Log Contents: #!/bin/bash export SPARK_YARN_STAGING_DIR=".sparkStaging/application_1510129660245_0004" export PATH="/usr/sbin:/sbin:/usr/lib/ambari-server/*:/sbin:/usr/sbin:/bin:/usr/bin:/var/lib/ambari-agent" export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/usr/hdp/current/hadoop-client/conf"} export MAX_APP_ATTEMPTS="2" export JAVA_HOME=${JAVA_HOME:-"/usr/java/jre1.8.0_131"} export SPARK_YARN_CACHE_FILES="hdfs://clustername:8020/hdp/apps/2.6.1.0-129/spark/spark-hdp-assembly.jar#__spark__.jar,hdfs://clustername:8020/user/root/.sparkStaging/application_1510129660245_0004/dma-collect.properties#dma-collect.properties" export LANG="en_US.UTF-8" export APP_SUBMIT_TIME_ENV="1510132629142" export NM_HOST="clustername" export SPARK_YARN_CACHE_FILES_FILE_SIZES="190891225,1890" export SPARK_YARN_CACHE_ARCHIVES_TIME_STAMPS="1510132629111" export LOGNAME="root" export JVM_PID="$$" export PWD="/hadoop/yarn/local/usercache/root/appcache/application_1510129660245_0004/container_e08_1510129660245_0004_02_000001" export LOCAL_DIRS="/hadoop/yarn/local/usercache/root/appcache/application_1510129660245_0004,/hadoop/yarn/local/usercache/root/appcache/application_1510129660245_0004" export APPLICATION_WEB_PROXY_BASE="/proxy/application_1510129660245_0004" export NM_HTTP_PORT="8042" export SPARK_DIST_CLASSPATH="/home/analytics/sparkServer/lib" export LOG_DIRS="/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_02_000001,/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_02_000001" export NM_AUX_SERVICE_mapreduce_shuffle="AAA0+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= " export NM_PORT="45454" export SPARK_YARN_CACHE_FILES_TIME_STAMPS="1507722357508,1510132629075" export USER="root" export HADOOP_YARN_HOME=${HADOOP_YARN_HOME:-"/usr/hdp/current/hadoop-yarn-nodemanager"} export CLASSPATH="$PWD:$PWD/__spark_conf__:$PWD/__spark__.jar:/etc/hadoop/conf:/usr/hdp/current/hadoop-client/*:/usr/hdp/current/hadoop-client/lib/*:/usr/hdp/current/hadoop-hdfs-client/*:/usr/hdp/current/hadoop-hdfs-client/lib/*:/usr/hdp/current/hadoop-yarn-client/*:/usr/hdp/current/hadoop-yarn-client/lib/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.6.1.0-129/hadoop/lib/hadoop-lzo-0.6.0.2.6.1.0-129.jar:/etc/hadoop/conf/secure:/home/analytics/sparkServer/lib" export SPARK_YARN_CACHE_ARCHIVES="hdfs://clustername:8020/user/root/.sparkStaging/application_1510129660245_0004/__spark_conf__2207740214486653748.zip#__spark_conf__" export SPARK_YARN_CACHE_ARCHIVES_FILE_SIZES="137793" export SPARK_YARN_MODE="true" export SPARK_YARN_CACHE_FILES_VISIBILITIES="PUBLIC,PRIVATE" export HADOOP_TOKEN_FILE_LOCATION="/hadoop/yarn/local/usercache/root/appcache/application_1510129660245_0004/container_e08_1510129660245_0004_02_000001/container_tokens" export NM_AUX_SERVICE_spark_shuffle="" export SPARK_USER="root" export LOCAL_USER_DIRS="/hadoop/yarn/local/usercache/root/,/hadoop/yarn/local/usercache/root/" export HADOOP_HOME="/usr/hdp/2.6.1.0-129/hadoop" export SPARK_YARN_CACHE_ARCHIVES_VISIBILITIES="PRIVATE" export HOME="/home/" export NM_AUX_SERVICE_spark2_shuffle="" export CONTAINER_ID="container_e08_1510129660245_0004_02_000001" export MALLOC_ARENA_MAX="4" ln -sf "/hadoop/yarn/local/usercache/root/filecache/104/dma-collect.properties" "dma-collect.properties" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/hadoop/yarn/local/filecache/13/spark-hdp-assembly.jar" "__spark__.jar" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi ln -sf "/hadoop/yarn/local/usercache/root/filecache/105/__spark_conf__2207740214486653748.zip" "__spark_conf__" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi # Creating copy of launch script cp "launch_container.sh" "/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_02_000001/launch_container.sh" chmod 640 "/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_02_000001/launch_container.sh" # Determining directory contents echo "ls -l:" 1>"/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_02_000001/directory.info" ls -l 1>>"/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_02_000001/directory.info" echo "find -L . -maxdepth 5 -ls:" 1>>"/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_02_000001/directory.info" find -L . -maxdepth 5 -ls 1>>"/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_02_000001/directory.info" echo "broken symlinks(find -L . -maxdepth 5 -type l -ls):" 1>>"/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_02_000001/directory.info" find -L . -maxdepth 5 -type l -ls 1>>"/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_02_000001/directory.info" exec /bin/bash -c "$JAVA_HOME/bin/java -server -Xmx512m -Djava.io.tmpdir=$PWD/tmp '-Dhdp.verion=2.6.1.0-129' -Dhdp.version=2.6.1.0-129 -Dspark.yarn.app.container.log.dir=/hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_02_000001 org.apache.spark.deploy.yarn.ExecutorLauncher --arg '107.110.7.59:53643' --executor-memory 2048m --executor-cores 4 --properties-file $PWD/__spark_conf__/__spark_conf__.properties 1> /hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_02_000001/stdout 2> /hadoop/yarn/log/application_1510129660245_0004/container_e08_1510129660245_0004_02_000001/stderr" hadoop_shell_errorcode=$? if [ $hadoop_shell_errorcode -ne 0 ] then exit $hadoop_shell_errorcode fi End of LogType:launch_container.sh LogType:stderr Log Upload Time:Wed Nov 08 14:47:15 +0530 2017 LogLength:4626 Log Contents: SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/hadoop/yarn/local/filecache/13/spark-hdp-assembly.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/hdp/2.6.1.0-129/hadoop/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 17/11/08 14:47:12 INFO ApplicationMaster: Registered signal handlers for [TERM, HUP, INT] 17/11/08 14:47:12 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 17/11/08 14:47:12 INFO ApplicationMaster: ApplicationAttemptId: appattempt_1510129660245_0004_000002 17/11/08 14:47:13 WARN DomainSocketFactory: The short-circuit local reads feature cannot be used because libhadoop cannot be loaded. 17/11/08 14:47:13 INFO SecurityManager: Changing view acls to: yarn,root 17/11/08 14:47:13 INFO SecurityManager: Changing modify acls to: yarn,root 17/11/08 14:47:13 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(yarn, root); users with modify permissions: Set(yarn, root) Exception in thread "main" java.lang.ExceptionInInitializerError at javax.crypto.JceSecurityManager.(JceSecurityManager.java:65) at javax.crypto.Cipher.getConfiguredPermission(Cipher.java:2587) at javax.crypto.Cipher.getMaxAllowedKeyLength(Cipher.java:2611) at sun.security.ssl.CipherSuite$BulkCipher.isUnlimited(Unknown Source) at sun.security.ssl.CipherSuite$BulkCipher.(Unknown Source) at sun.security.ssl.CipherSuite.(Unknown Source) at sun.security.ssl.SSLContextImpl.getApplicableCipherSuiteList(Unknown Source) at sun.security.ssl.SSLContextImpl.access$100(Unknown Source) at sun.security.ssl.SSLContextImpl$AbstractTLSContext.(Unknown Source) at java.lang.Class.forName0(Native Method) at java.lang.Class.forName(Unknown Source) at java.security.Provider$Service.getImplClass(Unknown Source) at java.security.Provider$Service.newInstance(Unknown Source) at sun.security.jca.GetInstance.getInstance(Unknown Source) at sun.security.jca.GetInstance.getInstance(Unknown Source) at javax.net.ssl.SSLContext.getInstance(Unknown Source) at javax.net.ssl.SSLContext.getDefault(Unknown Source) at org.apache.spark.SSLOptions.liftedTree1$1(SSLOptions.scala:123) at org.apache.spark.SSLOptions.(SSLOptions.scala:115) at org.apache.spark.SSLOptions$.parse(SSLOptions.scala:200) at org.apache.spark.SecurityManager.(SecurityManager.scala:245) at org.apache.spark.deploy.yarn.ApplicationMaster.run(ApplicationMaster.scala:190) at org.apache.spark.deploy.yarn.ApplicationMaster$$anonfun$main$1.apply$mcV$sp(ApplicationMaster.scala:674) at org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:68) at org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:67) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Unknown Source) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866) at org.apache.spark.deploy.SparkHadoopUtil.runAsSparkUser(SparkHadoopUtil.scala:67) at org.apache.spark.deploy.yarn.ApplicationMaster$.main(ApplicationMaster.scala:672) at org.apache.spark.deploy.yarn.ExecutorLauncher$.main(ApplicationMaster.scala:699) at org.apache.spark.deploy.yarn.ExecutorLauncher.main(ApplicationMaster.scala) Caused by: java.lang.SecurityException: Can not initialize cryptographic mechanism at javax.crypto.JceSecurity.(JceSecurity.java:88) ... 32 more Caused by: java.lang.SecurityException: Cannot locate policy or framework files! at javax.crypto.JceSecurity.setupJurisdictionPolicies(JceSecurity.java:255) at javax.crypto.JceSecurity.access$000(JceSecurity.java:48) at javax.crypto.JceSecurity$1.run(JceSecurity.java:80) at java.security.AccessController.doPrivileged(Native Method) at javax.crypto.JceSecurity.(JceSecurity.java:77) ... 32 more 17/11/08 14:47:13 INFO ApplicationMaster: Final app status: UNDEFINED, exitCode: 0, (reason: Shutdown hook called before final status was reported.) 17/11/08 14:47:13 INFO ApplicationMaster: Unregistering ApplicationMaster with UNDEFINED (diag message: Shutdown hook called before final status was reported.) 17/11/08 14:47:13 INFO ApplicationMaster: Deleting staging directory .sparkStaging/application_1510129660245_0004 17/11/08 14:47:13 INFO ShutdownHookManager: Shutdown hook called End of LogType:stderr LogType:stdout Log Upload Time:Wed Nov 08 14:47:15 +0530 2017 LogLength:0 Log Contents: End of LogType:stdout