Member since
04-13-2017
1
Post
0
Kudos Received
0
Solutions
04-13-2017
12:26 PM
The following is my error logs: -rw-r--r--. 1 yarn hadoop 105 4月 13 11:29 container_tokens
-rwx------. 1 yarn hadoop 662 4月 13 11:29 default_container_executor_session.sh
-rwx------. 1 yarn hadoop 716 4月 13 11:29 default_container_executor.sh
lrwxrwxrwx. 1 yarn hadoop 73 4月 13 11:29 guava-11.0.2.jar -> /hadoop/yarn/local/usercache/Administrator/filecache/169/guava-11.0.2.jar
lrwxrwxrwx. 1 yarn hadoop 87 4月 13 11:29 hadoop-4636969314433354462.jar -> /hadoop/yarn/local/usercache/Administrator/filecache/165/hadoop-4636969314433354462.jar
lrwxrwxrwx. 1 yarn hadoop 80 4月 13 11:29 hadoop-common-2.7.3.jar -> /hadoop/yarn/local/usercache/Administrator/filecache/166/hadoop-common-2.7.3.jar
lrwxrwxrwx. 1 yarn hadoop 95 4月 13 11:29 hadoop-mapreduce-client-core-2.7.3.jar -> /hadoop/yarn/local/usercache/Administrator/filecache/176/hadoop-mapreduce-client-core-2.7.3.jar
lrwxrwxrwx. 1 yarn hadoop 79 4月 13 11:29 hbase-client-1.1.2.jar -> /hadoop/yarn/local/usercache/Administrator/filecache/174/hbase-client-1.1.2.jar
lrwxrwxrwx. 1 yarn hadoop 79 4月 13 11:29 hbase-common-1.1.2.jar -> /hadoop/yarn/local/usercache/Administrator/filecache/164/hbase-common-1.1.2.jar
lrwxrwxrwx. 1 yarn hadoop 86 4月 13 11:29 hbase-hadoop-compat-1.1.2.jar -> /hadoop/yarn/local/usercache/Administrator/filecache/177/hbase-hadoop-compat-1.1.2.jar
lrwxrwxrwx. 1 yarn hadoop 81 4月 13 11:29 hbase-protocol-1.1.2.jar -> /hadoop/yarn/local/usercache/Administrator/filecache/170/hbase-protocol-1.1.2.jar
lrwxrwxrwx. 1 yarn hadoop 79 4月 13 11:29 hbase-server-1.1.2.jar -> /hadoop/yarn/local/usercache/Administrator/filecache/175/hbase-server-1.1.2.jar
lrwxrwxrwx. 1 yarn hadoop 89 4月 13 11:29 htrace-core-3.1.0-incubating.jar -> /hadoop/yarn/local/usercache/Administrator/filecache/171/htrace-core-3.1.0-incubating.jar
drwxr-xr-x. 2 yarn hadoop 48 4月 13 11:29 jobSubmitDir
lrwxrwxrwx. 1 yarn hadoop 103 4月 13 11:29 job.xml -> /hadoop/yarn/local/usercache/Administrator/appcache/application_1491987967977_0010/filecache/12/job.xml
-rwx------. 1 yarn hadoop 8403 4月 13 11:29 launch_container.sh
lrwxrwxrwx. 1 yarn hadoop 79 4月 13 11:29 metrics-core-2.2.0.jar -> /hadoop/yarn/local/usercache/Administrator/filecache/172/metrics-core-2.2.0.jar
lrwxrwxrwx. 1 yarn hadoop 83 4月 13 11:29 netty-all-4.0.23.Final.jar -> /hadoop/yarn/local/usercache/Administrator/filecache/167/netty-all-4.0.23.Final.jar
lrwxrwxrwx. 1 yarn hadoop 80 4月 13 11:29 protobuf-java-2.5.0.jar -> /hadoop/yarn/local/usercache/Administrator/filecache/168/protobuf-java-2.5.0.jar
drwx--x---. 2 yarn hadoop 6 4月 13 11:29 tmp
lrwxrwxrwx. 1 yarn hadoop 76 4月 13 11:29 zookeeper-3.4.6.jar -> /hadoop/yarn/local/usercache/Administrator/filecache/173/zookeeper-3.4.6.jar
find -L . -maxdepth 5 -ls:
2226325 4 drwx--x--- 4 yarn hadoop 4096 4月 13 11:29 .
2151885286 0 drwx--x--- 2 yarn hadoop 6 4月 13 11:29 ./tmp
2226326 4 -rw-r--r-- 1 yarn hadoop 105 4月 13 11:29 ./container_tokens
2226327 4 -rw-r--r-- 1 yarn hadoop 12 4月 13 11:29 ./.container_tokens.crc
2226328 12 -rwx------ 1 yarn hadoop 8403 4月 13 11:29 ./launch_container.sh
2226329 4 -rw-r--r-- 1 yarn hadoop 76 4月 13 11:29 ./.launch_container.sh.crc
2226330 4 -rwx------ 1 yarn hadoop 662 4月 13 11:29 ./default_container_executor_session.sh
2226331 4 -rw-r--r-- 1 yarn hadoop 16 4月 13 11:29 ./.default_container_executor_session.sh.crc
2226332 4 -rwx------ 1 yarn hadoop 716 4月 13 11:29 ./default_container_executor.sh
2226333 4 -rw-r--r-- 1 yarn hadoop 16 4月 13 11:29 ./.default_container_executor.sh.crc
1207925872 4104 -r-x------ 1 yarn hadoop 4201685 4月 13 11:29 ./hbase-protocol-1.1.2.jar
3221226664 568 -r-x------ 1 yarn hadoop 580435 4月 13 11:29 ./hadoop-4636969314433354462.jar
1207925863 520 -r-x------ 1 yarn hadoop 530078 4月 13 11:29 ./hbase-common-1.1.2.jar
1207925869 524 -r-x------ 1 yarn hadoop 533455 4月 13 11:29 ./protobuf-java-2.5.0.jar
3222227754 0 drwxr-xr-x 2 yarn hadoop 48 4月 13 11:29 ./jobSubmitDir
3222227746 4 -r-x------ 1 yarn hadoop 50 4月 13 11:29 ./jobSubmitDir/job.splitmetainfo
1207925884 4 -r-x------ 1 yarn hadoop 161 4月 13 11:29 ./jobSubmitDir/job.split
3222227734 1444 -r-x------ 1 yarn hadoop 1475955 4月 13 11:29 ./htrace-core-3.1.0-incubating.jar
1207925878 4092 -r-x------ 1 yarn hadoop 4187084 4月 13 11:29 ./hbase-client-1.1.2.jar
3222227728 1740 -r-x------ 1 yarn hadoop 1779991 4月 13 11:29 ./netty-all-4.0.23.Final.jar
1207925875 84 -r-x------ 1 yarn hadoop 82123 4月 13 11:29 ./metrics-core-2.2.0.jar
1207925881 1524 -r-x------ 1 yarn hadoop 1556539 4月 13 11:29 ./hadoop-mapreduce-client-core-2.7.3.jar
3222227731 1612 -r-x------ 1 yarn hadoop 1648200 4月 13 11:29 ./guava-11.0.2.jar
1207925887 124 -r-x------ 1 yarn hadoop 123700 4月 13 11:29 ./job.xml
1207925866 3400 -r-x------ 1 yarn hadoop 3479293 4月 13 11:29 ./hadoop-common-2.7.3.jar
3222227743 36 -r-x------ 1 yarn hadoop 35944 4月 13 11:29 ./hbase-hadoop-compat-1.1.2.jar
3222227740 3888 -r-x------ 1 yarn hadoop 3978343 4月 13 11:29 ./hbase-server-1.1.2.jar
3222227737 776 -r-x------ 1 yarn hadoop 792964 4月 13 11:29 ./zookeeper-3.4.6.jar
broken symlinks(find -L . -maxdepth 5 -type l -ls):
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/usr/hdp/current/hadoop-client/conf"}
export MAX_APP_ATTEMPTS="2"
export JAVA_HOME=${JAVA_HOME:-"/usr/jdk64/jdk1.8.0_77"}
export APP_SUBMIT_TIME_ENV="1492054154700"
export NM_HOST="hdp-slave2"
export HADOOP_CLASSPATH="$PWD:job.jar/job.jar:job.jar/classes/:job.jar/lib/*:$PWD/*:null"
export LD_LIBRARY_PATH="$PWD"
export LOGNAME="Administrator"
export JVM_PID="$$"
export PWD="/hadoop/yarn/local/usercache/Administrator/appcache/application_1491987967977_0010/container_e09_1491987967977_0010_02_000001"
export LOCAL_DIRS="/hadoop/yarn/local/usercache/Administrator/appcache/application_1491987967977_0010"
export APPLICATION_WEB_PROXY_BASE="/proxy/application_1491987967977_0010"
export SHELL="/bin/bash"
export NM_HTTP_PORT="8042"
export LOG_DIRS="/hadoop/yarn/log/application_1491987967977_0010/container_e09_1491987967977_0010_02_000001"
export NM_AUX_SERVICE_mapreduce_shuffle="AAA0+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
"
export NM_PORT="45454"
export USER="Administrator"
export HADOOP_YARN_HOME=${HADOOP_YARN_HOME:-"/usr/hdp/current/hadoop-yarn-nodemanager"}
export CLASSPATH="$PWD:$HADOOP_CONF_DIR:/usr/hdp/current/hadoop-client/*:/usr/hdp/current/hadoop-client/lib/*:/usr/hdp/current/hadoop-hdfs-client/*:/usr/hdp/current/hadoop-hdfs-client/lib/*:/usr/hdp/current/hadoop-yarn-client/*:/usr/hdp/current/hadoop-yarn-client/lib/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/etc/hadoop/conf/secure:/usr/hdp/2.5.3.0-37/hadoop/mapreduce.tar.gz:job.jar/job.jar:job.jar/classes/:job.jar/lib/*:$PWD/*"
export HADOOP_TOKEN_FILE_LOCATION="/hadoop/yarn/local/usercache/Administrator/appcache/application_1491987967977_0010/container_e09_1491987967977_0010_02_000001/container_tokens"
export NM_AUX_SERVICE_spark_shuffle=""
export LOCAL_USER_DIRS="/hadoop/yarn/local/usercache/Administrator/"
export HOME="/home/"
export NM_AUX_SERVICE_spark2_shuffle=""
export CONTAINER_ID="container_e09_1491987967977_0010_02_000001"
export MALLOC_ARENA_MAX="4"
ln -sf "/hadoop/yarn/local/usercache/Administrator/filecache/170/hbase-protocol-1.1.2.jar" "hbase-protocol-1.1.2.jar"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
ln -sf "/hadoop/yarn/local/usercache/Administrator/filecache/165/hadoop-4636969314433354462.jar" "hadoop-4636969314433354462.jar"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
ln -sf "/hadoop/yarn/local/usercache/Administrator/filecache/164/hbase-common-1.1.2.jar" "hbase-common-1.1.2.jar"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
ln -sf "/hadoop/yarn/local/usercache/Administrator/filecache/168/protobuf-java-2.5.0.jar" "protobuf-java-2.5.0.jar"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
mkdir -p jobSubmitDir
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
ln -sf "/hadoop/yarn/local/usercache/Administrator/appcache/application_1491987967977_0010/filecache/11/job.splitmetainfo" "jobSubmitDir/job.splitmetainfo"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
ln -sf "/hadoop/yarn/local/usercache/Administrator/filecache/171/htrace-core-3.1.0-incubating.jar" "htrace-core-3.1.0-incubating.jar"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
ln -sf "/hadoop/yarn/local/usercache/Administrator/filecache/174/hbase-client-1.1.2.jar" "hbase-client-1.1.2.jar"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
ln -sf "/hadoop/yarn/local/usercache/Administrator/filecache/167/netty-all-4.0.23.Final.jar" "netty-all-4.0.23.Final.jar"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
ln -sf "/hadoop/yarn/local/usercache/Administrator/filecache/172/metrics-core-2.2.0.jar" "metrics-core-2.2.0.jar"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
ln -sf "/hadoop/yarn/local/usercache/Administrator/filecache/176/hadoop-mapreduce-client-core-2.7.3.jar" "hadoop-mapreduce-client-core-2.7.3.jar"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
ln -sf "/hadoop/yarn/local/usercache/Administrator/filecache/169/guava-11.0.2.jar" "guava-11.0.2.jar"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
ln -sf "/hadoop/yarn/local/usercache/Administrator/appcache/application_1491987967977_0010/filecache/12/job.xml" "job.xml"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
ln -sf "/hadoop/yarn/local/usercache/Administrator/filecache/166/hadoop-common-2.7.3.jar" "hadoop-common-2.7.3.jar"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
ln -sf "/hadoop/yarn/local/usercache/Administrator/filecache/177/hbase-hadoop-compat-1.1.2.jar" "hbase-hadoop-compat-1.1.2.jar"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
ln -sf "/hadoop/yarn/local/usercache/Administrator/filecache/175/hbase-server-1.1.2.jar" "hbase-server-1.1.2.jar"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
mkdir -p jobSubmitDir
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
ln -sf "/hadoop/yarn/local/usercache/Administrator/appcache/application_1491987967977_0010/filecache/10/job.split" "jobSubmitDir/job.split"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
ln -sf "/hadoop/yarn/local/usercache/Administrator/filecache/173/zookeeper-3.4.6.jar" "zookeeper-3.4.6.jar"
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
# Creating copy of launch script
cp "launch_container.sh" "/hadoop/yarn/log/application_1491987967977_0010/container_e09_1491987967977_0010_02_000001/launch_container.sh"
chmod 640 "/hadoop/yarn/log/application_1491987967977_0010/container_e09_1491987967977_0010_02_000001/launch_container.sh"
# Determining directory contents
echo "ls -l:" 1>"/hadoop/yarn/log/application_1491987967977_0010/container_e09_1491987967977_0010_02_000001/directory.info"
ls -l 1>>"/hadoop/yarn/log/application_1491987967977_0010/container_e09_1491987967977_0010_02_000001/directory.info"
echo "find -L . -maxdepth 5 -ls:" 1>>"/hadoop/yarn/log/application_1491987967977_0010/container_e09_1491987967977_0010_02_000001/directory.info"
find -L . -maxdepth 5 -ls 1>>"/hadoop/yarn/log/application_1491987967977_0010/container_e09_1491987967977_0010_02_000001/directory.info"
echo "broken symlinks(find -L . -maxdepth 5 -type l -ls):" 1>>"/hadoop/yarn/log/application_1491987967977_0010/container_e09_1491987967977_0010_02_000001/directory.info"
find -L . -maxdepth 5 -type l -ls 1>>"/hadoop/yarn/log/application_1491987967977_0010/container_e09_1491987967977_0010_02_000001/directory.info"
exec /bin/bash -c "$JAVA_HOME/bin/java -Djava.io.tmpdir=$PWD/tmp -Dlog4j.configuration=container-log4j.properties -Dyarn.app.container.log.dir=/hadoop/yarn/log/application_1491987967977_0010/container_e09_1491987967977_0010_02_000001 -Dyarn.app.container.log.filesize=0 -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog -Xmx1024m org.apache.hadoop.mapreduce.v2.app.MRAppMaster 1>/hadoop/yarn/log/application_1491987967977_0010/container_e09_1491987967977_0010_02_000001/stdout 2>/hadoop/yarn/log/application_1491987967977_0010/container_e09_1491987967977_0010_02_000001/stderr "
hadoop_shell_errorcode=$?
if [ $hadoop_shell_errorcode -ne 0 ]
then
exit $hadoop_shell_errorcode
fi
Log Type: stderr
Log Upload Time: 星期四 四月 13 11:29:18 +0800 2017
Log Length: 86
错误: 找不到或无法加载主类 org.apache.hadoop.mapreduce.v2.app.MRAppMaster
Log Type: stdout
Log Upload Time: 星期四 四月 13 11:29:18 +0800 2017
Log Length: 0
... View more