Support Questions

Find answers, ask questions, and share your expertise

oozie tasks in kerberized cluster using Hive action

avatar
Rising Star

When i try run oozie's task in kerberized cluster i get an error and can't connect to hive metastore.

Java System Properties:
------------------------
#
#Wed Feb 07 14:47:22 MSK 2018
java.runtime.name=Java(TM) SE Runtime Environment
sun.boot.library.path=/usr/lib/jvm/java-1.8.0-oracle-1.8.0.141-1jpp.1.el7_3.x86_64/jre/lib/amd64
java.vm.version=25.141-b15
oozie.action.externalChildIDs=/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/externalChildIDs
hadoop.root.logger=INFO,CLA
java.vm.vendor=Oracle Corporation
java.vendor.url=http\://java.oracle.com/
path.separator=\:
java.vm.name=Java HotSpot(TM) 64-Bit Server VM
file.encoding.pkg=sun.io
oozie.job.launch.time=1518004020550
user.country=US
sun.java.launcher=SUN_STANDARD
sun.os.patch.level=unknown
java.vm.specification.name=Java Virtual Machine Specification
user.dir=/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002
oozie.action.newId=/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/newId
java.runtime.version=1.8.0_141-b15
java.awt.graphicsenv=sun.awt.X11GraphicsEnvironment
java.endorsed.dirs=/usr/lib/jvm/java-1.8.0-oracle-1.8.0.141-1jpp.1.el7_3.x86_64/jre/lib/endorsed
os.arch=amd64
oozie.job.id=0000003-180207125715047-oozie-oozi-W
oozie.action.id=0000003-180207125715047-oozie-oozi-W@clear_hive_tbl
yarn.app.container.log.dir=/data/log/yarn/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002
java.io.tmpdir=/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/tmp
line.separator=\n
oozie.action.output.properties=/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/output.properties
java.vm.specification.vendor=Oracle Corporation
os.name=Linux
log4j.configuration=container-log4j.properties
sun.jnu.encoding=UTF-8
hdp.version=2.6.4.0-91
java.library.path=/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002\:/usr/hdp/2.6.4.0-91/hadoop/lib/native\:/usr/hdp/2.6.4.0-91/hadoop/lib/native/Linux-amd64-64\:./mr-framework/hadoop/lib/native\:./mr-framework/hadoop/lib/native/Linux-amd64-64\:/usr/java/packages/lib/amd64\:/usr/lib64\:/lib64\:/lib\:/usr/lib
oozie.action.conf.xml=/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/action.xml
java.specification.name=Java Platform API Specification
java.class.version=52.0
java.net.preferIPv4Stack=true
sun.management.compiler=HotSpot 64-Bit Tiered Compilers
os.version=3.10.0-693.1.1.el7.x86_64
oozie.action.error.properties=/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/error.properties
hadoop.root.logfile=syslog
yarn.app.container.log.filesize=0
user.home=/home/admfkr
user.timezone=Europe/Moscow
java.awt.printerjob=sun.print.PSPrinterJob
file.encoding=UTF-8
java.specification.version=1.8
java.class.path=/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-app-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-plugins-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-shuffle-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-common-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.7.3.2.6.4.0-91-tests.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/hamcrest-core-1.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/paranamer-2.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/protobuf-java-2.5.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/commons-compress-1.4.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/jersey-core-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/guice-3.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/jackson-mapper-asl-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/leveldbjni-all-1.8.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/hadoop-annotations-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/netty-3.6.2.Final.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/avro-1.7.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/commons-io-2.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/guice-servlet-3.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/xz-1.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/jersey-guice-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/jersey-server-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/snappy-java-1.0.4.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/javax.inject-1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/aopalliance-1.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/log4j-1.2.17.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/junit-4.11.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/asm-3.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/mapreduce/lib/jackson-core-asl-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/hadoop-nfs-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/azure-data-lake-store-sdk-2.1.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/hadoop-aws-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/hadoop-azure-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/hadoop-common-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/hadoop-azure-datalake-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/hadoop-common-2.7.3.2.6.4.0-91-tests.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/gson-2.2.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/xmlenc-0.52.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/commons-configuration-1.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/hamcrest-core-1.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/curator-framework-2.7.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/commons-codec-1.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jackson-annotations-2.2.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/paranamer-2.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/protobuf-java-2.5.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/aws-java-sdk-core-1.10.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/api-util-1.0.0-M20.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/httpcore-4.4.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jackson-databind-2.2.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/commons-compress-1.4.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jets3t-0.9.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/slf4j-api-1.7.10.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/commons-cli-1.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jackson-xc-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/joda-time-2.9.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/nimbus-jose-jwt-3.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/java-xmlbuilder-0.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jaxb-api-2.2.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jersey-core-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/commons-lang-2.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jsch-0.1.54.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/azure-storage-5.4.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/azure-keyvault-core-0.8.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/activation-1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jackson-mapper-asl-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/hadoop-annotations-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/servlet-api-2.5.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/netty-3.6.2.Final.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/avro-1.7.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/api-asn1-api-1.0.0-M20.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/commons-io-2.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jsr305-3.0.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/guava-11.0.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/commons-math3-3.1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/commons-beanutils-core-1.8.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/zookeeper-3.4.6.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/aws-java-sdk-kms-1.10.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/xz-1.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/apacheds-i18n-2.0.0-M15.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/httpclient-4.5.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jersey-server-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jersey-json-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/snappy-java-1.0.4.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/commons-beanutils-1.7.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/commons-lang3-3.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/aws-java-sdk-s3-1.10.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/commons-collections-3.2.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jetty-6.1.26.hwx.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jackson-jaxrs-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jetty-sslengine-6.1.26.hwx.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/commons-digester-1.8.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/apacheds-kerberos-codec-2.0.0-M15.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/commons-net-3.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/json-smart-1.1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jaxb-impl-2.2.3-1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jcip-annotations-1.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/curator-recipes-2.7.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/log4j-1.2.17.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jackson-core-2.2.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/hadoop-auth-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/mockito-all-1.8.5.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jettison-1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/commons-logging-1.1.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/junit-4.11.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jsp-api-2.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/stax-api-1.0-2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/asm-3.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jetty-util-6.1.26.hwx.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/jackson-core-asl-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/htrace-core-3.1.0-incubating.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/common/lib/curator-client-2.7.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/hadoop-yarn-server-sharedcachemanager-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/hadoop-yarn-api-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/hadoop-yarn-client-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/hadoop-yarn-server-applicationhistoryservice-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/hadoop-yarn-server-resourcemanager-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/hadoop-yarn-applications-unmanaged-am-launcher-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/hadoop-yarn-registry-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/hadoop-yarn-server-common-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/hadoop-yarn-server-timeline-pluginstorage-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/hadoop-yarn-server-nodemanager-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/hadoop-yarn-common-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/hadoop-yarn-server-web-proxy-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/hadoop-yarn-server-tests-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/hadoop-yarn-applications-distributedshell-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/gson-2.2.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/xmlenc-0.52.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/commons-configuration-1.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/curator-framework-2.7.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/commons-codec-1.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jackson-annotations-2.2.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/paranamer-2.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/protobuf-java-2.5.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/api-util-1.0.0-M20.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jersey-client-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/httpcore-4.4.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jackson-databind-2.2.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/commons-compress-1.4.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jets3t-0.9.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/commons-cli-1.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/metrics-core-3.0.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jackson-xc-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/nimbus-jose-jwt-3.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/java-xmlbuilder-0.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jaxb-api-2.2.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jersey-core-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/guice-3.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/commons-lang-2.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jsch-0.1.54.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/azure-storage-5.4.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/azure-keyvault-core-0.8.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/activation-1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jackson-mapper-asl-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/leveldbjni-all-1.8.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/objenesis-2.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/servlet-api-2.5.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/netty-3.6.2.Final.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/zookeeper-3.4.6.2.6.4.0-91-tests.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/avro-1.7.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/api-asn1-api-1.0.0-M20.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/commons-io-2.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/guice-servlet-3.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jsr305-3.0.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/guava-11.0.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/fst-2.24.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/commons-math3-3.1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/commons-beanutils-core-1.8.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/zookeeper-3.4.6.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/xz-1.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/apacheds-i18n-2.0.0-M15.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jersey-guice-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/httpclient-4.5.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jersey-server-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jersey-json-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/javassist-3.18.1-GA.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/snappy-java-1.0.4.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/commons-beanutils-1.7.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/commons-lang3-3.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/commons-collections-3.2.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/javax.inject-1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jetty-6.1.26.hwx.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jackson-jaxrs-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jetty-sslengine-6.1.26.hwx.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/commons-digester-1.8.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/apacheds-kerberos-codec-2.0.0-M15.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/aopalliance-1.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/commons-net-3.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/json-smart-1.1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jaxb-impl-2.2.3-1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jcip-annotations-1.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/curator-recipes-2.7.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/log4j-1.2.17.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jackson-core-2.2.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jettison-1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/commons-logging-1.1.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jsp-api-2.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/stax-api-1.0-2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/asm-3.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jetty-util-6.1.26.hwx.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/jackson-core-asl-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/htrace-core-3.1.0-incubating.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/yarn/lib/curator-client-2.7.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/hadoop-hdfs-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/hadoop-hdfs-nfs-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/hadoop-hdfs-2.7.3.2.6.4.0-91-tests.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/xmlenc-0.52.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/xercesImpl-2.9.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/commons-codec-1.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/jackson-annotations-2.2.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/protobuf-java-2.5.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/netty-all-4.0.52.Final.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/jackson-databind-2.2.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/commons-cli-1.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/commons-daemon-1.0.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/okhttp-2.4.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/jersey-core-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/commons-lang-2.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/jackson-mapper-asl-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/okio-1.4.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/leveldbjni-all-1.8.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/xml-apis-1.3.04.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/servlet-api-2.5.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/netty-3.6.2.Final.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/commons-io-2.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/jsr305-3.0.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/guava-11.0.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/jersey-server-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/jetty-6.1.26.hwx.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/log4j-1.2.17.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/jackson-core-2.2.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/commons-logging-1.1.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/asm-3.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/jetty-util-6.1.26.hwx.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/jackson-core-asl-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/hdfs/lib/htrace-core-3.1.0-incubating.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/gson-2.2.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/xmlenc-0.52.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/commons-configuration-1.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/hamcrest-core-1.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/curator-framework-2.7.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/commons-codec-1.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/paranamer-2.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/protobuf-java-2.5.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/api-util-1.0.0-M20.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/httpcore-4.4.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/commons-compress-1.4.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jets3t-0.9.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/commons-cli-1.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/metrics-core-3.0.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jackson-xc-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/nimbus-jose-jwt-3.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/okhttp-2.4.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/java-xmlbuilder-0.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jaxb-api-2.2.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jersey-core-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/commons-lang-2.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jsch-0.1.54.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/hadoop-distcp-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/hadoop-sls-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/azure-keyvault-core-0.8.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/activation-1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jackson-mapper-asl-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/okio-1.4.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/servlet-api-2.5.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/netty-3.6.2.Final.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/avro-1.7.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/api-asn1-api-1.0.0-M20.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/hadoop-openstack-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/commons-io-2.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jsr305-3.0.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/guava-11.0.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/hadoop-datajoin-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/commons-math3-3.1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/commons-beanutils-core-1.8.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/zookeeper-3.4.6.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/hadoop-rumen-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/xz-1.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/apacheds-i18n-2.0.0-M15.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/httpclient-4.5.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jersey-server-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jersey-json-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/snappy-java-1.0.4.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/hadoop-archives-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/commons-beanutils-1.7.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/commons-lang3-3.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/commons-collections-3.2.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jetty-6.1.26.hwx.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/hadoop-ant-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/hadoop-streaming-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jackson-jaxrs-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jetty-sslengine-6.1.26.hwx.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/commons-digester-1.8.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/apacheds-kerberos-codec-2.0.0-M15.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/commons-net-3.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/hadoop-gridmix-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/commons-httpclient-3.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/json-smart-1.1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jaxb-impl-2.2.3-1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jcip-annotations-1.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/curator-recipes-2.7.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/log4j-1.2.17.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/hadoop-auth-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/mockito-all-1.8.5.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jettison-1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/commons-logging-1.1.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/junit-4.11.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jsp-api-2.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/stax-api-1.0-2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/asm-3.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jetty-util-6.1.26.hwx.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/hadoop-extras-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/jackson-core-asl-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/htrace-core-3.1.0-incubating.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mr-framework/hadoop/share/hadoop/tools/lib/curator-client-2.7.1.jar\:/usr/hdp/2.6.4.0-91/hadoop/lib/hadoop-lzo-0.6.0.2.6.4.0-91.jar\:/etc/hadoop/conf/secure\:job.jar/job.jar\:job.jar/classes/\:job.jar/lib/*\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/httpclient-4.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jackson-jaxrs-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-bridge-0.8.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/oozie-sharelib-oozie-4.2.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-shims-scheduler-1.2.1000.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/guava-11.0.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/atlas-intg-0.8.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/javax.jdo-3.2.0-m3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hadoop-yarn-registry-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/kafka-clients-0.10.1.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/velocity-1.5.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hadoop-azure-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hadoop-yarn-server-common-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/commons-collections4-4.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/azure-data-lake-store-sdk-2.1.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/javax.inject-1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/netty-3.6.2.Final.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/tez-dag-0.7.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-shims-common-1.2.1000.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/antlr-2.7.7.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/HikariCP-2.5.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/geronimo-jaspic_1.0_spec-1.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/atlas-typesystem-0.8.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/datanucleus-core-4.1.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/stringtemplate-3.2.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/avro-1.7.5.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/fst-2.24.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jersey-client-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/commons-httpclient-3.0.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jackson-databind-2.4.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jetty-6.1.14.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/json4s-native_2.11-3.2.11.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/servlet-api-2.5-6.1.14.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jackson-annotations-2.4.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/snappy-java-1.0.5.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hadoop-yarn-server-resourcemanager-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/metrics-core-3.1.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/avatica-1.8.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jta-1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/aws-java-sdk-s3-1.10.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/json-simple-1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/oozie-sharelib-hive-4.2.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/atlas-client-common-0.8.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/apache-log4j-extras-1.2.17.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jackson-xc-1.9.13.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/mail-1.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jpam-1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/datanucleus-api-jdo-4.2.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/eigenbase-properties-1.1.5.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/gson-2.5.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/tez-common-0.7.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/leveldbjni-all-1.8.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/atlas-client-v1-0.8.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/stax-api-1.0-2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jsr305-2.0.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/metrics-jvm-3.1.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jline-2.12.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/groovy-all-2.4.11.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/avatica-metrics-1.8.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/ant-launcher-1.9.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/atlas-common-0.8.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/scala-compiler-2.11.8.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/metrics-json-3.1.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/zookeeper-3.4.6.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jersey-guice-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/scala-library-2.11.8.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/objenesis-2.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-shims-0.23-1.2.1000.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/ant-1.9.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/zookeeper-3.4.6.2.6.4.0-91-tests.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/commons-collections-3.2.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/commons-lang3-3.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/atlas-notification-0.8.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/commons-compress-1.4.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hadoop-yarn-server-web-proxy-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/joda-time-2.9.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/tez-yarn-timeline-history-0.7.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hadoop-yarn-common-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/antlr-runtime-3.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/commons-pool-1.5.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/ivy-2.4.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-shims-0.20S-1.2.1000.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-service-1.2.1000.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/asm-3.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/curator-framework-2.6.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/tez-mapreduce-0.7.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/parquet-hadoop-bundle-1.8.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hadoop-yarn-api-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/avro-mapred-1.8.0-hadoop2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/asm-tree-3.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/transaction-api-1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/commons-codec-1.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jersey-json-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/xz-1.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/tez-runtime-internals-0.7.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/geronimo-jta_1.1_spec-1.1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/ST4-4.0.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/guice-3.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/paranamer-2.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/slf4j-api-1.6.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/pentaho-aggdesigner-algorithm-5.1.5-jhyde.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/tez-runtime-library-0.7.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/guice-servlet-3.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hadoop-yarn-server-applicationhistoryservice-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jaxb-api-2.2.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/tez-api-0.7.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/javassist-3.18.1-GA.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/opencsv-2.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/geronimo-annotation_1.0_spec-1.1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/commons-dbcp-1.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/oozie-hadoop-utils-hadoop-2-4.2.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/servlet-api-2.5.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/asm-commons-3.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/avro-1.8.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/aws-java-sdk-core-1.10.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/azure-keyvault-core-0.8.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-common-1.2.1000.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/atlas-client-v2-0.8.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/httpcore-4.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/oro-2.0.8.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/calcite-linq4j-1.2.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/okhttp-2.4.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jdo-api-3.0.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hadoop-annotations-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/slf4j-log4j12-1.6.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-serde-1.2.1000.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hdfs-model-0.8.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/azure-storage-5.4.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jettison-1.3.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/libfb303-0.9.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/calcite-core-1.2.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/log4j-1.2.17.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/scalap-2.11.8.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/kafka_2.11-0.10.1.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/datanucleus-rdbms-4.1.7.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/derby-10.10.1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/tez-yarn-timeline-history-with-acls-0.7.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/json4s-ast_2.11-3.2.11.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hadoop-aws-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jetty-util-6.1.26.hwx.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hadoop-yarn-server-timeline-pluginstorage-2.7.3.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/aopalliance-1.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/aws-java-sdk-kms-1.10.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-metastore-1.2.1000.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-cli-1.2.1000.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-exec-1.2.1000.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/curator-client-2.6.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/commons-io-2.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jackson-core-2.4.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/scala-reflect-2.11.8.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/commons-compiler-2.7.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-contrib-1.2.1000.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/stax-api-1.0.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-ant-1.2.1000.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/libthrift-0.9.3.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/activation-1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/commons-lang-2.4.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jersey-core-1.9.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/protobuf-java-2.5.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-shims-1.2.1000.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/okio-1.4.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/json4s-core_2.11-3.2.11.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jetty-all-7.6.0.v20120127.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/commons-cli-1.2.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/json-20090211.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/commons-logging-1.1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jersey-multipart-1.19.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/tez-yarn-timeline-history-with-fs-0.7.0.2.6.4.0-91.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/janino-2.7.6.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/bonecp-0.8.0.RELEASE.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/curator-recipes-2.5.0.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/jaxb-impl-2.2.3-1.jar\:/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hadoop-azure-datalake-2.7.3.2.6.4.0-91.jar
user.name=admfkr
java.vm.specification.version=1.8
sun.java.command=org.apache.hadoop.mapred.YarnChild 10.242.145.210 33794 attempt_1518001613009_0002_m_000000_0 50577534877698
java.home=/usr/lib/jvm/java-1.8.0-oracle-1.8.0.141-1jpp.1.el7_3.x86_64/jre
sun.arch.data.model=64
user.language=en
java.specification.vendor=Oracle Corporation
awt.toolkit=sun.awt.X11.XToolkit
java.vm.info=mixed mode
java.version=1.8.0_141
java.ext.dirs=/usr/lib/jvm/java-1.8.0-oracle-1.8.0.141-1jpp.1.el7_3.x86_64/jre/lib/ext\:/usr/java/packages/lib/ext
sun.boot.class.path=/usr/lib/jvm/java-1.8.0-oracle-1.8.0.141-1jpp.1.el7_3.x86_64/jre/lib/resources.jar\:/usr/lib/jvm/java-1.8.0-oracle-1.8.0.141-1jpp.1.el7_3.x86_64/jre/lib/rt.jar\:/usr/lib/jvm/java-1.8.0-oracle-1.8.0.141-1jpp.1.el7_3.x86_64/jre/lib/sunrsasign.jar\:/usr/lib/jvm/java-1.8.0-oracle-1.8.0.141-1jpp.1.el7_3.x86_64/jre/lib/jsse.jar\:/usr/lib/jvm/java-1.8.0-oracle-1.8.0.141-1jpp.1.el7_3.x86_64/jre/lib/jce.jar\:/usr/lib/jvm/java-1.8.0-oracle-1.8.0.141-1jpp.1.el7_3.x86_64/jre/lib/charsets.jar\:/usr/lib/jvm/java-1.8.0-oracle-1.8.0.141-1jpp.1.el7_3.x86_64/jre/lib/jfr.jar\:/usr/lib/jvm/java-1.8.0-oracle-1.8.0.141-1jpp.1.el7_3.x86_64/jre/classes
java.vendor=Oracle Corporation
file.separator=/
oozie.launcher.job.id=job_1518001613009_0002
oozie.action.stats.properties=/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/stats.properties
java.vendor.url.bug=http\://bugreport.sun.com/bugreport/
sun.io.unicode.encoding=UnicodeLittle
sun.cpu.endian=little
sun.cpu.isalist=
------------------------

=================================================================

>>> Invoking Main class now >>>


Oozie Hive action configuration
=================================================================

Using action configuration file /data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/action.xml
Setting [tez.application.tags] tag: oozie-c06b0d53a514ec89a3bd520d6f3fdff3
------------------------
Setting env property for mapreduce.job.credentials.binary to: /data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/container_tokens
------------------------
------------------------
Setting env property for tez.credentials.path to: /data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/container_tokens
------------------------

Hive Configuration Properties:
------------------------
mapreduce.jobtracker.address=s-msk-d-hd-mn1.*.*:8050
tez.runtime.sorter.class=PIPELINED
hive.exec.reducers.bytes.per.reducer=67108864
yarn.log-aggregation.file-formats=IndexedFormat,TFile
dfs.namenode.resource.check.interval=5000
ipc.server.tcpnodelay=true
hadoop.security.group.mapping.ldap.posix.attr.uid.name=uidNumber
mapreduce.jobhistory.client.thread-count=10
oozie.job.id=0000003-180207125715047-oozie-oozi-W
yarn.application.classpath=/etc/hadoop/conf,/usr/hdp/2.6.4.0-91/hadoop/*,/usr/hdp/2.6.4.0-91/hadoop/lib/*,/usr/hdp/current/hadoop-hdfs-client/*,/usr/hdp/current/hadoop-hdfs-client/lib/*,/usr/hdp/current/hadoop-yarn-client/*,/usr/hdp/current/hadoop-yarn-client/lib/*
yarn.admin.acl=yarn,dr.who
yarn.app.mapreduce.am.job.committer.cancel-timeout=60000
hadoop.config.dir=/etc/hadoop/conf
hive.exec.orc.default.compress=ZLIB
mapreduce.job.emit-timeline-data=false
dfs.journalnode.rpc-address=0.0.0.0:8485
yarn.resourcemanager.leveldb-state-store.path=${hadoop.tmp.dir}/yarn/system/rmstore
ipc.client.connection.maxidletime=30000
yarn.nodemanager.process-kill-wait.ms=2000
hive.smbjoin.cache.rows=10000
mapreduce.jobtracker.handler.count=10
io.map.index.interval=128
dfs.namenode.https-address=0.0.0.0:50470
dfs.mover.max-no-move-interval=60000
fs.s3n.multipart.uploads.enabled=false
mapreduce.task.profile.reduces=0-2
io.seqfile.sorter.recordlimit=1000000
hadoop.util.hash.type=murmur
mapreduce.tasktracker.tasks.sleeptimebeforesigkill=5000
hadoop.proxyuser.zeppelin.hosts=*
dfs.namenode.replication.min=1
fs.s3a.path.style.access=false
mapreduce.jobtracker.jobhistory.block.size=3145728
dfs.namenode.fs-limits.min-block-size=1048576
fs.AbstractFileSystem.file.impl=org.apache.hadoop.fs.local.LocalFs
net.topology.script.number.args=100
hive.exec.post.hooks=org.apache.hadoop.hive.ql.hooks.ATSHook,org.apache.atlas.hive.hook.HiveHook
yarn.resourcemanager.container-tokens.master-key-rolling-interval-secs=86400
mapreduce.map.output.compress.codec=org.apache.hadoop.io.compress.DefaultCodec
yarn.nodemanager.windows-container.memory-limit.enabled=false
yarn.timeline-service.webapp.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD
mapreduce.input.fileinputformat.split.minsize=0
hadoop.security.group.mapping=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback
mapreduce.jobtracker.system.dir=${hadoop.tmp.dir}/mapred/system
hive.zookeeper.quorum=s-msk-d-hd-dn1.*.*:2181,s-msk-d-hd-dn2.*.*:2181,s-msk-d-hd-mn1.*.*:2181
mapreduce.job.end-notification.max.attempts=5
mapreduce.reduce.speculative=false
yarn.nodemanager.localizer.cache.cleanup.interval-ms=600000
hadoop.proxyuser.beacon.groups=*
yarn.timeline-service.leveldb-timeline-store.start-time-read-cache-size=10000
dfs.namenode.replication.interval=3
yarn.resourcemanager.admin.address=s-msk-d-hd-mn1.*.*:8141
mapreduce.job.maps=2
mapreduce.job.ubertask.enable=false
yarn.timeline-service.entity-group-fs-store.retain-seconds=604800
hadoop.proxyuser.knox.hosts=s-msk-d-bdata1.*.*
dfs.client.use.datanode.hostname=false
mapreduce.am.max-attempts=2
hive.prewarm.enabled=false
yarn.resourcemanager.zk-num-retries=1000
hadoop.proxyuser.knox.groups=users
s3.blocksize=67108864
dfs.datanode.data.dir=/data/1/hadoop/hdfs/data
mapreduce.reduce.shuffle.parallelcopies=30
adl.feature.ownerandgroup.enableupn=false
fs.s3.buffer.dir=${hadoop.tmp.dir}/s3
hadoop.registry.zk.retry.ceiling.ms=60000
hive.tez.max.partition.factor=2.0
dfs.datanode.data.dir.perm=750
tez.am.container.reuse.enabled=true
yarn.nodemanager.env-whitelist=JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,HADOOP_YARN_HOME,HADOOP_HOME,PATH,LANG,TZ
dfs.namenode.xattrs.enabled=true
dfs.datanode.bp-ready.timeout=20
dfs.datanode.transfer.socket.send.buffer.size=0
yarn.app.mapreduce.client.job.max-retries=30
yarn.nodemanager.linux-container-executor.cgroups.hierarchy=/hadoop-yarn
yarn.resourcemanager.recovery.enabled=true
yarn.app.mapreduce.am.container.log.backups=0
hadoop.proxyuser.yarn.hosts=s-msk-d-hd-mn1.*.*
yarn.nodemanager.disk-health-checker.interval-ms=120000
hive.default.fileformat=TextFile
tez.am.launch.cmd-opts=-XX:+PrintGCDetails -verbose:gc -XX:+PrintGCTimeStamps -XX:+UseNUMA -XX:+UseParallelGC
dfs.namenode.list.cache.directives.num.responses=100
hive.security.metastore.authorization.auth.reads=true
fs.s3a.max.total.tasks=5
mapreduce.shuffle.port=13562
mapreduce.reduce.maxattempts=4
yarn.resourcemanager.resource-tracker.client.thread-count=50
dfs.namenode.replication.considerLoad=true
yarn.resourcemanager.webapp.cross-origin.enabled=true
yarn.resourcemanager.bind-host=0.0.0.0
yarn.nodemanager.delete.thread-count=4
hive.compactor.worker.threads=0
yarn.nodemanager.admin-env=MALLOC_ARENA_MAX=$MALLOC_ARENA_MAX
yarn.resourcemanager.proxy-user-privileges.enabled=true
mapreduce.job.speculative.speculative-cap-total-tasks=0.01
ftp.replication=3
mapreduce.job.speculative.slowtaskthreshold=1.0
yarn.sharedcache.cleaner.initial-delay-mins=10
file.bytes-per-checksum=512
s3native.client-write-packet-size=65536
dfs.datanode.slow.io.warning.threshold.ms=300
io.seqfile.lazydecompress=true
mapreduce.task.skip.start.attempts=2
hadoop.security.dns.log-slow-lookups.threshold.ms=1000
dfs.namenode.reject-unresolved-dn-topology-mapping=false
yarn.timeline-service.entity-group-fs-store.leveldb-cache-read-cache-size=10485760
yarn.sharedcache.admin.address=0.0.0.0:8047
hive.tez.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat
mapreduce.jobtracker.taskcache.levels=2
mapreduce.job.jvm.numtasks=1
dfs.namenode.top.num.users=10
yarn.nodemanager.linux-container-executor.cgroups.mount=false
yarn.sharedcache.checksum.algo.impl=org.apache.hadoop.yarn.sharedcache.ChecksumSHA256Impl
hive.limit.optimize.enable=true
mapreduce.job.classloader=false
tez.am.resource.memory.mb=4096
yarn.log-aggregation-enable=true
yarn.resourcemanager.nodemanager.minimum.version=NONE
mapreduce.reduce.shuffle.fetch.retry.interval-ms=1000
hadoop.proxyuser.hue.groups=*
hadoop.security.kms.client.encrypted.key.cache.size=500
mapreduce.output.fileoutputformat.compress.type=BLOCK
hadoop.hdfs.configuration.version=1
dfs.client.failover.proxy.provider.DataLakeNNHA=org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider
yarn.timeline-service.entity-group-fs-store.done-dir=/ats/done/
yarn.nodemanager.log.retain-seconds=10800
yarn.nodemanager.local-cache.max-files-per-directory=8192
mapreduce.job.end-notification.retry.interval=1000
ha.failover-controller.new-active.rpc-timeout.ms=60000
dfs.client.read.shortcircuit=true
hadoop.ssl.hostname.verifier=DEFAULT
hive.merge.smallfiles.avgsize=16000000
s3native.blocksize=67108864
hive.server2.logging.operation.enabled=true
dfs.client.failover.sleep.base.millis=500
dfs.permissions.superusergroup=hdfs
hadoop.registry.zk.retry.times=5
hive.support.concurrency=false
dfs.client.socket.send.buffer.size=0
yarn.scheduler.capacity.ordering-policy.priority-utilization.underutilized-preemption.enabled=true
dfs.blockreport.initialDelay=120
hive.cli.print.header=false
yarn.scheduler.maximum-allocation-mb=16384
oozie.HadoopAccessorService.created=true
yarn.timeline-service.http-authentication.proxyuser.ambari-server-devdatalake.groups=*
mapreduce.task.io.sort.factor=100
dfs.namenode.http-address.DevDataLakeNNHA.nn2=s-msk-d-hd-dn2.*.*:50070
tez.grouping.max-size=1073741824
yarn.timeline-service.entity-group-fs-store.with-user-dir=false
dfs.client.failover.sleep.max.millis=15000
dfs.namenode.http-address.DevDataLakeNNHA.nn1=s-msk-d-hd-mn1.*.*:50070
fs.s3.sleepTimeSeconds=10
ha.health-monitor.rpc-timeout.ms=45000
hive.exec.dynamic.partition=true
yarn.nodemanager.linux-container-executor.nonsecure-mode.limit-users=true
fs.AbstractFileSystem.viewfs.impl=org.apache.hadoop.fs.viewfs.ViewFs
fs.ftp.host=0.0.0.0
fs.adl.oauth2.access.token.provider.type=ClientCredential
yarn.nodemanager.linux-container-executor.nonsecure-mode.local-user=nobody
yarn.resourcemanager.webapp.spnego-principal=HTTP/_HOST@*.*
hive.optimize.reducededuplication=true
yarn.timeline-service.entity-group-fs-store.active-dir=/ats/active/
fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
yarn.nodemanager.linux-container-executor.group=hadoop
dfs.namenode.fs-limits.max-blocks-per-file=1048576
mapreduce.tasktracker.http.threads=40
yarn.resourcemanager.am-rm-tokens.master-key-rolling-interval-secs=86400
hive.vectorized.execution.enabled=true
hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdConfOnlyAuthorizerFactory
hive.exec.orc.compression.strategy=SPEED
dfs.namenode.lifeline.handler.ratio=0.10
io.compression.codec.bzip2.library=system-native
mapreduce.map.skip.maxrecords=0
dfs.namenode.rpc-address.DevDataLakeNNHA.nn1=s-msk-d-hd-mn1.*.*:8020
dfs.namenode.rpc-address.DevDataLakeNNHA.nn2=s-msk-d-hd-dn2.*.*:8020
ipc.ping.interval=60000
mapreduce.jobhistory.loadedjobs.cache.size=5
mapreduce.job.credentials.binary=/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/container_tokens
dfs.storage.policy.enabled=true
tez.am.am-rm.heartbeat.interval-ms.max=250
mapreduce.client.output.filter=FAILED
yarn.timeline-service.client.best-effort=false
atlas.rest.address=http://s-msk-d-hd-mn1.*.*:21000
mapreduce.jobtracker.persist.jobstatus.hours=1
dfs.datanode.block-pinning.enabled=false
mapreduce.job.speculative.retry-after-no-speculate=1000
yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled=false
s3native.stream-buffer-size=4096
tez.shuffle-vertex-manager.max-src-fraction=0.4
io.seqfile.local.dir=${hadoop.tmp.dir}/io/local
yarn.nodemanager.webapp.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD
dfs.encrypt.data.transfer.cipher.key.bitlength=128
hive.server2.transport.mode=binary
hive.server2.thrift.http.path=cliservice
yarn.app.mapreduce.am.log.level=INFO
dfs.datanode.sync.behind.writes=false
dfs.namenode.stale.datanode.interval=30000
mapreduce.task.io.sort.mb=1146
yarn.resourcemanager.zk-state-store.parent-path=/rmstore
fs.client.resolve.remote.symlinks=true
hadoop.ssl.enabled.protocols=TLSv1,SSLv2Hello,TLSv1.1,TLSv1.2
mapreduce.reduce.cpu.vcores=1
yarn.client.failover-retries=0
mapreduce.jobhistory.address=s-msk-d-hd-mn1.*.*:10020
hadoop.ssl.enabled=false
mapreduce.admin.reduce.child.java.opts=-server -XX:NewRatio=8 -Djava.net.preferIPv4Stack=true -Dhdp.version=${hdp.version}
dfs.namenode.name.dir=/data/hadoop/hdfs/namenode
datanucleus.fixedDatastore=true
hive.server2.thrift.sasl.qop=auth
tez.tez-ui.history-url.base=http://s-msk-d-hd-mn1.*.*:8080/#/main/view/TEZ/tez_cluster_instance
hive.compactor.delta.num.threshold=10
dfs.block.access.token.enable=true
dfs.webhdfs.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD,TRACE
mapreduce.job.speculative.retry-after-speculate=15000
dfs.datanode.fileio.profiling.sampling.percentage=0
dfs.datanode.address=0.0.0.0:1019
hive.map.aggr.hash.percentmemory=0.5
ipc.client.connect.max.retries=50
yarn.nodemanager.container.stderr.tail.bytes=4096
dfs.short.circuit.shared.memory.watcher.interrupt.check.ms=60000
dfs.webhdfs.rest-csrf.custom-header=X-XSRF-HEADER
yarn.resourcemanager.ha.automatic-failover.embedded=true
dfs.datanode.handler.count=10
hive.mapred.reduce.tasks.speculative.execution=false
hadoop.proxyuser.oozie.groups=*
hive.vectorized.groupby.flush.percent=0.1
yarn.nodemanager.log-container-debug-info.enabled=true
mapreduce.task.profile.map.params=${mapreduce.task.profile.params}
yarn.resourcemanager.nodemanagers.heartbeat-interval-ms=1000
yarn.nodemanager.aux-services.spark2_shuffle.class=org.apache.spark.network.yarn.YarnShuffleService
dfs.namenode.block-placement-policy.default.prefer-local-node=true
dfs.namenode.resource.checked.volumes.minimum=1
yarn.resourcemanager.keytab=/etc/security/keytabs/rm.service.keytab
hive.exec.parallel=false
hive.exec.submitviachild=false
yarn.client.max-cached-nodemanagers-proxies=0
yarn.sharedcache.app-checker.class=org.apache.hadoop.yarn.server.sharedcachemanager.RemoteAppChecker
fs.trash.checkpoint.interval=0
dfs.journalnode.http-address=0.0.0.0:8480
tez.am.container.idle.release-timeout-min.millis=10000
yarn.app.mapreduce.am.staging-dir=/user
yarn.nm.liveness-monitor.expiry-interval-ms=600000
mapreduce.reduce.shuffle.merge.percent=0.66
hive.convert.join.bucket.mapjoin.tez=false
hive.execution.engine=tez
dfs.namenode.retrycache.heap.percent=0.03f
ipc.client.connect.timeout=20000
hive.tez.container.size=2048
yarn.nodemanager.local-dirs=/data/1/hadoop/yarn/local
dfs.balancer.block-move.timeout=0
yarn.nodemanager.recovery.enabled=true
hive.server2.use.SSL=false
hadoop.proxyuser.yarn.groups=*
s3.replication=3
yarn.resourcemanager.am.max-attempts=2
hive.optimize.null.scan=true
yarn.timeline-service.client.internal-timers-ttl-secs=420
hadoop.kerberos.min.seconds.before.relogin=60
yarn.node-labels.fs-store.root-dir=/system/yarn/node-labels
dfs.image.compress=false
dfs.datanode.available-space-volume-choosing-policy.balanced-space-preference-fraction=0.75f
yarn.nodemanager.aux-services.spark_shuffle.classpath=/usr/hdp/${hdp.version}/spark/aux/*
dfs.namenode.edit.log.autoroll.multiplier.threshold=2.0
hadoop.security.group.mapping.ldap.ssl=false
dfs.namenode.checkpoint.check.period=60
fs.defaultFS=hdfs://DevDataLakeNNHA
hive.enforce.sorting=true
hadoop.security.group.mapping.ldap.search.attr.group.name=cn
yarn.nodemanager.disk-health-checker.max-disk-utilization-per-disk-percentage=90
yarn.nodemanager.container-monitor.procfs-tree.smaps-based-rss.enabled=false
hive.map.aggr.hash.min.reduction=0.5
mapreduce.map.sort.spill.percent=0.7
yarn.log-aggregation.file-controller.TFile.class=org.apache.hadoop.yarn.logaggregation.filecontroller.tfile.LogAggregationTFileController
dfs.namenode.http-address=0.0.0.0:50070
hadoop.security.crypto.codec.classes.aes.ctr.nopadding=org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec,org.apache.hadoop.crypto.JceAesCtrCryptoCodec
hive.optimize.metadataonly=true
hadoop.security.groups.negative-cache.secs=30
hadoop.ssl.server.conf=ssl-server.xml
yarn.client.nodemanager-client-async.thread-pool-max-size=500
mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging
mapreduce.jobhistory.admin.address=0.0.0.0:10033
dfs.namenode.startup.delay.block.deletion.sec=3600
yarn.nodemanager.health-checker.interval-ms=135000
dfs.namenode.checkpoint.max-retries=3
ftp.client-write-packet-size=65536
hive.conf.restricted.list=hive.security.authenticator.manager,hive.security.authorization.manager,hive.users.in.admin.role
yarn.timeline-service.keytab=/etc/security/keytabs/yarn.service.keytab
hive.auto.convert.sortmerge.join.to.mapjoin=false
hive.fetch.task.aggr=false
hadoop.proxyuser.livy.hosts=*
mapreduce.reduce.shuffle.fetch.retry.enabled=1
yarn.app.mapreduce.task.container.log.backups=0
dfs.heartbeat.interval=3
tez.task.launch.cluster-default.cmd-opts=-server -Djava.net.preferIPv4Stack=true -Dhdp.version=${hdp.version}
ha.zookeeper.session-timeout.ms=5000
hive.exec.scratchdir=/datalake/tmp/hive
hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret
mapreduce.jobhistory.webapp.xfs-filter.xframe-options=SAMEORIGIN
yarn.nodemanager.log-aggregation.compression-type=gz
tez.task.generate.counters.per.io=true
tez.am.launch.env=LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-amd64-64:./tezlib/lib/native:./tezlib/lib/native/Linux-amd64-64
yarn.resourcemanager.work-preserving-recovery.scheduling-wait-ms=10000
yarn.nodemanager.log-dirs=/data/log/yarn
yarn.resourcemanager.zk-address=s-msk-d-hd-dn1.*.*:2181,s-msk-d-hd-dn2.*.*:2181,s-msk-d-hd-mn1.*.*:2181
mapreduce.job.speculative.minimum-allowed-tasks=10
dfs.datanode.cache.revocation.timeout.ms=900000
mapreduce.jobhistory.recovery.store.class=org.apache.hadoop.mapreduce.v2.hs.HistoryServerLeveldbStateStoreService
mapreduce.task.combine.progress.records=10000
mapreduce.jobtracker.instrumentation=org.apache.hadoop.mapred.JobTrackerMetricsInst
yarn.nodemanager.kill-escape.launch-command-line=slider-agent,LLAP
hive.auto.convert.join.noconditionaltask=true
hive.stats.fetch.column.stats=true
yarn.nodemanager.address=0.0.0.0:45454
mapreduce.job.reduces=1
yarn.timeline-service.address=s-msk-d-hd-mn1.*.*:10200
hadoop.security.kms.client.encrypted.key.cache.expiry=43200000
yarn.resourcemanager.configuration.provider-class=org.apache.hadoop.yarn.LocalConfigurationProvider
yarn.sharedcache.enabled=false
hadoop.registry.zk.session.timeout.ms=60000
tfile.io.chunk.size=1048576
hive.compactor.initiator.on=false
ha.health-monitor.sleep-after-disconnect.ms=1000
mapreduce.tasktracker.reduce.tasks.maximum=2
fs.azure.user.agent.prefix=unknown
hive.metastore.authorization.storage.checks=false
dfs.datanode.directoryscan.threads=1
ssl.client.keystore.password=bigdata
dfs.datanode.directoryscan.interval=21600
hadoop.http.authentication.token.validity=36000
ha.failover-controller.graceful-fence.rpc-timeout.ms=5000
mapreduce.tasktracker.local.dir.minspacekill=0
mapreduce.jobhistory.cleaner.interval-ms=86400000
dfs.namenode.datanode.registration.ip-hostname-check=true
dfs.journalnode.kerberos.principal=jn/_HOST@*.*
mapreduce.jobtracker.http.address=0.0.0.0:50030
tez.cluster.additional.classpath.prefix=/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/etc/hadoop/conf/secure
dfs.namenode.backup.http-address=0.0.0.0:50105
mapreduce.tasktracker.outofband.heartbeat=false
hadoop.security.crypto.buffer.size=8192
mapreduce.reduce.shuffle.read.timeout=180000
mapreduce.reduce.skip.proc.count.autoincr=true
oozie.child.mapreduce.job.tags=oozie-c06b0d53a514ec89a3bd520d6f3fdff3
mapreduce.ifile.readahead.bytes=4194304
hadoop.registry.secure=true
yarn.timeline-service.generic-application-history.save-non-am-container-meta-info=false
dfs.namenode.safemode.min.datanodes=0
dfs.datanode.kerberos.principal=dn/_HOST@*.*
mapreduce.tasktracker.report.address=127.0.0.1:0
yarn.timeline-service.http-authentication.type=kerberos
dfs.webhdfs.socket.read-timeout=60s
dfs.webhdfs.enabled=true
yarn.dispatcher.drain-events.timeout=300000
hive.security.authorization.enabled=true
dfs.namenode.avoid.write.stale.datanode=true
yarn.log-aggregation.retain-seconds=2592000
dfs.namenode.kerberos.principal=nn/_HOST@*.*
mapreduce.job.complete.cancel.delegation.tokens=false
yarn.resourcemanager.fail-fast=${yarn.fail-fast}
fs.s3a.multiobjectdelete.enable=true
mapreduce.shuffle.connection-keep-alive.timeout=5
yarn.nodemanager.kill-escape.user=hive
hive.user.install.directory=/user/
tez.am.tez-ui.history-url.template=__HISTORY_URL_BASE__?viewPath=%2F%23%2Ftez-app%2F__APPLICATION_ID__
yarn.scheduler.minimum-allocation-vcores=1
hive.vectorized.groupby.checkinterval=4096
yarn.nodemanager.runtime.linux.docker.privileged-containers.allowed=false
yarn.timeline-service.client.max-retries=30
yarn.timeline-service.client.retry-interval-ms=1000
nfs.exports.allowed.hosts=* rw
mapreduce.shuffle.max.threads=0
dfs.client.mmap.cache.size=256
io.file.buffer.size=131072
yarn.timeline-service.bind-host=0.0.0.0
dfs.ha.zkfc.nn.http.timeout.ms=20000
yarn.nodemanager.container-metrics.unregister-delay-ms=60000
dfs.namenode.checkpoint.txns=1000000
ipc.client.connect.retry.interval=1000
dfs.journalnode.edits.dir=/hadoop/hdfs/journal
mapreduce.reduce.shuffle.connect.timeout=180000
yarn.resourcemanager.fs.state-store.uri= 
fs.AbstractFileSystem.adl.impl=org.apache.hadoop.fs.adl.Adl
hadoop.registry.zk.connection.timeout.ms=15000
dfs.cachereport.intervalMsec=10000
yarn.timeline-service.client.fd-flush-interval-secs=5
dfs.ha.tail-edits.rolledits.timeout=60
yarn.app.mapreduce.am.container.log.limit.kb=0
hive.server2.tez.initialize.default.sessions=false
atlas.hook.hive.minThreads=1
yarn.nodemanager.resourcemanager.minimum.version=NONE
hive.txn.max.open.batch=1000
hive.compactor.check.interval=300L
yarn.resourcemanager.address=s-msk-d-hd-mn1.*.*:8050
file.stream-buffer-size=4096
mapreduce.job.ubertask.maxreduces=1
yarn.resourcemanager.nodemanager-connect-retries=10
fs.azure.secure.mode=false
ipc.client.idlethreshold=8000
hadoop.security.group.mapping.ldap.search.group.hierarchy.levels=0
yarn.nodemanager.logaggregation.threadpool-size-max=100
ssl.client.keystore.location=/etc/security/clientKeys/keystore.jks
ftp.stream-buffer-size=4096
yarn.sharedcache.client-server.address=0.0.0.0:8045
dfs.client.failover.proxy.provider.DevDataLakeNNHA=org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider
dfs.client.failover.connection.retries.on.timeouts=0
tez.runtime.compress=true
hadoop.registry.client.auth=kerberos
dfs.namenode.replication.work.multiplier.per.iteration=2
hadoop.http.authentication.simple.anonymous.allowed=true
yarn.client.nodemanager-connect.retry-interval-ms=10000
yarn.timeline-service.webapp.rest-csrf.custom-header=X-XSRF-Header
yarn.nodemanager.linux-container-executor.resources-handler.class=org.apache.hadoop.yarn.server.nodemanager.util.DefaultLCEResourcesHandler
yarn.timeline-service.leveldb-timeline-store.read-cache-size=104857600
dfs.internal.nameservices=DevDataLakeNNHA
hadoop.security.authentication=kerberos
hive.optimize.constant.propagation=true
dfs.image.compression.codec=org.apache.hadoop.io.compress.DefaultCodec
yarn.resourcemanager.monitor.capacity.preemption.natural_termination_factor=1
mapreduce.task.files.preserve.failedtasks=false
yarn.timeline-service.http-authentication.proxyuser.ambari-server-devdatalake.hosts=s-msk-d-hd-mn1.*.*
dfs.client.read.shortcircuit.streams.cache.size=4096
dfs.ha.namenodes.DevDataLakeNNHA=nn1,nn2
yarn.log-aggregation.file-controller.IndexedFormat.class=org.apache.hadoop.yarn.logaggregation.filecontroller.ifile.LogAggregationIndexedFileController
file.replication=1
hadoop.registry.system.accounts=sasl:yarn,sasl:jhs,sasl:hdfs-devdatalake,sasl:rm,sasl:hive
mapreduce.jobhistory.joblist.cache.size=20000
hive.exec.pre.hooks=org.apache.hadoop.hive.ql.hooks.ATSHook
yarn.timeline-service.principal=yarn/_HOST@*.*
dfs.namenode.fs-limits.max-xattrs-per-inode=32
yarn.resourcemanager.work-preserving-recovery.enabled=true
dfs.image.transfer.timeout=60000
tez.am.container.reuse.rack-fallback.enabled=true
nfs.wtmax=1048576
io.compression.codecs=org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec
fs.s3a.multipart.purge=false
yarn.resourcemanager.webapp.ui-actions.enabled=true
fs.s3a.connection.establish.timeout=5000
dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}
hive.metastore.client.connect.retry.delay=5s
dfs.stream-buffer-size=4096
dfs.namenode.invalidate.work.pct.per.iteration=0.32f
fs.s3a.multipart.purge.age=86400
yarn.resourcemanager.scheduler.client.thread-count=50
ipc.maximum.data.length=67108864
yarn.resourcemanager.webapp.rest-csrf.enabled=false
tfile.fs.input.buffer.size=262144
hive.exec.reducers.max=1009
hive.fetch.task.conversion.threshold=1073741824
hadoop.http.authentication.type=simple
dfs.namenode.list.encryption.zones.num.responses=100
mapreduce.map.cpu.vcores=1
dfs.namenode.inode.attributes.provider.class=org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer
tez.history.logging.timeline-cache-plugin.old-num-dags-per-group=5
hive.auto.convert.join.noconditionaltask.size=572662306
ssl.client.keystore.type=jks
dfs.namenode.decommission.interval=30
fs.AbstractFileSystem.webhdfs.impl=org.apache.hadoop.fs.WebHdfs
ftp.bytes-per-checksum=512
dfs.user.home.dir.prefix=/user
hadoop.workaround.non.threadsafe.getpwuid=false
yarn.nodemanager.pmem-check-enabled=true
dfs.namenode.inotify.max.events.per.rpc=1000
mapreduce.task.profile.maps=0-2
mapreduce.shuffle.ssl.file.buffer.size=65536
dfs.namenode.https-address.DevDataLakeNNHA.nn1=s-msk-d-hd-mn1.*.*:50470
dfs.namenode.https-address.DevDataLakeNNHA.nn2=s-msk-d-hd-dn2.*.*:50470
dfs.datanode.transfer.socket.recv.buffer.size=0
yarn.timeline-service.webapp.https.address=s-msk-d-hd-mn1.*.*:8190
hive.enforce.bucketing=true
yarn.app.mapreduce.am.command-opts=-Xmx1638m -Dhdp.version=2.6.4.0-91 -Djava.io.tmpdir=./tmp
yarn.resourcemanager.amlauncher.thread-count=50
tez.grouping.min-size=16777216
yarn.sharedcache.nm.uploader.replication.factor=10
hive.exec.local.scratchdir=/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hivetmp/scratchdir
hadoop.registry.zk.root=/registry
yarn.client.failover-proxy-provider=org.apache.hadoop.yarn.client.RequestHedgingRMFailoverProxyProvider
yarn.timeline-service.client.fd-retain-secs=300
hadoop.security.auth_to_local=RULE:[1:$1@$0](ambari-qa-devdatalake@*.*)s/.*/ambari-qa/
RULE:[1:$1@$0](druid-devdatalake@*.*)s/.*/druid/
RULE:[1:$1@$0](hbase-devdatalake@*.*)s/.*/hbase/
RULE:[1:$1@$0](hdfs-devdatalake@*.*)s/.*/hdfs/
RULE:[1:$1@$0](spark-devdatalake@*.*)s/.*/spark/
RULE:[1:$1@$0](superset-devdatalake@*.*)s/.*/superset/
RULE:[1:$1@$0](zeppelin-devdatalake@*.*)s/.*/zeppelin/
RULE:[1:$1@$0](.*@*.*)s/@.*//
RULE:[2:$1@$0](amshbase@*.*)s/.*/ams/
RULE:[2:$1@$0](amszk@*.*)s/.*/ams/
RULE:[2:$1@$0](atlas@*.*)s/.*/atlas/
RULE:[2:$1@$0](beacon@*.*)s/.*/beacon/
RULE:[2:$1@$0](dn@*.*)s/.*/hdfs/
RULE:[2:$1@$0](falcon@*.*)s/.*/falcon/
RULE:[2:$1@$0](hbase@*.*)s/.*/hbase/
RULE:[2:$1@$0](hive@*.*)s/.*/hive/
RULE:[2:$1@$0](jhs@*.*)s/.*/mapred/
RULE:[2:$1@$0](jn@*.*)s/.*/hdfs/
RULE:[2:$1@$0](knox@*.*)s/.*/knox/
RULE:[2:$1@$0](livy@*.*)s/.*/livy/
RULE:[2:$1@$0](nifi@*.*)s/.*/nifi/
RULE:[2:$1@$0](nm@*.*)s/.*/yarn/
RULE:[2:$1@$0](nn@*.*)s/.*/hdfs/
RULE:[2:$1@$0](oozie@*.*)s/.*/oozie/
RULE:[2:$1@$0](rangeradmin@*.*)s/.*/ranger/
RULE:[2:$1@$0](rangertagsync@*.*)s/.*/rangertagsync/
RULE:[2:$1@$0](rangerusersync@*.*)s/.*/rangerusersync/
RULE:[2:$1@$0](rm@*.*)s/.*/yarn/
RULE:[2:$1@$0](yarn@*.*)s/.*/yarn/
DEFAULT
hadoop.http.cross-origin.max-age=1800
yarn.nodemanager.remote-app-log-dir-suffix=logs
mapreduce.jobhistory.principal=jhs/_HOST@*.*
nfs.mountd.port=4242
mapreduce.reduce.merge.inmem.threshold=1000
tez.lib.uris=/hdp/apps/${hdp.version}/tez/tez.tar.gz
mapreduce.jobtracker.jobhistory.lru.cache.size=5
hive.compactor.worker.timeout=86400L
dfs.namenode.num.checkpoints.retained=2
mapreduce.job.queuename=load_raw
mapreduce.jobhistory.max-age-ms=604800000
hive.server2.authentication.kerberos.principal=hive/_HOST@*.*
fs.azure.authorization=false
yarn.nodemanager.aux-services.spark_shuffle.class=org.apache.spark.network.yarn.YarnShuffleService
yarn.nodemanager.localizer.client.thread-count=5
yarn.sharedcache.uploader.server.thread-count=50
hive.enforce.sortmergebucketmapjoin=true
hive.tez.smb.number.waves=0.5
net.topology.script.file.name=/etc/hadoop/conf/topology_script.py
dfs.namenode.fslock.fair=false
dfs.blockreport.split.threshold=1000000
dfs.block.scanner.volume.bytes.per.second=1048576
dfs.datanode.balance.bandwidthPerSec=6250000
ha.zookeeper.quorum=s-msk-d-hd-dn1.*.*:2181,s-msk-d-hd-dn2.*.*:2181,s-msk-d-hd-mn1.*.*:2181
ipc.client.rpc-timeout.ms=0
yarn.resourcemanager.amlauncher.log.command=false
dfs.default.chunk.view.size=32768
hive.tez.auto.reducer.parallelism=true
mapreduce.jobhistory.datestring.cache.size=200000
mapreduce.task.profile.params=-agentlib:hprof=cpu=samples,heap=sites,force=n,thread=y,verbose=n,file=%s
hive.security.metastore.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider
dfs.namenode.handler.count=100
datanucleus.autoCreateSchema=false
dfs.image.transfer.bandwidthPerSec=0
mapreduce.jobtracker.expire.trackers.interval=600000
yarn.app.mapreduce.client.max-retries=3
fs.azure.local.sas.key.mode=false
yarn.node-labels.enabled=false
yarn.nodemanager.log.retain-second=604800
fs.s3a.threads.max=10
yarn.timeline-service.handler-thread-count=10
ipc.server.listen.queue.size=128
yarn.resourcemanager.connect.max-wait.ms=900000
hadoop.proxyuser.hcat.groups=*
mapreduce.job.max.split.locations=10
yarn.resourcemanager.scheduler.class=org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler
dfs.blocksize=134217728
mapreduce.shuffle.connection-keep-alive.enable=false
fs.s3a.threads.keepalivetime=60
datanucleus.cache.level2.type=none
ha.failover-controller.cli-check.rpc-timeout.ms=20000
ha.zookeeper.acl=sasl:nn:rwcda
dfs.namenode.write.stale.datanode.ratio=1.0f
dfs.encrypt.data.transfer=false
dfs.datanode.shared.file.descriptor.paths=/dev/shm,/tmp
hive.exec.compress.intermediate=false
mapreduce.input.lineinputformat.linespermap=1
yarn.nodemanager.localizer.fetch.thread-count=4
yarn.resourcemanager.scheduler.address=s-msk-d-hd-mn1.*.*:8030
yarn.timeline-service.leveldb-timeline-store.start-time-write-cache-size=10000
hive.stats.autogather=true
hive.optimize.sort.dynamic.partition=false
yarn.timeline-service.entity-group-fs-store.group-id-plugin-classes=org.apache.tez.dag.history.logging.ats.TimelineCachePluginImpl,org.apache.spark.deploy.history.yarn.plugin.SparkATSPlugin
dfs.client.read.shortcircuit.skip.checksum=false
mapreduce.shuffle.ssl.enabled=false
mapreduce.reduce.log.level=INFO
yarn.app.mapreduce.am.admin-command-opts=-Dhdp.version=${hdp.version}
yarn.resourcemanager.delegation-token.max-conf-size-bytes=12800
hadoop.registry.rm.enabled=true
hive.stats.dbclass=fs
yarn.nodemanager.log-aggregation.num-log-files-per-app=336
yarn.resourcemanager.system-metrics-publisher.dispatcher.pool-size=10
dfs.datanode.use.datanode.hostname=false
hive.metastore.cache.pinobjtypes=Table,Database,Type,FieldSchema,Order
yarn.resourcemanager.ha.enabled=false
dfs.web.authentication.kerberos.principal=HTTP/_HOST@*.*
fs.s3a.multipart.threshold=2147483647
mapreduce.reduce.shuffle.memory.limit.percent=0.25
hadoop.http.cross-origin.enabled=false
dfs.https.server.keystore.resource=ssl-server.xml
hive.cluster.delegation.token.store.zookeeper.connectString=s-msk-d-hd-dn1.*.*:2181,s-msk-d-hd-dn2.*.*:2181,s-msk-d-hd-mn1.*.*:2181
hive.tez.java.opts=-server -Djava.net.preferIPv4Stack=true -XX:NewRatio=8 -XX:+UseNUMA -XX:+UseParallelGC -XX:+PrintGCDetails -verbose:gc -XX:+PrintGCTimeStamps
mapreduce.jobtracker.taskscheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler
dfs.namenode.kerberos.internal.spnego.principal=HTTP/_HOST@*.*
yarn.resourcemanager.state-store.max-completed-applications=${yarn.resourcemanager.max-completed-applications}
hive.exec.submit.local.task.via.child=true
dfs.ha.namenodes.DataLakeNNHA=nn1,nn2
hive.cluster.delegation.token.store.zookeeper.znode=/hive/cluster/delegation
map.sort.class=org.apache.hadoop.util.QuickSort
dfs.datanode.dns.interface=default
hadoop.proxyuser.hcat.hosts=s-msk-d-hd-mn1.*.*
tez.am.container.reuse.non-local-fallback.enabled=false
mapreduce.reduce.shuffle.retry-delay.max.ms=60000
fs.s3a.buffer.dir=${hadoop.tmp.dir}/s3a
mapreduce.client.progressmonitor.pollinterval=1000
yarn.app.mapreduce.shuffle.log.limit.kb=0
dfs.datanode.max.locked.memory=0
oozie.action.id=0000003-180207125715047-oozie-oozi-W@clear_hive_tbl
dfs.namenode.retrycache.expirytime.millis=600000
dfs.datanode.scan.period.hours=504
dfs.ha.fencing.ssh.connect-timeout=30000
mapreduce.jobhistory.move.interval-ms=180000
dfs.client.block.write.replace-datanode-on-failure.best-effort=false
dfs.datanode.disk.check.min.gap=15m
yarn.nodemanager.log-aggregation.roll-monitoring-interval-seconds=3600
dfs.namenode.fs-limits.max-component-length=255
dfs.ha.fencing.methods=shell(/bin/true)
dfs.webhdfs.rest-csrf.enabled=false
dfs.datanode.ipc.address=0.0.0.0:8010
yarn.timeline-service.state-store-class=org.apache.hadoop.yarn.server.timeline.recovery.LeveldbTimelineStateStore
hadoop.security.crypto.cipher.suite=AES/CTR/NoPadding
dfs.namenode.path.based.cache.retry.interval.ms=30000
dfs.client.block.write.replace-datanode-on-failure.policy=DEFAULT
fs.s3a.fast.upload.active.blocks=4
dfs.ha.tail-edits.period=60
yarn.timeline-service.generic-application-history.max-applications=10000
hadoop.registry.jaas.context=Client
yarn.resourcemanager.hostname=s-msk-d-hd-mn1.*.*
hive.tez.min.partition.factor=0.25
dfs.client.retry.policy.enabled=false
hadoop.security.group.mapping.ldap.search.filter.group=(objectClass=group)
yarn.resourcemanager.principal=rm/_HOST@*.*
hadoop.shell.safely.delete.limit.num.files=100
hadoop.security.group.mapping.ldap.search.filter.user=(&(objectClass=user)(sAMAccountName={0}))
hive.metastore.kerberos.keytab.file=/etc/security/keytabs/hive.service.keytab
dfs.namenode.shared.edits.dir=qjournal://s-msk-d-hd-mn1.*.*:8485;s-msk-d-hd-dn1.*.*:8485;s-msk-d-hd-dn2.*.*:8485/DevDataLakeNNHA
dfs.namenode.edits.dir=${dfs.namenode.name.dir}
javax.jdo.option.ConnectionUserName=hive
yarn.client.failover-retries-on-socket-timeouts=0
dfs.namenode.decommission.max.concurrent.tracked.nodes=100
oozie.hive.script=drop_part.sql
ipc.server.log.slow.rpc=false
hive.optimize.bucketmapjoin.sortedmerge=false
mapreduce.jobhistory.recovery.store.leveldb.path=/hadoop/mapreduce/jhs
yarn.sharedcache.store.class=org.apache.hadoop.yarn.server.sharedcachemanager.store.InMemorySCMStore
dfs.support.append=true
yarn.nodemanager.windows-container.cpu-limit.enabled=false
yarn.nodemanager.vmem-pmem-ratio=2.1
dfs.namenode.checkpoint.period=21600
mapreduce.map.java.opts=-Xmx1638m
hive.auto.convert.sortmerge.join=true
dfs.ha.automatic-failover.enabled=true
yarn.resourcemanager.scheduler.monitor.policies=org.apache.hadoop.yarn.server.resourcemanager.monitor.capacity.ProportionalCapacityPreemptionPolicy
yarn.resourcemanager.leveldb-state-store.compaction-interval-secs=3600
yarn.app.mapreduce.am.containerlauncher.threadpool-initial-size=10
mapred.child.java.opts=-Xmx200m
mapreduce.jobtracker.retiredjobs.cache.size=1000
dfs.client.https.need-auth=false
yarn.timeline-service.entity-group-fs-store.cleaner-interval-seconds=3600
hive.merge.rcfile.block.level=true
tez.staging-dir=/data/tmp/${user.name}/staging
dfs.namenode.write-lock-reporting-threshold-ms=1000
fs.s3a.block.size=32M
fs.ftp.host.port=21
dfs.namenode.avoid.read.stale.datanode=true
mapred.job.name=
mapreduce.job.end-notification.retry.attempts=0
yarn.timeline-service.leveldb-timeline-store.ttl-interval-ms=300000
yarn.ipc.rpc.class=org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC
yarn.timeline-service.http-authentication.kerberos.keytab=/etc/security/keytabs/spnego.service.keytab
ssl.client.truststore.location=/etc/security/clientKeys/all.jks
dfs.namenode.lease-recheck-interval-ms=2000
mapreduce.cluster.acls.enabled=false
tez.counters.max=10000
yarn.nodemanager.aux-services=mapreduce_shuffle,spark_shuffle,spark2_shuffle
mapreduce.job.ubertask.maxmaps=9
ssl.client.truststore.type=jks
dfs.hosts.exclude=/etc/hadoop/conf/dfs.exclude
hive.tez.log.level=INFO
yarn.nodemanager.runtime.linux.docker.default-container-network=host
yarn.nodemanager.container-manager.thread-count=20
hive.merge.mapfiles=true
mapreduce.app-submission.cross-platform=false
tez.am.log.level=INFO
mapreduce.job.reducer.preempt.delay.sec=0
s3native.bytes-per-checksum=512
dfs.namenode.path.based.cache.block.map.allocation.percent=0.25
yarn.timeline-service.entity-group-fs-store.cache-store-class=org.apache.hadoop.yarn.server.timeline.MemoryTimelineStore
yarn.timeline-service.http-authentication.kerberos.principal=HTTP/_HOST@*.*
dfs.datanode.cache.revocation.polling.ms=500
mapreduce.reduce.markreset.buffer.percent=0.0
tez.runtime.io.sort.mb=540
dfs.namenode.lazypersist.file.scrub.interval.sec=300
yarn.nodemanager.webapp.xfs-filter.xframe-options=SAMEORIGIN
mapreduce.jobhistory.recovery.store.fs.uri=${hadoop.tmp.dir}/mapred/history/recoverystore
hadoop.registry.zk.retry.interval.ms=1000
yarn.nodemanager.keytab=/etc/security/keytabs/nm.service.keytab
nfs.dump.dir=/tmp/.hdfs-nfs
yarn.timeline-service.webapp.xfs-filter.xframe-options=SAMEORIGIN
mapreduce.job.user.name=admfkr
tez.runtime.pipelined.sorter.sort.threads=2
mapreduce.jobtracker.maxtasks.perjob=-1
yarn.resourcemanager.fs.state-store.retry-policy-spec=2000, 500
yarn.timeline-service.ttl-enable=true
dfs.datanode.cached-dfsused.check.interval.ms=600000
yarn.nodemanager.delete.debug-delay-sec=0
mapreduce.reduce.skip.maxgroups=0
hadoop.proxyuser.hdfs.groups=*
hive.compute.query.using.stats=true
fs.trash.interval=360
mapreduce.job.name=
hive.orc.splits.include.file.footer=false
mapreduce.jobtracker.heartbeats.in.second=100
hadoop.bin.path=/usr/bin/hadoop
mapreduce.jobtracker.persist.jobstatus.active=true
yarn.nodemanager.webapp.spnego-keytab-file=/etc/security/keytabs/spnego.service.keytab
mapreduce.jobhistory.done-dir=/mr-history/done
hadoop.security.instrumentation.requires.admin=false
nfs.rtmax=1048576
hive.server2.thrift.port=10000
yarn.resourcemanager.container.liveness-monitor.interval-ms=600000
dfs.namenode.backup.address=0.0.0.0:50100
dfs.namenode.max-lock-hold-to-release-lease-ms=25
dfs.datanode.readahead.bytes=4194304
mapreduce.jobhistory.webapp.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD
mapreduce.jobhistory.cleaner.enable=true
dfs.client.block.write.retries=3
hive.exec.max.created.files=100000
ha.failover-controller.graceful-fence.connection.retries=1
mapreduce.tasktracker.http.address=0.0.0.0:50060
yarn.timeline-service.entity-group-fs-store.group-id-plugin-classpath=/usr/hdp/2.6.4.0-91/spark/hdpLib/*
dfs.namenode.safemode.threshold-pct=0.99
hadoop.security.java.secure.random.algorithm=SHA1PRNG
dfs.datanode.dns.nameserver=default
mapreduce.cluster.temp.dir=${hadoop.tmp.dir}/mapred/temp
mapreduce.client.submit.file.replication=10
yarn.fail-fast=false
dfs.namenode.edits.journal-plugin.qjournal=org.apache.hadoop.hdfs.qjournal.client.QuorumJournalManager
dfs.client.write.exclude.nodes.cache.expiry.interval.millis=600000
dfs.nameservices=DevDataLakeNNHA,DataLakeNNHA
dfs.client.mmap.cache.timeout.ms=3600000
io.skip.checksum.errors=false
yarn.timeline-service.hostname=0.0.0.0
yarn.acl.enable=true
fs.s3a.fast.upload=false
file.blocksize=67108864
hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory
hive.metastore.warehouse.dir=/apps/hive/warehouse
tez.generate.debug.artifacts=false
fs.AbstractFileSystem.swebhdfs.impl=org.apache.hadoop.fs.SWebHdfs
hadoop.common.configuration.version=0.23.0
yarn.resourcemanager.client.thread-count=50
tez.runtime.convert.user-payload.to.history-text=false
dfs.datanode.drop.cache.behind.reads=false
hadoop.proxyuser.beacon.users=*
hadoop.proxyuser.hue.hosts=*
yarn.nodemanager.linux-container-executor.nonsecure-mode.user-pattern=^[_.A-Za-z0-9][-@_.A-Za-z0-9]{0,255}?[$]?$
hive.metastore.uris=thrift://s-msk-d-hd-mn1.*.*:9083
yarn.resourcemanager.zk-timeout-ms=10000
yarn.resourcemanager.max-completed-applications=10000
hive.querylog.location=/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hivetmp/querylog
dfs.xframe.value=SAMEORIGIN
yarn.sharedcache.cleaner.period-mins=1440
hive.exec.orc.default.stripe.size=67108864
mapreduce.jobtracker.tasktracker.maxblacklists=4
mapreduce.job.end-notification.max.retry.interval=5000
dfs.datanode.keytab.file=/etc/security/keytabs/dn.service.keytab
yarn.node-labels.fs-store.retry-policy-spec=2000, 500
mapreduce.job.acl-view-job= 
yarn.app.mapreduce.am.job.task.listener.thread-count=30
dfs.namenode.edit.log.autoroll.check.interval.ms=300000
yarn.app.mapreduce.am.resource.cpu-vcores=1
hive.cluster.delegation.token.store.class=org.apache.hadoop.hive.thrift.ZooKeeperTokenStore
hive.zookeeper.client.port=2181
hadoop.security.group.mapping.ldap.search.attr.member=member
hadoop.ssl.client.conf=ssl-client.xml
hive.server2.tez.default.queues=default,llap
mapreduce.cluster.administrators= hadoop
yarn.sharedcache.root-dir=/sharedcache
dfs.journalnode.https-address=0.0.0.0:8481
hadoop.security.groups.cache.background.reload=false
mapreduce.reduce.shuffle.fetch.retry.timeout-ms=30000
mapreduce.tasktracker.instrumentation=org.apache.hadoop.mapred.TaskTrackerMetricsInst
dfs.namenode.max.objects=0
dfs.bytes-per-checksum=512
tez.shuffle-vertex-manager.min-src-fraction=0.2
dfs.datanode.max.transfer.threads=4096
dfs.cluster.administrators= hdfs
mapreduce.jobtracker.jobhistory.task.numberprogresssplits=12
dfs.block.access.key.update.interval=600
mapreduce.map.memory.mb=2048
dfs.datanode.hdfs-blocks-metadata.enabled=false
mapreduce.tasktracker.healthchecker.interval=60000
yarn.nodemanager.bind-host=0.0.0.0
dfs.image.transfer.chunksize=65536
mapreduce.tasktracker.taskmemorymanager.monitoringinterval=5000
tez.am.maxtaskfailures.per.node=10
dfs.client.https.keystore.resource=ssl-client.xml
yarn.resourcemanager.connect.retry-interval.ms=30000
yarn.timeline-service.webapp.address=s-msk-d-hd-mn1.*.*:8188
yarn.scheduler.minimum-allocation-mb=1024
yarn.sharedcache.cleaner.resource-sleep-ms=0
net.topology.impl=org.apache.hadoop.net.NetworkTopology
io.seqfile.compress.blocksize=1000000
fs.AbstractFileSystem.ftp.impl=org.apache.hadoop.fs.ftp.FtpFs
ha.failover-controller.active-standby-elector.zk.op.retries=120
dfs.namenode.checkpoint.edits.dir=${dfs.namenode.checkpoint.dir}
mapreduce.job.running.reduce.limit=0
dfs.namenode.heartbeat.recheck-interval=300000
dfs.namenode.safemode.extension=30000
hive.server2.table.type.mapping=CLASSIC
mapreduce.job.reduce.shuffle.consumer.plugin.class=org.apache.hadoop.mapreduce.task.reduce.Shuffle
yarn.nodemanager.vmem-check-enabled=false
mapreduce.jobtracker.persist.jobstatus.dir=/jobtracker/jobsInfo
dfs.namenode.delegation.key.update-interval=86400000
hadoop.rpc.protection=authentication
fs.permissions.umask-mode=022
hadoop.http.staticuser.user=yarn
hive.metastore.failure.retries=24
fs.s3a.connection.maximum=15
fs.s3a.paging.maximum=5000
hadoop.shell.missing.defaultFs.warning=false
hive.metastore.pre.event.listeners=org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener
hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab
yarn.log.server.web-service.url=http://s-msk-d-hd-mn1.*.*:8188/ws/v1/applicationhistory
mapreduce.tasktracker.dns.nameserver=default
mapreduce.jobtracker.webinterface.trusted=false
hive.metastore.sasl.enabled=true
dfs.client.block.write.replace-datanode-on-failure.enable=true
fs.azure.saskey.usecontainersaskeyforallaccess=true
mapreduce.job.classpath.files=hdfs://DevDataLakeNNHA/datalake/app/lib/avro/avro-180/avro-1.8.0.jar,hdfs://DevDataLakeNNHA/datalake/app/lib/avro/avro-180/avro-mapred-1.8.0-hadoop2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/fst-2.24.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-shims-0.23-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/libfb303-0.9.3.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/atlas-client-v2-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/atlas-client-common-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-metastore-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/scala-compiler-2.11.8.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hdfs-model-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/paranamer-2.3.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/asm-tree-3.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/stax-api-1.0-2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/ST4-4.0.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-yarn-server-applicationhistoryservice-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/geronimo-jaspic_1.0_spec-1.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/scala-reflect-2.11.8.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/atlas-typesystem-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/zookeeper-3.4.6.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jackson-core-2.4.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-yarn-timeline-history-with-fs-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-yarn-timeline-history-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/libthrift-0.9.3.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-contrib-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/derby-10.10.1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/geronimo-jta_1.1_spec-1.1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-exec-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/objenesis-2.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/eigenbase-properties-1.1.5.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/transaction-api-1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/snappy-java-1.0.5.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/metrics-json-3.1.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jaxb-impl-2.2.3-1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/mail-1.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-collections-3.2.2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/okio-1.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/metrics-core-3.1.2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-lang3-3.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/guice-3.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/azure-data-lake-store-sdk-2.1.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/javax.jdo-3.2.0-m3.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/pentaho-aggdesigner-algorithm-5.1.5-jhyde.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-shims-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/datanucleus-core-4.1.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/activation-1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/geronimo-annotation_1.0_spec-1.1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-ant-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/javassist-3.18.1-GA.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-lang-2.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-azure-datalake-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-yarn-common-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/curator-client-2.6.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/aws-java-sdk-kms-1.10.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-shims-0.20S-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/stringtemplate-3.2.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-yarn-timeline-history-with-acls-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/atlas-common-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/azure-storage-5.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/ant-1.9.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-shims-scheduler-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/bonecp-0.8.0.RELEASE.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jersey-core-1.9.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/httpcore-4.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/atlas-intg-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/scalap-2.11.8.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/servlet-api-2.5.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/atlas-notification-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jersey-guice-1.9.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jersey-multipart-1.19.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/avatica-metrics-1.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-yarn-server-timeline-pluginstorage-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/guava-11.0.2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/oro-2.0.8.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/parquet-hadoop-bundle-1.8.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jettison-1.3.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/metrics-jvm-3.1.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/calcite-core-1.2.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-pool-1.5.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/xz-1.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/apache-log4j-extras-1.2.17.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-yarn-api-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/httpclient-4.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-api-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/velocity-1.5.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/HikariCP-2.5.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/janino-2.7.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jetty-all-7.6.0.v20120127.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jpam-1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-aws-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/kafka-clients-0.10.1.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/stax-api-1.0.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/aws-java-sdk-core-1.10.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/okhttp-2.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/antlr-runtime-3.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/guice-servlet-3.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-compress-1.4.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-azure-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/leveldbjni-all-1.8.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/asm-3.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-yarn-server-web-proxy-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jdo-api-3.0.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jline-2.12.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-yarn-registry-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-runtime-internals-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-bridge-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-dag-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/ant-launcher-1.9.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/asm-commons-3.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-cli-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-shims-common-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/aws-java-sdk-s3-1.10.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/servlet-api-2.5-6.1.14.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/gson-2.5.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/protobuf-java-2.5.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/scala-library-2.11.8.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/slf4j-api-1.6.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/oozie-sharelib-hive-4.2.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/json4s-core_2.11-3.2.11.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/datanucleus-api-jdo-4.2.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/avatica-1.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/ivy-2.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-service-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/kafka_2.11-0.10.1.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/zookeeper-3.4.6.2.6.4.0-91-tests.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-compiler-2.7.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jta-1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-dbcp-1.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jetty-util-6.1.26.hwx.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-cli-1.2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-httpclient-3.0.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/json4s-ast_2.11-3.2.11.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/opencsv-2.3.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/javax.inject-1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/datanucleus-rdbms-4.1.7.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-common-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jersey-client-1.9.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-io-2.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jackson-annotations-2.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/json-20090211.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-yarn-server-common-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jaxb-api-2.2.2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jersey-json-1.9.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/json4s-native_2.11-3.2.11.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-common-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jsr305-2.0.3.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/slf4j-log4j12-1.6.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/azure-keyvault-core-0.8.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-codec-1.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jackson-databind-2.4.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jetty-6.1.14.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/curator-recipes-2.5.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/groovy-all-2.4.11.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/log4j-1.2.17.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-serde-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-runtime-library-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jackson-jaxrs-1.9.13.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/aopalliance-1.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jackson-xc-1.9.13.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/avro-1.7.5.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/calcite-linq4j-1.2.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-yarn-server-resourcemanager-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/curator-framework-2.6.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/netty-3.6.2.Final.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-annotations-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/joda-time-2.9.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-mapreduce-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/atlas-client-v1-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/antlr-2.7.7.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-logging-1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-collections4-4.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/aws-java-sdk-core-1.10.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/aws-java-sdk-kms-1.10.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/aws-java-sdk-s3-1.10.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/azure-data-lake-store-sdk-2.1.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/azure-keyvault-core-0.8.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/azure-storage-5.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/commons-lang3-3.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/guava-11.0.2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/hadoop-aws-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/hadoop-azure-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/hadoop-azure-datalake-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/jackson-annotations-2.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/jackson-core-2.4.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/jackson-databind-2.4.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/joda-time-2.9.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/json-simple-1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/okhttp-2.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/okio-1.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/oozie-hadoop-utils-hadoop-2-4.2.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/oozie-sharelib-oozie-4.2.0.2.6.4.0-91.jar
dfs.namenode.checkpoint.dir=/data/hadoop/hdfs/namesecondary
dfs.webhdfs.rest-csrf.browser-useragents-regex=^Mozilla.*,^Opera.*
dfs.namenode.top.windows.minutes=1,5,25
dfs.client.use.legacy.blockreader.local=false
mapreduce.job.maxtaskfailures.per.tracker=3
mapreduce.shuffle.max.connections=0
net.topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping
hive.merge.mapredfiles=false
yarn.client.application-client-protocol.poll-interval-ms=200
mapreduce.fileoutputcommitter.marksuccessfuljobs=true
yarn.nodemanager.localizer.address=${yarn.nodemanager.hostname}:8040
dfs.namenode.list.cache.pools.num.responses=100
nfs.file.dump.dir=/tmp/.hdfs-nfs
nfs.server.port=2049
hadoop.proxyuser.HTTP.hosts=s-msk-d-hd-dn1.*.*,s-msk-d-hd-dn2.*.*,s-msk-d-hd-mn1.*.*
fs.s3a.readahead.range=64K
ha.zookeeper.parent-znode=/hadoop-ha
hive.exec.max.dynamic.partitions=5000
yarn.sharedcache.admin.thread-count=1
yarn.nodemanager.resource.cpu-vcores=8
dfs.encrypt.data.transfer.cipher.suites=AES/CTR/NoPadding
mapreduce.jobhistory.http.policy=HTTP_ONLY
fs.s3a.attempts.maximum=20
yarn.log-aggregation.retain-check-interval-seconds=-1
yarn.nodemanager.log-aggregation.debug-enabled=false
fs.s3n.multipart.copy.block.size=5368709120
hive.metastore.execute.setugi=true
yarn.resourcemanager.zk-acl=sasl:rm:rwcda
tez.task.get-task.sleep.interval-ms.max=200
yarn.resourcemanager.webapp.spnego-keytab-file=/etc/security/keytabs/spnego.service.keytab
yarn.timeline-service.client.fd-clean-interval-secs=60
hadoop.ssl.keystores.factory.class=org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory
mapreduce.job.split.metainfo.maxsize=10000000
hadoop.security.random.device.file.path=/dev/urandom
hive.mapjoin.bucket.cache.size=10000
fs.s3.maxRetries=4
yarn.client.nodemanager-connect.max-wait-ms=60000
yarn.app.mapreduce.client-am.ipc.max-retries=3
yarn.nodemanager.linux-container-executor.cgroups.strict-resource-usage=false
tez.session.am.dag.submit.timeout.secs=600
dfs.replication.max=50
dfs.datanode.https.address=0.0.0.0:50475
ipc.client.kill.max=10
mapreduce.job.committer.setup.cleanup.needed=true
dfs.client.domain.socket.data.traffic=false
yarn.nodemanager.localizer.cache.target-size-mb=10240
yarn.resourcemanager.admin.client.thread-count=1
hadoop.security.group.mapping.ldap.connection.timeout.ms=60000
hive.optimize.bucketmapjoin=true
mapreduce.jobtracker.restart.recover=false
yarn.nodemanager.webapp.spnego-principal=HTTP/_HOST@*.*
hadoop.proxyuser.zeppelin.groups=*
yarn.timeline-service.store-class=org.apache.hadoop.yarn.server.timeline.EntityGroupFSTimelineStore
tez.task.max-events-per-heartbeat=500
hadoop.tmp.dir=/tmp/hadoop-${user.name}
yarn.timeline-service.ttl-ms=2678400000
hive.vectorized.groupby.maxentries=100000
hive.mapjoin.optimized.hashtable=true
mapreduce.map.speculative=false
yarn.timeline-service.recovery.enabled=true
hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.ProxyUserAuthenticator
yarn.nodemanager.recovery.dir=/data/log/yarn/nodemanager/recovery-state
mapreduce.job.counters.max=130
dfs.namenode.max.full.block.report.leases=6
dfs.namenode.max.extra.edits.segments.retained=10000
dfs.webhdfs.user.provider.user.pattern=^[A-Za-z_][A-Za-z0-9._-]*[$]?$
oozie.hive.params.size=3
hive.fetch.task.conversion=more
dfs.client.mmap.enabled=true
mapreduce.map.log.level=INFO
dfs.client.file-block-storage-locations.timeout.millis=1000
hadoop.fuse.timer.period=5
yarn.app.mapreduce.am.scheduler.heartbeat.interval-ms=1000
mapreduce.tasktracker.local.dir.minspacestart=0
hive.server2.authentication.spnego.principal=HTTP/_HOST@*.*
ha.health-monitor.check-interval.ms=1000
yarn.nodemanager.docker-container-executor.exec-name=/usr/bin/docker
yarn.resourcemanager.fs.state-store.retry-interval-ms=1000
mapreduce.output.fileoutputformat.compress=false
javax.jdo.option.ConnectionPassword=hive
io.native.lib.available=true
yarn.sharedcache.store.in-memory.staleness-period-mins=10080
mapreduce.application.framework.path=/hdp/apps/${hdp.version}/mapreduce/mapreduce.tar.gz#mr-framework
mapreduce.jobhistory.webapp.spnego-keytab-file=/etc/security/keytabs/spnego.service.keytab
hadoop.security.group.mapping.providers.combined=true
fs.AbstractFileSystem.har.impl=org.apache.hadoop.fs.HarFs
mapreduce.job.running.map.limit=0
yarn.nodemanager.webapp.address=${yarn.nodemanager.hostname}:8042
mapreduce.reduce.input.buffer.percent=0.0
tez.am.max.app.attempts=2
ambari.hive.db.schema.name=hive
mapreduce.job.cache.files=hdfs://DevDataLakeNNHA/datalake/app/lib/avro/avro-180/avro-1.8.0.jar,hdfs://DevDataLakeNNHA/datalake/app/lib/avro/avro-180/avro-mapred-1.8.0-hadoop2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/fst-2.24.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-shims-0.23-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/libfb303-0.9.3.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/atlas-client-v2-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/atlas-client-common-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-metastore-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/scala-compiler-2.11.8.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hdfs-model-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/paranamer-2.3.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/asm-tree-3.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/stax-api-1.0-2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/ST4-4.0.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-yarn-server-applicationhistoryservice-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/geronimo-jaspic_1.0_spec-1.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/scala-reflect-2.11.8.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/atlas-typesystem-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/zookeeper-3.4.6.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jackson-core-2.4.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-yarn-timeline-history-with-fs-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-yarn-timeline-history-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/libthrift-0.9.3.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-contrib-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/derby-10.10.1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/geronimo-jta_1.1_spec-1.1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-exec-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/objenesis-2.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/eigenbase-properties-1.1.5.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/transaction-api-1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/snappy-java-1.0.5.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/metrics-json-3.1.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jaxb-impl-2.2.3-1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/mail-1.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-collections-3.2.2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/okio-1.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/metrics-core-3.1.2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-lang3-3.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/guice-3.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/azure-data-lake-store-sdk-2.1.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/javax.jdo-3.2.0-m3.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/pentaho-aggdesigner-algorithm-5.1.5-jhyde.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-shims-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/datanucleus-core-4.1.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/activation-1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/geronimo-annotation_1.0_spec-1.1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-ant-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/javassist-3.18.1-GA.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-lang-2.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-azure-datalake-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-yarn-common-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/curator-client-2.6.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/aws-java-sdk-kms-1.10.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-shims-0.20S-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/stringtemplate-3.2.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-yarn-timeline-history-with-acls-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/atlas-common-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/azure-storage-5.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/ant-1.9.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-shims-scheduler-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/bonecp-0.8.0.RELEASE.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jersey-core-1.9.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/httpcore-4.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/atlas-intg-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/scalap-2.11.8.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/servlet-api-2.5.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/atlas-notification-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jersey-guice-1.9.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jersey-multipart-1.19.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/avatica-metrics-1.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-yarn-server-timeline-pluginstorage-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/guava-11.0.2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/oro-2.0.8.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/parquet-hadoop-bundle-1.8.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/apache-curator-2.6.0.pom#apache-curator-2.6.0.pom,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jettison-1.3.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/metrics-jvm-3.1.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/calcite-core-1.2.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-pool-1.5.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/xz-1.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/apache-log4j-extras-1.2.17.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-yarn-api-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/httpclient-4.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-api-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/velocity-1.5.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/HikariCP-2.5.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/janino-2.7.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jetty-all-7.6.0.v20120127.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jpam-1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-aws-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/kafka-clients-0.10.1.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/stax-api-1.0.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/aws-java-sdk-core-1.10.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/okhttp-2.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/antlr-runtime-3.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/guice-servlet-3.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-compress-1.4.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-azure-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/leveldbjni-all-1.8.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/asm-3.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-yarn-server-web-proxy-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jdo-api-3.0.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jline-2.12.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-yarn-registry-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-runtime-internals-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-bridge-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-dag-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/ant-launcher-1.9.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/asm-commons-3.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-cli-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-shims-common-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/aws-java-sdk-s3-1.10.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/servlet-api-2.5-6.1.14.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/gson-2.5.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/protobuf-java-2.5.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/scala-library-2.11.8.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/slf4j-api-1.6.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/oozie-sharelib-hive-4.2.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/json4s-core_2.11-3.2.11.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/datanucleus-api-jdo-4.2.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/avatica-1.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/ivy-2.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-service-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/kafka_2.11-0.10.1.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/zookeeper-3.4.6.2.6.4.0-91-tests.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-compiler-2.7.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jta-1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-dbcp-1.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jetty-util-6.1.26.hwx.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-cli-1.2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-httpclient-3.0.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/atlas-application.properties#atlas-application.properties,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/json4s-ast_2.11-3.2.11.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/opencsv-2.3.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/javax.inject-1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/datanucleus-rdbms-4.1.7.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-common-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jersey-client-1.9.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-io-2.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jackson-annotations-2.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/json-20090211.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-yarn-server-common-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jaxb-api-2.2.2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jersey-json-1.9.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/json4s-native_2.11-3.2.11.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-common-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jsr305-2.0.3.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/slf4j-log4j12-1.6.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/azure-keyvault-core-0.8.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-codec-1.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jackson-databind-2.4.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jetty-6.1.14.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/curator-recipes-2.5.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/groovy-all-2.4.11.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/log4j-1.2.17.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hive-serde-1.2.1000.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-runtime-library-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jackson-jaxrs-1.9.13.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/aopalliance-1.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/jackson-xc-1.9.13.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/avro-1.7.5.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/calcite-linq4j-1.2.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-yarn-server-resourcemanager-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/curator-framework-2.6.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/netty-3.6.2.Final.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/hadoop-annotations-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/joda-time-2.9.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/tez-mapreduce-0.7.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/atlas-client-v1-0.8.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/antlr-2.7.7.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-logging-1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/hive/commons-collections4-4.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/aws-java-sdk-core-1.10.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/aws-java-sdk-kms-1.10.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/aws-java-sdk-s3-1.10.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/azure-data-lake-store-sdk-2.1.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/azure-keyvault-core-0.8.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/azure-storage-5.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/commons-lang3-3.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/guava-11.0.2.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/hadoop-aws-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/hadoop-azure-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/hadoop-azure-datalake-2.7.3.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/jackson-annotations-2.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/jackson-core-2.4.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/jackson-databind-2.4.4.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/joda-time-2.9.6.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/json-simple-1.1.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/okhttp-2.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/okio-1.4.0.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/oozie-hadoop-utils-hadoop-2-4.2.0.2.6.4.0-91.jar,hdfs://DevDataLakeNNHA/user/oozie/share/lib/lib_20180126151554/oozie/oozie-sharelib-oozie-4.2.0.2.6.4.0-91.jar
dfs.client.slow.io.warning.threshold.ms=30000
fs.s3a.multipart.size=100M
yarn.app.mapreduce.am.job.committer.commit-window=10000
hive.server2.tez.sessions.per.default.queue=1
yarn.timeline-service.webapp.rest-csrf.enabled=true
hive.server2.thrift.http.port=10001
hive.server2.logging.operation.log.location=/data/tmp/hive/operation_logs
javax.jdo.option.ConnectionURL=jdbc:postgresql://s-msk-d-hd-mn1.*.*:5432/hive
mapreduce.ifile.readahead=true
s3native.replication=3
hive.prewarm.numcontainers=3
yarn.timeline-service.entity-group-fs-store.summary-store=org.apache.hadoop.yarn.server.timeline.RollingLevelDBTimelineStore
s3.stream-buffer-size=4096
tez.queue.name=queries
mapreduce.output.fileoutputformat.compress.codec=org.apache.hadoop.io.compress.DefaultCodec
fs.s3a.socket.recv.buffer=8192
dfs.datanode.fsdatasetcache.max.threads.per.volume=4
hadoop.proxyuser.ambari-server-devdatalake.groups=*
yarn.sharedcache.store.in-memory.initial-delay-mins=10
mapreduce.jobhistory.webapp.address=s-msk-d-hd-mn1.*.*:19888
fs.adl.impl=org.apache.hadoop.fs.adl.AdlFileSystem
mapreduce.task.userlog.limit.kb=0
dfs.namenode.rpc-address.DataLakeNNHA.nn1=s-msk-p-hd-nn1.*.*:8020
hive.txn.timeout=300
dfs.namenode.rpc-address.DataLakeNNHA.nn2=s-msk-p-hd-nn2.*.*:8020
hive.stats.fetch.partition.stats=true
fs.s3a.connection.ssl.enabled=true
yarn.sharedcache.webapp.address=0.0.0.0:8788
hadoop.fuse.connection.timeout=300
hive.metastore.server.max.threads=100000
ipc.server.max.connections=0
hive.zookeeper.namespace=hive_zookeeper_namespace
yarn.app.mapreduce.am.resource.mb=2048
hadoop.security.groups.cache.secs=300
yarn.nodemanager.container-monitor.interval-ms=3000
dfs.datanode.peer.stats.enabled=false
s3.client-write-packet-size=65536
dfs.replication=2
mapreduce.shuffle.transfer.buffer.size=131072
dfs.namenode.audit.log.async=true
hive.server2.allow.user.substitution=true
hadoop.security.group.mapping.ldap.directory.search.timeout=10000
dfs.datanode.available-space-volume-choosing-policy.balanced-space-threshold=10737418240
dfs.datanode.disk.check.timeout=10m
yarn.app.mapreduce.client-am.ipc.max-retries-on-timeouts=3
hive.tez.dynamic.partition.pruning=true
tez.runtime.shuffle.memory.limit.percent=0.25
hive.exec.max.dynamic.partitions.pernode=2000
mapreduce.tasktracker.taskcontroller=org.apache.hadoop.mapred.DefaultTaskController
mapreduce.tasktracker.indexcache.mb=10
yarn.scheduler.maximum-allocation-vcores=1
yarn.nodemanager.sleep-delay-before-sigkill.ms=250
hive.compactor.abortedtxn.threshold=1000
tez.task.launch.cmd-opts=-XX:+PrintGCDetails -verbose:gc -XX:+PrintGCTimeStamps -XX:+UseNUMA -XX:+UseParallelGC
hive.map.aggr=true
mapreduce.job.acl-modify-job= 
fs.automatic.close=true
fs.azure.sas.expiry.period=90d
hadoop.security.groups.cache.background.reload.threads=3
hive.auto.convert.join=true
hive.server2.support.dynamic.service.discovery=true
mapreduce.input.fileinputformat.list-status.num-threads=1
hadoop.security.group.mapping.ldap.posix.attr.gid.name=gidNumber
dfs.namenode.acls.enabled=false
dfs.client.short.circuit.replica.stale.threshold.ms=1800000
dfs.journalnode.keytab.file=/etc/security/keytabs/jn.service.keytab
fs.s3.block.size=67108864
dfs.namenode.resource.du.reserved=104857600
mapreduce.jobhistory.intermediate-done-dir=/mr-history/tmp
yarn.nodemanager.recovery.compaction-interval-secs=3600
dfs.namenode.edits.noeditlogchannelflush=false
dfs.web.authentication.kerberos.keytab=/etc/security/keytabs/spnego.service.keytab
mapreduce.reduce.shuffle.input.buffer.percent=0.7
yarn.http.policy=HTTP_ONLY
mapreduce.map.maxattempts=4
dfs.namenode.audit.loggers=default
hive.metastore.kerberos.principal=hive/_HOST@*.*
io.serializations=org.apache.hadoop.io.serializer.WritableSerialization
hadoop.security.groups.cache.warn.after.ms=5000
yarn.nodemanager.webapp.rest-csrf.custom-header=X-XSRF-Header
yarn.node-labels.fs-store.impl.class=org.apache.hadoop.yarn.nodelabels.FileSystemNodeLabelsStore
hadoop.http.cross-origin.allowed-methods=GET,POST,HEAD
mapreduce.jobhistory.webapp.rest-csrf.enabled=false
dfs.http.policy=HTTP_ONLY
dfs.client.file-block-storage-locations.num-threads=10
yarn.nodemanager.container.stderr.pattern={*stderr*,*STDERR*}
mapreduce.cluster.local.dir=${hadoop.tmp.dir}/mapred/local
mapreduce.jobhistory.webapp.spnego-principal=HTTP/_HOST@*.*
hadoop.kerberos.kinit.command=kinit
dfs.namenode.secondary.https-address=0.0.0.0:50091
dfs.namenode.metrics.logger.period.seconds=600
dfs.block.access.token.lifetime=600
dfs.namenode.delegation.token.max-lifetime=604800000
dfs.datanode.drop.cache.behind.writes=false
mapreduce.jobhistory.bind-host=0.0.0.0
mapreduce.local.clientfactory.class.name=org.apache.hadoop.mapred.LocalClientFactory
hive.merge.orcfile.stripe.level=true
hive.exec.compress.output=false
dfs.namenode.num.extra.edits.retained=1000000
ipc.client.connect.max.retries.on.timeouts=45
hadoop.proxyuser.hive.hosts=s-msk-d-hd-mn1.*.*
hadoop.proxyuser.beacon.hosts=*
tez.task.launch.env=LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-amd64-64:./tezlib/lib/native:./tezlib/lib/native/Linux-amd64-64
yarn.nodemanager.aux-services.spark2_shuffle.classpath=/usr/hdp/${hdp.version}/spark2/aux/*
fs.s3n.block.size=67108864
mapreduce.job.map.output.collector.class=org.apache.hadoop.mapred.MapTask$MapOutputBuffer
fs.s3a.fast.upload.buffer=disk
ha.health-monitor.connect-retry-interval.ms=1000
mapreduce.tasktracker.map.tasks.maximum=2
tez.grouping.split-waves=1.7
hive.exec.failure.hooks=org.apache.hadoop.hive.ql.hooks.ATSHook
hadoop.proxyuser.falcon.hosts=*
ssl.client.truststore.reload.interval=10000
dfs.client.datanode-restart.timeout=30
io.mapfile.bloom.size=1048576
hadoop.security.kms.client.authentication.retry-count=1
dfs.client-write-packet-size=65536
fs.swift.impl=org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem
yarn.resourcemanager.webapp.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD
yarn.app.mapreduce.shuffle.log.backups=0
hive.metastore.client.socket.timeout=1800s
ftp.blocksize=67108864
tez.counters.max.groups=3000
yarn.log.server.url=http://s-msk-d-hd-mn1.*.*:19888/jobhistory/logs
dfs.namenode.kerberos.principal.pattern=*
hive.vectorized.execution.reduce.enabled=false
javax.jdo.option.ConnectionDriverName=org.postgresql.Driver
hive.compactor.delta.pct.threshold=0.1f
yarn.resourcemanager.scheduler.monitor.enable=true
dfs.webhdfs.socket.connect-timeout=60s
nfs.allow.insecure.ports=true
dfs.namenode.keytab.file=/etc/security/keytabs/nn.service.keytab
yarn.sharedcache.nm.uploader.thread-count=20
yarn.app.mapreduce.client.job.retry-interval=2000
hive.merge.tezfiles=false
hadoop.security.authorization=true
hive.optimize.index.filter=true
yarn.timeline-service.version=1.5
yarn.am.liveness-monitor.expiry-interval-ms=600000
fs.har.impl.disable.cache=true
atlas.hook.hive.maxThreads=1
hive.tez.dynamic.partition.pruning.max.event.size=1048576
hive.cbo.enable=true
hadoop.proxyuser.hdfs.hosts=*
hive.exec.mode.local.auto=false
hive.optimize.reducededuplication.min.reducer=4
mapreduce.job.reduce.slowstart.completedmaps=0.05
yarn.timeline-service.leveldb-timeline-store.path=/data/hadoop/yarn/timeline
hive.server2.max.start.attempts=5
hive.exec.dynamic.partition.mode=strict
mapreduce.jobhistory.minicluster.fixed.ports=false
hive.server2.thrift.max.worker.threads=500
mapreduce.application.classpath=$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/etc/hadoop/conf/secure
yarn.resourcemanager.ha.automatic-failover.enabled=true
mapreduce.reduce.java.opts=-Xmx3276m
mapreduce.job.userlog.retain.hours=24
dfs.namenode.accesstime.precision=0
tez.am.container.idle.release-timeout-max.millis=20000
yarn.resourcemanager.store.class=org.apache.hadoop.yarn.server.resourcemanager.recovery.ZKRMStateStore
io.mapfile.bloom.error.rate=0.005
yarn.nodemanager.webapp.rest-csrf.enabled=false
yarn.timeline-service.leveldb-state-store.path=/data/hadoop/yarn/timeline
hadoop.proxyuser.hive.groups=*
mapreduce.job.end-notification.url=http://s-msk-d-hd-mn1.*.*:11000/oozie/callback?id=0000003-180207125715047-oozie-oozi-W@clear_hive_tbl&status=$jobStatus
dfs.namenode.support.allow.format=true
dfs.content-summary.limit=5000
yarn.nodemanager.container-executor.class=org.apache.hadoop.yarn.server.nodemanager.LinuxContainerExecutor
yarn.resourcemanager.nodes.exclude-path=/etc/hadoop/conf/yarn.exclude
dfs.datanode.outliers.report.interval=1800000
hadoop.security.kms.client.encrypted.key.cache.low-watermark=0.3f
dfs.namenode.top.enabled=true
yarn.app.mapreduce.shuffle.log.separate=true
hadoop.user.group.static.mapping.overrides=dr.who=;
tez.runtime.optimize.local.fetch=true
mapreduce.jobhistory.webapp.rest-csrf.custom-header=X-XSRF-Header
yarn.webapp.xfs-filter.enabled=true
dfs.https.port=50470
dfs.client.cached.conn.retry=3
yarn.resourcemanager.monitor.capacity.preemption.total_preemption_per_round=0.14
hadoop.proxyuser.livy.groups=*
datanucleus.plugin.pluginRegistryBundleCheck=LOG
dfs.namenode.path.based.cache.refresh.interval.ms=30000
tez.runtime.shuffle.fetch.buffer.percent=0.6
dfs.namenode.fs-limits.max-directory-items=1048576
tez.use.cluster.hadoop-libs=false
yarn.resourcemanager.zk-retry-interval-ms=1000
dfs.ha.log-roll.period=120
hive.exec.parallel.thread.number=8
yarn.nodemanager.runtime.linux.docker.capabilities=CHOWN,DAC_OVERRIDE,FSETID,FOWNER,MKNOD,NET_RAW,SETGID,SETUID,SETFCAP,SETPCAP,NET_BIND_SERVICE,SYS_CHROOT,KILL,AUDIT_WRITE
ipc.client.fallback-to-simple-auth-allowed=false
yarn.nodemanager.remote-app-log-dir=/app-logs
mapreduce.tasktracker.healthchecker.script.timeout=600000
hive.security.metastore.authenticator.manager=org.apache.hadoop.hive.ql.security.HadoopDefaultMetastoreAuthenticator
yarn.timeline-service.entity-group-fs-store.scan-interval-seconds=15
dfs.xframe.enabled=true
hive.default.fileformat.managed=TextFile
yarn.nodemanager.resource.percentage-physical-cpu-limit=80
mapreduce.job.tags=oozie-c06b0d53a514ec89a3bd520d6f3fdff3
dfs.namenode.fs-limits.max-xattr-size=16384
dfs.datanode.http.address=0.0.0.0:1022
dfs.namenode.blocks.per.postponedblocks.rescan=10000
hadoop.jetty.logs.serve.aliases=true
hadoop.proxyuser.ambari-server-devdatalake.hosts=s-msk-d-hd-mn1.*.*
dfs.webhdfs.ugi.expire.after.access=600000
mapreduce.jobhistory.admin.acl=*
mapreduce.job.reducer.unconditional-preempt.delay.sec=300
yarn.app.mapreduce.am.hard-kill-timeout-ms=10000
yarn.resourcemanager.display.per-user-apps=false
hive.limit.pushdown.memory.usage=0.04
yarn.resourcemanager.webapp.address=s-msk-d-hd-mn1.*.*:8088
mapreduce.jobhistory.recovery.enable=true
yarn.sharedcache.store.in-memory.check-period-mins=720
tez.am.container.reuse.locality.delay-allocation-millis=250
hive.tez.dynamic.partition.pruning.max.data.size=104857600
fs.df.interval=60000
yarn.timeline-service.enabled=true
oozie.hive.params.0=rawTbl=raw_gdwh_midas_rurcicur.rci
oozie.hive.params.1=partName=dlk_cob_date
mapreduce.task.profile=false
hadoop.http.cross-origin.allowed-headers=X-Requested-With,Content-Type,Accept,Origin
yarn.nodemanager.hostname=0.0.0.0
mapreduce.admin.map.child.java.opts=-server -XX:NewRatio=8 -Djava.net.preferIPv4Stack=true -Dhdp.version=${hdp.version}
mapreduce.job.token.tracking.ids.enabled=false
tez.task.am.heartbeat.counter.interval-ms.max=4000
fs.azure.authorization.caching.enable=true
dfs.client.mmap.retry.timeout.ms=300000
yarn.resourcemanager.webapp.rest-csrf.custom-header=X-XSRF-Header
mapreduce.jobhistory.move.thread-count=3
hive.server2.authentication.spnego.keytab=/etc/security/keytabs/spnego.service.keytab
dfs.permissions.enabled=true
fs.AbstractFileSystem.hdfs.impl=org.apache.hadoop.fs.Hdfs
hadoop.http.filter.initializers=org.apache.hadoop.http.lib.StaticUserWebFilter,org.apache.hadoop.security.HttpCrossOriginFilterInitializer
yarn.timeline-service.http-authentication.simple.anonymous.allowed=true
oozie.hive.params.2=partition=2017-10-30
tez.task.resource.memory.mb=2048
yarn.nodemanager.runtime.linux.docker.allowed-container-networks=host,none,bridge
yarn.sharedcache.client-server.thread-count=50
yarn.resourcemanager.resource-tracker.address=s-msk-d-hd-mn1.*.*:8025
mapreduce.jobhistory.jobname.limit=50
dfs.domain.socket.path=/var/lib/hadoop-hdfs/dn_socket
dfs.namenode.decommission.blocks.per.interval=500000
rpc.metrics.quantile.enable=false
dfs.namenode.read-lock-reporting-threshold-ms=5000
mapreduce.task.timeout=300000
yarn.nodemanager.resource.memory-mb=24576
dfs.datanode.failed.volumes.tolerated=0
yarn.nodemanager.disk-health-checker.min-healthy-disks=0.25
tez.session.client.timeout.secs=-1
mapreduce.framework.name=yarn
mapreduce.fileoutputcommitter.algorithm.version=1
mapreduce.map.skip.proc.count.autoincr=true
hive.orc.compute.splits.num.threads=10
yarn.resourcemanager.system-metrics-publisher.enabled=true
yarn.sharedcache.nested-level=3
fs.s3a.connection.timeout=200000
hadoop.security.dns.log-slow-lookups.enabled=false
hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager
file.client-write-packet-size=65536
tez.runtime.unordered.output.buffer.size-mb=153
ipc.client.ping=true
yarn.timeline-service.generic-application-history.store-class=org.apache.hadoop.yarn.server.applicationhistoryservice.NullApplicationHistoryStore
hadoop.proxyuser.oozie.hosts=s-msk-d-hd-mn1.*.*
yarn.resourcemanager.delayed.delegation-token.removal-interval-ms=30000
dfs.client.failover.max.attempts=15
dfs.balancer.max-no-move-interval=60000
yarn.nodemanager.webapp.cross-origin.enabled=false
dfs.client.read.shortcircuit.streams.cache.expiry.ms=300000
tez.credentials.path=/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/container_tokens
yarn.nodemanager.health-checker.script.timeout-ms=60000
dfs.journalnode.kerberos.internal.spnego.principal=HTTP/_HOST@*.*
yarn.resourcemanager.fs.state-store.num-retries=0
hadoop.ssl.require.client.cert=false
mapreduce.jobhistory.keytab=/etc/security/keytabs/jhs.service.keytab
hadoop.security.uid.cache.secs=14400
yarn.resourcemanager.ha.automatic-failover.zk-base-path=/yarn-leader-election
hadoop.proxyuser.falcon.groups=*
mapreduce.tasktracker.dns.interface=default
yarn.nodemanager.principal=nm/_HOST@*.*
hive.server2.authentication=KERBEROS
mapreduce.job.speculative.speculative-cap-running-tasks=0.1
hive.map.aggr.hash.force.flush.memory.threshold=0.9
dfs.datanode.block.id.layout.upgrade.threads=12
dfs.client.context=default
dfs.namenode.delegation.token.renew-interval=86400000
yarn.timeline-service.entity-group-fs-store.app-cache-size=10
fs.AbstractFileSystem.s3a.impl=org.apache.hadoop.fs.s3a.S3A
hive.exec.orc.encoding.strategy=SPEED
dfs.blockreport.intervalMsec=21600000
hive.metastore.connect.retries=24
io.map.index.skip=0
mapreduce.job.hdfs-servers=${fs.defaultFS}
mapreduce.map.output.compress=false
hadoop.security.kms.client.encrypted.key.cache.num.refill.threads=2
fs.s3n.multipart.uploads.block.size=67108864
mapreduce.task.merge.progress.records=10000
yarn.nodemanager.aux-services.mapreduce_shuffle.class=org.apache.hadoop.mapred.ShuffleHandler
tfile.fs.output.buffer.size=262144
dfs.client.failover.connection.retries=0
fs.du.interval=600000
dfs.namenode.top.window.num.buckets=10
yarn.sharedcache.uploader.server.address=0.0.0.0:8046
hive.server2.authentication.kerberos.keytab=/etc/security/keytabs/hive.service.keytab
hive.tez.cpu.vcores=-1
ssl.client.truststore.password=bigdata
fs.s3a.socket.send.buffer=8192
hadoop.registry.zk.quorum=s-msk-d-hd-dn1.*.*:2181,s-msk-d-hd-dn2.*.*:2181,s-msk-d-hd-mn1.*.*:2181
hadoop.http.cross-origin.allowed-origins=*
dfs.namenode.enable.retrycache=true
dfs.datanode.du.reserved=26830438400
hadoop.registry.system.acls=sasl:yarn@, sasl:mapred@, sasl:hdfs@
hive.merge.size.per.task=256000000
yarn.resourcemanager.webapp.xfs-filter.xframe-options=SAMEORIGIN
mapreduce.admin.user.env=LD_LIBRARY_PATH=/usr/hdp/2.6.4.0-91/hadoop/lib/native:/usr/hdp/2.6.4.0-91/hadoop/lib/native/Linux-amd64-64:./mr-framework/hadoop/lib/native:./mr-framework/hadoop/lib/native/Linux-amd64-64
mapreduce.task.profile.reduce.params=${mapreduce.task.profile.params}
tez.am.launch.cluster-default.cmd-opts=-server -Djava.net.preferIPv4Stack=true -Dhdp.version=${hdp.version}
mapreduce.reduce.memory.mb=4096
hadoop.http.authentication.kerberos.principal=HTTP/_HOST@LOCALHOST
yarn.nodemanager.disk-health-checker.min-free-space-per-disk-mb=1000
oozie.action.rootlogger.log.level=INFO
hadoop.security.sensitive-config-keys=
    secret$
    password$
    ssl.keystore.pass$
    fs.s3.*[Ss]ecret.?[Kk]ey
    fs.s3a.*.server-side-encryption.key
    fs.azure.account.key.*
    credential$
    oauth.*token$
    hadoop.security.sensitive-config-keys
  
mapreduce.client.completion.pollinterval=5000
dfs.namenode.name.dir.restore=true
dfs.namenode.full.block.report.lease.length.ms=300000
tez.application.tags=oozie-c06b0d53a514ec89a3bd520d6f3fdff3
dfs.namenode.secondary.http-address=0.0.0.0:50090
hadoop.security.group.mapping.ldap.read.timeout.ms=60000
s3.bytes-per-checksum=512
tez.history.logging.service.class=org.apache.tez.dag.history.logging.ats.ATSV15HistoryLoggingService
yarn.resourcemanager.max-log-aggregation-diagnostics-in-memory=10
hive.server2.zookeeper.namespace=hiveserver2
hive.server2.enable.doAs=false
yarn.resourcemanager.webapp.https.address=s-msk-d-hd-mn1.*.*:8090
yarn.authorization-provider=org.apache.ranger.authorization.yarn.authorizer.RangerYarnAuthorizer
tez.runtime.compress.codec=org.apache.hadoop.io.compress.SnappyCodec
hadoop.proxyuser.HTTP.groups=users
------------------------

Current (local) dir = /data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002
------------------------
  tmp
  launch_container.sh
  container_tokens
  httpclient-4.4.jar
  jackson-jaxrs-1.9.13.jar
  hive-bridge-0.8.0.2.6.4.0-91.jar
  oozie-sharelib-oozie-4.2.0.2.6.4.0-91.jar
  hive-shims-scheduler-1.2.1000.2.6.4.0-91.jar
  guava-11.0.2.jar
  atlas-intg-0.8.0.2.6.4.0-91.jar
  javax.jdo-3.2.0-m3.jar
  hadoop-yarn-registry-2.7.3.2.6.4.0-91.jar
  kafka-clients-0.10.1.2.6.4.0-91.jar
  velocity-1.5.jar
  hadoop-azure-2.7.3.2.6.4.0-91.jar
  hadoop-yarn-server-common-2.7.3.2.6.4.0-91.jar
  commons-collections4-4.1.jar
  azure-data-lake-store-sdk-2.1.4.jar
  javax.inject-1.jar
  netty-3.6.2.Final.jar
  tez-dag-0.7.0.2.6.4.0-91.jar
  hive-shims-common-1.2.1000.2.6.4.0-91.jar
  antlr-2.7.7.jar
  HikariCP-2.5.1.jar
  geronimo-jaspic_1.0_spec-1.0.jar
  atlas-typesystem-0.8.0.2.6.4.0-91.jar
  datanucleus-core-4.1.6.jar
  stringtemplate-3.2.1.jar
  avro-1.7.5.jar
  fst-2.24.jar
  jersey-client-1.9.jar
  commons-httpclient-3.0.1.jar
  atlas-application.properties
  jackson-databind-2.4.4.jar
  jetty-6.1.14.jar
  json4s-native_2.11-3.2.11.jar
  servlet-api-2.5-6.1.14.jar
  jackson-annotations-2.4.0.jar
  snappy-java-1.0.5.jar
  hadoop-yarn-server-resourcemanager-2.7.3.2.6.4.0-91.jar
  metrics-core-3.1.2.jar
  avatica-1.8.0.2.6.4.0-91.jar
  jta-1.1.jar
  aws-java-sdk-s3-1.10.6.jar
  json-simple-1.1.jar
  job.xml
  oozie-sharelib-hive-4.2.0.2.6.4.0-91.jar
  atlas-client-common-0.8.0.2.6.4.0-91.jar
  apache-log4j-extras-1.2.17.jar
  jackson-xc-1.9.13.jar
  mail-1.4.jar
  jpam-1.1.jar
  datanucleus-api-jdo-4.2.1.jar
  eigenbase-properties-1.1.5.jar
  mr-framework
  gson-2.5.jar
  tez-common-0.7.0.2.6.4.0-91.jar
  leveldbjni-all-1.8.jar
  atlas-client-v1-0.8.0.2.6.4.0-91.jar
  stax-api-1.0-2.jar
  jsr305-2.0.3.jar
  metrics-jvm-3.1.0.jar
  jline-2.12.jar
  groovy-all-2.4.11.jar
  avatica-metrics-1.8.0.2.6.4.0-91.jar
  ant-launcher-1.9.1.jar
  atlas-common-0.8.0.2.6.4.0-91.jar
  scala-compiler-2.11.8.jar
  metrics-json-3.1.0.jar
  zookeeper-3.4.6.2.6.4.0-91.jar
  jersey-guice-1.9.jar
  scala-library-2.11.8.jar
  objenesis-2.1.jar
  hive-shims-0.23-1.2.1000.2.6.4.0-91.jar
  ant-1.9.1.jar
  zookeeper-3.4.6.2.6.4.0-91-tests.jar
  commons-collections-3.2.2.jar
  commons-lang3-3.4.jar
  atlas-notification-0.8.0.2.6.4.0-91.jar
  commons-compress-1.4.1.jar
  hadoop-yarn-server-web-proxy-2.7.3.2.6.4.0-91.jar
  joda-time-2.9.6.jar
  tez-yarn-timeline-history-0.7.0.2.6.4.0-91.jar
  hadoop-yarn-common-2.7.3.2.6.4.0-91.jar
  antlr-runtime-3.4.jar
  commons-pool-1.5.4.jar
  ivy-2.4.0.jar
  hive-shims-0.20S-1.2.1000.2.6.4.0-91.jar
  hive-service-1.2.1000.2.6.4.0-91.jar
  asm-3.1.jar
  curator-framework-2.6.0.jar
  tez-mapreduce-0.7.0.2.6.4.0-91.jar
  parquet-hadoop-bundle-1.8.1.jar
  hadoop-yarn-api-2.7.3.2.6.4.0-91.jar
  avro-mapred-1.8.0-hadoop2.jar
  asm-tree-3.1.jar
  transaction-api-1.1.jar
  commons-codec-1.4.jar
  jersey-json-1.9.jar
  xz-1.0.jar
  tez-runtime-internals-0.7.0.2.6.4.0-91.jar
  geronimo-jta_1.1_spec-1.1.1.jar
  ST4-4.0.4.jar
  guice-3.0.jar
  paranamer-2.3.jar
  slf4j-api-1.6.6.jar
  pentaho-aggdesigner-algorithm-5.1.5-jhyde.jar
  tez-runtime-library-0.7.0.2.6.4.0-91.jar
  guice-servlet-3.0.jar
  hadoop-yarn-server-applicationhistoryservice-2.7.3.2.6.4.0-91.jar
  jaxb-api-2.2.2.jar
  tez-api-0.7.0.2.6.4.0-91.jar
  javassist-3.18.1-GA.jar
  opencsv-2.3.jar
  geronimo-annotation_1.0_spec-1.1.1.jar
  commons-dbcp-1.4.jar
  oozie-hadoop-utils-hadoop-2-4.2.0.2.6.4.0-91.jar
  servlet-api-2.5.jar
  asm-commons-3.1.jar
  avro-1.8.0.jar
  aws-java-sdk-core-1.10.6.jar
  azure-keyvault-core-0.8.0.jar
  hive-common-1.2.1000.2.6.4.0-91.jar
  atlas-client-v2-0.8.0.2.6.4.0-91.jar
  httpcore-4.4.jar
  oro-2.0.8.jar
  calcite-linq4j-1.2.0.2.6.4.0-91.jar
  okhttp-2.4.0.jar
  jdo-api-3.0.1.jar
  apache-curator-2.6.0.pom
  hadoop-annotations-2.7.3.2.6.4.0-91.jar
  slf4j-log4j12-1.6.6.jar
  hive-serde-1.2.1000.2.6.4.0-91.jar
  hdfs-model-0.8.0.2.6.4.0-91.jar
  azure-storage-5.4.0.jar
  jettison-1.3.4.jar
  libfb303-0.9.3.jar
  calcite-core-1.2.0.2.6.4.0-91.jar
  log4j-1.2.17.jar
  dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar
  scalap-2.11.8.jar
  kafka_2.11-0.10.1.2.6.4.0-91.jar
  datanucleus-rdbms-4.1.7.jar
  derby-10.10.1.1.jar
  tez-yarn-timeline-history-with-acls-0.7.0.2.6.4.0-91.jar
  json4s-ast_2.11-3.2.11.jar
  hadoop-aws-2.7.3.2.6.4.0-91.jar
  jetty-util-6.1.26.hwx.jar
  hadoop-yarn-server-timeline-pluginstorage-2.7.3.2.6.4.0-91.jar
  aopalliance-1.0.jar
  aws-java-sdk-kms-1.10.6.jar
  hive-metastore-1.2.1000.2.6.4.0-91.jar
  hive-cli-1.2.1000.2.6.4.0-91.jar
  hive-exec-1.2.1000.2.6.4.0-91.jar
  curator-client-2.6.0.jar
  commons-io-2.4.jar
  jackson-core-2.4.4.jar
  drop_part.sql
  scala-reflect-2.11.8.jar
  commons-compiler-2.7.6.jar
  hive-contrib-1.2.1000.2.6.4.0-91.jar
  stax-api-1.0.1.jar
  hive-ant-1.2.1000.2.6.4.0-91.jar
  libthrift-0.9.3.jar
  activation-1.1.jar
  commons-lang-2.4.jar
  jersey-core-1.9.jar
  protobuf-java-2.5.0.jar
  hive-shims-1.2.1000.2.6.4.0-91.jar
  okio-1.4.0.jar
  json4s-core_2.11-3.2.11.jar
  jetty-all-7.6.0.v20120127.jar
  commons-cli-1.2.jar
  json-20090211.jar
  commons-logging-1.1.jar
  jersey-multipart-1.19.jar
  tez-yarn-timeline-history-with-fs-0.7.0.2.6.4.0-91.jar
  janino-2.7.6.jar
  bonecp-0.8.0.RELEASE.jar
  curator-recipes-2.5.0.jar
  jaxb-impl-2.2.3-1.jar
  hadoop-azure-datalake-2.7.3.2.6.4.0-91.jar
  .job.xml.crc
  action.xml
  .action.xml.crc
  propagation-conf.xml
  hive-site.xml
  hive-log4j.properties
  hive-exec-log4j.properties
------------------------


Script [drop_part.sql] content: 
------------------------
ALTER TABLE ${rawTbl} DROP IF EXISTS PARTITION(${partName}="${partition}") PURGE;

------------------------

Parameters:
------------------------
  rawTbl=raw_gdwh_midas_rurcicur.rci
  partName=dlk_cob_date
  partition=2017-10-30
------------------------

Hive command arguments :
             --hiveconf
             hive.log4j.file=/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-log4j.properties
             --hiveconf
             hive.exec.log4j.file=/data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-exec-log4j.properties
             --hiveconf
             hive.log.trace.id=oozie:0000003-180207125715047-oozie-oozi-W
             -f
             drop_part.sql
             --hivevar
             rawTbl=raw_gdwh_midas_rurcicur.rci
             --hivevar
             partName=dlk_cob_date
             --hivevar
             partition=2017-10-30

Fetching child yarn jobs
tag id : oozie-c06b0d53a514ec89a3bd520d6f3fdff3
Child yarn jobs are found - 
=================================================================

>>> Invoking Hive command line now >>>

3521 [main] -5p SessionState  - Logging initialized using configuration in /data/1/hadoop/yarn/local/usercache/admfkr/appcache/application_1518001613009_0002/container_e46_1518001613009_0002_01_000002/hive-log4j.properties
3742 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
3742 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
3802 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
3808 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
3808 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
3809 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
3809 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
8809 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
8809 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
8811 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
8812 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
8812 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
8812 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
8812 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
13813 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
13813 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
13815 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
13816 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
13816 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
13816 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
13816 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
18817 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
18817 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
18819 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
18821 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
18821 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
18821 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
18821 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
23821 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
23821 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
23824 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
23825 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
23825 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
23825 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
23825 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
28825 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
28825 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
28828 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
28829 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
28829 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
28829 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
28829 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
Heart beat
33829 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
33829 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
33835 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
33836 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
33836 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
33836 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
33836 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
38836 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
38836 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
38838 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
38839 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
38839 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
38839 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
38839 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
43839 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
43839 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
43842 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
43843 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
43843 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
43843 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
43843 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
48843 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
48843 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
48845 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
48846 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
48846 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
48846 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
48846 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
53846 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
53846 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
53849 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
53850 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
53850 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
53850 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
53850 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
58850 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
58850 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
58852 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
58854 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
58854 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
58854 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
58854 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
Heart beat
63854 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
63854 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
63860 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
63861 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
63861 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
63861 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
63861 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
68861 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
68861 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
68863 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
68864 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
68864 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
68864 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
68864 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
73865 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
73865 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
73867 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
73868 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
73868 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
73868 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
73868 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
78868 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
78868 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
78872 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
78873 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
78873 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
78873 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
78873 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
83873 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
83873 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
83875 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
83876 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
83876 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
83876 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
83876 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
88876 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
88876 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
88879 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
88880 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
88880 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
88880 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
88880 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
Heart beat
93880 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
93880 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
93887 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
93888 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
93888 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
93888 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
93888 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
98889 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
98889 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
98891 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
98892 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
98892 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
98892 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
98892 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
103892 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
103892 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
103894 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
103895 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
103895 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
103895 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
103895 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
108895 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
108895 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
108897 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
108898 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
108898 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
108898 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
108898 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
113898 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
113898 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
113901 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
113902 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
113902 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
113902 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
113902 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
118902 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
118902 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
118904 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 45 more
118905 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
118905 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
118905 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
118905 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
Heart beat
123924 [main] -5p hive.ql.metadata.Hive  - Failed to access metastore. This class should not accessed in runtime.
java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1566)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: java.lang.reflect.InvocationTargetException
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    ... 28 more
Caused by: MetaException(message:Could not connect to meta store using any of the URIs provided. Most recent failure: org.apache.thrift.transport.TTransportException: GSS initiate failed
    at org.apache.thrift.transport.TSaslTransport.sendAndThrowMessage(TSaslTransport.java:232)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:316)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:534)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    ... 33 more
123924 [main] -5p hive.ql.metadata.Hive  - Failed to access metastore. This class should not accessed in runtime.
java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1566)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: java.lang.reflect.InvocationTargetException
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    ... 28 more
Caused by: MetaException(message:Could not connect to meta store using any of the URIs provided. Most recent failure: org.apache.thrift.transport.TTransportException: GSS initiate failed
    at org.apache.thrift.transport.TSaslTransport.sendAndThrowMessage(TSaslTransport.java:232)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:316)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3799)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:221)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:213)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:534)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    ... 33 more
123925 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
123925 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
123931 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
123932 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
123932 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
123932 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
123932 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
128936 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
128936 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
128938 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
128939 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
128939 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
128939 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
128939 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
133939 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
133939 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
133941 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
133942 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
133942 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
133942 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
133942 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
138942 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
138942 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
138945 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
138946 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
138946 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
138946 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
138946 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
143946 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
143946 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
143948 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
143949 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
143949 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
143949 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
143949 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
148949 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
148949 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
148951 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
148952 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
148952 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
148952 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
148952 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
Heart beat
153952 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
153952 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
153959 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
153960 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
153960 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
153960 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
153960 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
158960 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
158960 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
158963 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
158963 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
158963 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
158964 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
158964 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
163964 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
163964 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
163966 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
163967 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
163967 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
163967 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
163967 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
168968 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
168968 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
168971 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
168972 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
168972 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
168972 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
168972 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
173972 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
173972 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
173974 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
173975 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
173975 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
173975 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
173975 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
178975 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
178975 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
178977 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
178977 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
178977 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
178977 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
178977 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
Heart beat
183978 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
183978 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
183983 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
183984 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
183984 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
183984 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
183984 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
188984 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
188984 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
188986 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
188987 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
188987 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
188987 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
188987 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
193987 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
193987 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
193989 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
193990 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
193990 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
193990 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
193990 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
198990 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
198990 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
198992 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
198993 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
198993 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
198993 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
198993 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
203993 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
203993 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
203995 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
203996 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
203996 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
203996 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
203996 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
208996 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
208996 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
208998 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
208999 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
208999 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
208999 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
208999 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
Heart beat
213999 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
213999 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
214004 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
214005 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
214005 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
214005 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
214005 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
219006 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
219006 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
219007 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
219008 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
219008 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
219008 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
219008 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
224009 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
224009 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
224011 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
224011 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
224011 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
224011 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
224011 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
229013 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
229013 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
229015 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
229016 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
229016 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
229016 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
229016 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
234016 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
234016 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
234018 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
234019 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
234019 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
234019 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
234019 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
239019 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
239019 [main] -5p hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
239021 [main] -5p org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
    at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
    at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
    at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
    at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
    at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
    at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
    ... 42 more
239021 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
239021 [main] -5p hive.metastore  - Failed to connect to the MetaStore Server...
239021 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
239021 [main] -5p hive.metastore  - Waiting 5 seconds before next connection attempt.
Heart beat

<<< Invocation of Hive command completed <<<

No child hadoop job is executed.

<<< Invocation of Main class completed <<<

Failing Oozie Launcher, Main class [org.apache.oozie.action.hadoop.HiveMain], main() threw exception, java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
java.lang.RuntimeException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:569)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1566)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    ... 19 more
Caused by: java.lang.reflect.InvocationTargetException
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    ... 25 more
Caused by: MetaException(message:Could not connect to meta store using any of the URIs provided. Most recent failure: org.apache.thrift.transport.TTransportException: GSS initiate failed
    at org.apache.thrift.transport.TSaslTransport.sendAndThrowMessage(TSaslTransport.java:232)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:316)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:487)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:110)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3528)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3560)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:550)
    at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
    at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:336)
    at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:313)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:69)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:534)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:76)
    ... 30 more

Oozie Launcher failed, finishing Hadoop job gracefully

Oozie Launcher, uploading action data to HDFS sequence file: hdfs://DevDataLakeNNHA/user/admfkr/oozie-oozi/0000003-180207125715047-oozie-oozi-W/clear_hive_tbl--hive/action-data.seq
244116 [main] -5p org.apache.hadoop.io.compress.zlib.ZlibFactory  - Successfully loaded & initialized native-zlib library
244117 [main] -5p org.apache.hadoop.io.compress.CodecPool  - Got brand-new compressor [.deflate]
Successfully reset security manager from org.apache.oozie.action.hadoop.LauncherSecurityManager@736048ed to null

Oozie Launcher ends

244162 [main] -5p org.apache.hadoop.mapred.Task  - Task:attempt_1518001613009_0002_m_000000_0 is done. And is in the process of committing
244234 [main] -5p org.apache.hadoop.mapred.Task  - Task 'attempt_1518001613009_0002_m_000000_0' done.



1 ACCEPTED SOLUTION

avatar
Rising Star

The two different credential into workflow was wrong. Needed only hive2 and not hcat.

The article on link bellow helped:

https://community.hortonworks.com/articles/75107/oozie-hive-2-action-in-a-kerberized-cluster.html

View solution in original post

2 REPLIES 2

avatar
Rising Star

I have added <credentials> block into oozie's workflow.xml

But i have new error:

6741 [main] INFO  org.apache.hive.hcatalog.common.HiveClientCache  - Initializing cache: eviction-timeout=120 initial-capacity=50 maximum-capacity=50
6783 [main] INFO  hive.metastore  - Trying to connect to metastore with URI thrift://s-msk-d-hd-mn1.*.*:9083
6939 [main] WARN  hive.metastore  - set_ugi() not successful, Likely cause: new client talking to old server. Continuing without it.
org.apache.thrift.transport.TTransportException
    at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:132)
    at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86)
    at org.apache.thrift.protocol.TBinaryProtocol.readStringBody(TBinaryProtocol.java:380)
    at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:230)
    at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77)
    at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_set_ugi(ThriftHiveMetastore.java:3802)
    at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.set_ugi(ThriftHiveMetastore.java:3788)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:503)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:282)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:188)
    at org.apache.hive.hcatalog.common.HiveClientCache$CacheableHiveMetaStoreClient.<init>(HiveClientCache.java:406)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1564)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:92)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:138)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:124)
    at org.apache.hive.hcatalog.common.HiveClientCache$5.call(HiveClientCache.java:295)
    at org.apache.hive.hcatalog.common.HiveClientCache$5.call(HiveClientCache.java:291)
    at com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4767)
    at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3568)
    at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2350)
    at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2313)
    at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2228)
    at com.google.common.cache.LocalCache.get(LocalCache.java:3965)
    at com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4764)
    at org.apache.hive.hcatalog.common.HiveClientCache.getOrCreate(HiveClientCache.java:291)
    at org.apache.hive.hcatalog.common.HiveClientCache.get(HiveClientCache.java:266)
    at org.apache.hive.hcatalog.common.HCatUtil.getHiveMetastoreClient(HCatUtil.java:558)
    at org.apache.hive.hcatalog.mapreduce.InitializeInput.getInputJobInfo(InitializeInput.java:104)
    at org.apache.hive.hcatalog.mapreduce.InitializeInput.setInput(InitializeInput.java:88)
    at org.apache.hive.hcatalog.mapreduce.HCatInputFormat.setInput(HCatInputFormat.java:95)
    at org.apache.hive.hcatalog.mapreduce.HCatInputFormat.setInput(HCatInputFormat.java:51)
    at org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities.configureHCat(SqoopHCatUtilities.java:349)
    at org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities.configureImportOutputFormat(SqoopHCatUtilities.java:848)
    at org.apache.sqoop.mapreduce.ImportJobBase.configureOutputFormat(ImportJobBase.java:102)
    at org.apache.sqoop.mapreduce.ImportJobBase.runImport(ImportJobBase.java:263)
    at org.apache.sqoop.manager.SqlManager.importQuery(SqlManager.java:748)
    at org.apache.sqoop.manager.OracleManager.importQuery(OracleManager.java:454)
    at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:509)
    at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:615)
    at org.apache.sqoop.Sqoop.run(Sqoop.java:147)
    at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76)
    at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183)
    at org.apache.sqoop.Sqoop.runTool(Sqoop.java:225)
    at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234)
    at org.apache.sqoop.Sqoop.main(Sqoop.java:243)
    at org.apache.oozie.action.hadoop.SqoopMain.runSqoopJob(SqoopMain.java:197)
    at org.apache.oozie.action.hadoop.SqoopMain.run(SqoopMain.java:179)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.SqoopMain.main(SqoopMain.java:48)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
6946 [main] INFO  hive.metastore  - Connected to metastore.
7126 [main] ERROR hive.log  - Got exception: org.apache.thrift.transport.TTransportException null
org.apache.thrift.transport.TTransportException
    at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:132)
    at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86)
    at org.apache.thrift.protocol.TBinaryProtocol.readAll(TBinaryProtocol.java:429)
    at org.apache.thrift.protocol.TBinaryProtocol.readI32(TBinaryProtocol.java:318)
    at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:219)
    at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77)
    at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_get_databases(ThriftHiveMetastore.java:746)
    at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.get_databases(ThriftHiveMetastore.java:733)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getDatabases(HiveMetaStoreClient.java:1116)
    at org.apache.hive.hcatalog.common.HiveClientCache$CacheableHiveMetaStoreClient.isOpen(HiveClientCache.java:469)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:174)
    at com.sun.proxy.$Proxy19.isOpen(Unknown Source)
    at org.apache.hive.hcatalog.common.HiveClientCache.get(HiveClientCache.java:269)
    at org.apache.hive.hcatalog.common.HCatUtil.getHiveMetastoreClient(HCatUtil.java:558)
    at org.apache.hive.hcatalog.mapreduce.InitializeInput.getInputJobInfo(InitializeInput.java:104)
    at org.apache.hive.hcatalog.mapreduce.InitializeInput.setInput(InitializeInput.java:88)
    at org.apache.hive.hcatalog.mapreduce.HCatInputFormat.setInput(HCatInputFormat.java:95)
    at org.apache.hive.hcatalog.mapreduce.HCatInputFormat.setInput(HCatInputFormat.java:51)
    at org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities.configureHCat(SqoopHCatUtilities.java:349)
    at org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities.configureImportOutputFormat(SqoopHCatUtilities.java:848)
    at org.apache.sqoop.mapreduce.ImportJobBase.configureOutputFormat(ImportJobBase.java:102)
    at org.apache.sqoop.mapreduce.ImportJobBase.runImport(ImportJobBase.java:263)
    at org.apache.sqoop.manager.SqlManager.importQuery(SqlManager.java:748)
    at org.apache.sqoop.manager.OracleManager.importQuery(OracleManager.java:454)
    at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:509)
    at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:615)
    at org.apache.sqoop.Sqoop.run(Sqoop.java:147)
    at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76)
    at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183)
    at org.apache.sqoop.Sqoop.runTool(Sqoop.java:225)
    at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234)
    at org.apache.sqoop.Sqoop.main(Sqoop.java:243)
    at org.apache.oozie.action.hadoop.SqoopMain.runSqoopJob(SqoopMain.java:197)
    at org.apache.oozie.action.hadoop.SqoopMain.run(SqoopMain.java:179)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.SqoopMain.main(SqoopMain.java:48)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)
7126 [main] ERROR hive.log  - Converting exception to MetaException
7134 [main] WARN  hive.metastore  - Evicted client has non-zero user count: 1
7134 [main] WARN  hive.metastore  - Non-zero user count preventing client tear down: users=1 expired=true
7139 [main] WARN  org.apache.thrift.transport.TIOStreamTransport  - Error closing output stream.
java.net.SocketException: Socket closed
    at java.net.SocketOutputStream.socketWrite(SocketOutputStream.java:118)
    at java.net.SocketOutputStream.write(SocketOutputStream.java:155)
    at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:82)
    at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:140)
    at java.io.FilterOutputStream.close(FilterOutputStream.java:158)
    at org.apache.thrift.transport.TIOStreamTransport.close(TIOStreamTransport.java:110)
    at org.apache.thrift.transport.TSocket.close(TSocket.java:235)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.close(HiveMetaStoreClient.java:567)
    at org.apache.hive.hcatalog.common.HiveClientCache$CacheableHiveMetaStoreClient.tearDown(HiveClientCache.java:508)
    at org.apache.hive.hcatalog.common.HiveClientCache$CacheableHiveMetaStoreClient.tearDownIfUnused(HiveClientCache.java:498)
    at org.apache.hive.hcatalog.common.HiveClientCache$CacheableHiveMetaStoreClient.close(HiveClientCache.java:483)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:174)
    at com.sun.proxy.$Proxy19.close(Unknown Source)
    at org.apache.hive.hcatalog.common.HiveClientCache.get(HiveClientCache.java:272)
    at org.apache.hive.hcatalog.common.HCatUtil.getHiveMetastoreClient(HCatUtil.java:558)
    at org.apache.hive.hcatalog.mapreduce.InitializeInput.getInputJobInfo(InitializeInput.java:104)
    at org.apache.hive.hcatalog.mapreduce.InitializeInput.setInput(InitializeInput.java:88)
    at org.apache.hive.hcatalog.mapreduce.HCatInputFormat.setInput(HCatInputFormat.java:95)
    at org.apache.hive.hcatalog.mapreduce.HCatInputFormat.setInput(HCatInputFormat.java:51)
    at org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities.configureHCat(SqoopHCatUtilities.java:349)
    at org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities.configureImportOutputFormat(SqoopHCatUtilities.java:848)
    at org.apache.sqoop.mapreduce.ImportJobBase.configureOutputFormat(ImportJobBase.java:102)
    at org.apache.sqoop.mapreduce.ImportJobBase.runImport(ImportJobBase.java:263)
    at org.apache.sqoop.manager.SqlManager.importQuery(SqlManager.java:748)
    at org.apache.sqoop.manager.OracleManager.importQuery(OracleManager.java:454)
    at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:509)
    at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:615)
    at org.apache.sqoop.Sqoop.run(Sqoop.java:147)
    at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76)
    at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183)
    at org.apache.sqoop.Sqoop.runTool(Sqoop.java:225)
    at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234)
    at org.apache.sqoop.Sqoop.main(Sqoop.java:243)
    at org.apache.oozie.action.hadoop.SqoopMain.runSqoopJob(SqoopMain.java:197)
    at org.apache.oozie.action.hadoop.SqoopMain.run(SqoopMain.java:179)
    at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:58)
    at org.apache.oozie.action.hadoop.SqoopMain.main(SqoopMain.java:48)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:240)
    at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:170)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:164)log.txt

workflow:

<workflow-app xmlns="uri:oozie:workflow:0.5" name="TestFormat:RCI DATE:${loadFolder}">

    <global>
        <job-tracker>${jobTracker}</job-tracker>
        <name-node>${nameNode}</name-node>
        <job-xml>${hiveConfFilePath}/hive-site.xml</job-xml>
        <configuration>
            <property>
                <name>mapred.job.queue.name</name>
                <value>${queueName}</value>
            </property>
        </configuration>
    </global> 
    
    <credentials>
        <credential name="hive2" type="hive2">
            <property>
                <name>hive2.jdbc.url</name>
                <value>jdbc:hive2://s-msk-d-hd-mn1.*.*:10000/default;</value>
            </property>
            <property>
                <name>hive2.server.principal</name>
                <value>hive/_HOST@*.*</value>
            </property>
        </credential>
        <credential name='hcat' type='hcat'>
            <property>
               <name>hcat.metastore.uri</name>
               <value>thrift://s-msk-d-hd-mn1.*.*:9083</value>
            </property>
            <property>
               <name>hcat.metastore.principal</name>
               <value>hive/_HOST@*.*</value>
            </property>
        </credential>
    </credentials>
    
    <start to="rci_orc"/>
    
    <action name="rci_avro">
        <sqoop xmlns="uri:oozie:sqoop-action:0.4">
            <arg>import</arg>
            <arg>--options-file</arg>
            <arg>${opfFile}</arg>
            <arg>--query</arg> 
            <arg>${select}</arg>
            <arg>--m</arg> 
            <arg>${defaultNumMap}</arg>
            <arg>--compress</arg>
            <arg>--compression-codec</arg>
            <arg>org.apache.hadoop.io.compress.SnappyCodec</arg> 
            <arg>--as-avrodatafile</arg>
            <arg>--target-dir</arg> 
            <arg>"/tmp/ruaetqg/test_formats/rci.avro"</arg>
            <arg>--null-string</arg>
            <arg>'\\N'</arg>
            <arg>--null-non-string</arg>
            <arg>'\\N'</arg>
            <arg>--direct</arg>
            <arg>--delete-target-dir</arg>
            <file>${opfFilePath}/${opfFile}#${opfFile}</file> 
            <file>${jdbcFilePath}/${jdbcFile}#${jdbcFile}</file> 
       </sqoop>
        <ok to="rci_parquet"/>
        <error to="kill"/>
    </action>
    
    <action name="rci_parquet">
        <sqoop xmlns="uri:oozie:sqoop-action:0.4">
            <arg>import</arg>
            <arg>--options-file</arg>
            <arg>${opfFile}</arg>
            <arg>--query</arg> 
            <arg>${select}</arg>
            <arg>--m</arg> 
            <arg>${defaultNumMap}</arg>
            <arg>--compress</arg>
            <arg>--compression-codec</arg>
            <arg>org.apache.hadoop.io.compress.SnappyCodec</arg> 
            <arg>--as-parquetfile</arg>
            <arg>--target-dir</arg> 
            <arg>"/tmp/ruaetqg/test_formats/rci.parquet"</arg>
            <arg>--null-string</arg>
            <arg>'\\N'</arg>
            <arg>--null-non-string</arg>
            <arg>'\\N'</arg>
            <arg>--direct</arg>
            <arg>--delete-target-dir</arg>
            <file>${opfFilePath}/${opfFile}#${opfFile}</file> 
            <file>${jdbcFilePath}/${jdbcFile}#${jdbcFile}</file> 
        </sqoop>
        <ok to="rci_orc"/>
        <error to="kill"/>
    </action>
    
    <action name="rci_orc" cred='hcat'>
        <sqoop xmlns="uri:oozie:sqoop-action:0.4">
            <arg>import</arg>
            <arg>--options-file</arg>
            <arg>${opfFile}</arg>
            <arg>--query</arg> 
            <arg>${select}</arg>
            <arg>--num-mappers</arg> 
            <arg>${defaultNumMap}</arg>
            <arg>--compress</arg>
            <arg>--compression-codec</arg>
            <arg>org.apache.hadoop.io.compress.SnappyCodec</arg>
            <arg>--null-string</arg>
            <arg>'\\N'</arg>
            <arg>--null-non-string</arg>
            <arg>'\\N'</arg>
            <arg>--hcatalog-database</arg>
            <arg>test_formats</arg>
            <arg>--hcatalog-table</arg>
            <arg>rci_orc</arg>
            <arg>--skip-dist-cache</arg>
            <file>${opfFilePath}/${opfFile}#${opfFile}</file> 
            <file>${jdbcFilePath}/${jdbcFile}#${jdbcFile}</file> 
        </sqoop>
        <ok to="end"/>
        <error to="kill"/>
    </action>
    
    <kill name="kill">
        <message>${wf:errorMessage(wf:lastErrorNode())}</message>
    </kill>

    <end name="end"/>
</workflow-app>

avatar
Rising Star

The two different credential into workflow was wrong. Needed only hive2 and not hcat.

The article on link bellow helped:

https://community.hortonworks.com/articles/75107/oozie-hive-2-action-in-a-kerberized-cluster.html