Support Questions
Find answers, ask questions, and share your expertise
Announcements
Alert: Welcome to the Unified Cloudera Community. Former HCC members be sure to read and learn how to activate your account here.

oozie submit spark job Mechanism level: Failed to find any Kerberos tgt

Highlighted

oozie submit spark job Mechanism level: Failed to find any Kerberos tgt

New Contributor
2019-06-11 15:59:06,314 [pool-21-thread-13] ERROR org.apache.thrift.transport.TSaslTransport  - SASL negotiation failure
 javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
     at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
     at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
     at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
     at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
     at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
     at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
     at java.security.AccessController.doPrivileged(Native Method)
     at javax.security.auth.Subject.doAs(Subject.java:422)
     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
     at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
     at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:426)
     at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:242)
     at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
     at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
     at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
     at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
     at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
     at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
     at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
     at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
     at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
     at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3021)
     at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3040)
     at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
     at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
     at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
     at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
     at org.apache.spark.sql.hive.client.HiveClientImpl.newState(HiveClientImpl.scala:186)
     at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:120)
     at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
     at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
     at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
     at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
     at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:274)
     at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:390)
     at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:287)
     at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
     at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
     at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:195)
     at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
     at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
     at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
     at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:194)
     at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:114)
     at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:102)
     at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:138)
     at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:133)
     at org.apache.spark.sql.hive.HiveSessionStateBuilder$$anonfun$2.apply(HiveSessionStateBuilder.scala:54)
     at org.apache.spark.sql.hive.HiveSessionStateBuilder$$anonfun$2.apply(HiveSessionStateBuilder.scala:54)
     at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:91)
     at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:91)
     at org.apache.spark.sql.catalyst.catalog.SessionCatalog.createDatabase(SessionCatalog.scala:203)
     at org.apache.spark.sql.execution.command.CreateDatabaseCommand.run(ddl.scala:70)
     at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
     at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
     at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
     at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:190)
     at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:190)
     at org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3259)
     at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77)
     at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3258)
     at org.apache.spark.sql.Dataset.<init>(Dataset.scala:190)
     at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:75)
     at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:642)
     at com.wumii.analysis.data.sync.InitDataJob$.initPartition(InitDataJob.scala:40)
     at com.wumii.analysis.data.sync.InitDataJob$.doInitCrement(InitDataJob.scala:121)
     at com.wumii.analysis.data.sync.InitDataJob$$anonfun$initIncrement$1$$anon$1.call(InitDataJob.scala:92)
     at com.wumii.analysis.data.sync.InitDataJob$$anonfun$initIncrement$1$$anon$1.call(InitDataJob.scala:89)
     at java.util.concurrent.FutureTask.run(FutureTask.java:266)
     at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
     at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
     at java.lang.Thread.run(Thread.java:748)



workflow.xml

<workflow-app name="Init-Data-Increment-Job" xmlns="uri:oozie:workflow:0.5">
 <credentials> 
 <credential name='hcatauth' type='hcat'> 
 <property> 
 <name>hcat.metastore.uri</name> 
 <value>thrift://ip:9083</value>
 </property> 
 
<property> 
 <name>hcat.metastore.principal</name> 
 <value>hive/_HOST@WUMII.NET</value>
 </property> 
 </credential> 
 </credentials>
    <start to="spark-a61f"/>
    <kill name="Kill">
        <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
    </kill>
    <action name="spark-a61f">
        <spark xmlns="uri:oozie:spark-action:0.2">
            <job-tracker>${jobTracker}</job-tracker>
            <name-node>${nameNode}</name-node>
            <master>yarn</master>
            <mode>cluster</mode>
            <name>Init-Data-Increment</name>
              <class>com.wumii.analysis.data.sync.InitDataJob</class>
            <jar>data-inbound-1.0-SNAPSHOT.jar</jar>
              <spark-opts>--master yarn --driver-memory 16g --executor-memory 8g --queue online --keytab hive.service.keytab </spark-opts>
              <arg>2</arg>
            <file>/user/oozie/job/data-inbound/data-inbound-1.0-SNAPSHOT.jar#data-inbound-1.0-SNAPSHOT.jar</file>
            <file>/user/oozie/job/data-inbound/platform-common-1.0-SNAPSHOT.jar#platform-common-1.0-SNAPSHOT.jar</file>
            <file>/user/oozie/job/data-inbound/hive.service.keytab#hive.service.keytab</file>
        </spark>
        <ok to="End"/>
        <error to="Kill"/>
    </action>
    <end name="End"/>
 </workflow-app>



1 REPLY 1

Re: oozie submit spark job Mechanism level: Failed to find any Kerberos tgt

Explorer

Were you able to resolve this error?