<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: Oozie shell action - Spark2-submit pyspark job having hive queries. in Archives of Support Questions (Read Only)</title>
    <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Oozie-shell-action-Spark2-submit-pyspark-job-having-hive/m-p/81366#M84504</link>
    <description>&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;The issue is that Spark couldn't read hive-site.xml and hence providing the properties in SparkSession builder fixed the issue.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;&lt;BR /&gt;spark_session = (SparkSession
                 .builder
                 .config("hive.metastore.uris", "thrift://server.country.cloud.company.com:9083")
                 .config("hive.metastore.client.socket.timeout", "300")
                 .config("hive.metastore.warehouse.dir", "/user/hive/warehouse")
                 .config("hive.warehouse.subdir.inherit.perms", "true")
                 .config("hive.execution.engine", "mr")
                 .config("hive.metastore.execute.setugi", "true")
                 .config("hive.support.concurrency", "true")
                 .config("hive.zookeeper.quorum", "&amp;lt;&amp;lt;REDACTED&amp;gt;&amp;gt;")
                 .config("hive.zookeeper.client.port", "2181")
                 .config("hive.zookeeper.namespace", "hive_zookeeper_namespace_hive")
                 .config("hive.cluster.delegation.token.store.class", "org.apache.hadoop.hive.thrift.MemoryTokenStore")
                 .config("hive.server2.enable.doAs", "false")
                 .config("hive.metastore.sasl.enabled", "true")
                 .config("hive.server2.authentication", "kerberos")
                 .config("hive.metastore.kerberos.principal", "&amp;lt;&amp;lt;REDACTED&amp;gt;&amp;gt;")
                 .config("hive.server2.authentication.kerberos.principal", "&amp;lt;&amp;lt;REDACTED&amp;gt;&amp;gt;")
                 .config("hive.server2.use.SSL", "true")
                 .config("hive.exec.dynamic.partition", "true")
                 .config("hive.exec.dynamic.partition.mode", "nonstrict")
                 .enableHiveSupport()
                 .appName('app_name')
                 .getOrCreate())&lt;/PRE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
    <pubDate>Mon, 22 Oct 2018 21:51:13 GMT</pubDate>
    <dc:creator>Amirdha</dc:creator>
    <dc:date>2018-10-22T21:51:13Z</dc:date>
    <item>
      <title>Oozie shell action - Spark2-submit pyspark job having hive queries.</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Oozie-shell-action-Spark2-submit-pyspark-job-having-hive/m-p/81362#M84503</link>
      <description>&lt;P&gt;Environment:&amp;nbsp;Cloudera Enterprise 5.10.2.&amp;nbsp;&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;We have a python spark job that is submitted using spark2-submit from the edge node and is working as expected. The spark job queries the hive tables, and it works in both client and cluster mode.&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;However, when it's submitted via a shell action from the&amp;nbsp;oozie workflow, it fails&amp;nbsp;to connect to the metastore. Actually, it opens a connection and then looses it. Any lead on how to fix would be helpful.&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;=======================================================================================&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;DIV&gt;18/10/22 10:52:23 INFO hive.metastore: Trying to connect to metastore with URI thrift://correcturl.country.cloud.company.com:9083&lt;/DIV&gt;
&lt;DIV&gt;18/10/22 10:52:23 INFO hive.metastore: Opened a connection to metastore, current connections: 1&lt;/DIV&gt;
&lt;DIV&gt;18/10/22 10:52:23 WARN hive.metastore:&lt;SPAN&gt;&amp;nbsp;&lt;/SPAN&gt;&lt;STRONG&gt;set_ugi() not successful, Likely cause: new client talking to old server. Continuing without it.&lt;/STRONG&gt;&lt;/DIV&gt;
&lt;DIV&gt;&lt;STRONG&gt;org.apache.thrift.transport.TTransportException&lt;/STRONG&gt;&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:132)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.thrift.protocol.TBinaryProtocol.readStringBody(TBinaryProtocol.java:380)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:230)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:69)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_set_ugi(ThriftHiveMetastore.java:3741)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.set_ugi(ThriftHiveMetastore.java:3727)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:437)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.&amp;lt;init&amp;gt;(HiveMetaStoreClient.java:235)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.&amp;lt;init&amp;gt;(SessionHiveMetaStoreClient.java:74)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at java.lang.reflect.Constructor.newInstance(Constructor.java:526)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1490)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.&amp;lt;init&amp;gt;(RetryingMetaStoreClient.java:67)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:82)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2935)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2954)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3179)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:210)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:197)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.ql.metadata.Hive.&amp;lt;init&amp;gt;(Hive.java:307)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:268)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:243)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at java.lang.reflect.Method.invoke(Method.java:606)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.HiveCredentialProvider$$anonfun$obtainCredentials$1.apply$mcV$sp(HiveCredentialProvider.scala:91)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.HiveCredentialProvider$$anonfun$obtainCredentials$1.apply(HiveCredentialProvider.scala:90)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.HiveCredentialProvider$$anonfun$obtainCredentials$1.apply(HiveCredentialProvider.scala:90)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.HiveCredentialProvider$$anon$1.run(HiveCredentialProvider.scala:123)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at java.security.AccessController.doPrivileged(Native Method)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at javax.security.auth.Subject.doAs(Subject.java:415)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1907)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.HiveCredentialProvider.doAsRealUser(HiveCredentialProvider.scala:122)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.HiveCredentialProvider.obtainCredentials(HiveCredentialProvider.scala:90)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.ConfigurableCredentialManager$$anonfun$obtainCredentials$2.apply(ConfigurableCredentialManager.scala:80)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.ConfigurableCredentialManager$$anonfun$obtainCredentials$2.apply(ConfigurableCredentialManager.scala:78)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at scala.collection.Iterator$class.foreach(Iterator.scala:893)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at scala.collection.MapLike$DefaultValuesIterable.foreach(MapLike.scala:206)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at scala.collection.AbstractTraversable.flatMap(Traversable.scala:104)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.ConfigurableCredentialManager.obtainCredentials(ConfigurableCredentialManager.scala:78)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.Client.prepareLocalResources(Client.scala:398)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:874)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:171)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:56)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:171)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.SparkContext.&amp;lt;init&amp;gt;(SparkContext.scala:509)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.api.java.JavaSparkContext.&amp;lt;init&amp;gt;(JavaSparkContext.scala:58)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at java.lang.reflect.Constructor.newInstance(Constructor.java:526)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:247)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at py4j.Gateway.invoke(Gateway.java:236)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at py4j.commands.ConstructorCommand.invokeConstructor(ConstructorCommand.java:80)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at py4j.commands.ConstructorCommand.execute(ConstructorCommand.java:69)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at py4j.GatewayConnection.run(GatewayConnection.java:214)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at java.lang.Thread.run(Thread.java:745)&lt;/DIV&gt;
&lt;DIV&gt;18/10/22 10:52:23 INFO hive.metastore: Connected to metastore.&lt;/DIV&gt;
&lt;DIV&gt;18/10/22 10:52:23 WARN metastore.RetryingMetaStoreClient: MetaStoreClient lost connection. Attempting to reconnect.&lt;/DIV&gt;
&lt;DIV&gt;org.apache.thrift.transport.TTransportException&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:132)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.thrift.protocol.TBinaryProtocol.readAll(TBinaryProtocol.java:429)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.thrift.protocol.TBinaryProtocol.readI32(TBinaryProtocol.java:318)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:219)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:69)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_get_all_functions(ThriftHiveMetastore.java:3339)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.get_all_functions(ThriftHiveMetastore.java:3327)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getAllFunctions(HiveMetaStoreClient.java:2060)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at java.lang.reflect.Method.invoke(Method.java:606)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:105)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at com.sun.proxy.$Proxy18.getAllFunctions(Unknown Source)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at java.lang.reflect.Method.invoke(Method.java:606)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:1998)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at com.sun.proxy.$Proxy18.getAllFunctions(Unknown Source)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3179)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:210)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:197)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.ql.metadata.Hive.&amp;lt;init&amp;gt;(Hive.java:307)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:268)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:243)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at java.lang.reflect.Method.invoke(Method.java:606)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.HiveCredentialProvider$$anonfun$obtainCredentials$1.apply$mcV$sp(HiveCredentialProvider.scala:91)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.HiveCredentialProvider$$anonfun$obtainCredentials$1.apply(HiveCredentialProvider.scala:90)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.HiveCredentialProvider$$anonfun$obtainCredentials$1.apply(HiveCredentialProvider.scala:90)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.HiveCredentialProvider$$anon$1.run(HiveCredentialProvider.scala:123)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at java.security.AccessController.doPrivileged(Native Method)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at javax.security.auth.Subject.doAs(Subject.java:415)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1907)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.HiveCredentialProvider.doAsRealUser(HiveCredentialProvider.scala:122)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.HiveCredentialProvider.obtainCredentials(HiveCredentialProvider.scala:90)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.ConfigurableCredentialManager$$anonfun$obtainCredentials$2.apply(ConfigurableCredentialManager.scala:80)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.ConfigurableCredentialManager$$anonfun$obtainCredentials$2.apply(ConfigurableCredentialManager.scala:78)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at scala.collection.Iterator$class.foreach(Iterator.scala:893)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at scala.collection.MapLike$DefaultValuesIterable.foreach(MapLike.scala:206)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at scala.collection.AbstractTraversable.flatMap(Traversable.scala:104)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.security.ConfigurableCredentialManager.obtainCredentials(ConfigurableCredentialManager.scala:78)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.Client.prepareLocalResources(Client.scala:398)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:874)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:171)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:56)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:171)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.SparkContext.&amp;lt;init&amp;gt;(SparkContext.scala:509)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at org.apache.spark.api.java.JavaSparkContext.&amp;lt;init&amp;gt;(JavaSparkContext.scala:58)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at java.lang.reflect.Constructor.newInstance(Constructor.java:526)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:247)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at py4j.Gateway.invoke(Gateway.java:236)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at py4j.commands.ConstructorCommand.invokeConstructor(ConstructorCommand.java:80)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at py4j.commands.ConstructorCommand.execute(ConstructorCommand.java:69)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at py4j.GatewayConnection.run(GatewayConnection.java:214)&lt;/DIV&gt;
&lt;DIV&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; at java.lang.Thread.run(Thread.java:745)&lt;/DIV&gt;
&lt;DIV&gt;18/10/22 10:52:24 INFO hive.metastore: Closed a connection to metastore, current connections: 0&lt;/DIV&gt;
&lt;DIV&gt;18/10/22 10:52:24 INFO hive.metastore: Trying to connect to metastore with URI thrift://correcturl.country.cloud.company.com:9083&lt;/DIV&gt;
&lt;DIV&gt;18/10/22 10:52:24 INFO hive.metastore: Opened a connection to metastore, current connections: 1&lt;/DIV&gt;
&lt;DIV&gt;18/10/22 10:52:24 WARN hive.metastore: set_ugi() not successful, Likely cause: new client talking to old server. Continuing without it.&lt;/DIV&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Fri, 16 Sep 2022 13:49:25 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Oozie-shell-action-Spark2-submit-pyspark-job-having-hive/m-p/81362#M84503</guid>
      <dc:creator>Amirdha</dc:creator>
      <dc:date>2022-09-16T13:49:25Z</dc:date>
    </item>
    <item>
      <title>Re: Oozie shell action - Spark2-submit pyspark job having hive queries.</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Oozie-shell-action-Spark2-submit-pyspark-job-having-hive/m-p/81366#M84504</link>
      <description>&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;The issue is that Spark couldn't read hive-site.xml and hence providing the properties in SparkSession builder fixed the issue.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;&lt;BR /&gt;spark_session = (SparkSession
                 .builder
                 .config("hive.metastore.uris", "thrift://server.country.cloud.company.com:9083")
                 .config("hive.metastore.client.socket.timeout", "300")
                 .config("hive.metastore.warehouse.dir", "/user/hive/warehouse")
                 .config("hive.warehouse.subdir.inherit.perms", "true")
                 .config("hive.execution.engine", "mr")
                 .config("hive.metastore.execute.setugi", "true")
                 .config("hive.support.concurrency", "true")
                 .config("hive.zookeeper.quorum", "&amp;lt;&amp;lt;REDACTED&amp;gt;&amp;gt;")
                 .config("hive.zookeeper.client.port", "2181")
                 .config("hive.zookeeper.namespace", "hive_zookeeper_namespace_hive")
                 .config("hive.cluster.delegation.token.store.class", "org.apache.hadoop.hive.thrift.MemoryTokenStore")
                 .config("hive.server2.enable.doAs", "false")
                 .config("hive.metastore.sasl.enabled", "true")
                 .config("hive.server2.authentication", "kerberos")
                 .config("hive.metastore.kerberos.principal", "&amp;lt;&amp;lt;REDACTED&amp;gt;&amp;gt;")
                 .config("hive.server2.authentication.kerberos.principal", "&amp;lt;&amp;lt;REDACTED&amp;gt;&amp;gt;")
                 .config("hive.server2.use.SSL", "true")
                 .config("hive.exec.dynamic.partition", "true")
                 .config("hive.exec.dynamic.partition.mode", "nonstrict")
                 .enableHiveSupport()
                 .appName('app_name')
                 .getOrCreate())&lt;/PRE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Mon, 22 Oct 2018 21:51:13 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Oozie-shell-action-Spark2-submit-pyspark-job-having-hive/m-p/81366#M84504</guid>
      <dc:creator>Amirdha</dc:creator>
      <dc:date>2018-10-22T21:51:13Z</dc:date>
    </item>
  </channel>
</rss>

