<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: Query Hive Using Python Getting below error in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357446#M237596</link>
    <description>&lt;P&gt;thank you&amp;nbsp;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/100776"&gt;@pankshiv1809&lt;/a&gt;&amp;nbsp;&lt;/P&gt;</description>
    <pubDate>Fri, 11 Nov 2022 11:14:31 GMT</pubDate>
    <dc:creator>asish</dc:creator>
    <dc:date>2022-11-11T11:14:31Z</dc:date>
    <item>
      <title>Query Hive Using Python Getting below error</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357347#M237559</link>
      <description>&lt;P&gt;How to resove given ERROR -&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;ERROR - Unknown Exception:07&amp;lt;class 'py4j.protocol.Py4JJavaError'&amp;gt;An error occurred while calling o1140.insertInto.&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;: org.apache.spark.SparkException: Job aborted.&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 10 Nov 2022 09:10:32 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357347#M237559</guid>
      <dc:creator>pankshiv1809</dc:creator>
      <dc:date>2022-11-10T09:10:32Z</dc:date>
    </item>
    <item>
      <title>Re: Query Hive Using Python Getting below error</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357357#M237563</link>
      <description>&lt;P&gt;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/100776"&gt;@pankshiv1809&lt;/a&gt;&amp;nbsp; I see you are getting Spark related error . Are you using Spark or Hive query ?&lt;/P&gt;</description>
      <pubDate>Thu, 10 Nov 2022 13:20:16 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357357#M237563</guid>
      <dc:creator>asish</dc:creator>
      <dc:date>2022-11-10T13:20:16Z</dc:date>
    </item>
    <item>
      <title>Re: Query Hive Using Python Getting below error</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357358#M237564</link>
      <description>&lt;P&gt;&lt;SPAN&gt;Hi Asish thanks for re-veiw here We are using python script using spark parameter, Also i am sharing complete error log for to do more analysis -&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;Unknown Exception:07&amp;lt;class 'py4j.protocol.Py4JJavaError'&amp;gt;An error occurred while calling o1140.insertInto.&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;: org.apache.spark.SparkException: Job aborted.&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:224)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run(InsertIntoHadoopFsRelationCommand.scala:154)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:104)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:102)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.execution.command.DataWritingCommandExec.doExecute(commands.scala:122)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:664)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:664)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:664)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.DataFrameWriter.insertInto(DataFrameWriter.scala:322)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.spark.sql.DataFrameWriter.insertInto(DataFrameWriter.scala:308)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at java.lang.reflect.Method.invoke(Method.java:498)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at py4j.Gateway.invoke(Gateway.java:282)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at py4j.commands.CallCommand.execute(CallCommand.java:79)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at py4j.GatewayConnection.run(GatewayConnection.java:238)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at java.lang.Thread.run(Thread.java:748)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;Caused by: org.apache.spark.SparkException: Job aborted due to stage failure: Task 21 in stage 107.0 failed 4 times, most recent failure: Lost task 21.3 in stage 107.0 (TID 17545, NDC3HDPPRODDN13.vodafoneidea.com, executor 75): java.io.FileNotFoundException: File does not exist: /warehouse/tablespace/external/hive/dim_cd_db.db/dim_subs_language_ivr/circle_id=14/part-00000-f53b4b78-8256-4040-9195-7af54c3365c8.c000.snappy.orc&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:86)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:76)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getBlockLocations(FSDirStatAndListingOp.java:158)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1931)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:738)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:426)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:524)&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1025)&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 10 Nov 2022 13:43:16 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357358#M237564</guid>
      <dc:creator>pankshiv1809</dc:creator>
      <dc:date>2022-11-10T13:43:16Z</dc:date>
    </item>
    <item>
      <title>Re: Query Hive Using Python Getting below error</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357360#M237566</link>
      <description>&lt;P&gt;This seems to be more of Spark. But I see below error:&lt;/P&gt;&lt;PRE&gt;&lt;SPAN&gt;&amp;nbsp;java.io.FileNotFoundException: File does not exist: /warehouse/tablespace/external/hive/dim_cd_db.db/dim_subs_language_ivr/circle_id=14/part-00000-f53b4b78-8256-4040-9195-7af54c3365c8.c000.snappy.orc&lt;/SPAN&gt;&lt;/PRE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;Can you please check,if the file is present:&lt;/P&gt;&lt;PRE&gt;hdfs dfs -ls&lt;SPAN&gt;/warehouse/tablespace/external/hive/dim_cd_db.db/dim_subs_language_ivr/circle_id=14/part-00000-f53b4b78-8256-4040-9195-7af54c3365c8.c000.snappy.orc&lt;/SPAN&gt;&lt;/PRE&gt;&lt;P&gt;&lt;SPAN&gt;Please also perfrom and let us know&lt;/SPAN&gt;&lt;/P&gt;&lt;PRE&gt;&lt;SPAN&gt;msck repair table&amp;nbsp;dim_subs_language_ivr&lt;/SPAN&gt;&lt;/PRE&gt;&lt;P&gt;&lt;SPAN&gt;Try to run the same in hive beeline and check if the issue persists.&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Thu, 10 Nov 2022 13:50:52 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357360#M237566</guid>
      <dc:creator>asish</dc:creator>
      <dc:date>2022-11-10T13:50:52Z</dc:date>
    </item>
    <item>
      <title>Re: Query Hive Using Python Getting below error</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357361#M237567</link>
      <description>&lt;P&gt;Sure Asish. Let me check and will update you accordingly with log and respective o/p.&lt;/P&gt;</description>
      <pubDate>Thu, 10 Nov 2022 13:54:45 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357361#M237567</guid>
      <dc:creator>pankshiv1809</dc:creator>
      <dc:date>2022-11-10T13:54:45Z</dc:date>
    </item>
    <item>
      <title>Re: Query Hive Using Python Getting below error</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357429#M237586</link>
      <description>&lt;P&gt;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/100776"&gt;@pankshiv1809&lt;/a&gt;&amp;nbsp; were you able to fix? Please click "Accept as solution" if this has worked&lt;/P&gt;</description>
      <pubDate>Fri, 11 Nov 2022 08:40:37 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357429#M237586</guid>
      <dc:creator>asish</dc:creator>
      <dc:date>2022-11-11T08:40:37Z</dc:date>
    </item>
    <item>
      <title>Re: Query Hive Using Python Getting below error</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357431#M237588</link>
      <description>&lt;P&gt;Hi Asish,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I have implemented the steps.. and my team will work on post dependent flow...will update you if in case any issue comes on the same.&lt;/P&gt;</description>
      <pubDate>Fri, 11 Nov 2022 08:42:51 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357431#M237588</guid>
      <dc:creator>pankshiv1809</dc:creator>
      <dc:date>2022-11-11T08:42:51Z</dc:date>
    </item>
    <item>
      <title>Re: Query Hive Using Python Getting below error</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357446#M237596</link>
      <description>&lt;P&gt;thank you&amp;nbsp;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/100776"&gt;@pankshiv1809&lt;/a&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Fri, 11 Nov 2022 11:14:31 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Query-Hive-Using-Python-Getting-below-error/m-p/357446#M237596</guid>
      <dc:creator>asish</dc:creator>
      <dc:date>2022-11-11T11:14:31Z</dc:date>
    </item>
  </channel>
</rss>

