<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: Hive Table Inserts are failing after certain number of inserts in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/Hive-Table-Inserts-are-failing-after-certain-number-of/m-p/235347#M197161</link>
    <description>&lt;P&gt;There is a workaround i found to this problem, if i have 500 rows, i have to execute them in 5 beeline/jdbc/hive cli sessions, so first session inserts 120 rows then another new session/new jdbc connection takes over and so on. I am not sure why this is happening at first place, where existing hive session is not able to see the hdfs file/directory names generated by inserts in that session and generates the exactly same file as for previous insert.&lt;/P&gt;</description>
    <pubDate>Tue, 16 Jul 2019 15:40:14 GMT</pubDate>
    <dc:creator>sujoy_neogi</dc:creator>
    <dc:date>2019-07-16T15:40:14Z</dc:date>
  </channel>
</rss>

