<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: Sqoop import with warehouse-dir argument in Archives of Support Questions (Read Only)</title>
    <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Sqoop-import-with-warehouse-dir-argument/m-p/67743#M78729</link>
    <description>&lt;P&gt;using --delete-target-dir argument worked for me&lt;/P&gt;</description>
    <pubDate>Wed, 30 May 2018 06:56:21 GMT</pubDate>
    <dc:creator>sim6</dc:creator>
    <dc:date>2018-05-30T06:56:21Z</dc:date>
    <item>
      <title>Sqoop import with warehouse-dir argument</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Sqoop-import-with-warehouse-dir-argument/m-p/67663#M78727</link>
      <description>&lt;P&gt;&lt;SPAN&gt;I am using --warehouse-dir argument for loading data in HDFS before sqoop puts it into hive. I am running all my sqoop jobs through oozie.&lt;/SPAN&gt;&lt;BR /&gt;&lt;BR /&gt;&lt;SPAN&gt;Now, if the task fails for some reason, it is reattempted and the problem here is that the warehouse dir created by previous task is still there and the task re-attempt fails with error : output directory already exists.&lt;/SPAN&gt;&lt;BR /&gt;&lt;BR /&gt;&lt;SPAN&gt;I understand I could use direct argument to skip intermediate loading in HDFS step but I need to use drop import hive delims argument as well and that's not supported with Hive. Advice, please? It's important.&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Fri, 16 Sep 2022 13:16:31 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Sqoop-import-with-warehouse-dir-argument/m-p/67663#M78727</guid>
      <dc:creator>sim6</dc:creator>
      <dc:date>2022-09-16T13:16:31Z</dc:date>
    </item>
    <item>
      <title>Re: Sqoop import with warehouse-dir argument</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Sqoop-import-with-warehouse-dir-argument/m-p/67694#M78728</link>
      <description>&lt;P&gt;This is a normal behavior.&lt;/P&gt;&lt;P&gt;You should either create dynamic folder name (e.g. output_dir_timestamp) but you may end up having a lot of directories, or add an HDFS action to delete the HDFS directory, just before the sqoop action. I recomend the last approach.&lt;/P&gt;</description>
      <pubDate>Mon, 28 May 2018 08:34:32 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Sqoop-import-with-warehouse-dir-argument/m-p/67694#M78728</guid>
      <dc:creator>GeKas</dc:creator>
      <dc:date>2018-05-28T08:34:32Z</dc:date>
    </item>
    <item>
      <title>Re: Sqoop import with warehouse-dir argument</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Sqoop-import-with-warehouse-dir-argument/m-p/67743#M78729</link>
      <description>&lt;P&gt;using --delete-target-dir argument worked for me&lt;/P&gt;</description>
      <pubDate>Wed, 30 May 2018 06:56:21 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Sqoop-import-with-warehouse-dir-argument/m-p/67743#M78729</guid>
      <dc:creator>sim6</dc:creator>
      <dc:date>2018-05-30T06:56:21Z</dc:date>
    </item>
  </channel>
</rss>

