<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: Fork Record Processor cannot handle Large files in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/Fork-Record-Processor-cannot-handle-Large-files/m-p/394832#M248813</link>
    <description>&lt;P&gt;Can you break the data into smaller chunks ? this way you speed up the process by taking advantage of parallelism and multi threading.&lt;/P&gt;</description>
    <pubDate>Wed, 09 Oct 2024 15:37:45 GMT</pubDate>
    <dc:creator>SAMSAL</dc:creator>
    <dc:date>2024-10-09T15:37:45Z</dc:date>
    <item>
      <title>Fork Record Processor cannot handle Large files</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Fork-Record-Processor-cannot-handle-Large-files/m-p/394810#M248808</link>
      <description>&lt;P&gt;I'm using ForkRecord processor in Apache NiFi for a use case where I need to retain the parent of the record and it is working smoothly for file sizes up to 100 MB. When I pass larger files say for example files with 250 MB or above it is taking longer time resulting in huge backlogs of files.&amp;nbsp;&lt;/P&gt;&lt;P&gt;Is there any alternate flow I can build to avoid such backlogs?&lt;/P&gt;&lt;P&gt;Can I completely avoid fork record processor and choose some other flow logic to solve this ?&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/38301"&gt;@mburgess&lt;/a&gt;&amp;nbsp;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/35454"&gt;@MattWho&lt;/a&gt;&amp;nbsp; &amp;nbsp;Please help here&lt;/P&gt;</description>
      <pubDate>Wed, 09 Oct 2024 06:25:16 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Fork-Record-Processor-cannot-handle-Large-files/m-p/394810#M248808</guid>
      <dc:creator>Jagapriyan</dc:creator>
      <dc:date>2024-10-09T06:25:16Z</dc:date>
    </item>
    <item>
      <title>Re: Fork Record Processor cannot handle Large files</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Fork-Record-Processor-cannot-handle-Large-files/m-p/394832#M248813</link>
      <description>&lt;P&gt;Can you break the data into smaller chunks ? this way you speed up the process by taking advantage of parallelism and multi threading.&lt;/P&gt;</description>
      <pubDate>Wed, 09 Oct 2024 15:37:45 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Fork-Record-Processor-cannot-handle-Large-files/m-p/394832#M248813</guid>
      <dc:creator>SAMSAL</dc:creator>
      <dc:date>2024-10-09T15:37:45Z</dc:date>
    </item>
  </channel>
</rss>

