<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Recommended value for vm.overcommit_memory for a Spark cluster? in Archives of Support Questions (Read Only)</title>
    <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Recommended-value-for-vm-overcommit-memory-for-a-Spark/m-p/97046#M10561</link>
    <description>&lt;P&gt;What is the recommended configuration for the &lt;I&gt;vm.overcommit_memory &lt;/I&gt;[0|1|2] and &lt;I&gt;vm.overcommit_ratio &lt;/I&gt;settings in sysctl?&lt;/P&gt;&lt;P&gt;Looking specifically for a Spark cluster.&lt;/P&gt;&lt;P&gt;I found the following link that suggests vm.overcommit_memory should be set to 1 for mapreduce streaming use cases:&lt;/P&gt;&lt;P&gt;&lt;A href="https://www.safaribooksonline.com/library/view/hadoop-operations/9781449327279/ch04.html" target="_blank"&gt;https://www.safaribooksonline.com/library/view/hadoop-operations/9781449327279/ch04.html&lt;/A&gt;&lt;/P&gt;&lt;P&gt;Do we have any best practices around this?&lt;/P&gt;</description>
    <pubDate>Fri, 13 Nov 2015 13:51:02 GMT</pubDate>
    <dc:creator>ldaluz</dc:creator>
    <dc:date>2015-11-13T13:51:02Z</dc:date>
    <item>
      <title>Recommended value for vm.overcommit_memory for a Spark cluster?</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Recommended-value-for-vm-overcommit-memory-for-a-Spark/m-p/97046#M10561</link>
      <description>&lt;P&gt;What is the recommended configuration for the &lt;I&gt;vm.overcommit_memory &lt;/I&gt;[0|1|2] and &lt;I&gt;vm.overcommit_ratio &lt;/I&gt;settings in sysctl?&lt;/P&gt;&lt;P&gt;Looking specifically for a Spark cluster.&lt;/P&gt;&lt;P&gt;I found the following link that suggests vm.overcommit_memory should be set to 1 for mapreduce streaming use cases:&lt;/P&gt;&lt;P&gt;&lt;A href="https://www.safaribooksonline.com/library/view/hadoop-operations/9781449327279/ch04.html" target="_blank"&gt;https://www.safaribooksonline.com/library/view/hadoop-operations/9781449327279/ch04.html&lt;/A&gt;&lt;/P&gt;&lt;P&gt;Do we have any best practices around this?&lt;/P&gt;</description>
      <pubDate>Fri, 13 Nov 2015 13:51:02 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Recommended-value-for-vm-overcommit-memory-for-a-Spark/m-p/97046#M10561</guid>
      <dc:creator>ldaluz</dc:creator>
      <dc:date>2015-11-13T13:51:02Z</dc:date>
    </item>
    <item>
      <title>Re: Recommended value for vm.overcommit_memory for a Spark cluster?</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Recommended-value-for-vm-overcommit-memory-for-a-Spark/m-p/97047#M10562</link>
      <description>&lt;P&gt;&lt;A rel="user" href="https://community.cloudera.com/users/528/rsriharsha.html" nodeid="528"&gt;@Ram Sriharsha&lt;/A&gt; &lt;/P&gt;</description>
      <pubDate>Sun, 15 Nov 2015 11:10:40 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Recommended-value-for-vm-overcommit-memory-for-a-Spark/m-p/97047#M10562</guid>
      <dc:creator>nsabharwal</dc:creator>
      <dc:date>2015-11-15T11:10:40Z</dc:date>
    </item>
    <item>
      <title>Re: Recommended value for vm.overcommit_memory for a Spark cluster?</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Recommended-value-for-vm-overcommit-memory-for-a-Spark/m-p/97048#M10563</link>
      <description>&lt;P&gt;&lt;A href="https://community.cloudera.com/users/94/ldaluz.html" rel="user" target="_blank"&gt;@Laurence Da Luz&lt;/A&gt;&lt;/P&gt;
&lt;P&gt;This is very useful &lt;A href="https://www.kernel.org/doc/Documentation/vm/overcommit-accounting" target="_blank" rel="noopener"&gt;https://www.kernel.org/doc/Documentation/vm/overcommit-accounting&lt;/A&gt;&lt;/P&gt;
&lt;P&gt;and you are right, The best practice is to set &lt;I&gt;vm.overcommit_memory to 1&lt;/I&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 19 Dec 2019 21:53:06 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Recommended-value-for-vm-overcommit-memory-for-a-Spark/m-p/97048#M10563</guid>
      <dc:creator>nsabharwal</dc:creator>
      <dc:date>2019-12-19T21:53:06Z</dc:date>
    </item>
    <item>
      <title>Re: Recommended value for vm.overcommit_memory for a Spark cluster?</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Recommended-value-for-vm-overcommit-memory-for-a-Spark/m-p/97049#M10564</link>
      <description>&lt;P&gt;&lt;A rel="user" href="https://community.cloudera.com/users/94/ldaluz.html" nodeid="94"&gt;@Laurence Da Luz&lt;/A&gt; can you accept the answer?&lt;/P&gt;</description>
      <pubDate>Tue, 02 Feb 2016 09:46:56 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Recommended-value-for-vm-overcommit-memory-for-a-Spark/m-p/97049#M10564</guid>
      <dc:creator>aervits</dc:creator>
      <dc:date>2016-02-02T09:46:56Z</dc:date>
    </item>
    <item>
      <title>Re: Recommended value for vm.overcommit_memory for a Spark cluster?</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Recommended-value-for-vm-overcommit-memory-for-a-Spark/m-p/286044#M10565</link>
      <description>&lt;P&gt;This answer incorrectly summarized the content of the link referenced.&amp;nbsp; The resource listed suggests setting vm.overcommit_memory=1, not vm.overcommit_ratio.&lt;/P&gt;</description>
      <pubDate>Thu, 19 Dec 2019 20:59:25 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Recommended-value-for-vm-overcommit-memory-for-a-Spark/m-p/286044#M10565</guid>
      <dc:creator>Augustine</dc:creator>
      <dc:date>2019-12-19T20:59:25Z</dc:date>
    </item>
    <item>
      <title>Re: Recommended value for vm.overcommit_memory for a Spark cluster?</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Recommended-value-for-vm-overcommit-memory-for-a-Spark/m-p/286050#M10566</link>
      <description>&lt;P&gt;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/35196"&gt;@Augustine&lt;/a&gt;&amp;nbsp;,&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;Thanks for the feedback. I have corrected the answer and thank you for reporting this.&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;Cheers,&lt;/P&gt;
&lt;P&gt;Li&lt;/P&gt;</description>
      <pubDate>Thu, 19 Dec 2019 21:53:57 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Recommended-value-for-vm-overcommit-memory-for-a-Spark/m-p/286050#M10566</guid>
      <dc:creator>lwang</dc:creator>
      <dc:date>2019-12-19T21:53:57Z</dc:date>
    </item>
  </channel>
</rss>

