<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: ExecutorLostFailure  Reason: Container killed by YARN for exceeding memory limits in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/ExecutorLostFailure-Reason-Container-killed-by-YARN-for/m-p/41764#M21243</link>
    <description>&lt;P&gt;This means the JVM took more memory than YARN thought it should. Usually this means you need to allocate more overhead, so that more memory is requested from YARN for the same size of JVM heap. See the&amp;nbsp;&lt;SPAN&gt;spark.yarn.executor.memoryOverhead option, which defaults to 10% of the specified executor memory. Increase it.&lt;/SPAN&gt;&lt;/P&gt;</description>
    <pubDate>Tue, 07 Jun 2016 19:31:08 GMT</pubDate>
    <dc:creator>srowen</dc:creator>
    <dc:date>2016-06-07T19:31:08Z</dc:date>
  </channel>
</rss>

