<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: getting following exception while running spark job in client mode in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/getting-following-exception-while-running-spark-job-in/m-p/145108#M107680</link>
    <description>&lt;P&gt;it seems your spark driver is running with very small heap size, please try increasing the java.driver memory and see if it helps.&lt;/P&gt;</description>
    <pubDate>Fri, 17 Jun 2016 18:08:21 GMT</pubDate>
    <dc:creator>rajkumar_singh</dc:creator>
    <dc:date>2016-06-17T18:08:21Z</dc:date>
    <item>
      <title>getting following exception while running spark job in client mode</title>
      <link>https://community.cloudera.com/t5/Support-Questions/getting-following-exception-while-running-spark-job-in/m-p/145107#M107679</link>
      <description>&lt;PRE&gt;16/06/15 06:52:45 ERROR SparkContext: Error initializing SparkContext.
java.lang.IllegalArgumentException: System memory 257949696 must be at least 4.718592E8. Please use a larger heap size.
at org.apache.spark.memory.UnifiedMemoryManager$.getMaxMemory(UnifiedMemoryManager.scala:193)
at org.apache.spark.memory.UnifiedMemoryManager$.apply(UnifiedMemoryManager.scala:175)
at org.apache.spark.SparkEnv$.create(SparkEnv.scala:354)
at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:193)
at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:288)
at org.apache.spark.SparkContext.&amp;lt;init&amp;gt;(SparkContext.scala:457)
at com.example.project.SimpleApp$.main(SimpleApp.scala:16)
at com.example.project.SimpleApp.main(SimpleApp.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$runMain(SparkSubmit.scala:731)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
16/06/15 06:52:45 INFO SparkContext: Successfully stopped SparkContext
Exception in thread "main" java.lang.IllegalArgumentException: System memory 257949696 must be at least 4.718592E8. Please use a larger heap size.
at org.apache.spark.memory.UnifiedMemoryManager$.getMaxMemory(UnifiedMemoryManager.scala:193)
at org.apache.spark.memory.UnifiedMemoryManager$.apply(UnifiedMemoryManager.scala:175)
at org.apache.spark.SparkEnv$.create(SparkEnv.scala:354)
at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:193)
at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:288)
at org.apache.spark.SparkContext.&amp;lt;init&amp;gt;(SparkContext.scala:457)
at com.example.project.SimpleApp$.main(SimpleApp.scala:16)
at com.example.project.SimpleApp.main(SimpleApp.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$runMain(SparkSubmit.scala:731)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)&lt;/PRE&gt;</description>
      <pubDate>Fri, 17 Jun 2016 18:06:05 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/getting-following-exception-while-running-spark-job-in/m-p/145107#M107679</guid>
      <dc:creator>amresh_kumar</dc:creator>
      <dc:date>2016-06-17T18:06:05Z</dc:date>
    </item>
    <item>
      <title>Re: getting following exception while running spark job in client mode</title>
      <link>https://community.cloudera.com/t5/Support-Questions/getting-following-exception-while-running-spark-job-in/m-p/145108#M107680</link>
      <description>&lt;P&gt;it seems your spark driver is running with very small heap size, please try increasing the java.driver memory and see if it helps.&lt;/P&gt;</description>
      <pubDate>Fri, 17 Jun 2016 18:08:21 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/getting-following-exception-while-running-spark-job-in/m-p/145108#M107680</guid>
      <dc:creator>rajkumar_singh</dc:creator>
      <dc:date>2016-06-17T18:08:21Z</dc:date>
    </item>
    <item>
      <title>Re: getting following exception while running spark job in client mode</title>
      <link>https://community.cloudera.com/t5/Support-Questions/getting-following-exception-while-running-spark-job-in/m-p/145109#M107681</link>
      <description>&lt;A rel="user" href="https://community.cloudera.com/users/11042/amreshkumar.html" nodeid="11042"&gt;@a kumar&lt;/A&gt;&lt;P&gt;You need to increase the driver memory try from 1G at least. use below parameter while submitting the job.&lt;/P&gt;&lt;PRE&gt; --driver-memory 1g&lt;/PRE&gt;</description>
      <pubDate>Fri, 17 Jun 2016 18:10:28 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/getting-following-exception-while-running-spark-job-in/m-p/145109#M107681</guid>
      <dc:creator>jyadav</dc:creator>
      <dc:date>2016-06-17T18:10:28Z</dc:date>
    </item>
    <item>
      <title>Re: getting following exception while running spark job in client mode</title>
      <link>https://community.cloudera.com/t5/Support-Questions/getting-following-exception-while-running-spark-job-in/m-p/145110#M107682</link>
      <description>&lt;P&gt;increase heap space to be at least 471 MB. try to set VM options to -Xmx512m.&lt;/P&gt;</description>
      <pubDate>Fri, 17 Jun 2016 21:45:53 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/getting-following-exception-while-running-spark-job-in/m-p/145110#M107682</guid>
      <dc:creator>bmathew</dc:creator>
      <dc:date>2016-06-17T21:45:53Z</dc:date>
    </item>
  </channel>
</rss>

