<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: ERROR SparkContext: Error initializing SparkContext. org.apache.spark.SparkException: Yarn application has already ended! It might have been killed or unable to launch application master. in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/ERROR-SparkContext-Error-initializing-SparkContext-org/m-p/349446#M235661</link>
    <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/78612"&gt;@RangaReddy&lt;/a&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thank you for the reply.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;We have four nodes HDP cluster, wherein recently java version upgraded only in three nodes.&amp;nbsp;&lt;/P&gt;&lt;P&gt;In three nodes, we have upgraded the java path manually in the files&amp;nbsp; Eg: spark-env.sh, yarn-env.sh, hadoop-env.sh .&lt;/P&gt;&lt;P&gt;We haven't upgraded the java path in one node where java version is not upgraded.&lt;/P&gt;&lt;P&gt;Executed the above mentioned command. Please find the attached screenshot of application logs.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Kindly help us.&amp;nbsp;&lt;/P&gt;</description>
    <pubDate>Wed, 03 Aug 2022 12:25:06 GMT</pubDate>
    <dc:creator>ssuja</dc:creator>
    <dc:date>2022-08-03T12:25:06Z</dc:date>
    <item>
      <title>ERROR SparkContext: Error initializing SparkContext. org.apache.spark.SparkException: Yarn application has already ended! It might have been killed or unable to launch application master.</title>
      <link>https://community.cloudera.com/t5/Support-Questions/ERROR-SparkContext-Error-initializing-SparkContext-org/m-p/349420#M235653</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;We have got this issue post java upgradation. Kindly let us know whether this error occurred because of java upgrade or else please suggest a solution.&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;spark-shell&lt;BR /&gt;Setting default log level to "WARN".&lt;BR /&gt;To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).&lt;BR /&gt;22/08/03 15:34:49 ERROR SparkContext: Error initializing SparkContext.&lt;BR /&gt;org.apache.spark.SparkException: Yarn application has already ended! It might have been killed or unable to launch application master.&lt;BR /&gt;at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.waitForApplication(YarnClientSchedulerBackend.scala:89)&lt;BR /&gt;at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:63)&lt;BR /&gt;at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:164)&lt;BR /&gt;at org.apache.spark.SparkContext.&amp;lt;init&amp;gt;(SparkContext.scala:500)&lt;BR /&gt;at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2498)&lt;BR /&gt;at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:934)&lt;BR /&gt;at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:925)&lt;BR /&gt;at scala.Option.getOrElse(Option.scala:121)&lt;BR /&gt;at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:925)&lt;BR /&gt;at org.apache.spark.repl.Main$.createSparkSession(Main.scala:103)&lt;BR /&gt;at $line3.$read$$iw$$iw.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:15)&lt;BR /&gt;at $line3.$read$$iw.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:43)&lt;BR /&gt;at $line3.$read.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:45)&lt;BR /&gt;at $line3.$read$.&amp;lt;init&amp;gt;(&amp;lt;console&amp;gt;:49)&lt;BR /&gt;at $line3.$read$.&amp;lt;clinit&amp;gt;(&amp;lt;console&amp;gt;)&lt;BR /&gt;at $line3.$eval$.$print$lzycompute(&amp;lt;console&amp;gt;:7)&lt;BR /&gt;at $line3.$eval$.$print(&amp;lt;console&amp;gt;:6)&lt;BR /&gt;at $line3.$eval.$print(&amp;lt;console&amp;gt;)&lt;BR /&gt;at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)&lt;BR /&gt;at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)&lt;BR /&gt;at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)&lt;BR /&gt;at java.lang.reflect.Method.invoke(Method.java:498)&lt;BR /&gt;at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:793)&lt;BR /&gt;at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1054)&lt;BR /&gt;at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:645)&lt;BR /&gt;at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:644)&lt;BR /&gt;at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)&lt;BR /&gt;at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)&lt;BR /&gt;at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:644)&lt;BR /&gt;at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:576)&lt;BR /&gt;at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:572)&lt;BR /&gt;at scala.tools.nsc.interpreter.IMain$$anonfun$quietRun$1.apply(IMain.scala:231)&lt;BR /&gt;at scala.tools.nsc.interpreter.IMain$$anonfun$quietRun$1.apply(IMain.scala:231)&lt;BR /&gt;at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221)&lt;BR /&gt;at scala.tools.nsc.interpreter.IMain.quietRun(IMain.scala:231)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1$$anonfun$apply$mcV$sp$1.apply(SparkILoop.scala:88)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1$$anonfun$apply$mcV$sp$1.apply(SparkILoop.scala:88)&lt;BR /&gt;at scala.collection.immutable.List.foreach(List.scala:392)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply$mcV$sp(SparkILoop.scala:88)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(SparkILoop.scala:88)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(SparkILoop.scala:88)&lt;BR /&gt;at scala.tools.nsc.interpreter.ILoop.savingReplayStack(ILoop.scala:91)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:87)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$org$apache$spark$repl$SparkILoop$$anonfun$$loopPostInit$1$1.apply$mcV$sp(SparkILoop.scala:170)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$org$apache$spark$repl$SparkILoop$$anonfun$$loopPostInit$1$1.apply(SparkILoop.scala:158)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$org$apache$spark$repl$SparkILoop$$anonfun$$loopPostInit$1$1.apply(SparkILoop.scala:158)&lt;BR /&gt;at scala.tools.nsc.interpreter.ILoop$$anonfun$mumly$1.apply(ILoop.scala:189)&lt;BR /&gt;at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221)&lt;BR /&gt;at scala.tools.nsc.interpreter.ILoop.mumly(ILoop.scala:186)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop$$anonfun$process$1.org$apache$spark$repl$SparkILoop$$anonfun$$loopPostInit$1(SparkILoop.scala:158)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$startup$1$1.apply(SparkILoop.scala:226)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$startup$1$1.apply(SparkILoop.scala:206)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop$$anonfun$process$1.withSuppressedSettings$1(SparkILoop.scala:194)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop$$anonfun$process$1.startup$1(SparkILoop.scala:206)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:241)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:141)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:141)&lt;BR /&gt;at scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97)&lt;BR /&gt;at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:141)&lt;BR /&gt;at org.apache.spark.repl.Main$.doMain(Main.scala:76)&lt;BR /&gt;at org.apache.spark.repl.Main$.main(Main.scala:56)&lt;BR /&gt;at org.apache.spark.repl.Main.main(Main.scala)&lt;BR /&gt;at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)&lt;BR /&gt;at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)&lt;BR /&gt;at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)&lt;BR /&gt;at java.lang.reflect.Method.invoke(Method.java:498)&lt;BR /&gt;at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)&lt;BR /&gt;at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:900)&lt;BR /&gt;at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:192)&lt;BR /&gt;at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:217)&lt;BR /&gt;at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:137)&lt;BR /&gt;at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)&lt;BR /&gt;22/08/03 15:34:49 WARN YarnSchedulerBackend$YarnSchedulerEndpoint: Attempted to request executors before the AM has registered!&lt;BR /&gt;22/08/03 15:34:49 WARN MetricsSystem: Stopping a MetricsSystem that is not running&lt;BR /&gt;org.apache.spark.SparkException: Yarn application has already ended! It might have been killed or unable to launch application master.&lt;BR /&gt;at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.waitForApplication(YarnClientSchedulerBackend.scala:89)&lt;BR /&gt;at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:63)&lt;BR /&gt;at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:164)&lt;BR /&gt;at org.apache.spark.SparkContext.&amp;lt;init&amp;gt;(SparkContext.scala:500)&lt;BR /&gt;at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2498)&lt;BR /&gt;at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:934)&lt;BR /&gt;at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:925)&lt;BR /&gt;at scala.Option.getOrElse(Option.scala:121)&lt;BR /&gt;at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:925)&lt;BR /&gt;at org.apache.spark.repl.Main$.createSparkSession(Main.scala:103)&lt;BR /&gt;... 62 elided&lt;BR /&gt;&amp;lt;console&amp;gt;:14: error: not found: value spark&lt;BR /&gt;import spark.implicits._&lt;BR /&gt;^&lt;BR /&gt;&amp;lt;console&amp;gt;:14: error: not found: value spark&lt;BR /&gt;import spark.sql&lt;BR /&gt;^&lt;BR /&gt;Welcome to&lt;BR /&gt;____ __&lt;BR /&gt;/ __/__ ___ _____/ /__&lt;BR /&gt;_\ \/ _ \/ _ `/ __/ '_/&lt;BR /&gt;/___/ .__/\_,_/_/ /_/\_\ version 2.3.2.3.1.5.6189-1&lt;BR /&gt;/_/&lt;/P&gt;&lt;P&gt;Using Scala version 2.11.12 (OpenJDK 64-Bit Server VM, Java 1.8.0_342)&lt;BR /&gt;Type in expressions to have them evaluated.&lt;BR /&gt;Type :help for more information.&lt;/P&gt;&lt;P&gt;scala&amp;gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Please help out.&lt;/P&gt;</description>
      <pubDate>Wed, 03 Aug 2022 10:24:19 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/ERROR-SparkContext-Error-initializing-SparkContext-org/m-p/349420#M235653</guid>
      <dc:creator>ssuja</dc:creator>
      <dc:date>2022-08-03T10:24:19Z</dc:date>
    </item>
    <item>
      <title>Re: ERROR SparkContext: Error initializing SparkContext. org.apache.spark.SparkException: Yarn application has already ended! It might have been killed or unable to launch application master.</title>
      <link>https://community.cloudera.com/t5/Support-Questions/ERROR-SparkContext-Error-initializing-SparkContext-org/m-p/349423#M235654</link>
      <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/98569"&gt;@ssuja&lt;/a&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Please try to run the SparkPi example and download the application logs. Once you are downloaded check is there any exceptions in the logs.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;spark-submit &lt;SPAN class="se"&gt;\&lt;/SPAN&gt;
  &lt;SPAN class="nt"&gt;--class&lt;/SPAN&gt; org.apache.spark.examples.SparkPi &lt;SPAN class="se"&gt;\&lt;/SPAN&gt;
  &lt;SPAN class="nt"&gt;--master&lt;/SPAN&gt; yarn &lt;SPAN class="se"&gt;\&lt;/SPAN&gt;
  &lt;SPAN class="nt"&gt;--deploy-mode&lt;/SPAN&gt; cluster &lt;SPAN class="se"&gt;\&lt;/SPAN&gt;
  &lt;SPAN class="nt"&gt;--num-executors&lt;/SPAN&gt; 1 &lt;SPAN class="se"&gt;\&lt;/SPAN&gt;
  &lt;SPAN class="nt"&gt;--driver-memory&lt;/SPAN&gt; 512m &lt;SPAN class="se"&gt;\&lt;/SPAN&gt;
  &lt;SPAN class="nt"&gt;--executor-memory&lt;/SPAN&gt; 512m &lt;SPAN class="se"&gt;\&lt;/SPAN&gt;
  &lt;SPAN class="nt"&gt;--executor-cores&lt;/SPAN&gt; 1 &lt;SPAN class="se"&gt;\&lt;/SPAN&gt;
  /usr/hdp/current/spark2-client/examples/jars/spark-examples_&lt;SPAN class="k"&gt;*&lt;/SPAN&gt;.jar 10&lt;/PRE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Wed, 03 Aug 2022 10:56:04 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/ERROR-SparkContext-Error-initializing-SparkContext-org/m-p/349423#M235654</guid>
      <dc:creator>RangaReddy</dc:creator>
      <dc:date>2022-08-03T10:56:04Z</dc:date>
    </item>
    <item>
      <title>Re: ERROR SparkContext: Error initializing SparkContext. org.apache.spark.SparkException: Yarn application has already ended! It might have been killed or unable to launch application master.</title>
      <link>https://community.cloudera.com/t5/Support-Questions/ERROR-SparkContext-Error-initializing-SparkContext-org/m-p/349446#M235661</link>
      <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/78612"&gt;@RangaReddy&lt;/a&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thank you for the reply.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;We have four nodes HDP cluster, wherein recently java version upgraded only in three nodes.&amp;nbsp;&lt;/P&gt;&lt;P&gt;In three nodes, we have upgraded the java path manually in the files&amp;nbsp; Eg: spark-env.sh, yarn-env.sh, hadoop-env.sh .&lt;/P&gt;&lt;P&gt;We haven't upgraded the java path in one node where java version is not upgraded.&lt;/P&gt;&lt;P&gt;Executed the above mentioned command. Please find the attached screenshot of application logs.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Kindly help us.&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Wed, 03 Aug 2022 12:25:06 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/ERROR-SparkContext-Error-initializing-SparkContext-org/m-p/349446#M235661</guid>
      <dc:creator>ssuja</dc:creator>
      <dc:date>2022-08-03T12:25:06Z</dc:date>
    </item>
    <item>
      <title>Re: ERROR SparkContext: Error initializing SparkContext. org.apache.spark.SparkException: Yarn application has already ended! It might have been killed or unable to launch application master.</title>
      <link>https://community.cloudera.com/t5/Support-Questions/ERROR-SparkContext-Error-initializing-SparkContext-org/m-p/349448#M235662</link>
      <description>&lt;P&gt;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/78612"&gt;@RangaReddy&lt;/a&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;span class="lia-inline-image-display-wrapper lia-image-align-inline" image-alt="spark-shell_application_logs.PNG" style="width: 400px;"&gt;&lt;img src="https://community.cloudera.com/t5/image/serverpage/image-id/35102i8D292A8874ABCD3A/image-size/medium?v=v2&amp;amp;px=400" role="button" title="spark-shell_application_logs.PNG" alt="spark-shell_application_logs.PNG" /&gt;&lt;/span&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt; &lt;/P&gt;</description>
      <pubDate>Wed, 03 Aug 2022 12:26:50 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/ERROR-SparkContext-Error-initializing-SparkContext-org/m-p/349448#M235662</guid>
      <dc:creator>ssuja</dc:creator>
      <dc:date>2022-08-03T12:26:50Z</dc:date>
    </item>
    <item>
      <title>Re: ERROR SparkContext: Error initializing SparkContext. org.apache.spark.SparkException: Yarn application has already ended! It might have been killed or unable to launch application master.</title>
      <link>https://community.cloudera.com/t5/Support-Questions/ERROR-SparkContext-Error-initializing-SparkContext-org/m-p/349513#M235683</link>
      <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/98569"&gt;@ssuja&lt;/a&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;In the above screenshot, we can see clearly, java path location is not able to find. Please verify and update the java path in nodes properly.&lt;/P&gt;</description>
      <pubDate>Thu, 04 Aug 2022 05:53:00 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/ERROR-SparkContext-Error-initializing-SparkContext-org/m-p/349513#M235683</guid>
      <dc:creator>RangaReddy</dc:creator>
      <dc:date>2022-08-04T05:53:00Z</dc:date>
    </item>
    <item>
      <title>Re: ERROR SparkContext: Error initializing SparkContext. org.apache.spark.SparkException: Yarn application has already ended! It might have been killed or unable to launch application master.</title>
      <link>https://community.cloudera.com/t5/Support-Questions/ERROR-SparkContext-Error-initializing-SparkContext-org/m-p/349700#M235743</link>
      <description>&lt;P&gt;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/98569"&gt;@ssuja&lt;/a&gt;,&amp;nbsp;Has the reply helped resolve your issue? If so, please mark the appropriate reply as the solution, as it will make it easier for others to find the answer in the future.&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Mon, 08 Aug 2022 09:39:45 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/ERROR-SparkContext-Error-initializing-SparkContext-org/m-p/349700#M235743</guid>
      <dc:creator>VidyaSargur</dc:creator>
      <dc:date>2022-08-08T09:39:45Z</dc:date>
    </item>
  </channel>
</rss>

