<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Spark Upgrade - How to get dependent components in Archives of Support Questions (Read Only)</title>
    <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Spark-Upgrade-How-to-get-dependent-components/m-p/116299#M38601</link>
    <description>&lt;P&gt;I have upgraded HDP from 2.3 to 2.4 in TEST environment and I see there is a change in spark between these versions.&lt;/P&gt;&lt;P&gt;How can I get the list of jobs that uses spark currently ?&lt;/P&gt;</description>
    <pubDate>Tue, 23 Aug 2016 23:00:44 GMT</pubDate>
    <dc:creator>kums</dc:creator>
    <dc:date>2016-08-23T23:00:44Z</dc:date>
    <item>
      <title>Spark Upgrade - How to get dependent components</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Spark-Upgrade-How-to-get-dependent-components/m-p/116299#M38601</link>
      <description>&lt;P&gt;I have upgraded HDP from 2.3 to 2.4 in TEST environment and I see there is a change in spark between these versions.&lt;/P&gt;&lt;P&gt;How can I get the list of jobs that uses spark currently ?&lt;/P&gt;</description>
      <pubDate>Tue, 23 Aug 2016 23:00:44 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Spark-Upgrade-How-to-get-dependent-components/m-p/116299#M38601</guid>
      <dc:creator>kums</dc:creator>
      <dc:date>2016-08-23T23:00:44Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Upgrade - How to get dependent components</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Spark-Upgrade-How-to-get-dependent-components/m-p/116300#M38602</link>
      <description>&lt;P&gt;@&lt;A href="https://community.hortonworks.com/users/11304/kumarveerappan.html"&gt;Kumar Veerappan&lt;/A&gt;&lt;/P&gt;&lt;P&gt;Your question caption asked about dependent components. Your question description asked about list of jobs that use spark currently. I assume that you mean you actually meant spark applications (AKA jobs) running on the cluster. If you have access to Ambari, you could click on Yarn link then on Quick Links and then on Resource Manager UI. That assumes your Spark runs over Yarn. Otherwise, you could go directly to Resource Manager UI. You would need to know the IP Address of the server where ResourceManager runs, as well as the port. Default is 8088.&lt;/P&gt;</description>
      <pubDate>Sat, 27 Aug 2016 02:23:19 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Spark-Upgrade-How-to-get-dependent-components/m-p/116300#M38602</guid>
      <dc:creator>cstanca</dc:creator>
      <dc:date>2016-08-27T02:23:19Z</dc:date>
    </item>
  </channel>
</rss>

