<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question I'm running a Spark2 job but get a java.lang.NoClassDefFoundError: javax/servlet/FilterRegistration in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/I-m-running-a-Spark2-job-but-get-a-java-lang/m-p/156999#M119412</link>
    <description>&lt;P&gt;I keep getting a NoClassDefFoundError using the Java API for creating a SparkSession&lt;/P&gt;&lt;P&gt;My java code is just simple SparkSession:&lt;/P&gt;&lt;P&gt;spark = SparkSession.builder()
                    .master("localhost") &lt;/P&gt;&lt;P style="margin-left: 20px;"&gt;                    .config("SPARK_MAJOR_VERSION", "2") &lt;/P&gt;&lt;P style="margin-left: 20px;"&gt;                    .config("SPARK_HOME", "/usr/hdp/current/spark2-client")&lt;/P&gt;&lt;P style="margin-left: 20px;"&gt;
                .appName("Spark E2D") &lt;/P&gt;&lt;P style="margin-left: 20px;"&gt;.getOrCreate();&lt;/P&gt;&lt;P&gt;Here is the output:&lt;/P&gt;&lt;P&gt;Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties
17/02/01 13:53:44 INFO SparkContext: Running Spark version 2.0.0
17/02/01 13:53:44 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
17/02/01 13:53:44 INFO SecurityManager: Changing view acls to: hdfs
17/02/01 13:53:44 INFO SecurityManager: Changing modify acls to: hdfs
17/02/01 13:53:44 INFO SecurityManager: Changing view acls groups to:
17/02/01 13:53:44 INFO SecurityManager: Changing modify acls groups to:
17/02/01 13:53:44 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users  with view permissions: Set(hdfs); groups with view permissions: Set(); users  with modify permissions: Set(hdfs); groups with modify permissions: Set()
17/02/01 13:53:44 INFO Utils: Successfully started service 'sparkDriver' on port 37105.
17/02/01 13:53:44 INFO SparkEnv: Registering MapOutputTracker
17/02/01 13:53:44 INFO SparkEnv: Registering BlockManagerMaster
17/02/01 13:53:44 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-0b01321d-cafe-4de1-b3f8-f232ce42754b
17/02/01 13:53:44 INFO MemoryStore: MemoryStore started with capacity 863.4 MB
17/02/01 13:53:44 INFO SparkEnv: Registering OutputCommitCoordinator &lt;/P&gt;&lt;P&gt;Exception in thread "main" java.lang.NoClassDefFoundError: javax/servlet/FilterRegistration
        at org.spark_project.jetty.servlet.ServletContextHandler.&amp;lt;init&amp;gt;(ServletContextHandler.java:142)
        at org.spark_project.jetty.servlet.ServletContextHandler.&amp;lt;init&amp;gt;(ServletContextHandler.java:135)
        at org.spark_project.jetty.servlet.ServletContextHandler.&amp;lt;init&amp;gt;(ServletContextHandler.java:129)
        at org.spark_project.jetty.servlet.ServletContextHandler.&amp;lt;init&amp;gt;(ServletContextHandler.java:99)
        at org.apache.spark.ui.JettyUtils$.createServletHandler(JettyUtils.scala:128)
        at org.apache.spark.ui.JettyUtils$.createServletHandler(JettyUtils.scala:115)
        at org.apache.spark.ui.WebUI.attachPage(WebUI.scala:80)
        at org.apache.spark.ui.WebUI$anonfun$attachTab$1.apply(WebUI.scala:64)
        at org.apache.spark.ui.WebUI$anonfun$attachTab$1.apply(WebUI.scala:64)
        at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
        at org.apache.spark.ui.WebUI.attachTab(WebUI.scala:64)
        at org.apache.spark.ui.SparkUI.initialize(SparkUI.scala:68)
        at org.apache.spark.ui.SparkUI.&amp;lt;init&amp;gt;(SparkUI.scala:81)
        at org.apache.spark.ui.SparkUI$.create(SparkUI.scala:215)
        at org.apache.spark.ui.SparkUI$.createLiveUI(SparkUI.scala:157)
        at org.apache.spark.SparkContext.&amp;lt;init&amp;gt;(SparkContext.scala:443)
        at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2256)
        at org.apache.spark.sql.SparkSession$Builder$anonfun$8.apply(SparkSession.scala:831)
        at org.apache.spark.sql.SparkSession$Builder$anonfun$8.apply(SparkSession.scala:823)
        at scala.Option.getOrElse(Option.scala:121)
        at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:823) &lt;/P&gt;&lt;P&gt;My POM dependencies:&lt;/P&gt;&lt;P&gt;    &amp;lt;dependencies&amp;gt; &lt;/P&gt;&lt;P&gt;        &amp;lt;dependency&amp;gt;
            &amp;lt;groupId&amp;gt;org.apache.hbase&amp;lt;/groupId&amp;gt;
            &amp;lt;artifactId&amp;gt;hbase-client&amp;lt;/artifactId&amp;gt;
            &amp;lt;version&amp;gt;1.2.4&amp;lt;/version&amp;gt; &lt;/P&gt;&lt;P&gt; &amp;lt;/dependency&amp;gt;
        &amp;lt;dependency&amp;gt;
            &amp;lt;groupId&amp;gt;org.json&amp;lt;/groupId&amp;gt;
            &amp;lt;artifactId&amp;gt;json&amp;lt;/artifactId&amp;gt;
            &amp;lt;version&amp;gt;20090211&amp;lt;/version&amp;gt;
        &amp;lt;/dependency&amp;gt;
        &amp;lt;dependency&amp;gt;
            &amp;lt;groupId&amp;gt;junit&amp;lt;/groupId&amp;gt;
            &amp;lt;artifactId&amp;gt;junit&amp;lt;/artifactId&amp;gt;
            &amp;lt;scope&amp;gt;test&amp;lt;/scope&amp;gt;
        &amp;lt;/dependency&amp;gt;   &lt;/P&gt;&lt;P&gt;&amp;lt;dependency&amp;gt;
            &amp;lt;groupId&amp;gt;org.apache.hadoop&amp;lt;/groupId&amp;gt;
            &amp;lt;artifactId&amp;gt;hadoop-common&amp;lt;/artifactId&amp;gt;
            &amp;lt;version&amp;gt;2.7.3&amp;lt;/version&amp;gt;
            &amp;lt;exclusions&amp;gt;
            &amp;lt;exclusion&amp;gt;  
                &amp;lt;groupId&amp;gt;javax.servlet&amp;lt;/groupId&amp;gt;
                &amp;lt;artifactId&amp;gt;servlet-api&amp;lt;/artifactId&amp;gt;
            &amp;lt;/exclusion&amp;gt;
            &amp;lt;/exclusions&amp;gt; 
        &amp;lt;/dependency&amp;gt; &lt;/P&gt;&lt;P&gt;        &amp;lt;dependency&amp;gt;
            &amp;lt;groupId&amp;gt;org.slf4j&amp;lt;/groupId&amp;gt;
            &amp;lt;artifactId&amp;gt;slf4j-api&amp;lt;/artifactId&amp;gt;
        &amp;lt;/dependency&amp;gt;
        &amp;lt;dependency&amp;gt;
            &amp;lt;groupId&amp;gt;joda-time&amp;lt;/groupId&amp;gt;
            &amp;lt;artifactId&amp;gt;joda-time&amp;lt;/artifactId&amp;gt;
        &amp;lt;/dependency&amp;gt; &lt;/P&gt;&lt;P&gt;&amp;lt;dependency&amp;gt;
            &amp;lt;groupId&amp;gt;org.apache.commons&amp;lt;/groupId&amp;gt;
            &amp;lt;artifactId&amp;gt;commons-csv&amp;lt;/artifactId&amp;gt;
        &amp;lt;/dependency&amp;gt;
        &amp;lt;dependency&amp;gt;
            &amp;lt;groupId&amp;gt;commons-io&amp;lt;/groupId&amp;gt;
            &amp;lt;artifactId&amp;gt;commons-io&amp;lt;/artifactId&amp;gt;
        &amp;lt;/dependency&amp;gt; &lt;/P&gt;&lt;P&gt;        &amp;lt;!--        Spark --&amp;gt; &lt;/P&gt;&lt;P&gt;        &amp;lt;dependency&amp;gt;
            &amp;lt;groupId&amp;gt;org.apache.spark&amp;lt;/groupId&amp;gt;
            &amp;lt;artifactId&amp;gt;spark-core_2.11&amp;lt;/artifactId&amp;gt;
            &amp;lt;version&amp;gt;2.0.0&amp;lt;/version&amp;gt;
        &amp;lt;/dependency&amp;gt; &lt;/P&gt;&lt;P&gt;&amp;lt;dependency&amp;gt;
            &amp;lt;groupId&amp;gt;org.apache.spark&amp;lt;/groupId&amp;gt;
            &amp;lt;artifactId&amp;gt;spark-sql_2.11&amp;lt;/artifactId&amp;gt;
            &amp;lt;version&amp;gt;2.0.0&amp;lt;/version&amp;gt;
        &amp;lt;/dependency&amp;gt;&lt;/P&gt;</description>
    <pubDate>Thu, 02 Feb 2017 06:04:17 GMT</pubDate>
    <dc:creator>gary_d_garingo</dc:creator>
    <dc:date>2017-02-02T06:04:17Z</dc:date>
  </channel>
</rss>

