<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question unhandled Null pointer exception while transforming in Archives of Support Questions (Read Only)</title>
    <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/unhandled-Null-pointer-exception-while-transforming/m-p/164707#M49551</link>
    <description>&lt;P&gt;While running over tutorial &lt;A href="https://community.hortonworks.com/articles/53903/spark-machine-learning-pipeline-by-example.html" target="_blank"&gt;https://community.hortonworks.com/articles/53903/spark-machine-learning-pipeline-by-example.html&lt;/A&gt;, I face issue in below line&lt;/P&gt;&lt;P&gt;val header = flight2007.first &lt;/P&gt;&lt;P&gt;val trainingData = flight2007
                    .filter(x =&amp;gt; x != header)
unhandled exception while transforming &amp;lt;console&amp;gt;
error: uncaught exception during compilation: java.lang.NullPointerException
&lt;/P&gt;&lt;P&gt;---------------------&lt;/P&gt;&lt;P&gt;Error message in detail after successful display of the header val.&lt;/P&gt;&lt;P&gt;  while compiling: &amp;lt;console&amp;gt;
        during phase: specialize
     library version: version 2.10.5
    compiler version: version 2.10.5
  reconstructed args: -classpath /usr/hdp/2.4.0.0-169/zeppelin/lib/interpreter/spark/zeppelin-spark-0.6.0.2.4.0.0-169.jar:/etc/spark/2.4.0.0-169/0:/usr/hdp/2.4.0.0-169/spark/lib/spark-assembly-1.6.0.2.4.0.0-169-hadoop2.7.1.2.4.0.0-169.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-api-jdo-3.2.6.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-core-3.2.10.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-rdbms-3.2.9.jar:/etc/hadoop/2.4.0.0-169/0:/usr/hdp/current/zeppelin-server/lib/interpreter/spark/zeppelin-spark-0.6.0.2.4.0.0-169.jar:/usr/hdp/current/spark-historyserver/conf:/usr/hdp/2.4.0.0-169/spark/lib/spark-assembly-1.6.0.2.4.0.0-169-hadoop2.7.1.2.4.0.0-169.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-api-jdo-3.2.6.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-core-3.2.10.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-rdbms-3.2.9.jar:/usr/hdp/current/hadoop-client/conf:/usr/hdp/current/zeppelin-server/lib/interpreter/spark/zeppelin-spark-0.6.0.2.4.0.0-169.jar
  last tree to typer: TypeTree(anonymous class $anonfun)
              symbol: anonymous class $anonfun (flags: final &amp;lt;synthetic&amp;gt;)
   symbol definition: final class $anonfun extends AbstractFunction1[Array[String],Flight] with Serializable
                 tpe: scala.runtime.AbstractFunction1[Array[String],Flight] with Serializable
       symbol owners: anonymous class $anonfun -&amp;gt; value trainingData -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $read -&amp;gt; package $line568
      context owners: anonymous class $anonfun -&amp;gt; value trainingData -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $read -&amp;gt; package $line568
== Enclosing template or block ==
ClassDef( // final class $anonfun extends AbstractFunction1[String,Boolean] with Serializable
  final &amp;lt;synthetic&amp;gt; @{ SerialVersionUID(0) }
  "$anonfun"
  []
  Template( // val &amp;lt;local $anonfun&amp;gt;: &amp;lt;notype&amp;gt;, tree.tpe=scala.runtime.AbstractFunction1[String,Boolean] with Serializable
    "scala.runtime.AbstractFunction1", "scala.Serializable" // parents
    ValDef(
      private
      "_"
      &amp;lt;tpt&amp;gt;
      &amp;lt;empty&amp;gt;
    )
    // 2 statements
    DefDef( // def &amp;lt;init&amp;gt;(): scala.runtime.AbstractFunction1[String,Boolean] with Serializable
      &amp;lt;method&amp;gt; &amp;lt;triedcooking&amp;gt;
      "&amp;lt;init&amp;gt;"
      []
      List(Nil)
      &amp;lt;tpt&amp;gt; // tree.tpe=scala.runtime.AbstractFunction1[String,Boolean] with Serializable
      Block( // tree.tpe=Unit
        Apply( // def &amp;lt;init&amp;gt;(): scala.runtime.AbstractFunction1[T1,R] in class AbstractFunction1, tree.tpe=scala.runtime.AbstractFunction1[String,Boolean]
          $anonfun.super."&amp;lt;init&amp;gt;" // def &amp;lt;init&amp;gt;(): scala.runtime.AbstractFunction1[T1,R] in class AbstractFunction1, tree.tpe=()scala.runtime.AbstractFunction1[String,Boolean]
          Nil
        )
        ()
      )
    )
    DefDef( // final def apply(x: String): Boolean
      &amp;lt;method&amp;gt; final
      "apply"
      []
      // 1 parameter list
      ValDef( // x: String
        &amp;lt;param&amp;gt; &amp;lt;triedcooking&amp;gt;
        "x"
        &amp;lt;tpt&amp;gt; // tree.tpe=String
        &amp;lt;empty&amp;gt;
      )
      &amp;lt;tpt&amp;gt; // tree.tpe=Boolean
      Apply( // final def !=(x$1: Object): Boolean in class Object, tree.tpe=Boolean
        "x"."$bang$eq" // final def !=(x$1: Object): Boolean in class Object, tree.tpe=(x$1: Object)Boolean
        Apply( // val header(): String, tree.tpe=String
          $iwC.this.$VAL1317().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw()."header" // val header(): String, tree.tpe=()String
          Nil
        )
      )
    )
  )
)
== Expanded type of tree ==
TypeRef(
  TypeSymbol(
    final class $anonfun extends AbstractFunction1[Array[String],Flight] with Serializable
  )
)
unhandled exception while transforming &amp;lt;console&amp;gt;
error: uncaught exception during compilation: java.lang.NullPointerException
&lt;/P&gt;&lt;P&gt;----------------------------------------------&lt;/P&gt;</description>
    <pubDate>Wed, 21 Dec 2016 01:54:52 GMT</pubDate>
    <dc:creator>mothi86</dc:creator>
    <dc:date>2016-12-21T01:54:52Z</dc:date>
    <item>
      <title>unhandled Null pointer exception while transforming</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/unhandled-Null-pointer-exception-while-transforming/m-p/164707#M49551</link>
      <description>&lt;P&gt;While running over tutorial &lt;A href="https://community.hortonworks.com/articles/53903/spark-machine-learning-pipeline-by-example.html" target="_blank"&gt;https://community.hortonworks.com/articles/53903/spark-machine-learning-pipeline-by-example.html&lt;/A&gt;, I face issue in below line&lt;/P&gt;&lt;P&gt;val header = flight2007.first &lt;/P&gt;&lt;P&gt;val trainingData = flight2007
                    .filter(x =&amp;gt; x != header)
unhandled exception while transforming &amp;lt;console&amp;gt;
error: uncaught exception during compilation: java.lang.NullPointerException
&lt;/P&gt;&lt;P&gt;---------------------&lt;/P&gt;&lt;P&gt;Error message in detail after successful display of the header val.&lt;/P&gt;&lt;P&gt;  while compiling: &amp;lt;console&amp;gt;
        during phase: specialize
     library version: version 2.10.5
    compiler version: version 2.10.5
  reconstructed args: -classpath /usr/hdp/2.4.0.0-169/zeppelin/lib/interpreter/spark/zeppelin-spark-0.6.0.2.4.0.0-169.jar:/etc/spark/2.4.0.0-169/0:/usr/hdp/2.4.0.0-169/spark/lib/spark-assembly-1.6.0.2.4.0.0-169-hadoop2.7.1.2.4.0.0-169.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-api-jdo-3.2.6.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-core-3.2.10.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-rdbms-3.2.9.jar:/etc/hadoop/2.4.0.0-169/0:/usr/hdp/current/zeppelin-server/lib/interpreter/spark/zeppelin-spark-0.6.0.2.4.0.0-169.jar:/usr/hdp/current/spark-historyserver/conf:/usr/hdp/2.4.0.0-169/spark/lib/spark-assembly-1.6.0.2.4.0.0-169-hadoop2.7.1.2.4.0.0-169.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-api-jdo-3.2.6.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-core-3.2.10.jar:/usr/hdp/2.4.0.0-169/spark/lib/datanucleus-rdbms-3.2.9.jar:/usr/hdp/current/hadoop-client/conf:/usr/hdp/current/zeppelin-server/lib/interpreter/spark/zeppelin-spark-0.6.0.2.4.0.0-169.jar
  last tree to typer: TypeTree(anonymous class $anonfun)
              symbol: anonymous class $anonfun (flags: final &amp;lt;synthetic&amp;gt;)
   symbol definition: final class $anonfun extends AbstractFunction1[Array[String],Flight] with Serializable
                 tpe: scala.runtime.AbstractFunction1[Array[String],Flight] with Serializable
       symbol owners: anonymous class $anonfun -&amp;gt; value trainingData -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $read -&amp;gt; package $line568
      context owners: anonymous class $anonfun -&amp;gt; value trainingData -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $iwC -&amp;gt; class $read -&amp;gt; package $line568
== Enclosing template or block ==
ClassDef( // final class $anonfun extends AbstractFunction1[String,Boolean] with Serializable
  final &amp;lt;synthetic&amp;gt; @{ SerialVersionUID(0) }
  "$anonfun"
  []
  Template( // val &amp;lt;local $anonfun&amp;gt;: &amp;lt;notype&amp;gt;, tree.tpe=scala.runtime.AbstractFunction1[String,Boolean] with Serializable
    "scala.runtime.AbstractFunction1", "scala.Serializable" // parents
    ValDef(
      private
      "_"
      &amp;lt;tpt&amp;gt;
      &amp;lt;empty&amp;gt;
    )
    // 2 statements
    DefDef( // def &amp;lt;init&amp;gt;(): scala.runtime.AbstractFunction1[String,Boolean] with Serializable
      &amp;lt;method&amp;gt; &amp;lt;triedcooking&amp;gt;
      "&amp;lt;init&amp;gt;"
      []
      List(Nil)
      &amp;lt;tpt&amp;gt; // tree.tpe=scala.runtime.AbstractFunction1[String,Boolean] with Serializable
      Block( // tree.tpe=Unit
        Apply( // def &amp;lt;init&amp;gt;(): scala.runtime.AbstractFunction1[T1,R] in class AbstractFunction1, tree.tpe=scala.runtime.AbstractFunction1[String,Boolean]
          $anonfun.super."&amp;lt;init&amp;gt;" // def &amp;lt;init&amp;gt;(): scala.runtime.AbstractFunction1[T1,R] in class AbstractFunction1, tree.tpe=()scala.runtime.AbstractFunction1[String,Boolean]
          Nil
        )
        ()
      )
    )
    DefDef( // final def apply(x: String): Boolean
      &amp;lt;method&amp;gt; final
      "apply"
      []
      // 1 parameter list
      ValDef( // x: String
        &amp;lt;param&amp;gt; &amp;lt;triedcooking&amp;gt;
        "x"
        &amp;lt;tpt&amp;gt; // tree.tpe=String
        &amp;lt;empty&amp;gt;
      )
      &amp;lt;tpt&amp;gt; // tree.tpe=Boolean
      Apply( // final def !=(x$1: Object): Boolean in class Object, tree.tpe=Boolean
        "x"."$bang$eq" // final def !=(x$1: Object): Boolean in class Object, tree.tpe=(x$1: Object)Boolean
        Apply( // val header(): String, tree.tpe=String
          $iwC.this.$VAL1317().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw().$iw()."header" // val header(): String, tree.tpe=()String
          Nil
        )
      )
    )
  )
)
== Expanded type of tree ==
TypeRef(
  TypeSymbol(
    final class $anonfun extends AbstractFunction1[Array[String],Flight] with Serializable
  )
)
unhandled exception while transforming &amp;lt;console&amp;gt;
error: uncaught exception during compilation: java.lang.NullPointerException
&lt;/P&gt;&lt;P&gt;----------------------------------------------&lt;/P&gt;</description>
      <pubDate>Wed, 21 Dec 2016 01:54:52 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/unhandled-Null-pointer-exception-while-transforming/m-p/164707#M49551</guid>
      <dc:creator>mothi86</dc:creator>
      <dc:date>2016-12-21T01:54:52Z</dc:date>
    </item>
    <item>
      <title>Re: unhandled Null pointer exception while transforming</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/unhandled-Null-pointer-exception-while-transforming/m-p/164708#M49552</link>
      <description>&lt;P&gt;Closed issue with data error.&lt;/P&gt;</description>
      <pubDate>Sat, 22 Apr 2017 01:14:13 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/unhandled-Null-pointer-exception-while-transforming/m-p/164708#M49552</guid>
      <dc:creator>mothi86</dc:creator>
      <dc:date>2017-04-22T01:14:13Z</dc:date>
    </item>
  </channel>
</rss>

