<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: spark-submit  - NoSuchMethodError :saveToPhoenix in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184948#M147055</link>
    <description>&lt;P&gt;Hi &lt;A rel="user" href="https://community.cloudera.com/users/11048/falbani.html" nodeid="11048"&gt;@Felix Albani&lt;/A&gt; thanks your advice,  I changed the submit command as:&lt;/P&gt;&lt;PRE&gt;spark-submit \
--class com.test.SmokeTest \
--master yarn \
--deploy-mode cluster \
--driver-memory 1g \
--executor-memory  2g \
--executor-cores 4 \
--num-executors 2 \
--files /etc/hbase/conf/hbase-site.xml \
--conf "spark.executor.extraClassPath=phoenix-4.7.0.2.6.2.0-205-spark2.jar:phoenix-client.jar:hbase-client.jar:phoenix-spark2-4.7.0.2.6.2.0-205.jar:hbase-common.jar:hbase-protocol.jar:phoenix-core-4.7.0.2.6.2.0-205.jar"  \
--conf "spark.driver.extraClassPath=/usr/hdp/current/phoenix-client/phoenix-4.7.0.2.6.2.0-205-spark2.jar:/usr/hdp/current/phoenix-client/phoenix-client.jar:/usr/hdp/current/phoenix-client/lib/hbase-client.jar:/usr/hdp/current/phoenix-client/lib/phoenix-spark2-4.7.0.2.6.2.0-205.jar:/usr/hdp/current/phoenix-client/lib/hbase-common.jar:/usr/hdp/current/phoenix-client/lib/hbase-protocol.jar:/usr/hdp/current/phoenix-client/lib/phoenix-core-4.7.0.2.6.2.0-205.jar" \
--jars /usr/hdp/current/phoenix-client/phoenix-4.7.0.2.6.2.0-205-spark2.jar,/usr/hdp/current/phoenix-client/phoenix-client.jar,/usr/hdp/current/phoenix-client/lib/hbase-client.jar,/usr/hdp/current/phoenix-client/lib/phoenix-spark2-4.7.0.2.6.2.0-205.jar,/usr/hdp/current/phoenix-client/lib/hbase-common.jar,/usr/hdp/current/phoenix-client/lib/hbase-protocol.jar,/usr/hdp/current/phoenix-client/lib/phoenix-core-4.7.0.2.6.2.0-205.jar  \
--verbose \
/tmp/test-1.0-SNAPSHOT.jar &lt;/PRE&gt;&lt;P&gt;but no luck: &lt;/P&gt;&lt;PRE&gt;18/07/17 13:16:21 INFO CodeGenerator: Code generated in 33.11763 ms
18/07/17 13:16:22 ERROR ApplicationMaster: User class threw exception: java.lang.NoSuchMethodError: org.apache.phoenix.spark.DataFrameFunctions.saveToPhoenix$default$4()Lscala/Option;
java.lang.NoSuchMethodError: org.apache.phoenix.spark.DataFrameFunctions.saveToPhoenix$default$4()Lscala/Option;
at com.trendyglobal.bigdata.inventory.CreateTestData$anonfun$main$1.apply$mcVI$sp(CreateTestData.scala:87)
at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:160)
at com.trendyglobal.bigdata.inventory.CreateTestData$.main(CreateTestData.scala:80)
at com.trendyglobal.bigdata.inventory.CreateTestData.main(CreateTestData.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.yarn.ApplicationMaster$anon$3.run(ApplicationMaster.scala:654)
18/07/17 13:16:22 INFO ApplicationMaster: Final app status: FAILED, exitCode: 15, (reason: User class threw exception: java.lang.NoSuchMethodError: org.apache.phoenix.spark.DataFrameFunctions.saveToPhoenix$default$4()Lscala/Option;)
18/07/17 13:16:22 INFO SparkContext: Invoking stop() from shutdown hook
18/07/17 13:16:22 INFO ServerConnector: Stopped Spark@81d2265{HTTP/1.1}{0.0.0.0:0}
18/07/17 13:16:22 INFO SparkUI: Stopped Spark web UI at &lt;A href="http://10.2.29.104:37764" target="_blank"&gt;http://10.2.29.104:37764&lt;/A&gt;
18/07/17 13:16:22 INFO YarnAllocator: Driver requested a total number of 0 executor(s).
18/07/17 13:16:22 INFO YarnClusterSchedulerBackend: Shutting down all executors
18/07/17 13:16:22 INFO YarnSchedulerBackend$YarnDriverEndpoint: Asking each executor to shut down
18/07/17 13:16:22 INFO SchedulerExtensionServices: Stopping SchedulerExtensionServices
(serviceOption=None,
 services=List(),
 started=false)&amp;lt;br&amp;gt;&lt;/PRE&gt;&lt;P&gt;I can see the phoenix-spark2-4.7.0.2.6.2.0-205.jar was in the classpath&lt;/P&gt;&lt;PRE&gt;===============================================================================
YARN executor launch context:
  env:
    CLASSPATH -&amp;gt; phoenix-4.7.0.2.6.2.0-205-spark2.jar:phoenix-client.jar:hbase-client.jar:phoenix-spark2-4.7.0.2.6.2.0-205.jar:hbase-common.jar:hbase-protocol.jar:phoenix-core-4.7.0.2.6.2.0-205.jar&amp;lt;CPS&amp;gt;{{PWD}}&amp;lt;CPS&amp;gt;{{PWD}}/__spark_conf__&amp;lt;CPS&amp;gt;{{PWD}}/__spark_libs__/*&amp;lt;CPS&amp;gt;/etc/hadoop/conf&amp;lt;CPS&amp;gt;/usr/hdp/current/hadoop-client/*&amp;lt;CPS&amp;gt;/usr/hdp/current/hadoop-client/lib/*&amp;lt;CPS&amp;gt;/usr/hdp/current/hadoop-hdfs-client/*&amp;lt;CPS&amp;gt;/usr/hdp/current/hadoop-hdfs-client/lib/*&amp;lt;CPS&amp;gt;/usr/hdp/current/hadoop-yarn-client/*&amp;lt;CPS&amp;gt;/usr/hdp/current/hadoop-yarn-client/lib/*&amp;lt;CPS&amp;gt;$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.6.2.0-205/hadoop/lib/hadoop-lzo-0.6.0.2.6.2.0-205.jar:/etc/hadoop/conf/secure
    SPARK_YARN_STAGING_DIR -&amp;gt; hdfs://nn1-dev1-tbdp.trendy-global.com:8020/user/nifi/.sparkStaging/application_1529853578712_0039
    SPARK_USER -&amp;gt; nifi
    SPARK_YARN_MODE -&amp;gt; true
  command:
    LD_LIBRARY_PATH="/usr/hdp/current/hadoop-client/lib/native:/usr/hdp/current/hadoop-client/lib/native/Linux-amd64-64:$LD_LIBRARY_PATH" \ 
      {{JAVA_HOME}}/bin/java \ 
      -server \ 
      -Xmx2048m \ 
      -Djava.io.tmpdir={{PWD}}/tmp \ 
      '-Dspark.history.ui.port=18081' \ 
      -Dspark.yarn.app.container.log.dir=&amp;lt;LOG_DIR&amp;gt; \ 
      -XX:OnOutOfMemoryError='kill %p' \ 
      org.apache.spark.executor.CoarseGrainedExecutorBackend \ 
      --driver-url \ 
      spark://CoarseGrainedScheduler@10.2.29.104:40401 \ 
      --executor-id \ 
      &amp;lt;executorId&amp;gt; \ 
      --hostname \ 
      &amp;lt;hostname&amp;gt; \ 
      --cores \ 
      4 \ 
      --app-id \ 
      application_1529853578712_0039 \ 
      --user-class-path \ 
      file:$PWD/__app__.jar \ 
      --user-class-path \ 
      file:$PWD/phoenix-4.7.0.2.6.2.0-205-spark2.jar \ 
      --user-class-path \ 
      file:$PWD/phoenix-client.jar \ 
      --user-class-path \ 
      file:$PWD/hbase-client.jar \ 
      --user-class-path \ 
      file:$PWD/phoenix-spark2-4.7.0.2.6.2.0-205.jar \ 
      --user-class-path \ 
      file:$PWD/hbase-common.jar \ 
      --user-class-path \ 
      file:$PWD/hbase-protocol.jar \ 
      --user-class-path \ 
      file:$PWD/phoenix-core-4.7.0.2.6.2.0-205.jar \ 
      1&amp;gt;&amp;lt;LOG_DIR&amp;gt;/stdout \ 
      2&amp;gt;&amp;lt;LOG_DIR&amp;gt;/stderr
  resources:
    hbase-common.jar -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/hbase-common.jar" } size: 575685 timestamp: 1531804498373 type: FILE visibility: PRIVATE
    phoenix-4.7.0.2.6.2.0-205-spark2.jar -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/phoenix-4.7.0.2.6.2.0-205-spark2.jar" } size: 87275 timestamp: 1531804497220 type: FILE visibility: PRIVATE
    __app__.jar -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/inventory-calc-service-1.0-SNAPSHOT.jar" } size: 41478 timestamp: 1531804497134 type: FILE visibility: PRIVATE
    __spark_conf__ -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/__spark_conf__.zip" } size: 106688 timestamp: 1531804498824 type: ARCHIVE visibility: PRIVATE
    hbase-client.jar -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/hbase-client.jar" } size: 1398707 timestamp: 1531804498300 type: FILE visibility: PRIVATE
    phoenix-spark2-4.7.0.2.6.2.0-205.jar -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/phoenix-spark2-4.7.0.2.6.2.0-205.jar" } size: 81143 timestamp: 1531804498334 type: FILE visibility: PRIVATE
    hbase-site.xml -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/hbase-site.xml" } size: 7320 timestamp: 1531804498662 type: FILE visibility: PRIVATE
    hbase-protocol.jar -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/hbase-protocol.jar" } size: 4941870 timestamp: 1531804498450 type: FILE visibility: PRIVATE
    __spark_libs__ -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/hdp/apps/2.6.2.0-205/spark2/spark2-hdp-yarn-archive.tar.gz" } size: 180384518 timestamp: 1507704288496 type: ARCHIVE visibility: PUBLIC
    phoenix-client.jar -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/phoenix-client.jar" } size: 107566119 timestamp: 1531804498256 type: FILE visibility: PRIVATE
    phoenix-core-4.7.0.2.6.2.0-205.jar -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/phoenix-core-4.7.0.2.6.2.0-205.jar" } size: 3834414 timestamp: 1531804498628 type: FILE visibility: PRIVATE
===============================================================================&lt;/PRE&gt;</description>
    <pubDate>Tue, 17 Jul 2018 12:39:40 GMT</pubDate>
    <dc:creator>xpelive</dc:creator>
    <dc:date>2018-07-17T12:39:40Z</dc:date>
    <item>
      <title>spark-submit  - NoSuchMethodError :saveToPhoenix</title>
      <link>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184945#M147052</link>
      <description>&lt;P&gt;
	Hi there,&lt;/P&gt;&lt;P&gt;
	in my hdp 2.6.2, i am using spark2.1.1, phoenix 4.7.   When start spark-shell as below, &lt;/P&gt;
&lt;PRE&gt;spark-shell --conf "spark.executor.extraClassPath=/usr/hdp/current/phoenix-client/phoenix-4.7.0.2.6.2.0-205-spark2.jar:/usr/hdp/current/phoenix-client/phoenix-client.jar:/usr/hdp/current/phoenix-client/lib/hbase-client.jar:/usr/hdp/current/phoenix-client/lib/phoenix-spark2-4.7.0.2.6.2.0-205.jar:/usr/hdp/current/phoenix-client/lib/hbase-common.jar:/usr/hdp/current/phoenix-client/lib/hbase-protocol.jar:/usr/hdp/current/phoenix-client/lib/phoenix-core-4.7.0.2.6.2.0-205.jar" --conf "spark.driver.extraClassPath=/usr/hdp/current/phoenix-client/phoenix-4.7.0.2.6.2.0-205-spark2.jar:/usr/hdp/current/phoenix-client/phoenix-client.jar:/usr/hdp/current/phoenix-client/lib/hbase-client.jar:/usr/hdp/current/phoenix-client/lib/phoenix-spark2-4.7.0.2.6.2.0-205.jar:/usr/hdp/current/phoenix-client/lib/hbase-common.jar:/usr/hdp/current/phoenix-client/lib/hbase-protocol.jar:/usr/hdp/current/phoenix-client/lib/hoenix-core-4.7.0.2.6.2.0-205.jar"
&lt;/PRE&gt;&lt;P&gt;
	It can successfully save data to table2 with code&lt;/P&gt;
&lt;PRE&gt;val phoenixOptionMap=Map("table"-&amp;gt;"TABLE1","zkUrl"-&amp;gt;"zk1:2181/hbase-secure")
val df2=spark.sqlContext.read.format("org.apache.phoenix.spark").options(phoenixOptionMap).load()
val configuration = HBaseConfiguration.create();
configuration.set("zookeeper.znode.parent", "/hbase-secure")
df2.saveToPhoenix("table2",configuration,Option("zk1:2181/hbase-secure"))
&lt;/PRE&gt;&lt;P&gt;
	Then i created a new scala program as:&lt;/P&gt;
&lt;PRE&gt;package com.test

import org.apache.spark.sql.{SQLContext, SparkSession}
import org.apache.phoenix.spark._
import org.apache.hadoop.hbase.HBaseConfiguration

object SmokeTest {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("PhoenixSmokeTest")
      .getOrCreate()

    val phoenixOptionMap=Map("table"-&amp;gt;"TABLE1","zkUrl"-&amp;gt;"zk1:2181/hbase-secure")
    val df2=spark.sqlContext.read.format("org.apache.phoenix.spark").options(phoenixOptionMap).load()
    val configuration = HBaseConfiguration.create();
    configuration.set("zookeeper.znode.parent", "/hbase-secure")
    configuration.addResource("/etc/hbase/conf/hbase-site.xml")
    df2.saveToPhoenix("table2",configuration,Option("zk1:2181/hbase-secure"))
  }

}

&lt;/PRE&gt;&lt;P&gt;
	and run it with below spark-submit script&lt;/P&gt;&lt;PRE&gt;spark-submit \
--class com.test.SmokeTest \
--master yarn\
--deploy-mode client \
--driver-memory 1g \
--executor-memory  1g \
--executor-cores 4 \
--num-executors 2 \
--conf "spark.executor.extraClassPath=/usr/hdp/current/phoenix-client/phoenix-4.7.0.2.6.2.0-205-spark2.jar:/usr/hdp/current/phoenix-client/phoenix-client.jar:/usr/hdp/current/phoenix-client/lib/hbase-client.jar:/usr/hdp/current/phoenix-client/lib/phoenix-spark2-4.7.0.2.6.2.0-205.jar:/usr/hdp/current/phoenix-client/lib/hbase-common.jar:/usr/hdp/current/phoenix-client/lib/hbase-protocol.jar:/usr/hdp/current/phoenix-client/lib/phoenix-core-4.7.0.2.6.2.0-205.jar"  \
--conf "spark.driver.extraClassPath=/usr/hdp/current/phoenix-client/phoenix-4.7.0.2.6.2.0-205-spark2.jar:/usr/hdp/current/phoenix-client/phoenix-client.jar:/usr/hdp/current/phoenix-client/lib/hbase-client.jar:/usr/hdp/current/phoenix-client/lib/phoenix-spark2-4.7.0.2.6.2.0-205.jar:/usr/hdp/current/phoenix-client/lib/hbase-common.jar:/usr/hdp/current/phoenix-client/lib/hbase-protocol.jar:/usr/hdp/current/phoenix-client/lib/phoenix-core-4.7.0.2.6.2.0-205.jar" \
--jars /usr/hdp/current/phoenix-client/phoenix-4.7.0.2.6.2.0-205-spark2.jar,/usr/hdp/current/phoenix-client/phoenix-client.jar,/usr/hdp/current/phoenix-client/lib/hbase-client.jar,/usr/hdp/current/phoenix-client/lib/phoenix-spark2-4.7.0.2.6.2.0-205.jar,/usr/hdp/current/phoenix-client/lib/hbase-common.jar,/usr/hdp/current/phoenix-client/lib/hbase-protocol.jar,/usr/hdp/current/phoenix-client/lib/phoenix-core-4.7.0.2.6.2.0-205.jar  \
--verbose \
/tmp/test-1.0-SNAPSHOT.jar&lt;/PRE&gt;&lt;P&gt;It failed with  below message&lt;/P&gt;&lt;PRE&gt;18/07/16 16:30:16 INFO ClientCnxn: Session establishment complete on server zk1/10.2.29.102:2181, sessionid = 0x364270588b5472f, negotiated timeout = 60000
18/07/16 16:30:17 INFO Metrics: Initializing metrics system: phoenix
18/07/16 16:30:17 INFO MetricsConfig: loaded properties from hadoop-metrics2.properties
18/07/16 16:30:17 INFO MetricsSystemImpl: Scheduled snapshot period at 10 second(s).
18/07/16 16:30:17 INFO MetricsSystemImpl: phoenix metrics system started
Exception in thread "main" java.lang.NoSuchMethodError: org.apache.phoenix.spark.DataFrameFunctions.saveToPhoenix$default$4()Lscala/Option;
        at com.trendyglobal.bigdata.inventory.SmokeTest$.main(SmokeTest.scala:28)
        at com.trendyglobal.bigdata.inventory.SmokeTest.main(SmokeTest.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:751)
        at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
18/07/16 16:30:20 INFO SparkContext: Invoking stop() from shutdown hook
18/07/16 16:30:20 INFO ServerConnector: Stopped Spark@38f66b77{HTTP/1.1}{0.0.0.0:4040}&amp;lt;br&amp;gt;&lt;/PRE&gt;&lt;P&gt;Woud anyone has any advice?  &lt;/P&gt;&lt;P&gt;Thanks,&lt;/P&gt;&lt;P&gt;Forest&lt;/P&gt;</description>
      <pubDate>Mon, 16 Jul 2018 15:44:23 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184945#M147052</guid>
      <dc:creator>xpelive</dc:creator>
      <dc:date>2018-07-16T15:44:23Z</dc:date>
    </item>
    <item>
      <title>Re: spark-submit  - NoSuchMethodError :saveToPhoenix</title>
      <link>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184946#M147053</link>
      <description>&lt;P&gt;seems it relate to &lt;A href="https://issues.apache.org/jira/browse/PHOENIX-3333" target="_blank"&gt;https://issues.apache.org/jira/browse/PHOENIX-3333&lt;/A&gt; , however, in hdp2.6.2, it is fixed according to &lt;A href="https://docs.hortonworks.com/HDPDocuments/HDP2/HDP-2.6.5/bk_release-notes/content/patch_phoenix.html" target="_blank"&gt;https://docs.hortonworks.com/HDPDocuments/HDP2/HDP-2.6.5/bk_release-notes/content/patch_phoenix.html&lt;/A&gt;&lt;/P&gt;</description>
      <pubDate>Mon, 16 Jul 2018 18:11:41 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184946#M147053</guid>
      <dc:creator>xpelive</dc:creator>
      <dc:date>2018-07-16T18:11:41Z</dc:date>
    </item>
    <item>
      <title>Re: spark-submit  - NoSuchMethodError :saveToPhoenix</title>
      <link>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184947#M147054</link>
      <description>&lt;P&gt;I see your executor has the full path to the phoenix client jars. From local mode to yarn/client mode the most relevant change is that the executors will run on cluster worker nodes. Please try running your code like this:&lt;/P&gt;&lt;PRE&gt;spark-submit \
--class com.test.SmokeTest \
--master yarn\
--deploy-mode client \
--driver-memory 1g \
--executor-memory  1g \
--executor-cores 4 \
--num-executors 2 \
--conf "spark.executor.extraClassPath=phoenix-4.7.0.2.6.2.0-205-spark2.jar:phoenix-client.jar:hbase-client.jar:phoenix-spark2-4.7.0.2.6.2.0-205.jar:hbase-common.jar:hbase-protocol.jar:phoenix-core-4.7.0.2.6.2.0-205.jar"  \
--conf "spark.driver.extraClassPath=/usr/hdp/current/phoenix-client/phoenix-4.7.0.2.6.2.0-205-spark2.jar:/usr/hdp/current/phoenix-client/phoenix-client.jar:/usr/hdp/current/phoenix-client/lib/hbase-client.jar:/usr/hdp/current/phoenix-client/lib/phoenix-spark2-4.7.0.2.6.2.0-205.jar:/usr/hdp/current/phoenix-client/lib/hbase-common.jar:/usr/hdp/current/phoenix-client/lib/hbase-protocol.jar:/usr/hdp/current/phoenix-client/lib/phoenix-core-4.7.0.2.6.2.0-205.jar" \
--jars /usr/hdp/current/phoenix-client/phoenix-4.7.0.2.6.2.0-205-spark2.jar,/usr/hdp/current/phoenix-client/phoenix-client.jar,/usr/hdp/current/phoenix-client/lib/hbase-client.jar,/usr/hdp/current/phoenix-client/lib/phoenix-spark2-4.7.0.2.6.2.0-205.jar,/usr/hdp/current/phoenix-client/lib/hbase-common.jar,/usr/hdp/current/phoenix-client/lib/hbase-protocol.jar,/usr/hdp/current/phoenix-client/lib/phoenix-core-4.7.0.2.6.2.0-205.jar  \
--verbose \
/tmp/test-1.0-SNAPSHOT.jar&lt;/PRE&gt;&lt;P&gt;And let me know if that works.&lt;/P&gt;&lt;P&gt;HTH&lt;/P&gt;&lt;P&gt;*** If you found this answer addressed your question, please take a moment to login and click the "accept" link on the answer.&lt;/P&gt;</description>
      <pubDate>Mon, 16 Jul 2018 19:16:09 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184947#M147054</guid>
      <dc:creator>falbani</dc:creator>
      <dc:date>2018-07-16T19:16:09Z</dc:date>
    </item>
    <item>
      <title>Re: spark-submit  - NoSuchMethodError :saveToPhoenix</title>
      <link>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184948#M147055</link>
      <description>&lt;P&gt;Hi &lt;A rel="user" href="https://community.cloudera.com/users/11048/falbani.html" nodeid="11048"&gt;@Felix Albani&lt;/A&gt; thanks your advice,  I changed the submit command as:&lt;/P&gt;&lt;PRE&gt;spark-submit \
--class com.test.SmokeTest \
--master yarn \
--deploy-mode cluster \
--driver-memory 1g \
--executor-memory  2g \
--executor-cores 4 \
--num-executors 2 \
--files /etc/hbase/conf/hbase-site.xml \
--conf "spark.executor.extraClassPath=phoenix-4.7.0.2.6.2.0-205-spark2.jar:phoenix-client.jar:hbase-client.jar:phoenix-spark2-4.7.0.2.6.2.0-205.jar:hbase-common.jar:hbase-protocol.jar:phoenix-core-4.7.0.2.6.2.0-205.jar"  \
--conf "spark.driver.extraClassPath=/usr/hdp/current/phoenix-client/phoenix-4.7.0.2.6.2.0-205-spark2.jar:/usr/hdp/current/phoenix-client/phoenix-client.jar:/usr/hdp/current/phoenix-client/lib/hbase-client.jar:/usr/hdp/current/phoenix-client/lib/phoenix-spark2-4.7.0.2.6.2.0-205.jar:/usr/hdp/current/phoenix-client/lib/hbase-common.jar:/usr/hdp/current/phoenix-client/lib/hbase-protocol.jar:/usr/hdp/current/phoenix-client/lib/phoenix-core-4.7.0.2.6.2.0-205.jar" \
--jars /usr/hdp/current/phoenix-client/phoenix-4.7.0.2.6.2.0-205-spark2.jar,/usr/hdp/current/phoenix-client/phoenix-client.jar,/usr/hdp/current/phoenix-client/lib/hbase-client.jar,/usr/hdp/current/phoenix-client/lib/phoenix-spark2-4.7.0.2.6.2.0-205.jar,/usr/hdp/current/phoenix-client/lib/hbase-common.jar,/usr/hdp/current/phoenix-client/lib/hbase-protocol.jar,/usr/hdp/current/phoenix-client/lib/phoenix-core-4.7.0.2.6.2.0-205.jar  \
--verbose \
/tmp/test-1.0-SNAPSHOT.jar &lt;/PRE&gt;&lt;P&gt;but no luck: &lt;/P&gt;&lt;PRE&gt;18/07/17 13:16:21 INFO CodeGenerator: Code generated in 33.11763 ms
18/07/17 13:16:22 ERROR ApplicationMaster: User class threw exception: java.lang.NoSuchMethodError: org.apache.phoenix.spark.DataFrameFunctions.saveToPhoenix$default$4()Lscala/Option;
java.lang.NoSuchMethodError: org.apache.phoenix.spark.DataFrameFunctions.saveToPhoenix$default$4()Lscala/Option;
at com.trendyglobal.bigdata.inventory.CreateTestData$anonfun$main$1.apply$mcVI$sp(CreateTestData.scala:87)
at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:160)
at com.trendyglobal.bigdata.inventory.CreateTestData$.main(CreateTestData.scala:80)
at com.trendyglobal.bigdata.inventory.CreateTestData.main(CreateTestData.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.yarn.ApplicationMaster$anon$3.run(ApplicationMaster.scala:654)
18/07/17 13:16:22 INFO ApplicationMaster: Final app status: FAILED, exitCode: 15, (reason: User class threw exception: java.lang.NoSuchMethodError: org.apache.phoenix.spark.DataFrameFunctions.saveToPhoenix$default$4()Lscala/Option;)
18/07/17 13:16:22 INFO SparkContext: Invoking stop() from shutdown hook
18/07/17 13:16:22 INFO ServerConnector: Stopped Spark@81d2265{HTTP/1.1}{0.0.0.0:0}
18/07/17 13:16:22 INFO SparkUI: Stopped Spark web UI at &lt;A href="http://10.2.29.104:37764" target="_blank"&gt;http://10.2.29.104:37764&lt;/A&gt;
18/07/17 13:16:22 INFO YarnAllocator: Driver requested a total number of 0 executor(s).
18/07/17 13:16:22 INFO YarnClusterSchedulerBackend: Shutting down all executors
18/07/17 13:16:22 INFO YarnSchedulerBackend$YarnDriverEndpoint: Asking each executor to shut down
18/07/17 13:16:22 INFO SchedulerExtensionServices: Stopping SchedulerExtensionServices
(serviceOption=None,
 services=List(),
 started=false)&amp;lt;br&amp;gt;&lt;/PRE&gt;&lt;P&gt;I can see the phoenix-spark2-4.7.0.2.6.2.0-205.jar was in the classpath&lt;/P&gt;&lt;PRE&gt;===============================================================================
YARN executor launch context:
  env:
    CLASSPATH -&amp;gt; phoenix-4.7.0.2.6.2.0-205-spark2.jar:phoenix-client.jar:hbase-client.jar:phoenix-spark2-4.7.0.2.6.2.0-205.jar:hbase-common.jar:hbase-protocol.jar:phoenix-core-4.7.0.2.6.2.0-205.jar&amp;lt;CPS&amp;gt;{{PWD}}&amp;lt;CPS&amp;gt;{{PWD}}/__spark_conf__&amp;lt;CPS&amp;gt;{{PWD}}/__spark_libs__/*&amp;lt;CPS&amp;gt;/etc/hadoop/conf&amp;lt;CPS&amp;gt;/usr/hdp/current/hadoop-client/*&amp;lt;CPS&amp;gt;/usr/hdp/current/hadoop-client/lib/*&amp;lt;CPS&amp;gt;/usr/hdp/current/hadoop-hdfs-client/*&amp;lt;CPS&amp;gt;/usr/hdp/current/hadoop-hdfs-client/lib/*&amp;lt;CPS&amp;gt;/usr/hdp/current/hadoop-yarn-client/*&amp;lt;CPS&amp;gt;/usr/hdp/current/hadoop-yarn-client/lib/*&amp;lt;CPS&amp;gt;$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/2.6.2.0-205/hadoop/lib/hadoop-lzo-0.6.0.2.6.2.0-205.jar:/etc/hadoop/conf/secure
    SPARK_YARN_STAGING_DIR -&amp;gt; hdfs://nn1-dev1-tbdp.trendy-global.com:8020/user/nifi/.sparkStaging/application_1529853578712_0039
    SPARK_USER -&amp;gt; nifi
    SPARK_YARN_MODE -&amp;gt; true
  command:
    LD_LIBRARY_PATH="/usr/hdp/current/hadoop-client/lib/native:/usr/hdp/current/hadoop-client/lib/native/Linux-amd64-64:$LD_LIBRARY_PATH" \ 
      {{JAVA_HOME}}/bin/java \ 
      -server \ 
      -Xmx2048m \ 
      -Djava.io.tmpdir={{PWD}}/tmp \ 
      '-Dspark.history.ui.port=18081' \ 
      -Dspark.yarn.app.container.log.dir=&amp;lt;LOG_DIR&amp;gt; \ 
      -XX:OnOutOfMemoryError='kill %p' \ 
      org.apache.spark.executor.CoarseGrainedExecutorBackend \ 
      --driver-url \ 
      spark://CoarseGrainedScheduler@10.2.29.104:40401 \ 
      --executor-id \ 
      &amp;lt;executorId&amp;gt; \ 
      --hostname \ 
      &amp;lt;hostname&amp;gt; \ 
      --cores \ 
      4 \ 
      --app-id \ 
      application_1529853578712_0039 \ 
      --user-class-path \ 
      file:$PWD/__app__.jar \ 
      --user-class-path \ 
      file:$PWD/phoenix-4.7.0.2.6.2.0-205-spark2.jar \ 
      --user-class-path \ 
      file:$PWD/phoenix-client.jar \ 
      --user-class-path \ 
      file:$PWD/hbase-client.jar \ 
      --user-class-path \ 
      file:$PWD/phoenix-spark2-4.7.0.2.6.2.0-205.jar \ 
      --user-class-path \ 
      file:$PWD/hbase-common.jar \ 
      --user-class-path \ 
      file:$PWD/hbase-protocol.jar \ 
      --user-class-path \ 
      file:$PWD/phoenix-core-4.7.0.2.6.2.0-205.jar \ 
      1&amp;gt;&amp;lt;LOG_DIR&amp;gt;/stdout \ 
      2&amp;gt;&amp;lt;LOG_DIR&amp;gt;/stderr
  resources:
    hbase-common.jar -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/hbase-common.jar" } size: 575685 timestamp: 1531804498373 type: FILE visibility: PRIVATE
    phoenix-4.7.0.2.6.2.0-205-spark2.jar -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/phoenix-4.7.0.2.6.2.0-205-spark2.jar" } size: 87275 timestamp: 1531804497220 type: FILE visibility: PRIVATE
    __app__.jar -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/inventory-calc-service-1.0-SNAPSHOT.jar" } size: 41478 timestamp: 1531804497134 type: FILE visibility: PRIVATE
    __spark_conf__ -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/__spark_conf__.zip" } size: 106688 timestamp: 1531804498824 type: ARCHIVE visibility: PRIVATE
    hbase-client.jar -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/hbase-client.jar" } size: 1398707 timestamp: 1531804498300 type: FILE visibility: PRIVATE
    phoenix-spark2-4.7.0.2.6.2.0-205.jar -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/phoenix-spark2-4.7.0.2.6.2.0-205.jar" } size: 81143 timestamp: 1531804498334 type: FILE visibility: PRIVATE
    hbase-site.xml -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/hbase-site.xml" } size: 7320 timestamp: 1531804498662 type: FILE visibility: PRIVATE
    hbase-protocol.jar -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/hbase-protocol.jar" } size: 4941870 timestamp: 1531804498450 type: FILE visibility: PRIVATE
    __spark_libs__ -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/hdp/apps/2.6.2.0-205/spark2/spark2-hdp-yarn-archive.tar.gz" } size: 180384518 timestamp: 1507704288496 type: ARCHIVE visibility: PUBLIC
    phoenix-client.jar -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/phoenix-client.jar" } size: 107566119 timestamp: 1531804498256 type: FILE visibility: PRIVATE
    phoenix-core-4.7.0.2.6.2.0-205.jar -&amp;gt; resource { scheme: "hdfs" host: "nn1-dev1-tbdp.trendy-global.com" port: 8020 file: "/user/nifi/.sparkStaging/application_1529853578712_0039/phoenix-core-4.7.0.2.6.2.0-205.jar" } size: 3834414 timestamp: 1531804498628 type: FILE visibility: PRIVATE
===============================================================================&lt;/PRE&gt;</description>
      <pubDate>Tue, 17 Jul 2018 12:39:40 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184948#M147055</guid>
      <dc:creator>xpelive</dc:creator>
      <dc:date>2018-07-17T12:39:40Z</dc:date>
    </item>
    <item>
      <title>Re: spark-submit  - NoSuchMethodError :saveToPhoenix</title>
      <link>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184949#M147056</link>
      <description>&lt;P&gt;&lt;A rel="user" href="https://community.cloudera.com/users/19015/xpelive.html" nodeid="19015"&gt;@forest lin&lt;/A&gt; The above suggestion was for --deploy-mode &lt;STRONG&gt;client&lt;/STRONG&gt; and I see you used --deploy-mode &lt;STRONG&gt;cluster&lt;/STRONG&gt; instead. If you are willing to run in &lt;STRONG&gt;cluster&lt;/STRONG&gt; mode you need to do this changes:&lt;/P&gt;&lt;PRE&gt;cp /etc/hbase/conf/hbase-site.xml /etc/spark/conf
cp /etc/hbase/conf/hbase-site.xml /etc/spark2/conf


export SPARK_CLASSPATH="/usr/hdp/current/phoenix-client/phoenix-4.7.0.2.6.2.0-205-spark2.jar:/usr/hdp/current/phoenix-client/phoenix-client.jar:/usr/hdp/current/phoenix-client/lib/hbase-client.jar:/usr/hdp/current/phoenix-client/lib/phoenix-spark2-4.7.0.2.6.2.0-205.jar:/usr/hdp/current/phoenix-client/lib/hbase-common.jar:/usr/hdp/current/phoenix-client/lib/hbase-protocol.jar:/usr/hdp/current/phoenix-client/lib/phoenix-core-4.7.0.2.6.2.0-205.jar"

spark-submit \
--class com.test.SmokeTest \
--master yarn\
--deploy-mode client \
--driver-memory 1g \
--executor-memory  1g \
--executor-cores 4 \
--num-executors 2 \
--conf "spark.executor.extraClassPath=phoenix-4.7.0.2.6.2.0-205-spark2.jar:phoenix-client.jar:hbase-client.jar:phoenix-spark2-4.7.0.2.6.2.0-205.jar:hbase-common.jar:hbase-protocol.jar:phoenix-core-4.7.0.2.6.2.0-205.jar"  \
--conf "spark.driver.extraClassPath=phoenix-4.7.0.2.6.2.0-205-spark2.jar:phoenix-client.jar:hbase-client.jar:phoenix-spark2-4.7.0.2.6.2.0-205.jar:hbase-common.jar:hbase-protocol.jar:phoenix-core-4.7.0.2.6.2.0-205.jar"  \
--jars /usr/hdp/current/phoenix-client/phoenix-4.7.0.2.6.2.0-205-spark2.jar,/usr/hdp/current/phoenix-client/phoenix-client.jar,/usr/hdp/current/phoenix-client/lib/hbase-client.jar,/usr/hdp/current/phoenix-client/lib/phoenix-spark2-4.7.0.2.6.2.0-205.jar,/usr/hdp/current/phoenix-client/lib/hbase-common.jar,/usr/hdp/current/phoenix-client/lib/hbase-protocol.jar,/usr/hdp/current/phoenix-client/lib/phoenix-core-4.7.0.2.6.2.0-205.jar  \
--files /etc/hbase/conf/hbase-site.xml
--verbose \
/tmp/test-1.0-SNAPSHOT.jar
&lt;BR /&gt;&lt;/PRE&gt;&lt;P&gt;HTH&lt;/P&gt;&lt;P&gt;*** If you found this answer addressed your question, please take a moment to login and click the "accept" link on the answer.&lt;/P&gt;</description>
      <pubDate>Tue, 17 Jul 2018 20:09:46 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184949#M147056</guid>
      <dc:creator>falbani</dc:creator>
      <dc:date>2018-07-17T20:09:46Z</dc:date>
    </item>
    <item>
      <title>Re: spark-submit  - NoSuchMethodError :saveToPhoenix</title>
      <link>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184950#M147057</link>
      <description>&lt;PRE&gt;spark-submit \
--class com.test.SmokeTest \
--master yarn \
--deploy-mode cluster \
--driver-memory 1g \
--executor-memory  2g \
--executor-cores 2 \
--num-executors 3 \
--files /etc/hbase/conf/hbase-site.xml \
--conf "spark.executor.extraClassPath=phoenix-client.jar:hbase-client.jar:phoenix-spark-4.7.0.2.6.2.0-205.jar:hbase-common.jar:hbase-protocol.jar:phoenix-core-4.7.0.2.6.2.0-205.jar"  \
--conf "spark.driver.extraClassPath=/usr/hdp/current/phoenix-client/phoenix-client.jar:/usr/hdp/current/phoenix-client/lib/hbase-client.jar:/usr/hdp/current/phoenix-client/lib/phoenix-spark-4.7.0.2.6.2.0-205.jar:/usr/hdp/current/phoenix-client/lib/hbase-common.jar:/usr/hdp/current/phoenix-client/lib/hbase-protocol.jar:/usr/hdp/current/phoenix-client/lib/phoenix-core-4.7.0.2.6.2.0-205.jar" \
--jars /usr/hdp/current/phoenix-client/phoenix-client.jar,/usr/hdp/current/phoenix-client/lib/hbase-client.jar,/usr/hdp/current/phoenix-client/lib/phoenix-spark-4.7.0.2.6.2.0-205.jar,/usr/hdp/current/phoenix-client/lib/hbase-common.jar,/usr/hdp/current/phoenix-client/lib/hbase-protocol.jar,/usr/hdp/current/phoenix-client/lib/phoenix-core-4.7.0.2.6.2.0-205.jar  \
--verbose \
/tmp/test-1.0-SNAPSHOT.jar&lt;/PRE&gt;&lt;P&gt;Following your advice, set the classpath and copy the said xml,  but still have error :&lt;/P&gt;&lt;PRE&gt;18/07/18 19:47:59 INFO Client: 
         client token: Token { kind: YARN_CLIENT_TOKEN, service:  }
         diagnostics: User class threw exception: java.lang.NoClassDefFoundError: org/apache/spark/sql/DataFrame
         ApplicationMaster host: 10.2.29.104
         ApplicationMaster RPC port: 0
         queue: default
         start time: 1531914415906
         final status: FAILED
         tracking URL: &lt;A href="http://en1-dev1-tbdp.trendy-global.com:8088/proxy/application_1531814517578_0019/" target="_blank"&gt;http://en1-dev1-tbdp.trendy-global.com:8088/proxy/application_1531814517578_0019/&lt;/A&gt;
         user: nifi
Exception in thread "main" org.apache.spark.SparkException: Application application_1531814517578_0019 finished with failed status
        at org.apache.spark.deploy.yarn.Client.run(Client.scala:1261)
        at org.apache.spark.deploy.yarn.Client$.main(Client.scala:1307)
        at org.apache.spark.deploy.yarn.Client.main(Client.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$runMain(SparkSubmit.scala:751)
        at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
18/07/18 19:47:59 INFO ShutdownHookManager: Shutdown hook called&lt;/PRE&gt;</description>
      <pubDate>Wed, 18 Jul 2018 19:02:54 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184950#M147057</guid>
      <dc:creator>xpelive</dc:creator>
      <dc:date>2018-07-18T19:02:54Z</dc:date>
    </item>
    <item>
      <title>Re: spark-submit  - NoSuchMethodError :saveToPhoenix</title>
      <link>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184951#M147058</link>
      <description>&lt;P&gt;&lt;A href="https://community.hortonworks.com/users/19015/xpelive.html"&gt;@forest lin&lt;/A&gt; spark.driver.extraClassPath is not same as the one I shared for cluster mode. Could you confirm the code is running in client mode? And then try the exact settings I provided for cluster mode? Please let me know how it goes!&lt;/P&gt;</description>
      <pubDate>Wed, 18 Jul 2018 20:19:15 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184951#M147058</guid>
      <dc:creator>falbani</dc:creator>
      <dc:date>2018-07-18T20:19:15Z</dc:date>
    </item>
    <item>
      <title>Re: spark-submit  - NoSuchMethodError :saveToPhoenix</title>
      <link>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184952#M147059</link>
      <description>&lt;P&gt;Hi Felix, &lt;/P&gt;&lt;P&gt;I followed your guideline and change the command as following&lt;/P&gt;&lt;PRE&gt;spark-submit \
--class com.test.SmokeTest \
--master yarn \
--deploy-mode cluster \
--driver-memory 1g \
--executor-memory  2g \
--executor-cores 2 \
--num-executors 3 \
--files /etc/hbase/conf/hbase-site.xml \
--conf "spark.executor.extraClassPath=phoenix-client.jar:hbase-client.jar:phoenix-spark-4.7.0.2.6.2.0-205.jar:hbase-common.jar:hbase-protocol.jar:phoenix-core-4.7.0.2.6.2.0-205.jar"  \
--conf "spark.driver.extraClassPath=phoenix-client.jar:hbase-client.jar:phoenix-spark-4.7.0.2.6.2.0-205.jar:hbase-common.jar:hbase-protocol.jar:phoenix-core-4.7.0.2.6.2.0-205.jar" \
--jars /usr/hdp/current/phoenix-client/phoenix-client.jar,/usr/hdp/current/phoenix-client/lib/hbase-client.jar,/usr/hdp/current/phoenix-client/lib/phoenix-spark-4.7.0.2.6.2.0-205.jar,/usr/hdp/current/phoenix-client/lib/hbase-common.jar,/usr/hdp/current/phoenix-client/lib/hbase-protocol.jar,/usr/hdp/current/phoenix-client/lib/phoenix-core-4.7.0.2.6.2.0-205.jar  \
--verbose \
/tmp/test-1.0-SNAPSHOT.jar &lt;/PRE&gt;&lt;P&gt; which encouter the same error: &lt;/P&gt;&lt;PRE&gt;Userclass threw exception: java.lang.NoClassDefFoundError: org/apache/spark/sql/DataFrame&lt;/PRE&gt;&lt;P&gt;But it run successfully with spark 1.6.3&lt;/P&gt;</description>
      <pubDate>Fri, 20 Jul 2018 11:23:02 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184952#M147059</guid>
      <dc:creator>xpelive</dc:creator>
      <dc:date>2018-07-20T11:23:02Z</dc:date>
    </item>
    <item>
      <title>Re: spark-submit  - NoSuchMethodError :saveToPhoenix</title>
      <link>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184953#M147060</link>
      <description>&lt;P&gt;&lt;A rel="user" href="https://community.cloudera.com/users/19015/xpelive.html" nodeid="19015"&gt;@forest lin&lt;/A&gt; then that is possibly a different issue. Initially you were getting &lt;/P&gt;&lt;P&gt;java.lang.NoSuchMethodError: org.apache.phoenix.spark.DataFrameFunctions.saveToPhoenix$default$4()Lscala/Option;&lt;/P&gt;&lt;P&gt;and only for spark 2 you are now getting&lt;/P&gt;&lt;P&gt;Userclass threw exception: java.lang.NoClassDefFoundError: org/apache/spark/sql/DataFrame&lt;/P&gt;&lt;P&gt;Please review the following link &lt;/P&gt;&lt;P&gt;&lt;A href="https://community.hortonworks.com/content/supportkb/150292/errorexception-in-thread-main-javalangnoclassdeffo.html" target="_blank"&gt;https://community.hortonworks.com/content/supportkb/150292/errorexception-in-thread-main-javalangnoclassdeffo.html&lt;/A&gt;&lt;/P&gt;&lt;P&gt;Also I think is best to take this error in separate thread as is not same as the initial problem which got solved by adding the configuration I mentioned before. &lt;/P&gt;&lt;P&gt;HTH&lt;/P&gt;</description>
      <pubDate>Sat, 21 Jul 2018 01:42:42 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/spark-submit-NoSuchMethodError-saveToPhoenix/m-p/184953#M147060</guid>
      <dc:creator>falbani</dc:creator>
      <dc:date>2018-07-21T01:42:42Z</dc:date>
    </item>
  </channel>
</rss>

