<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: Phoenix driver not found in Spark job in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/Phoenix-driver-not-found-in-Spark-job/m-p/147508#M110047</link>
    <description>&lt;P&gt;Here is the output:&lt;/P&gt;&lt;PRE&gt;2017-06-28 21:28:13 INFO  ReloadInputReader          Connection: jdbc:phoenix:master:2181:/hbase-unsecure;
2017-06-28 21:28:13 INFO  ReloadInputReader          Ingest DBC: jdbc:phoenix:master:2181:/hbase-unsecure
2017-06-28 21:28:13 INFO  ReloadInputReader          driver host name: master.vm.local
2017-06-28 21:28:13 INFO  ReloadInputReader          Zookeeper quorum: master
2017-06-28 21:28:13 INFO  ReloadInputReader          Reload query: SELECT FILE_NAME, TM, DATASET, WORKER_NAME, FILE_CONTENTS FROM JOBS.FILE_CONTENTS WHERE FILE_NAME in (SELECT FILE_NAME FROM JOBS.file_loaded WHERE file_name='B162836D20090316T0854.AAD')
2017-06-28 21:28:13 INFO  MemoryStore                Block broadcast_1 stored as values in memory (estimated size 428.6 KB, free 465.7 KB)
2017-06-28 21:28:13 INFO  MemoryStore                Block broadcast_1_piece0 stored as bytes in memory (estimated size 34.9 KB, free 500.7 KB)
2017-06-28 21:28:13 INFO  BlockManagerInfo           Added broadcast_1_piece0 in memory on 192.168.56.2:51844 (size: 34.9 KB, free: 457.8 MB)
2017-06-28 21:28:13 INFO  SparkContext               Created broadcast 1 from newAPIHadoopRDD at ReloadInputReader.java:135
2017-06-28 21:28:14 INFO  SparkContext               Starting job: foreach at ReloadInputReader.java:137
2017-06-28 21:28:14 INFO  DAGScheduler               Got job 0 (foreach at ReloadInputReader.java:137) with 1 output partitions
2017-06-28 21:28:14 INFO  DAGScheduler               Final stage: ResultStage 0 (foreach at ReloadInputReader.java:137)
2017-06-28 21:28:14 INFO  DAGScheduler               Parents of final stage: List()
2017-06-28 21:28:14 INFO  DAGScheduler               Missing parents: List()
2017-06-28 21:28:14 INFO  DAGScheduler               Submitting ResultStage 0 (NewHadoopRDD[0] at newAPIHadoopRDD at ReloadInputReader.java:135), which has no missing parents
2017-06-28 21:28:14 INFO  MemoryStore                Block broadcast_2 stored as values in memory (estimated size 2.9 KB, free 503.6 KB)
2017-06-28 21:28:14 INFO  MemoryStore                Block broadcast_2_piece0 stored as bytes in memory (estimated size 1845.0 B, free 505.4 KB)
2017-06-28 21:28:14 INFO  BlockManagerInfo           Added broadcast_2_piece0 in memory on 192.168.56.2:51844 (size: 1845.0 B, free: 457.8 MB)
2017-06-28 21:28:14 INFO  SparkContext               Created broadcast 2 from broadcast at DAGScheduler.scala:1008
2017-06-28 21:28:14 INFO  DAGScheduler               Submitting 1 missing tasks from ResultStage 0 (NewHadoopRDD[0] at newAPIHadoopRDD at ReloadInputReader.java:135)
2017-06-28 21:28:14 INFO  YarnClusterScheduler       Adding task set 0.0 with 1 tasks
2017-06-28 21:28:14 INFO  TaskSetManager             Starting task 0.0 in stage 0.0 (TID 0, master.vm.local, partition 0,PROCESS_LOCAL, 2494 bytes)
2017-06-28 21:28:18 INFO  BlockManagerInfo           Added broadcast_2_piece0 in memory on master.vm.local:40246 (size: 1845.0 B, free: 511.1 MB)
2017-06-28 21:28:18 INFO  BlockManagerInfo           Added broadcast_1_piece0 in memory on master.vm.local:40246 (size: 34.9 KB, free: 511.1 MB)
2017-06-28 21:28:20 WARN  TaskSetManager             Lost task 0.0 in stage 0.0 (TID 0, master.vm.local): java.lang.RuntimeException: java.sql.SQLException: No suitable driver found for jdbc:phoenix:master:2181:/hbase-unsecure;
    at org.apache.phoenix.mapreduce.PhoenixInputFormat.getQueryPlan(PhoenixInputFormat.java:134)
    at org.apache.phoenix.mapreduce.PhoenixInputFormat.createRecordReader(PhoenixInputFormat.java:71)
    at org.apache.spark.rdd.NewHadoopRDD$$anon$1.&amp;lt;init&amp;gt;(NewHadoopRDD.scala:156)
    at org.apache.spark.rdd.NewHadoopRDD.compute(NewHadoopRDD.scala:129)
    at org.apache.spark.rdd.NewHadoopRDD.compute(NewHadoopRDD.scala:64)
    at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:313)
    at org.apache.spark.rdd.RDD.iterator(RDD.scala:277)
    at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)
    at org.apache.spark.scheduler.Task.run(Task.scala:89)
    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:227)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    at java.lang.Thread.run(Thread.java:745)
Caused by: java.sql.SQLException: No suitable driver found for jdbc:phoenix:master:2181:/hbase-unsecure;
    at java.sql.DriverManager.getConnection(DriverManager.java:689)
    at java.sql.DriverManager.getConnection(DriverManager.java:208)
    at org.apache.phoenix.mapreduce.util.ConnectionUtil.getConnection(ConnectionUtil.java:98)
    at org.apache.phoenix.mapreduce.util.ConnectionUtil.getInputConnection(ConnectionUtil.java:57)
    at org.apache.phoenix.mapreduce.PhoenixInputFormat.getQueryPlan(PhoenixInputFormat.java:116)
    ... 12 more
&lt;/PRE&gt;</description>
    <pubDate>Tue, 04 Jul 2017 07:19:31 GMT</pubDate>
    <dc:creator>jeff_watson</dc:creator>
    <dc:date>2017-07-04T07:19:31Z</dc:date>
  </channel>
</rss>

