<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: Phoenix driver not found in Spark job in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/Phoenix-driver-not-found-in-Spark-job/m-p/147509#M110048</link>
    <description>&lt;P&gt;FYI, I added some code that in other parts of app forced the Phoenix JDBC driver to load, but it doesn't seem to be working in this context.  The call to ConnectionUtil.getInputConnection(conf, props) is the code I tracked down in the stack trace below that builds the connection to verify I was getting the correct connection (it is the valid JDBC URL).  &lt;/P&gt;&lt;PRE&gt;        final Configuration configuration = HBaseConfiguration.create();
        configuration.set(HConstants.ZOOKEEPER_CLIENT_PORT, "2181");
        configuration.set(HConstants.ZOOKEEPER_ZNODE_PARENT, quorumParentNode);
        configuration.set(HConstants.ZOOKEEPER_QUORUM, quorum);
        Properties props = new Properties();
        Connection conn = ConnectionUtil.getInputConnection(configuration, props);
        log.info("Connection: " + conn.getMetaData().getURL());
        log.info("Ingest DBC: " + ingestDbConn);

        log.info("driver host name: " + driverHost);
        log.info("Zookeeper quorum: " + quorum);
        log.info("Reload query: " + sqlQuery);
        PhoenixConfigurationUtil.setPhysicalTableName(configuration, FileContentsWritable.TABLE_NAME);
        PhoenixConfigurationUtil.setInputTableName(configuration , FileContentsWritable.TABLE_NAME);
        PhoenixConfigurationUtil.setOutputTableName(configuration , FileContentsWritable.TABLE_NAME);
        PhoenixConfigurationUtil.setInputQuery(configuration, sqlQuery);
        PhoenixConfigurationUtil.setInputClass(configuration , FileContentsWritable.class);
        PhoenixConfigurationUtil.setUpsertColumnNames(configuration, FileContentsWritable.COLUMN_NAMES);
        Class.forName("org.apache.phoenix.jdbc.PhoenixDriver");
        @SuppressWarnings("unchecked")
        JavaPairRDD&amp;lt;NullWritable, FileContentsWritable&amp;gt; fileContentsRDD = sparkContext.newAPIHadoopRDD(configuration, PhoenixInputFormat.class, NullWritable.class, FileContentsWritable.class);

        fileContentsRDD.foreach(rdd -&amp;gt;
        {
            Class.forName("org.apache.phoenix.jdbc.PhoenixDriver");
            FileContentsBean fileContentsBean = rdd._2.getFileContentsBean();
            :
            :
        };
	&lt;/PRE&gt;</description>
    <pubDate>Tue, 04 Jul 2017 07:28:33 GMT</pubDate>
    <dc:creator>jeff_watson</dc:creator>
    <dc:date>2017-07-04T07:28:33Z</dc:date>
  </channel>
</rss>

