<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: HDP 3.1 Hive Connectivity Issue in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/HDP-3-1-Hive-Connectivity-Issue/m-p/285225#M211694</link>
    <description>&lt;P&gt;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/20288"&gt;@Shelton&lt;/a&gt;&amp;nbsp;I have tried as you suggested still getting the same error.&lt;/P&gt;&lt;LI-CODE lang="markup"&gt;import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import com.hortonworks.hwc.HiveWarehouseSession;

import com.hortonworks.spark.sql.hive.llap.HiveWarehouseBuilder;

public class Hdp3MigrationMain extends CommonUtilities{

	public static void main(String[] args) {
		String hdp3Enabled = args[0];
		Dataset&amp;lt;Row&amp;gt; dataset;
		String query="select * from hive_schema.table1"; 
		try {
			if ("Y".equalsIgnoreCase(hdp3Enabled)) {
				HiveWarehouseSession hive = HiveWarehouseBuilder.session(sparkSession).build();
				dataset = hive.executeQuery(query);
			} else {
				dataset = sparkSession.sql(query);
			}
			dataset.show();
		} catch(Exception e) {
			e.printStackTrace();
		}

	}&lt;/LI-CODE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;And the same error occurs.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;LI-CODE lang="markup"&gt;java.util.NoSuchElementException: spark.sql.hive.hiveserver2.jdbc.url
        at org.apache.spark.sql.internal.SQLConf$$anonfun$getConfString$2.apply(SQLConf.scala:1571)
        at org.apache.spark.sql.internal.SQLConf$$anonfun$getConfString$2.apply(SQLConf.scala:1571)
        at scala.Option.getOrElse(Option.scala:121)
        at org.apache.spark.sql.internal.SQLConf.getConfString(SQLConf.scala:1571)
        at org.apache.spark.sql.RuntimeConfig.get(RuntimeConfig.scala:74)
        at com.hortonworks.spark.sql.hive.llap.HWConf.getConnectionUrlFromConf(HWConf.java:143)
        at com.hortonworks.spark.sql.hive.llap.HWConf.getConnectionUrl(HWConf.java:107)
        at com.hortonworks.spark.sql.hive.llap.HiveWarehouseBuilder.build(HiveWarehouseBuilder.java:97)
        at com.wunderman.hdp.Hdp3MigrationMain.main(Hdp3MigrationMain.java:18)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
        at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:904)
        at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:198)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:228)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:137)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)&lt;/LI-CODE&gt;</description>
    <pubDate>Tue, 10 Dec 2019 08:21:50 GMT</pubDate>
    <dc:creator>eswarloges</dc:creator>
    <dc:date>2019-12-10T08:21:50Z</dc:date>
  </channel>
</rss>

