Support Questions
Find answers, ask questions, and share your expertise

Cannot connect to HiveServer2 running in Cloudera Docker VM with JDBC

Cannot connect to HiveServer2 running in Cloudera Docker VM with JDBC

New Contributor

I am running Cloudera Quickstart on Docker, runnning on a 16GB Windows10 machine.

Hue shows all configurations are ok, and I can create and query tables through Hive.

I have set port forwarding for 10000.

From the host Windows machine I trying to connect to Hive2Server using Coudera JDBC, and getting this issue. Please help.

 

java.sql.SQLException: Could not open client transport with JDBC Uri: jdbc:hive2://127.0.0.1:10000: java.net.SocketException: Software caused connection abort: socket write error
    at org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:209)
    at org.apache.hive.jdbc.HiveDriver.connect(HiveDriver.java:107)
    at java.sql.DriverManager.getConnection(Unknown Source)
    at java.sql.DriverManager.getConnection(Unknown Source)
    at ConnectCloudera.main(ConnectCloudera.java:25)
Caused by: org.apache.thrift.transport.TTransportException: java.net.SocketException: Software caused connection abort: socket write error
    at org.apache.thrift.transport.TIOStreamTransport.flush(TIOStreamTransport.java:161)
    at org.apache.thrift.transport.TSaslTransport.sendSaslMessage(TSaslTransport.java:166)
    at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:103)
    at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
    at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
    at org.apache.hive.jdbc.HiveConnection.openTransport(HiveConnection.java:227)
    at org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:182)
    ... 4 more
Caused by: java.net.SocketException: Software caused connection abort: socket write error
    at java.net.SocketOutputStream.socketWrite0(Native Method)
    at java.net.SocketOutputStream.socketWrite(Unknown Source)
    at java.net.SocketOutputStream.write(Unknown Source)
    at java.io.BufferedOutputStream.flushBuffer(Unknown Source)
    at java.io.BufferedOutputStream.flush(Unknown Source)
    at org.apache.thrift.transport.TIOStreamTransport.flush(TIOStreamTransport.java:159)
    ... 10 more

 

 

My code is

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;

//import com.cloudera.hive.jdbc4.HS2Driver;

public class ConnectCloudera {

    public static void main(String[] args) {
        // TODO Auto-generated method stub
        System.out.println("In Progress");

        try {
            String driverName = "com.cloudera.hive.jdbc4.HS2Driver";
            Class.forName(driverName);
        } catch (ClassNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
            System.exit(1);
        }
        try {
        //Connection con = DriverManager.getConnection("jdbc:hive2://localhost:10000", "hive", "cloudera");
            Connection con = DriverManager.getConnection("jdbc:hive2://127.0.0.1:10000", "cloudera", "cloudera");
            
            Statement stmt = con.createStatement();
            String sql = "show tables";
            System.out.println("Running: " + sql);
            ResultSet res = stmt.executeQuery(sql);
            System.out.println("Query execution complete");
        } catch (SQLException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

}

 

 

hive-site.xml

<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>

<configuration>

  <!-- Hive Configuration can either be stored in this file or in the hadoop configuration files  -->
  <!-- that are implied by Hadoop setup variables.                                                -->
  <!-- Aside from Hadoop setup variables - this file is provided as a convenience so that Hive    -->
  <!-- users do not have to edit hadoop configuration files (that may be managed as a centralized -->
  <!-- resource).                                                                                 -->

  <!-- Hive Execution Parameters -->

  <property>
    <name>javax.jdo.option.ConnectionURL</name>
    <value>jdbc:mysql://127.0.0.1/metastore?createDatabaseIfNotExist=true</value>
    <description>JDBC connect string for a JDBC metastore</description>
  </property>

  <property>
    <name>javax.jdo.option.ConnectionDriverName</name>
    <value>com.mysql.jdbc.Driver</value>
    <description>Driver class name for a JDBC metastore</description>
  </property>

  <property>
    <name>javax.jdo.option.ConnectionUserName</name>
    <value>hive</value>
  </property>

  <property>
    <name>javax.jdo.option.ConnectionPassword</name>
    <value>cloudera</value>
  </property>

  <property>
    <name>hive.hwi.war.file</name>
    <value>/usr/lib/hive/lib/hive-hwi-0.8.1-cdh4.0.0.jar</value>
    <description>This is the WAR file with the jsp content for Hive Web Interface</description>
  </property>

<property>
    <name>datanucleus.fixedDatastore</name>
    <value>true</value>
  </property>

  <property>
    <name>datanucleus.autoCreateSchema</name>
    <value>false</value>
  </property>

  <property>
    <name>hive.metastore.uris</name>
    <value>thrift://127.0.0.1:9083</value>
    <description>IP address (or fully-qualified domain name) and port of the metastore host</description>
  </property>
</configuration>

 

 

 

JDBC JAR Files used

hadoop-common-2.8.0.jar
hadoop-mapreduce-client-core-2.8.0.jar
hive-jdbc-2.1.1-standalone.jar
HiveJDBC4.zip
hive_metastore.jar
hive_service.jar
JDBC Con.txt
libfb303-0.9.0.jar
libthrift-0.9.0.jar
log4j-1.2.14.jar
ql.jar
r2.8.0
slf4j-api-1.5.8.jar
slf4j-log4j12-1.5.8.jar
TCLIServiceClient.jar