Community Articles

Find and share helpful community-sourced technical articles.
Announcements
Celebrating as our community reaches 100,000 members! Thank you!
Labels (1)
avatar
package com.big.data.sparkserver2;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.sql.*;

public class SparkServer2Client {
private static final Logger LOGGER = LoggerFactory.getLogger(SparkServer2Client.class);
private static String HIVESERVE2DRIVER = "org.apache.hive.jdbc.HiveDriver";


public static void main(String[] args) throws SQLException, IOException {

// set the configurationConfiguration conf = new Configuration();
conf.set("hadoop.security.authentication", "Kerberos");
UserGroupInformation.setConfiguration(conf);
// /etc/security/keytab/hive.service.keytab is from local machine, This is the user which is executing the commandUserGroupInformation.loginUserFromKeytab("hive/hostname.com@FIELD.HORTONWORKS.COM", "/etc/security/keytab/hive.service.keytab");


// load the drivertry {
Class.forName(HIVESERVE2DRIVER);
} catch (ClassNotFoundException e) {
LOGGER.error("Driver not found");
}

Connection con = DriverManager.getConnection("jdbc:hive2://hostname.com:10016/default;httpPath=/;principal=hive/hostname.com@FIELD.HORTONWORKS.COM");
Statement stmt = con.createStatement();

// Table NameString tableName = "testHiveDriverTable";
stmt.execute("drop table " + tableName);

LOGGER.info("Table {} is dropped", tableName);
stmt.execute("create table " + tableName + " (key int, value string)");

// show tablesString sql = "show tables '" + tableName + "'";
LOGGER.info("Running {} ", sql);

ResultSet res = stmt.executeQuery(sql);
if (res.next()) {
LOGGER.info(" return from HiveServer {}", res.getString(1));
}
// describe tablesql = "describe " + tableName;
LOGGER.info("DESCRIBE newwly created table sql command : {}" + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
//System.out.printf("HOOOO");LOGGER.info("Return from HiveServer {}", res.getString(1) + "\t" + res.getString(2));
}

// close the connectioncon.close();
}

}



Dependency:
<dependencies>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>1.2.1000.2.6.0.3-8</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>1.2.1000.2.6.0.3-8</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
</dependencies>

repo : http://repo.hortonworks.com/content/groups/publi

2,249 Views