Member since 
    
	
		
		
		11-22-2019
	
	
	
	
	
	
	
	
	
	
	
	
	
	
			
      
                16
            
            
                Posts
            
        
                0
            
            
                Kudos Received
            
        
                0
            
            
                Solutions
            
        
			
    
	
		
		
		01-14-2021
	
		
		09:41 PM
	
	
	
	
	
	
	
	
	
	
	
	
	
	
		
	
				
		
			
					
				
		
	
		
					
							 Hi Aakulov,     Thanks for your advice, I checked it could be the connection error after restart my hive services the error above resolve but I observe the new error code as below:  Could you advice ?  2021-01-15 13:40:44,503 WARN [Timer-Driven Process Thread-5] o.apache.nifi.processors.hive.PutHiveQL PutHiveQL[id=f821e7aa-0176-1000-9088-506b00a72e66] Administratively yielding PutHiveQL_Listings after rolling back due to org.apache.nifi.processor.exception.ProcessException: Failed to process StandardFlowFileRecord[uuid=52458c75-cdb5-415d-968b-d7b65fe6e54b,claim=StandardContentClaim [resourceClaim=StandardResourceClaim[id=1610689175761-2338, container=default, section=290], offset=139, length=139],offset=0,name=52458c75-cdb5-415d-968b-d7b65fe6e54b,size=139] due to java.sql.SQLException: Error while processing statement: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.StatsTask:
2021-01-15 13:40:44,503 ERROR [Timer-Driven Process Thread-5] o.apache.nifi.processors.hive.PutHiveQL PutHiveQL[id=f821e7aa-0176-1000-9088-506b00a72e66] Failed to process session due to org.apache.nifi.processor.exception.ProcessException: Failed to process StandardFlowFileRecord[uuid=52458c75-cdb5-415d-968b-d7b65fe6e54b,claim=StandardContentClaim [resourceClaim=StandardResourceClaim[id=1610689175761-2338, container=default, section=290], offset=139, length=139],offset=0,name=52458c75-cdb5-415d-968b-d7b65fe6e54b,size=139] due to java.sql.SQLException: Error while processing statement: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.StatsTask: org.apache.nifi.processor.exception.ProcessException: Failed to process StandardFlowFileRecord[uuid=52458c75-cdb5-415d-968b-d7b65fe6e54b,claim=StandardContentClaim [resourceClaim=StandardResourceClaim[id=1610689175761-2338, container=default, section=290], offset=139, length=139],offset=0,name=52458c75-cdb5-415d-968b-d7b65fe6e54b,size=139] due to java.sql.SQLException: Error while processing statement: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.StatsTask
org.apache.nifi.processor.exception.ProcessException: Failed to process StandardFlowFileRecord[uuid=52458c75-cdb5-415d-968b-d7b65fe6e54b,claim=StandardContentClaim [resourceClaim=StandardResourceClaim[id=1610689175761-2338, container=default, section=290], offset=139, length=139],offset=0,name=52458c75-cdb5-415d-968b-d7b65fe6e54b,size=139] due to java.sql.SQLException: Error while processing statement: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.StatsTask
        at org.apache.nifi.processor.util.pattern.ExceptionHandler.lambda$createOnGroupError$2(ExceptionHandler.java:226)
        at org.apache.nifi.processor.util.pattern.ExceptionHandler.lambda$createOnError$1(ExceptionHandler.java:179)
        at org.apache.nifi.processor.util.pattern.ExceptionHandler$OnError.lambda$andThen$0(ExceptionHandler.java:54)
        at org.apache.nifi.processor.util.pattern.ExceptionHandler$OnError.lambda$andThen$0(ExceptionHandler.java:54)
        at org.apache.nifi.processor.util.pattern.ExceptionHandler.execute(ExceptionHandler.java:148)
        at org.apache.nifi.processors.hive.PutHiveQL.lambda$new$4(PutHiveQL.java:226)
        at org.apache.nifi.processor.util.pattern.Put.putFlowFiles(Put.java:60)
        at org.apache.nifi.processor.util.pattern.Put.onTrigger(Put.java:103)
        at org.apache.nifi.processors.hive.PutHiveQL.lambda$onTrigger$6(PutHiveQL.java:295)
        at org.apache.nifi.processor.util.pattern.PartialFunctions.onTrigger(PartialFunctions.java:120)
        at org.apache.nifi.processor.util.pattern.RollbackOnFailure.onTrigger(RollbackOnFailure.java:184)
        at org.apache.nifi.processors.hive.PutHiveQL.onTrigger(PutHiveQL.java:295)
        at org.apache.nifi.controller.StandardProcessorNode.onTrigger(StandardProcessorNode.java:1174)
        at org.apache.nifi.controller.tasks.ConnectableTask.invoke(ConnectableTask.java:213)
        at org.apache.nifi.controller.scheduling.TimerDrivenSchedulingAgent$1.run(TimerDrivenSchedulingAgent.java:117)
        at org.apache.nifi.engine.FlowEngine$2.run(FlowEngine.java:110)
        at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
        at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308)
        at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
        at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
Caused by: java.sql.SQLException: Error while processing statement: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.StatsTask
        at org.apache.hive.jdbc.HiveStatement.execute(HiveStatement.java:296)
        at org.apache.hive.jdbc.HivePreparedStatement.execute(HivePreparedStatement.java:98)
        at org.apache.commons.dbcp.DelegatingPreparedStatement.execute(DelegatingPreparedStatement.java:172)
        at org.apache.commons.dbcp.DelegatingPreparedStatement.execute(DelegatingPreparedStatement.java:172)
        at sun.reflect.GeneratedMethodAccessor789.invoke(Unknown Source)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.nifi.controller.service.StandardControllerServiceInvocationHandler.invoke(StandardControllerServiceInvocationHandler.java:254)
        at org.apache.nifi.controller.service.StandardControllerServiceInvocationHandler.access$100(StandardControllerServiceInvocationHandler.java:38)
        at org.apache.nifi.controller.service.StandardControllerServiceInvocationHandler$ProxiedReturnObjectInvocationHandler.invoke(StandardControllerServiceInvocationHandler.java:240)
        at com.sun.proxy.$Proxy227.execute(Unknown Source)
        at org.apache.nifi.processors.hive.PutHiveQL.lambda$null$3(PutHiveQL.java:254)
        at org.apache.nifi.processor.util.pattern.ExceptionHandler.execute(ExceptionHandler.java:127)
        ... 18 common frames omitted
2021-01-15 13:40:45,504 INFO [Timer-Driven Process Thread-8] o.a.nifi.dbcp.hive.HiveConnectionPool HiveConnectionPool[id=ffd816d0-0176-1000-e684-cec6f778cb48] Simple Authentication
2021-01-15 13:40:45,504 INFO [Timer-Driven Process Thread-8] hive.ql.parse.ParseDriver Parsing command: insert into transformed_db.tbl_airbnb_listing_transformed
select a.*, 20210113 partition_date_id from
staging_db.etbl_raw_airbnb_listing a
2021-01-15 13:40:45,505 INFO [Timer-Driven Process Thread-8] hive.ql.parse.ParseDriver Parse Completed 
						
					
					... View more
				
			
			
			
			
			
			
			
			
			
		
			
    
	
		
		
		01-14-2021
	
		
		05:36 AM
	
	
	
	
	
	
	
	
	
	
	
	
	
	
		
	
				
		
			
					
				
		
	
		
					
							 Hi All, 
   
 I have try to connect one simple insert workflow to hive. 
 But I encounter some error, could someone help on this ? 
   
   
 2021-01-14 21:30:14,245 ERROR [Timer-Driven Process Thread-4] o.apache.nifi.processors.hive.PutHiveQL PutHiveQL[id=f821e7aa-0176-1000-9088-506b00a72e66] org.apache.nifi.processors.hive.PutHiveQL$$Lambda$1066/699859650@1c9df9e2 failed to process due to org.apache.nifi.processor.exception.ProcessException: Failed to process StandardFlowFileRecord[uuid=acf2bb1a-10fa-4dc5-8e5f-c12bf706f45f,claim=StandardContentClaim [resourceClaim=StandardResourceClaim[id=1610610213856-985, container=default, section=985], offset=89099, length=139],offset=0,name=acf2bb1a-10fa-4dc5-8e5f-c12bf706f45f,size=139] due to java.sql.SQLException: org.apache.thrift.transport.TTransportException: java.net.SocketException: Broken pipe (Write failed); rolling back session: org.apache.nifi.processor.exception.ProcessException: Failed to process StandardFlowFileRecord[uuid=acf2bb1a-10fa-4dc5-8e5f-c12bf706f45f,claim=StandardContentClaim [resourceClaim=StandardResourceClaim[id=1610610213856-985, container=default, section=985], offset=89099, length=139],offset=0,name=acf2bb1a-10fa-4dc5-8e5f-c12bf706f45f,size=139] due to java.sql.SQLException: org.apache.thrift.transport.TTransportException: java.net.SocketException: Broken pipe (Write failed)
org.apache.nifi.processor.exception.ProcessException: Failed to process StandardFlowFileRecord[uuid=acf2bb1a-10fa-4dc5-8e5f-c12bf706f45f,claim=StandardContentClaim [resourceClaim=StandardResourceClaim[id=1610610213856-985, container=default, section=985], offset=89099, length=139],offset=0,name=acf2bb1a-10fa-4dc5-8e5f-c12bf706f45f,size=139] due to java.sql.SQLException: org.apache.thrift.transport.TTransportException: java.net.SocketException: Broken pipe (Write failed)
        at org.apache.nifi.processor.util.pattern.ExceptionHandler.lambda$createOnGroupError$2(ExceptionHandler.java:226)
        at org.apache.nifi.processor.util.pattern.ExceptionHandler.lambda$createOnError$1(ExceptionHandler.java:179)
        at org.apache.nifi.processor.util.pattern.ExceptionHandler$OnError.lambda$andThen$0(ExceptionHandler.java:54)
        at org.apache.nifi.processor.util.pattern.ExceptionHandler$OnError.lambda$andThen$0(ExceptionHandler.java:54)
        at org.apache.nifi.processor.util.pattern.ExceptionHandler.execute(ExceptionHandler.java:148)
        at org.apache.nifi.processors.hive.PutHiveQL.lambda$new$4(PutHiveQL.java:226)
        at org.apache.nifi.processor.util.pattern.Put.putFlowFiles(Put.java:60)
        at org.apache.nifi.processor.util.pattern.Put.onTrigger(Put.java:103)
        at org.apache.nifi.processors.hive.PutHiveQL.lambda$onTrigger$6(PutHiveQL.java:295)
        at org.apache.nifi.processor.util.pattern.PartialFunctions.onTrigger(PartialFunctions.java:120)
        at org.apache.nifi.processor.util.pattern.RollbackOnFailure.onTrigger(RollbackOnFailure.java:184)
        at org.apache.nifi.processors.hive.PutHiveQL.onTrigger(PutHiveQL.java:295)
        at org.apache.nifi.controller.StandardProcessorNode.onTrigger(StandardProcessorNode.java:1174)
        at org.apache.nifi.controller.tasks.ConnectableTask.invoke(ConnectableTask.java:213)
        at org.apache.nifi.controller.scheduling.TimerDrivenSchedulingAgent$1.run(TimerDrivenSchedulingAgent.java:117)
        at org.apache.nifi.engine.FlowEngine$2.run(FlowEngine.java:110)
        at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
        at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308)
        at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
        at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
Caused by: java.sql.SQLException: org.apache.thrift.transport.TTransportException: java.net.SocketException: Broken pipe (Write failed)
        at org.apache.hive.jdbc.HiveStatement.execute(HiveStatement.java:262)
        at org.apache.hive.jdbc.HivePreparedStatement.execute(HivePreparedStatement.java:98)
        at org.apache.commons.dbcp.DelegatingPreparedStatement.execute(DelegatingPreparedStatement.java:172)
        at org.apache.commons.dbcp.DelegatingPreparedStatement.execute(DelegatingPreparedStatement.java:172)
        at sun.reflect.GeneratedMethodAccessor658.invoke(Unknown Source)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.nifi.controller.service.StandardControllerServiceInvocationHandler.invoke(StandardControllerServiceInvocationHandler.java:254)
        at org.apache.nifi.controller.service.StandardControllerServiceInvocationHandler.access$100(StandardControllerServiceInvocationHandler.java:38)
        at org.apache.nifi.controller.service.StandardControllerServiceInvocationHandler$ProxiedReturnObjectInvocationHandler.invoke(StandardControllerServiceInvocationHandler.java:240)
        at com.sun.proxy.$Proxy214.execute(Unknown Source)
        at org.apache.nifi.processors.hive.PutHiveQL.lambda$null$3(PutHiveQL.java:254)
        at org.apache.nifi.processor.util.pattern.ExceptionHandler.execute(ExceptionHandler.java:127)
        ... 18 common frames omitted
Caused by: org.apache.thrift.transport.TTransportException: java.net.SocketException: Broken pipe (Write failed)
        at org.apache.thrift.transport.TIOStreamTransport.write(TIOStreamTransport.java:147)
        at org.apache.thrift.transport.TTransport.write(TTransport.java:107)
        at org.apache.thrift.transport.TSaslTransport.writeLength(TSaslTransport.java:391)
        at org.apache.thrift.transport.TSaslTransport.flush(TSaslTransport.java:499)
        at org.apache.thrift.transport.TSaslClientTransport.flush(TSaslClientTransport.java:37)
        at org.apache.thrift.TServiceClient.sendBase(TServiceClient.java:65)
        at org.apache.hive.service.cli.thrift.TCLIService$Client.send_ExecuteStatement(TCLIService.java:219)
        at org.apache.hive.service.cli.thrift.TCLIService$Client.ExecuteStatement(TCLIService.java:211)
        at org.apache.hive.jdbc.HiveStatement.execute(HiveStatement.java:253)
        ... 30 common frames omitted
Caused by: java.net.SocketException: Broken pipe (Write failed)
        at java.net.SocketOutputStream.socketWrite0(Native Method)
        at java.net.SocketOutputStream.socketWrite(SocketOutputStream.java:111)
        at java.net.SocketOutputStream.write(SocketOutputStream.java:155)
        at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:82)
        at java.io.BufferedOutputStream.write(BufferedOutputStream.java:126)
        at org.apache.thrift.transport.TIOStreamTransport.write(TIOStreamTransport.java:145)
        ... 38 common frames omitted 
   
   
   
 Best Regards, 
 Choon Kiat 
						
					
					... View more
				
			
			
			
			
			
			
			
			
			
		
		
			
				
						
							Labels:
						
						
		
			
	
					
			
		
	
	
	
	
				
		
	
	
- Labels:
- 
						
							
		
			Apache Hive
- 
						
							
		
			Apache NiFi
			
    
	
		
		
		12-20-2020
	
		
		06:46 AM
	
	
	
	
	
	
	
	
	
	
	
	
	
	
		
	
				
		
			
					
				
		
	
		
					
							 Additional Info:      Best Regards,  CK 
						
					
					... View more
				
			
			
			
			
			
			
			
			
			
		
			
    
	
		
		
		12-20-2020
	
		
		05:55 AM
	
	
	
	
	
	
	
	
	
	
	
	
	
	
		
	
				
		
			
					
				
		
	
		
					
							 Hi All,     I facing some issues to run the hive on Dbeaver when running the query:  Select count(*)     I observer the error code as below:  org.apache.hive.service.cli.HiveSQLException: Error while processing statement: FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
        at org.apache.hive.service.cli.operation.Operation.toSQLException(Operation.java:335) ~[hive-service-3.1.2.jar:3.1.2]
        at org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:226) ~[hive-service-3.1.2.jar:3.1.2]
        at org.apache.hive.service.cli.operation.SQLOperation.access$700(SQLOperation.java:87) ~[hive-service-3.1.2.jar:3.1.2]
        at org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork$1.run(SQLOperation.java:316) ~[hive-service-3.1.2.jar:3.1.2]
        at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_275]
        at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_275]
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657) ~[hadoop-common-2.7.2.jar:?]
        at org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork.run(SQLOperation.java:329) ~[hive-service-3.1.2.jar:3.1.2]
        at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) ~[?:1.8.0_275]
        at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[?:1.8.0_275]
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) ~[?:1.8.0_275]
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) ~[?:1.8.0_275]
        at java.lang.Thread.run(Thread.java:748) [?:1.8.0_275]    
						
					
					... View more
				
			
			
			
			
			
			
			
			
			
		
		
			
				
						
							Labels:
						
						
		
			
	
					
			
		
	
	
	
	
				
		
	
	
- Labels:
- 
						
							
		
			Apache Hive
			
    
	
		
		
		03-17-2020
	
		
		08:07 PM
	
	
	
	
	
	
	
	
	
	
	
	
	
	
		
	
				
		
			
					
				
		
	
		
					
							 Hi Eric,     After copy that two policy jar file to "$JAVA_HOME/jre/lib/security" still got the error code as below:  Mar 18 10:53:22.385 ERROR 30 com.cloudera.hiveserver2.exceptions.ExceptionConverter.toSQLException: [Cloudera][HiveJDBCDriver](500168) Error creating login context using ticket cache: Unable to obtain Principal Name for authentication .
java.sql.SQLException: [Cloudera][HiveJDBCDriver](500168) Error creating login context using ticket cache: Unable to obtain Principal Name for authentication .
	at com.cloudera.hiveserver2.hivecommon.api.HiveServer2ClientFactory.createTransport(Unknown Source)
	at com.cloudera.hiveserver2.hivecommon.api.ZooKeeperEnabledExtendedHS2Factory.createClient(Unknown Source)
	at com.cloudera.hiveserver2.hivecommon.core.HiveJDBCCommonConnection.establishConnection(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.core.LoginTimeoutConnection.connect(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.common.BaseConnectionFactory.doConnect(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.common.AbstractDriver.connect(Unknown Source)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.lambda$0(JDBCDataSource.java:157)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.openConnection(JDBCDataSource.java:174)
	at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.openConnection(GenericDataSource.java:124)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCExecutionContext.connect(JDBCExecutionContext.java:91)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.initializeMainContext(JDBCRemoteInstance.java:86)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.<init>(JDBCRemoteInstance.java:52)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.initializeRemoteInstance(JDBCDataSource.java:109)
	at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.<init>(GenericDataSource.java:106)
	at org.jkiss.dbeaver.ext.generic.model.meta.GenericMetaModel.createDataSourceImpl(GenericMetaModel.java:72)
	at org.jkiss.dbeaver.ext.generic.GenericDataSourceProvider.openDataSource(GenericDataSourceProvider.java:95)
	at org.jkiss.dbeaver.registry.DataSourceDescriptor.connect(DataSourceDescriptor.java:801)
	at org.jkiss.dbeaver.runtime.jobs.ConnectJob.run(ConnectJob.java:70)
	at org.jkiss.dbeaver.runtime.jobs.ConnectJob.runSync(ConnectJob.java:98)
	at org.jkiss.dbeaver.ui.actions.datasource.DataSourceHandler.connectToDataSource(DataSourceHandler.java:106)
	at org.jkiss.dbeaver.ui.actions.datasource.UIServiceConnectionsImpl.initConnection(UIServiceConnectionsImpl.java:63)
	at org.jkiss.dbeaver.model.navigator.DBNDataSource.initializeNode(DBNDataSource.java:151)
	at org.jkiss.dbeaver.model.navigator.DBNDatabaseNode.getChildren(DBNDatabaseNode.java:198)
	at org.jkiss.dbeaver.model.navigator.DBNDatabaseNode.getChildren(DBNDatabaseNode.java:1)
	at org.jkiss.dbeaver.model.navigator.DBNUtils.getNodeChildrenFiltered(DBNUtils.java:70)
	at org.jkiss.dbeaver.ui.navigator.database.load.TreeLoadService.evaluate(TreeLoadService.java:49)
	at org.jkiss.dbeaver.ui.navigator.database.load.TreeLoadService.evaluate(TreeLoadService.java:1)
	at org.jkiss.dbeaver.ui.LoadingJob.run(LoadingJob.java:86)
	at org.jkiss.dbeaver.ui.LoadingJob.run(LoadingJob.java:71)
	at org.jkiss.dbeaver.model.runtime.AbstractJob.run(AbstractJob.java:103)
Caused by: com.cloudera.hiveserver2.support.exceptions.GeneralException: [Cloudera][HiveJDBCDriver](500168) Error creating login context using ticket cache: Unable to obtain Principal Name for authentication .
	... 30 more
Caused by: javax.security.auth.login.LoginException: Unable to obtain Principal Name for authentication 
	at jdk.security.auth/com.sun.security.auth.module.Krb5LoginModule.promptForName(Unknown Source)
	at jdk.security.auth/com.sun.security.auth.module.Krb5LoginModule.attemptAuthentication(Unknown Source)
	at jdk.security.auth/com.sun.security.auth.module.Krb5LoginModule.login(Unknown Source)
	at java.base/javax.security.auth.login.LoginContext.invoke(Unknown Source)
	at java.base/javax.security.auth.login.LoginContext$4.run(Unknown Source)
	at java.base/javax.security.auth.login.LoginContext$4.run(Unknown Source)
	at java.base/java.security.AccessController.doPrivileged(Native Method)
	at java.base/javax.security.auth.login.LoginContext.invokePriv(Unknown Source)
	at java.base/javax.security.auth.login.LoginContext.login(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.kerberos.Kerberos.getSubjectViaTicketCache(Unknown Source)
	at com.cloudera.hiveserver2.hivecommon.api.HiveServer2ClientFactory.createTransport(Unknown Source)
	at com.cloudera.hiveserver2.hivecommon.api.ZooKeeperEnabledExtendedHS2Factory.createClient(Unknown Source)
	at com.cloudera.hiveserver2.hivecommon.core.HiveJDBCCommonConnection.establishConnection(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.core.LoginTimeoutConnection.connect(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.common.BaseConnectionFactory.doConnect(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.common.AbstractDriver.connect(Unknown Source)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.lambda$0(JDBCDataSource.java:157)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.openConnection(JDBCDataSource.java:174)
	at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.openConnection(GenericDataSource.java:124)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCExecutionContext.connect(JDBCExecutionContext.java:91)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.initializeMainContext(JDBCRemoteInstance.java:86)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.<init>(JDBCRemoteInstance.java:52)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.initializeRemoteInstance(JDBCDataSource.java:109)
	at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.<init>(GenericDataSource.java:106)
	at org.jkiss.dbeaver.ext.generic.model.meta.GenericMetaModel.createDataSourceImpl(GenericMetaModel.java:72)
	at org.jkiss.dbeaver.ext.generic.GenericDataSourceProvider.openDataSource(GenericDataSourceProvider.java:95)
	at org.jkiss.dbeaver.registry.DataSourceDescriptor.connect(DataSourceDescriptor.java:801)
	at org.jkiss.dbeaver.runtime.jobs.ConnectJob.run(ConnectJob.java:70)
	at org.jkiss.dbeaver.runtime.jobs.ConnectJob.runSync(ConnectJob.java:98)
	at org.jkiss.dbeaver.ui.actions.datasource.DataSourceHandler.connectToDataSource(DataSourceHandler.java:106)
	at org.jkiss.dbeaver.ui.actions.datasource.UIServiceConnectionsImpl.initConnection(UIServiceConnectionsImpl.java:63)
	at org.jkiss.dbeaver.model.navigator.DBNDataSource.initializeNode(DBNDataSource.java:151)
	at org.jkiss.dbeaver.model.navigator.DBNDatabaseNode.getChildren(DBNDatabaseNode.java:198)
	at org.jkiss.dbeaver.model.navigator.DBNDatabaseNode.getChildren(DBNDatabaseNode.java:1)
	at org.jkiss.dbeaver.model.navigator.DBNUtils.getNodeChildrenFiltered(DBNUtils.java:70)
	at org.jkiss.dbeaver.ui.navigator.database.load.TreeLoadService.evaluate(TreeLoadService.java:49)
	at org.jkiss.dbeaver.ui.navigator.database.load.TreeLoadService.evaluate(TreeLoadService.java:1)
	at org.jkiss.dbeaver.ui.LoadingJob.run(LoadingJob.java:86)
	at org.jkiss.dbeaver.ui.LoadingJob.run(LoadingJob.java:71)
	at org.jkiss.dbeaver.model.runtime.AbstractJob.run(AbstractJob.java:103)
	at org.eclipse.core.internal.jobs.Worker.run(Worker.java:63)   After that, I try to declare the KRB5CCNAME on Environment Variable and observation different error as below:     Mar 18 11:06:20.072 DEBUG 31 com.cloudera.hiveserver2.hivecommon.api.HiveServer2ClientFactory.createTransport: Kerberos subject retrieved via ticket cache lookup
Mar 18 11:06:20.222 ERROR 31 com.cloudera.hiveserver2.exceptions.ExceptionConverter.toSQLException: [Cloudera][HiveJDBCDriver](500164) Error initialized or created transport for authentication: [Cloudera][HiveJDBCDriver](500169) Unable to connect to server: GSS initiate failed.
java.sql.SQLException: [Cloudera][HiveJDBCDriver](500164) Error initialized or created transport for authentication: [Cloudera][HiveJDBCDriver](500169) Unable to connect to server: GSS initiate failed.
	at com.cloudera.hiveserver2.hivecommon.api.HiveServer2ClientFactory.createTransport(Unknown Source)
	at com.cloudera.hiveserver2.hivecommon.api.ZooKeeperEnabledExtendedHS2Factory.createClient(Unknown Source)
	at com.cloudera.hiveserver2.hivecommon.core.HiveJDBCCommonConnection.establishConnection(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.core.LoginTimeoutConnection.connect(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.common.BaseConnectionFactory.doConnect(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.common.AbstractDriver.connect(Unknown Source)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.lambda$0(JDBCDataSource.java:157)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.openConnection(JDBCDataSource.java:174)
	at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.openConnection(GenericDataSource.java:124)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCExecutionContext.connect(JDBCExecutionContext.java:91)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.initializeMainContext(JDBCRemoteInstance.java:86)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.<init>(JDBCRemoteInstance.java:52)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.initializeRemoteInstance(JDBCDataSource.java:109)
	at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.<init>(GenericDataSource.java:106)
	at org.jkiss.dbeaver.ext.generic.model.meta.GenericMetaModel.createDataSourceImpl(GenericMetaModel.java:72)
	at org.jkiss.dbeaver.ext.generic.GenericDataSourceProvider.openDataSource(GenericDataSourceProvider.java:95)
	at org.jkiss.dbeaver.registry.DataSourceDescriptor.connect(DataSourceDescriptor.java:801)
	at org.jkiss.dbeaver.runtime.jobs.ConnectJob.run(ConnectJob.java:70)
	at org.jkiss.dbeaver.runtime.jobs.ConnectJob.runSync(ConnectJob.java:98)
	at org.jkiss.dbeaver.ui.actions.datasource.DataSourceHandler.connectToDataSource(DataSourceHandler.java:106)
	at org.jkiss.dbeaver.ui.actions.datasource.UIServiceConnectionsImpl.initConnection(UIServiceConnectionsImpl.java:63)
	at org.jkiss.dbeaver.model.navigator.DBNDataSource.initializeNode(DBNDataSource.java:151)
	at org.jkiss.dbeaver.model.navigator.DBNDatabaseNode.getChildren(DBNDatabaseNode.java:198)
	at org.jkiss.dbeaver.model.navigator.DBNDatabaseNode.getChildren(DBNDatabaseNode.java:1)
	at org.jkiss.dbeaver.model.navigator.DBNUtils.getNodeChildrenFiltered(DBNUtils.java:70)
	at org.jkiss.dbeaver.ui.navigator.database.load.TreeLoadService.evaluate(TreeLoadService.java:49)
	at org.jkiss.dbeaver.ui.navigator.database.load.TreeLoadService.evaluate(TreeLoadService.java:1)
	at org.jkiss.dbeaver.ui.LoadingJob.run(LoadingJob.java:86)
	at org.jkiss.dbeaver.ui.LoadingJob.run(LoadingJob.java:71)
	at org.jkiss.dbeaver.model.runtime.AbstractJob.run(AbstractJob.java:103)
Caused by: com.cloudera.hiveserver2.support.exceptions.GeneralException: [Cloudera][HiveJDBCDriver](500164) Error initialized or created transport for authentication: [Cloudera][HiveJDBCDriver](500169) Unable to connect to server: GSS initiate failed.
	... 30 more
Caused by: java.lang.RuntimeException: [Cloudera][HiveJDBCDriver](500169) Unable to connect to server: GSS initiate failed
	at com.cloudera.hiveserver2.hivecommon.api.HiveServerPrivilegedAction.run(Unknown Source)
	at java.base/java.security.AccessController.doPrivileged(Native Method)
	at java.base/javax.security.auth.Subject.doAs(Unknown Source)
	at com.cloudera.hiveserver2.hivecommon.api.HiveServer2ClientFactory.createTransport(Unknown Source)
	at com.cloudera.hiveserver2.hivecommon.api.ZooKeeperEnabledExtendedHS2Factory.createClient(Unknown Source)
	at com.cloudera.hiveserver2.hivecommon.core.HiveJDBCCommonConnection.establishConnection(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.core.LoginTimeoutConnection.connect(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.common.BaseConnectionFactory.doConnect(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.common.AbstractDriver.connect(Unknown Source)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.lambda$0(JDBCDataSource.java:157)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.openConnection(JDBCDataSource.java:174)
	at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.openConnection(GenericDataSource.java:124)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCExecutionContext.connect(JDBCExecutionContext.java:91)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.initializeMainContext(JDBCRemoteInstance.java:86)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.<init>(JDBCRemoteInstance.java:52)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.initializeRemoteInstance(JDBCDataSource.java:109)
	at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.<init>(GenericDataSource.java:106)
	at org.jkiss.dbeaver.ext.generic.model.meta.GenericMetaModel.createDataSourceImpl(GenericMetaModel.java:72)
	at org.jkiss.dbeaver.ext.generic.GenericDataSourceProvider.openDataSource(GenericDataSourceProvider.java:95)
	at org.jkiss.dbeaver.registry.DataSourceDescriptor.connect(DataSourceDescriptor.java:801)
	at org.jkiss.dbeaver.runtime.jobs.ConnectJob.run(ConnectJob.java:70)
	at org.jkiss.dbeaver.runtime.jobs.ConnectJob.runSync(ConnectJob.java:98)
	at org.jkiss.dbeaver.ui.actions.datasource.DataSourceHandler.connectToDataSource(DataSourceHandler.java:106)
	at org.jkiss.dbeaver.ui.actions.datasource.UIServiceConnectionsImpl.initConnection(UIServiceConnectionsImpl.java:63)
	at org.jkiss.dbeaver.model.navigator.DBNDataSource.initializeNode(DBNDataSource.java:151)
	at org.jkiss.dbeaver.model.navigator.DBNDatabaseNode.getChildren(DBNDatabaseNode.java:198)
	at org.jkiss.dbeaver.model.navigator.DBNDatabaseNode.getChildren(DBNDatabaseNode.java:1)
	at org.jkiss.dbeaver.model.navigator.DBNUtils.getNodeChildrenFiltered(DBNUtils.java:70)
	at org.jkiss.dbeaver.ui.navigator.database.load.TreeLoadService.evaluate(TreeLoadService.java:49)
	at org.jkiss.dbeaver.ui.navigator.database.load.TreeLoadService.evaluate(TreeLoadService.java:1)
	at org.jkiss.dbeaver.ui.LoadingJob.run(LoadingJob.java:86)
	at org.jkiss.dbeaver.ui.LoadingJob.run(LoadingJob.java:71)
	at org.jkiss.dbeaver.model.runtime.AbstractJob.run(AbstractJob.java:103)
	at org.eclipse.core.internal.jobs.Worker.run(Worker.java:63)
Caused by: com.cloudera.hive.jdbc4.internal.apache.thrift.transport.TTransportException: GSS initiate failed
	at com.cloudera.hive.jdbc4.internal.apache.thrift.transport.TSaslTransport.sendAndThrowMessage(TSaslTransport.java:221)
	at com.cloudera.hive.jdbc4.internal.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:297)
	at com.cloudera.hive.jdbc4.internal.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
	... 34 more     Regards,  Choon Kiat 
						
					
					... View more
				
			
			
			
			
			
			
			
			
			
		
			
    
	
		
		
		03-17-2020
	
		
		07:01 PM
	
	
	
	
	
	
	
	
	
	
	
	
	
	
		
	
				
		
			
					
				
		
	
		
					
							 Hi Eric,     Yes due to privacy, I was changed "AIU.XXXXXX" to Domain.     Best Regards,  Choon Kiat 
						
					
					... View more
				
			
			
			
			
			
			
			
			
			
		
			
    
	
		
		
		03-16-2020
	
		
		10:16 PM
	
	
	
	
	
	
	
	
	
	
	
	
	
	
		
	
				
		
			
					
				
		
	
		
					
							 Hi Eric, 
   
 Thanks for your advice, I had referring the document you provide to configure the string but still face the same error. 
 I attach all info as below, appreciate your could help. 
 1. jass.ini 
 Client{  com.sun.security.auth.module.Krb5LoginModule required  useKeyTab=true  keyTab="c:\ProgramData\MIT\Kerberos5\hive.keytab"  principal="hive/sthdmgt1.aiu.xxxxxx@AIU.XXXXXX";  doNotPrompt=true  }; 
 Client{  com.sun.security.auth.module.Krb5LoginModule required  useKeyTab=true  keyTab="c:\ProgramData\MIT\Kerberos5\hive.keytab"  principal="hive/sthdmgt1.aiu.xxxxxx@Domain";  doNotPrompt=true  }; 
   
 2. dbeaver.ini 
 -startup  plugins/org.eclipse.equinox.launcher_1.5.600.v20191014-2022.jar  --launcher.library  plugins/org.eclipse.equinox.launcher.win32.win32.x86_64_1.1.1100.v20190907-0426  -vmargs  -XX:+IgnoreUnrecognizedVMOptions  --add-modules=ALL-SYSTEM  -Xms64m  -Xmx1024m  -Djavax.security.auth.useSubjectCredsOnly=false  -Djava.security.krb5.conf=C:\Program Files\DBeaver\krb5.conf  -Djava.security.auth.login.config=C:\Program Files\DBeaver\jaas.conf 
   
 3. JDBC String 
 jdbc:hive2://{host}:{port}/{database};AuthMech=1;KrbRealm=Domain;KrbHostFQDN={server};KrbServiceName=hive;KrbAuthType=2;LogLevel=6;LogPath=c:\ProgramData\MIT\Kerberos5\log.log 
   
 4. Krb5.ini 
 [libdefaults]  default_realm=Domain  dns_lookup_kdc = false  dns_lookup_realm = false  ticket_lifetime = 86400  renew_lifetime = 604800  forwardable = true  default_tgs_enctypes = aes256-cts-hmac-sha1-96  default_tkt_enctypes = aes256-cts-hmac-sha1-96  permitted_enctypes = aes256-cts-hmac-sha1-96  udp_preference_limit = 1  kdc_timeout = 3000  [realms]  AIU.XXXXXX={  kdc=sthdnj1-pvt.Domain  admin_server=sthdnj1-pvt.Domain  }  [domain_realm] 
   
 5. Klist Info 
 C:\Program Files\MIT\Kerberos\bin>klist  Ticket cache: FILE:C:\temp\krb  Default principal: hive/sthdmgt1-pvt.Domain@Domain 
 Valid starting Expires Service principal  03/17/20 13:07:24 03/18/20 13:07:24 krbtgt/Domain@Domain  renew until 03/22/20 13:07:24 
   
 Error Code: 
   
 Mar 17 13:12:05.063 ERROR 31 com.cloudera.hiveserver2.exceptions.ExceptionConverter.toSQLException: [Cloudera][HiveJDBCDriver](500168) Error creating login context using ticket cache: Unable to obtain Principal Name for authentication .
java.sql.SQLException: [Cloudera][HiveJDBCDriver](500168) Error creating login context using ticket cache: Unable to obtain Principal Name for authentication .
	at com.cloudera.hiveserver2.hivecommon.api.HiveServer2ClientFactory.createTransport(Unknown Source)
	at com.cloudera.hiveserver2.hivecommon.api.ZooKeeperEnabledExtendedHS2Factory.createClient(Unknown Source)
	at com.cloudera.hiveserver2.hivecommon.core.HiveJDBCCommonConnection.establishConnection(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.core.LoginTimeoutConnection.connect(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.common.BaseConnectionFactory.doConnect(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.common.AbstractDriver.connect(Unknown Source)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.lambda$0(JDBCDataSource.java:157)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.openConnection(JDBCDataSource.java:174)
	at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.openConnection(GenericDataSource.java:124)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCExecutionContext.connect(JDBCExecutionContext.java:91)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.initializeMainContext(JDBCRemoteInstance.java:86)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.<init>(JDBCRemoteInstance.java:52)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.initializeRemoteInstance(JDBCDataSource.java:109)
	at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.<init>(GenericDataSource.java:106)
	at org.jkiss.dbeaver.ext.generic.model.meta.GenericMetaModel.createDataSourceImpl(GenericMetaModel.java:72)
	at org.jkiss.dbeaver.ext.generic.GenericDataSourceProvider.openDataSource(GenericDataSourceProvider.java:95)
	at org.jkiss.dbeaver.registry.DataSourceDescriptor.connect(DataSourceDescriptor.java:801)
	at org.jkiss.dbeaver.runtime.jobs.ConnectJob.run(ConnectJob.java:70)
	at org.jkiss.dbeaver.ui.dialogs.connection.ConnectionWizard$ConnectionTester.run(ConnectionWizard.java:247)
	at org.jkiss.dbeaver.model.runtime.AbstractJob.run(AbstractJob.java:103)
Caused by: com.cloudera.hiveserver2.support.exceptions.GeneralException: [Cloudera][HiveJDBCDriver](500168) Error creating login context using ticket cache: Unable to obtain Principal Name for authentication .
	... 20 more
Caused by: javax.security.auth.login.LoginException: Unable to obtain Principal Name for authentication 
	at jdk.security.auth/com.sun.security.auth.module.Krb5LoginModule.promptForName(Unknown Source)
	at jdk.security.auth/com.sun.security.auth.module.Krb5LoginModule.attemptAuthentication(Unknown Source)
	at jdk.security.auth/com.sun.security.auth.module.Krb5LoginModule.login(Unknown Source)
	at java.base/javax.security.auth.login.LoginContext.invoke(Unknown Source)
	at java.base/javax.security.auth.login.LoginContext$4.run(Unknown Source)
	at java.base/javax.security.auth.login.LoginContext$4.run(Unknown Source)
	at java.base/java.security.AccessController.doPrivileged(Native Method)
	at java.base/javax.security.auth.login.LoginContext.invokePriv(Unknown Source)
	at java.base/javax.security.auth.login.LoginContext.login(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.kerberos.Kerberos.getSubjectViaTicketCache(Unknown Source)
	at com.cloudera.hiveserver2.hivecommon.api.HiveServer2ClientFactory.createTransport(Unknown Source)
	at com.cloudera.hiveserver2.hivecommon.api.ZooKeeperEnabledExtendedHS2Factory.createClient(Unknown Source)
	at com.cloudera.hiveserver2.hivecommon.core.HiveJDBCCommonConnection.establishConnection(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.core.LoginTimeoutConnection.connect(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.common.BaseConnectionFactory.doConnect(Unknown Source)
	at com.cloudera.hiveserver2.jdbc.common.AbstractDriver.connect(Unknown Source)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.lambda$0(JDBCDataSource.java:157)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.openConnection(JDBCDataSource.java:174)
	at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.openConnection(GenericDataSource.java:124)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCExecutionContext.connect(JDBCExecutionContext.java:91)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.initializeMainContext(JDBCRemoteInstance.java:86)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.<init>(JDBCRemoteInstance.java:52)
	at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.initializeRemoteInstance(JDBCDataSource.java:109)
	at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.<init>(GenericDataSource.java:106)
	at org.jkiss.dbeaver.ext.generic.model.meta.GenericMetaModel.createDataSourceImpl(GenericMetaModel.java:72)
	at org.jkiss.dbeaver.ext.generic.GenericDataSourceProvider.openDataSource(GenericDataSourceProvider.java:95)
	at org.jkiss.dbeaver.registry.DataSourceDescriptor.connect(DataSourceDescriptor.java:801)
	at org.jkiss.dbeaver.runtime.jobs.ConnectJob.run(ConnectJob.java:70)
	at org.jkiss.dbeaver.ui.dialogs.connection.ConnectionWizard$ConnectionTester.run(ConnectionWizard.java:247)
	at org.jkiss.dbeaver.model.runtime.AbstractJob.run(AbstractJob.java:103)
	at org.eclipse.core.internal.jobs.Worker.run(Worker.java:63) 
   
   
 Thanks & Regards, 
 Tan Choon Kiat 
						
					
					... View more
				
			
			
			
			
			
			
			
			
			
		
			
    
	
		
		
		03-16-2020
	
		
		01:20 AM
	
	
	
	
	
	
	
	
	
	
	
	
	
	
		
	
				
		
			
					
				
		
	
		
					
							 @EricL  
   
 Are you referring to the JDBC URL? If yes, below is the JDBC URL that i am using: 
   
 jdbc:hive2://10.11.121.20:10001/default;AuthMech=1;principal=hive/domain@domain;KrbHostFQDN=10.11.121.21;KrbServiceName=hive;KrbAuthType=2;LogLevel=6;LogPath=c:\ProgramData\MIT\Kerberos5\log.log    and below is the error log during my latest testing: 
   
 Mar 16 17:24:25.121 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.DSIConnection(com.cloudera.hive.hive.core.HiveJDBCEnvironment@3c7b0e50): +++++ enter +++++  Mar 16 17:24:25.122 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(101, Variant[type: TYPE_WSTRING, value: HiveJDBC]): +++++ enter +++++  Mar 16 17:24:25.122 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(139, Variant[type: TYPE_WSTRING, value: User]): +++++ enter +++++  Mar 16 17:24:25.123 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(22, Variant[type: TYPE_WSTRING, value: Hive]): +++++ enter +++++  Mar 16 17:24:25.127 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(58, Variant[type: TYPE_WSTRING, value: `]): +++++ enter +++++  Mar 16 17:24:25.127 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(66, Variant[type: TYPE_UINT16, value: -1]): +++++ enter +++++  Mar 16 17:24:25.127 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(68, Variant[type: TYPE_UINT16, value: -1]): +++++ enter +++++  Mar 16 17:24:25.128 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(76, Variant[type: TYPE_UINT16, value: -1]): +++++ enter +++++  Mar 16 17:24:25.128 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(81, Variant[type: TYPE_UINT16, value: -1]): +++++ enter +++++  Mar 16 17:24:25.128 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(83, Variant[type: TYPE_UINT16, value: -1]): +++++ enter +++++  Mar 16 17:24:25.129 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(80, Variant[type: TYPE_WSTRING, value: N]): +++++ enter +++++  Mar 16 17:24:25.129 TRACE 41 com.cloudera.hive.hive.core.HiveJDBCConnection.HiveJDBCConnection(com.cloudera.hive.hive.core.HiveJDBCEnvironment@3c7b0e50): +++++ enter +++++  Mar 16 17:24:25.147 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.registerWarningListener(com.cloudera.hive.jdbc.common.SWarningListener@755cbca6): +++++ enter +++++  Mar 16 17:24:25.147 TRACE 41 com.cloudera.hive.hivecommon.core.HiveJDBCCommonConnection.updateConnectionSettings(): +++++ enter +++++  Mar 16 17:24:25.151 TRACE 41 com.cloudera.hive.jdbc.common.CommonCoreUtils.logConnectionFunctionEntrance({AuthMech=Variant[type: TYPE_WSTRING, value: 1], ConnSchema=Variant[type: TYPE_WSTRING, value: default], DatabaseType=Variant[type: TYPE_WSTRING, value: Hive], HiveServerType=Variant[type: TYPE_WSTRING, value: 2], Host=Variant[type: TYPE_WSTRING, value: 10.11.121.20], KrbAuthType=Variant[type: TYPE_WSTRING, value: 2], KrbHostFQDN=Variant[type: TYPE_WSTRING, value: 10.11.121.21], KrbRealm=Variant[type: TYPE_WSTRING, value: AIU.XXXXX], KrbServiceName=Variant[type: TYPE_WSTRING, value: hive], LogLevel=Variant[type: TYPE_WSTRING, value: 6], LogPath=Variant[type: TYPE_WSTRING, value: c:\ProgramData\MIT\Kerberos5\log.log], Port=Variant[type: TYPE_WSTRING, value: 10001], principal=Variant[type: TYPE_WSTRING, value: hive/sthdmgt1-pvt.aiu.xxxxxx@AIU.XXXXXX], sskTrustStore=Variant[type: TYPE_WSTRING, value: C:\ProgramData\MIT\Kerberos5\hive.truststore], ssl=Variant[type: TYPE_WSTRING, value: 1], trustStorePassword=Variant[type: TYPE_WSTRING, value: "AiuHive"]}, "Major Version: 2", "Minor Version: 5", "Hot Fix Version: 15", "Build Number: 1040", "java.vendor:AdoptOpenJDK", "java.version:11.0.5", "os.arch:amd64", "os.name:Windows 10", "os.version:10.0", "Runtime.totalMemory:82837504", "Runtime.maxMemory:1073741824", "Runtime.avaialableProcessors:8", URLClassLoader.getURLs(): No URLClassLoader available.): +++++ enter +++++  Mar 16 17:24:25.395 TRACE 41 com.cloudera.hive.jdbc.kerberos.Kerberos.getSubjectViaAccessControlContext(): +++++ enter +++++  Mar 16 17:24:25.406 TRACE 41 com.cloudera.hive.jdbc.kerberos.Kerberos.getSubjectViaJAASConfig(): +++++ enter +++++  Mar 16 17:24:25.406 TRACE 41 com.cloudera.hive.jdbc.kerberos.Kerberos.getSubjectViaTicketCache(): +++++ enter +++++  Mar 16 17:24:25.441 ERROR 41 com.cloudera.hive.exceptions.ExceptionConverter.toSQLException: [Cloudera][HiveJDBCDriver](500164) Error initialized or created transport for authentication: CONN_KERBEROS_AUTHENTICATION_ERROR_GET_TICKETCACHE.  java.sql.SQLException: [Cloudera][HiveJDBCDriver](500164) Error initialized or created transport for authentication: CONN_KERBEROS_AUTHENTICATION_ERROR_GET_TICKETCACHE.  at com.cloudera.hive.hivecommon.api.HiveServer2ClientFactory.createTransport(Unknown Source)  at com.cloudera.hive.hivecommon.api.ZooKeeperEnabledExtendedHS2Factory.createClient(Unknown Source)  at com.cloudera.hive.hivecommon.core.HiveJDBCCommonConnection.connect(Unknown Source)  at com.cloudera.hive.hive.core.HiveJDBCConnection.connect(Unknown Source)  at com.cloudera.hive.jdbc.common.BaseConnectionFactory.doConnect(Unknown Source)  at com.cloudera.hive.jdbc.common.AbstractDriver.connect(Unknown Source)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.lambda$0(JDBCDataSource.java:157)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.openConnection(JDBCDataSource.java:174)  at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.openConnection(GenericDataSource.java:124)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCExecutionContext.connect(JDBCExecutionContext.java:91)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.initializeMainContext(JDBCRemoteInstance.java:86)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.<init>(JDBCRemoteInstance.java:52)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.initializeRemoteInstance(JDBCDataSource.java:109)  at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.<init>(GenericDataSource.java:106)  at org.jkiss.dbeaver.ext.generic.model.meta.GenericMetaModel.createDataSourceImpl(GenericMetaModel.java:72)  at org.jkiss.dbeaver.ext.generic.GenericDataSourceProvider.openDataSource(GenericDataSourceProvider.java:95)  at org.jkiss.dbeaver.registry.DataSourceDescriptor.connect(DataSourceDescriptor.java:801)  at org.jkiss.dbeaver.runtime.jobs.ConnectJob.run(ConnectJob.java:70)  at org.jkiss.dbeaver.runtime.jobs.ConnectJob.runSync(ConnectJob.java:98)  at org.jkiss.dbeaver.ui.actions.datasource.DataSourceHandler.connectToDataSource(DataSourceHandler.java:106)  at org.jkiss.dbeaver.ui.actions.datasource.UIServiceConnectionsImpl.initConnection(UIServiceConnectionsImpl.java:63)  at org.jkiss.dbeaver.model.navigator.DBNDataSource.initializeNode(DBNDataSource.java:151)  at org.jkiss.dbeaver.model.navigator.DBNDatabaseNode.getChildren(DBNDatabaseNode.java:198)  at org.jkiss.dbeaver.model.navigator.DBNDatabaseNode.getChildren(DBNDatabaseNode.java:1)  at org.jkiss.dbeaver.model.navigator.DBNUtils.getNodeChildrenFiltered(DBNUtils.java:70)  at org.jkiss.dbeaver.ui.navigator.database.load.TreeLoadService.evaluate(TreeLoadService.java:49)  at org.jkiss.dbeaver.ui.navigator.database.load.TreeLoadService.evaluate(TreeLoadService.java:1)  at org.jkiss.dbeaver.ui.LoadingJob.run(LoadingJob.java:86)  at org.jkiss.dbeaver.ui.LoadingJob.run(LoadingJob.java:71)  at org.jkiss.dbeaver.model.runtime.AbstractJob.run(AbstractJob.java:103)  Caused by: com.cloudera.hive.support.exceptions.GeneralException: [Cloudera][HiveJDBCDriver](500164) Error initialized or created transport for authentication: CONN_KERBEROS_AUTHENTICATION_ERROR_GET_TICKETCACHE.  ... 30 more  Caused by: com.cloudera.hive.support.exceptions.GeneralException: CONN_KERBEROS_AUTHENTICATION_ERROR_GET_TICKETCACHE  ... 30 more  Caused by: javax.security.auth.login.LoginException: Unable to obtain Principal Name for authentication  at jdk.security.auth/com.sun.security.auth.module.Krb5LoginModule.promptForName(Unknown Source)  at jdk.security.auth/com.sun.security.auth.module.Krb5LoginModule.attemptAuthentication(Unknown Source)  at jdk.security.auth/com.sun.security.auth.module.Krb5LoginModule.login(Unknown Source)  at java.base/javax.security.auth.login.LoginContext.invoke(Unknown Source)  at java.base/javax.security.auth.login.LoginContext$4.run(Unknown Source)  at java.base/javax.security.auth.login.LoginContext$4.run(Unknown Source)  at java.base/java.security.AccessController.doPrivileged(Native Method)  at java.base/javax.security.auth.login.LoginContext.invokePriv(Unknown Source)  at java.base/javax.security.auth.login.LoginContext.login(Unknown Source)  at com.cloudera.hive.jdbc.kerberos.Kerberos.getSubjectViaTicketCache(Unknown Source)  at com.cloudera.hive.hivecommon.api.HiveServer2ClientFactory.createTransport(Unknown Source)  at com.cloudera.hive.hivecommon.api.ZooKeeperEnabledExtendedHS2Factory.createClient(Unknown Source)  at com.cloudera.hive.hivecommon.core.HiveJDBCCommonConnection.connect(Unknown Source)  at com.cloudera.hive.hive.core.HiveJDBCConnection.connect(Unknown Source)  at com.cloudera.hive.jdbc.common.BaseConnectionFactory.doConnect(Unknown Source)  at com.cloudera.hive.jdbc.common.AbstractDriver.connect(Unknown Source)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.lambda$0(JDBCDataSource.java:157)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.openConnection(JDBCDataSource.java:174)  at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.openConnection(GenericDataSource.java:124)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCExecutionContext.connect(JDBCExecutionContext.java:91)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.initializeMainContext(JDBCRemoteInstance.java:86)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.<init>(JDBCRemoteInstance.java:52)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.initializeRemoteInstance(JDBCDataSource.java:109)  at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.<init>(GenericDataSource.java:106)  at org.jkiss.dbeaver.ext.generic.model.meta.GenericMetaModel.createDataSourceImpl(GenericMetaModel.java:72)  at org.jkiss.dbeaver.ext.generic.GenericDataSourceProvider.openDataSource(GenericDataSourceProvider.java:95)  at org.jkiss.dbeaver.registry.DataSourceDescriptor.connect(DataSourceDescriptor.java:801)  at org.jkiss.dbeaver.runtime.jobs.ConnectJob.run(ConnectJob.java:70)  at org.jkiss.dbeaver.runtime.jobs.ConnectJob.runSync(ConnectJob.java:98)  at org.jkiss.dbeaver.ui.actions.datasource.DataSourceHandler.connectToDataSource(DataSourceHandler.java:106)  at org.jkiss.dbeaver.ui.actions.datasource.UIServiceConnectionsImpl.initConnection(UIServiceConnectionsImpl.java:63)  at org.jkiss.dbeaver.model.navigator.DBNDataSource.initializeNode(DBNDataSource.java:151)  at org.jkiss.dbeaver.model.navigator.DBNDatabaseNode.getChildren(DBNDatabaseNode.java:198)  at org.jkiss.dbeaver.model.navigator.DBNDatabaseNode.getChildren(DBNDatabaseNode.java:1)  at org.jkiss.dbeaver.model.navigator.DBNUtils.getNodeChildrenFiltered(DBNUtils.java:70)  at org.jkiss.dbeaver.ui.navigator.database.load.TreeLoadService.evaluate(TreeLoadService.java:49)  at org.jkiss.dbeaver.ui.navigator.database.load.TreeLoadService.evaluate(TreeLoadService.java:1)  at org.jkiss.dbeaver.ui.LoadingJob.run(LoadingJob.java:86)  at org.jkiss.dbeaver.ui.LoadingJob.run(LoadingJob.java:71)  at org.jkiss.dbeaver.model.runtime.AbstractJob.run(AbstractJob.java:103)  at org.eclipse.core.internal.jobs.Worker.run(Worker.java:63) 
 Mar 16 17:33:37.773 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.DSIConnection(com.cloudera.hive.hive.core.HiveJDBCEnvironment@327a194b): +++++ enter +++++  Mar 16 17:33:37.778 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(101, Variant[type: TYPE_WSTRING, value: HiveJDBC]): +++++ enter +++++  Mar 16 17:33:37.778 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(139, Variant[type: TYPE_WSTRING, value: User]): +++++ enter +++++  Mar 16 17:33:37.779 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(22, Variant[type: TYPE_WSTRING, value: Hive]): +++++ enter +++++  Mar 16 17:33:37.780 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(58, Variant[type: TYPE_WSTRING, value: `]): +++++ enter +++++  Mar 16 17:33:37.780 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(66, Variant[type: TYPE_UINT16, value: -1]): +++++ enter +++++  Mar 16 17:33:37.781 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(68, Variant[type: TYPE_UINT16, value: -1]): +++++ enter +++++  Mar 16 17:33:37.782 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(76, Variant[type: TYPE_UINT16, value: -1]): +++++ enter +++++  Mar 16 17:33:37.782 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(81, Variant[type: TYPE_UINT16, value: -1]): +++++ enter +++++  Mar 16 17:33:37.783 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(83, Variant[type: TYPE_UINT16, value: -1]): +++++ enter +++++  Mar 16 17:33:37.784 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.setProperty(80, Variant[type: TYPE_WSTRING, value: N]): +++++ enter +++++  Mar 16 17:33:37.785 TRACE 41 com.cloudera.hive.hive.core.HiveJDBCConnection.HiveJDBCConnection(com.cloudera.hive.hive.core.HiveJDBCEnvironment@327a194b): +++++ enter +++++  Mar 16 17:33:37.805 TRACE 41 com.cloudera.hive.dsi.core.impl.DSIConnection.registerWarningListener(com.cloudera.hive.jdbc.common.SWarningListener@13ff0f12): +++++ enter +++++  Mar 16 17:33:37.805 TRACE 41 com.cloudera.hive.hivecommon.core.HiveJDBCCommonConnection.updateConnectionSettings(): +++++ enter +++++  Mar 16 17:33:37.805 TRACE 41 com.cloudera.hive.jdbc.common.CommonCoreUtils.logConnectionFunctionEntrance({AuthMech=Variant[type: TYPE_WSTRING, value: 1], ConnSchema=Variant[type: TYPE_WSTRING, value: default], DatabaseType=Variant[type: TYPE_WSTRING, value: Hive], HiveServerType=Variant[type: TYPE_WSTRING, value: 2], Host=Variant[type: TYPE_WSTRING, value: 10.11.121.20], KrbAuthType=Variant[type: TYPE_WSTRING, value: 2], KrbHostFQDN=Variant[type: TYPE_WSTRING, value: 10.11.121.21], KrbRealm=Variant[type: TYPE_WSTRING, value: AIU.XXXXXX], KrbServiceName=Variant[type: TYPE_WSTRING, value: hive], LogLevel=Variant[type: TYPE_WSTRING, value: 6], LogPath=Variant[type: TYPE_WSTRING, value: c:\ProgramData\MIT\Kerberos5\log.log], Port=Variant[type: TYPE_WSTRING, value: 10001], principal=Variant[type: TYPE_WSTRING, value: hive/sthdmgt1-pvt.aiu.xxxxxx@AIU.XXXXXX], sskTrustStore=Variant[type: TYPE_WSTRING, value: C:\ProgramData\MIT\Kerberos5\hive.truststore], ssl=Variant[type: TYPE_WSTRING, value: 1], trustStorePassword=Variant[type: TYPE_WSTRING, value: "AiuHive"]}, "Major Version: 2", "Minor Version: 5", "Hot Fix Version: 15", "Build Number: 1040", "java.vendor:AdoptOpenJDK", "java.version:11.0.5", "os.arch:amd64", "os.name:Windows 10", "os.version:10.0", "Runtime.totalMemory:80740352", "Runtime.maxMemory:1073741824", "Runtime.avaialableProcessors:8", URLClassLoader.getURLs(): No URLClassLoader available.): +++++ enter +++++  Mar 16 17:33:38.213 TRACE 41 com.cloudera.hive.jdbc.kerberos.Kerberos.getSubjectViaAccessControlContext(): +++++ enter +++++  Mar 16 17:33:38.231 TRACE 41 com.cloudera.hive.jdbc.kerberos.Kerberos.getSubjectViaJAASConfig(): +++++ enter +++++  Mar 16 17:33:38.231 TRACE 41 com.cloudera.hive.jdbc.kerberos.Kerberos.getSubjectViaTicketCache(): +++++ enter +++++  Mar 16 17:33:38.302 ERROR 41 com.cloudera.hive.hivecommon.api.HiveServer2ClientFactory.createTransport: Kerberos subject retrieved via ticket cache lookup  Mar 16 17:33:39.169 ERROR 41 com.cloudera.hive.exceptions.ExceptionConverter.toSQLException: [Cloudera][HiveJDBCDriver](500164) Error initialized or created transport for authentication: [Cloudera][HiveJDBCDriver](500168) Unable to connect to server: GSS initiate failed  Also, could not send response: org.apache.thrift.transport.TTransportException: javax.net.ssl.SSLHandshakeException: PKIX path building failed: sun.security.provider.certpath.SunCertPathBuilderException: unable to find valid certification path to requested target.  java.sql.SQLException: [Cloudera][HiveJDBCDriver](500164) Error initialized or created transport for authentication: [Cloudera][HiveJDBCDriver](500168) Unable to connect to server: GSS initiate failed  Also, could not send response: org.apache.thrift.transport.TTransportException: javax.net.ssl.SSLHandshakeException: PKIX path building failed: sun.security.provider.certpath.SunCertPathBuilderException: unable to find valid certification path to requested target.  at com.cloudera.hive.hivecommon.api.HiveServer2ClientFactory.createTransport(Unknown Source)  at com.cloudera.hive.hivecommon.api.ZooKeeperEnabledExtendedHS2Factory.createClient(Unknown Source)  at com.cloudera.hive.hivecommon.core.HiveJDBCCommonConnection.connect(Unknown Source)  at com.cloudera.hive.hive.core.HiveJDBCConnection.connect(Unknown Source)  at com.cloudera.hive.jdbc.common.BaseConnectionFactory.doConnect(Unknown Source)  at com.cloudera.hive.jdbc.common.AbstractDriver.connect(Unknown Source)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.lambda$0(JDBCDataSource.java:157)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.openConnection(JDBCDataSource.java:174)  at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.openConnection(GenericDataSource.java:124)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCExecutionContext.connect(JDBCExecutionContext.java:91)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.initializeMainContext(JDBCRemoteInstance.java:86)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.<init>(JDBCRemoteInstance.java:52)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.initializeRemoteInstance(JDBCDataSource.java:109)  at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.<init>(GenericDataSource.java:106)  at org.jkiss.dbeaver.ext.generic.model.meta.GenericMetaModel.createDataSourceImpl(GenericMetaModel.java:72)  at org.jkiss.dbeaver.ext.generic.GenericDataSourceProvider.openDataSource(GenericDataSourceProvider.java:95)  at org.jkiss.dbeaver.registry.DataSourceDescriptor.connect(DataSourceDescriptor.java:801)  at org.jkiss.dbeaver.runtime.jobs.ConnectJob.run(ConnectJob.java:70)  at org.jkiss.dbeaver.runtime.jobs.ConnectJob.runSync(ConnectJob.java:98)  at org.jkiss.dbeaver.ui.actions.datasource.DataSourceHandler.connectToDataSource(DataSourceHandler.java:106)  at org.jkiss.dbeaver.ui.actions.datasource.UIServiceConnectionsImpl.initConnection(UIServiceConnectionsImpl.java:63)  at org.jkiss.dbeaver.model.navigator.DBNDataSource.initializeNode(DBNDataSource.java:151)  at org.jkiss.dbeaver.model.navigator.DBNDatabaseNode.getChildren(DBNDatabaseNode.java:198)  at org.jkiss.dbeaver.model.navigator.DBNDatabaseNode.getChildren(DBNDatabaseNode.java:1)  at org.jkiss.dbeaver.model.navigator.DBNUtils.getNodeChildrenFiltered(DBNUtils.java:70)  at org.jkiss.dbeaver.ui.navigator.database.load.TreeLoadService.evaluate(TreeLoadService.java:49)  at org.jkiss.dbeaver.ui.navigator.database.load.TreeLoadService.evaluate(TreeLoadService.java:1)  at org.jkiss.dbeaver.ui.LoadingJob.run(LoadingJob.java:86)  at org.jkiss.dbeaver.ui.LoadingJob.run(LoadingJob.java:71)  at org.jkiss.dbeaver.model.runtime.AbstractJob.run(AbstractJob.java:103)  Caused by: com.cloudera.hive.support.exceptions.GeneralException: [Cloudera][HiveJDBCDriver](500164) Error initialized or created transport for authentication: [Cloudera][HiveJDBCDriver](500168) Unable to connect to server: GSS initiate failed  Also, could not send response: org.apache.thrift.transport.TTransportException: javax.net.ssl.SSLHandshakeException: PKIX path building failed: sun.security.provider.certpath.SunCertPathBuilderException: unable to find valid certification path to requested target.  ... 30 more  Caused by: java.lang.RuntimeException: [Cloudera][HiveJDBCDriver](500168) Unable to connect to server: GSS initiate failed  Also, could not send response: org.apache.thrift.transport.TTransportException: javax.net.ssl.SSLHandshakeException: PKIX path building failed: sun.security.provider.certpath.SunCertPathBuilderException: unable to find valid certification path to requested target  at com.cloudera.hive.hivecommon.api.HiveServerPrivilegedAction.run(Unknown Source)  at java.base/java.security.AccessController.doPrivileged(Native Method)  at java.base/javax.security.auth.Subject.doAs(Unknown Source)  at com.cloudera.hive.hivecommon.api.HiveServer2ClientFactory.createTransport(Unknown Source)  at com.cloudera.hive.hivecommon.api.ZooKeeperEnabledExtendedHS2Factory.createClient(Unknown Source)  at com.cloudera.hive.hivecommon.core.HiveJDBCCommonConnection.connect(Unknown Source)  at com.cloudera.hive.hive.core.HiveJDBCConnection.connect(Unknown Source)  at com.cloudera.hive.jdbc.common.BaseConnectionFactory.doConnect(Unknown Source)  at com.cloudera.hive.jdbc.common.AbstractDriver.connect(Unknown Source)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.lambda$0(JDBCDataSource.java:157)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.openConnection(JDBCDataSource.java:174)  at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.openConnection(GenericDataSource.java:124)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCExecutionContext.connect(JDBCExecutionContext.java:91)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.initializeMainContext(JDBCRemoteInstance.java:86)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.<init>(JDBCRemoteInstance.java:52)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.initializeRemoteInstance(JDBCDataSource.java:109)  at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.<init>(GenericDataSource.java:106)  at org.jkiss.dbeaver.ext.generic.model.meta.GenericMetaModel.createDataSourceImpl(GenericMetaModel.java:72)  at org.jkiss.dbeaver.ext.generic.GenericDataSourceProvider.openDataSource(GenericDataSourceProvider.java:95)  at org.jkiss.dbeaver.registry.DataSourceDescriptor.connect(DataSourceDescriptor.java:801)  at org.jkiss.dbeaver.runtime.jobs.ConnectJob.run(ConnectJob.java:70)  at org.jkiss.dbeaver.runtime.jobs.ConnectJob.runSync(ConnectJob.java:98)  at org.jkiss.dbeaver.ui.actions.datasource.DataSourceHandler.connectToDataSource(DataSourceHandler.java:106)  at org.jkiss.dbeaver.ui.actions.datasource.UIServiceConnectionsImpl.initConnection(UIServiceConnectionsImpl.java:63)  at org.jkiss.dbeaver.model.navigator.DBNDataSource.initializeNode(DBNDataSource.java:151)  at org.jkiss.dbeaver.model.navigator.DBNDatabaseNode.getChildren(DBNDatabaseNode.java:198)  at org.jkiss.dbeaver.model.navigator.DBNDatabaseNode.getChildren(DBNDatabaseNode.java:1)  at org.jkiss.dbeaver.model.navigator.DBNUtils.getNodeChildrenFiltered(DBNUtils.java:70)  at org.jkiss.dbeaver.ui.navigator.database.load.TreeLoadService.evaluate(TreeLoadService.java:49)  at org.jkiss.dbeaver.ui.navigator.database.load.TreeLoadService.evaluate(TreeLoadService.java:1)  at org.jkiss.dbeaver.ui.LoadingJob.run(LoadingJob.java:86)  at org.jkiss.dbeaver.ui.LoadingJob.run(LoadingJob.java:71)  at org.jkiss.dbeaver.model.runtime.AbstractJob.run(AbstractJob.java:103)  at org.eclipse.core.internal.jobs.Worker.run(Worker.java:63)  Caused by: org.apache.thrift.transport.TTransportException: GSS initiate failed  Also, could not send response: org.apache.thrift.transport.TTransportException: javax.net.ssl.SSLHandshakeException: PKIX path building failed: sun.security.provider.certpath.SunCertPathBuilderException: unable to find valid certification path to requested target  at org.apache.thrift.transport.TSaslTransport.sendAndThrowMessage(TSaslTransport.java:221)  at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:297)  at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)  ... 34 more 
						
					
					... View more
				
			
			
			
			
			
			
			
			
			
		
			
    
	
		
		
		03-15-2020
	
		
		09:04 PM
	
	
	
	
	
	
	
	
	
	
	
	
	
	
		
	
				
		
			
					
				
		
	
		
					
							 The authentication is based on Kerberos Authentication. 
   
 Have get the kerberos ticket in Windows MIT, but receiving the error message as follow when tried to connection the Cloudera Hive with DBeaver: 
   
 Mar 16 11:56:00.397 TRACE 33 com.cloudera.hive.jdbc.kerberos.Kerberos.getSubjectViaAccessControlContext(): +++++ enter +++++  Mar 16 11:56:00.398 TRACE 33 com.cloudera.hive.jdbc.kerberos.Kerberos.getSubjectViaJAASConfig(): +++++ enter +++++  Mar 16 11:56:00.399 ERROR 33 com.cloudera.hive.hivecommon.api.HiveServer2ClientFactory.createTransport: Kerberos subject retrieved via JAAS config  Mar 16 11:56:00.920 ERROR 33 com.cloudera.hive.exceptions.ExceptionConverter.toSQLException: [Cloudera][HiveJDBCDriver](500164) Error initialized or created transport for authentication: [Cloudera][HiveJDBCDriver](500168) Unable to connect to server: GSS initiate failed.  java.sql.SQLException: [Cloudera][HiveJDBCDriver](500164) Error initialized or created transport for authentication: [Cloudera][HiveJDBCDriver](500168) Unable to connect to server: GSS initiate failed.  at com.cloudera.hive.hivecommon.api.HiveServer2ClientFactory.createTransport(Unknown Source)  at com.cloudera.hive.hivecommon.api.ZooKeeperEnabledExtendedHS2Factory.createClient(Unknown Source)  at com.cloudera.hive.hivecommon.core.HiveJDBCCommonConnection.connect(Unknown Source)  at com.cloudera.hive.hive.core.HiveJDBCConnection.connect(Unknown Source)  at com.cloudera.hive.jdbc.common.BaseConnectionFactory.doConnect(Unknown Source)  at com.cloudera.hive.jdbc.common.AbstractDriver.connect(Unknown Source)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.lambda$0(JDBCDataSource.java:157)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.openConnection(JDBCDataSource.java:174)  at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.openConnection(GenericDataSource.java:124)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCExecutionContext.connect(JDBCExecutionContext.java:91)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.initializeMainContext(JDBCRemoteInstance.java:86)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.<init>(JDBCRemoteInstance.java:52)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.initializeRemoteInstance(JDBCDataSource.java:109)  at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.<init>(GenericDataSource.java:106)  at org.jkiss.dbeaver.ext.generic.model.meta.GenericMetaModel.createDataSourceImpl(GenericMetaModel.java:72)  at org.jkiss.dbeaver.ext.generic.GenericDataSourceProvider.openDataSource(GenericDataSourceProvider.java:95)  at org.jkiss.dbeaver.registry.DataSourceDescriptor.connect(DataSourceDescriptor.java:801)  at org.jkiss.dbeaver.runtime.jobs.ConnectJob.run(ConnectJob.java:70)  at org.jkiss.dbeaver.ui.dialogs.connection.ConnectionWizard$ConnectionTester.run(ConnectionWizard.java:247)  at org.jkiss.dbeaver.model.runtime.AbstractJob.run(AbstractJob.java:103)  Caused by: com.cloudera.hive.support.exceptions.GeneralException: [Cloudera][HiveJDBCDriver](500164) Error initialized or created transport for authentication: [Cloudera][HiveJDBCDriver](500168) Unable to connect to server: GSS initiate failed.  ... 20 more  Caused by: java.lang.RuntimeException: [Cloudera][HiveJDBCDriver](500168) Unable to connect to server: GSS initiate failed  at com.cloudera.hive.hivecommon.api.HiveServerPrivilegedAction.run(Unknown Source)  at java.base/java.security.AccessController.doPrivileged(Native Method)  at java.base/javax.security.auth.Subject.doAs(Unknown Source)  at com.cloudera.hive.hivecommon.api.HiveServer2ClientFactory.createTransport(Unknown Source)  at com.cloudera.hive.hivecommon.api.ZooKeeperEnabledExtendedHS2Factory.createClient(Unknown Source)  at com.cloudera.hive.hivecommon.core.HiveJDBCCommonConnection.connect(Unknown Source)  at com.cloudera.hive.hive.core.HiveJDBCConnection.connect(Unknown Source)  at com.cloudera.hive.jdbc.common.BaseConnectionFactory.doConnect(Unknown Source)  at com.cloudera.hive.jdbc.common.AbstractDriver.connect(Unknown Source)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.lambda$0(JDBCDataSource.java:157)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.openConnection(JDBCDataSource.java:174)  at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.openConnection(GenericDataSource.java:124)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCExecutionContext.connect(JDBCExecutionContext.java:91)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.initializeMainContext(JDBCRemoteInstance.java:86)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance.<init>(JDBCRemoteInstance.java:52)  at org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource.initializeRemoteInstance(JDBCDataSource.java:109)  at org.jkiss.dbeaver.ext.generic.model.GenericDataSource.<init>(GenericDataSource.java:106)  at org.jkiss.dbeaver.ext.generic.model.meta.GenericMetaModel.createDataSourceImpl(GenericMetaModel.java:72)  at org.jkiss.dbeaver.ext.generic.GenericDataSourceProvider.openDataSource(GenericDataSourceProvider.java:95)  at org.jkiss.dbeaver.registry.DataSourceDescriptor.connect(DataSourceDescriptor.java:801)  at org.jkiss.dbeaver.runtime.jobs.ConnectJob.run(ConnectJob.java:70)  at org.jkiss.dbeaver.ui.dialogs.connection.ConnectionWizard$ConnectionTester.run(ConnectionWizard.java:247)  at org.jkiss.dbeaver.model.runtime.AbstractJob.run(AbstractJob.java:103)  at org.eclipse.core.internal.jobs.Worker.run(Worker.java:63)  Caused by: org.apache.thrift.transport.TTransportException: GSS initiate failed  at org.apache.thrift.transport.TSaslTransport.sendAndThrowMessage(TSaslTransport.java:221)  at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:297)  at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)  ... 24 more 
						
					
					... View more
				
			
			
			
			
			
			
			
			
			
		
		
			
				
						
							Labels:
						
						
		
			
	
					
			
		
	
	
	
	
				
		
	
	
- Labels:
- 
						
							
		
			Apache Hive
- 
						
							
		
			Kerberos
 
        


