Member since
03-24-2021
4
Posts
0
Kudos Received
0
Solutions
04-30-2021
09:12 AM
@Daming Xue , seems like it's because of issue in java version JDK-8 build 171 and up. there has been a switch from jks to pkcs12 in jcrypto and KMS is giving out errors hive.log: 2021-04-30 15:57:10,951 WARN [main]: retry.RetryInvocationHandler (RetryInvocationHandler.java:invoke(126)) - Exception while invoking class org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create over null. Not retrying because try once and fail. org.apache.hadoop.ipc.RemoteException(java.io.IOException): java.util.concurrent.ExecutionException: java.io.IOException: com.google.common.u til.concurrent.UncheckedExecutionException: java.lang.NullPointerException: No KeyVersion exists for key 'hivekey' at org.apache.hadoop.crypto.key.kms.KMSClientProvider.generateEncryptedKey(KMSClientProvider.java:802) at org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.generateEncryptedKey(KeyProviderCryptoExtension.java:371) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.generateEncryptedDataEncryptionKey(FSNamesystem.java:2355) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2481) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2380) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:716) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB .java:405) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2273) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2269) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1724) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2267) Caused by: java.util.concurrent.ExecutionException: java.io.IOException: com.google.common.util.concurrent.UncheckedExecutionException: java.lang.NullPointerException: No KeyVersion exists for key 'hivekey' at com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:289) at com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:276) at com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:111) at com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:132) at com.google.common.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2381) at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2351) at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2313) at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2228) at com.google.common.cache.LocalCache.get(LocalCache.java:3965) at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3969) at com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4829) at org.apache.hadoop.crypto.key.kms.ValueQueue.getAtMost(ValueQueue.java:266) at org.apache.hadoop.crypto.key.kms.ValueQueue.getNext(ValueQueue.java:226) at org.apache.hadoop.crypto.key.kms.KMSClientProvider.generateEncryptedKey(KMSClientProvider.java:797) ... 15 more Caused by: java.io.IOException: com.google.common.util.concurrent.UncheckedExecutionException: java.lang.NullPointerException: No KeyVersion exists for key 'hivekey' at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.hadoop.util.HttpExceptionUtils.validateResponse(HttpExceptionUtils.java:157) at org.apache.hadoop.crypto.key.kms.KMSClientProvider.call(KMSClientProvider.java:611) at org.apache.hadoop.crypto.key.kms.KMSClientProvider.call(KMSClientProvider.java:569) at org.apache.hadoop.crypto.key.kms.KMSClientProvider.access$200(KMSClientProvider.java:90) at org.apache.hadoop.crypto.key.kms.KMSClientProvider$EncryptedQueueRefiller.fillQueueForKey(KMSClientProvider.java:145) at org.apache.hadoop.crypto.key.kms.ValueQueue$1.load(ValueQueue.java:181) at org.apache.hadoop.crypto.key.kms.ValueQueue$1.load(ValueQueue.java:175) at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3568) at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2350) ... 23 more at org.apache.hadoop.ipc.Client.call(Client.java:1455) at org.apache.hadoop.ipc.Client.call(Client.java:1392) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229) at com.sun.proxy.$Proxy12.create(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:298) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:258) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104) at com.sun.proxy.$Proxy13.create(Unknown Source) at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1719) at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1699) at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1634) at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:456) at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:452) at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:452) at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:395) at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:914) at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:895) at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:792) at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:365) at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:338) at org.apache.hadoop.fs.FileSystem.copyFromLocalFile(FileSystem.java:1972) at org.apache.hadoop.hive.ql.exec.tez.DagUtils.localizeResource(DagUtils.java:956) at org.apache.hadoop.hive.ql.exec.tez.DagUtils.addTempResources(DagUtils.java:862) at org.apache.hadoop.hive.ql.exec.tez.DagUtils.localizeTempFilesFromConf(DagUtils.java:805) at org.apache.hadoop.hive.ql.exec.tez.TezSessionState.refreshLocalResourcesFromConf(TezSessionState.java:233) at org.apache.hadoop.hive.ql.exec.tez.TezSessionState.open(TezSessionState.java:158) at org.apache.hadoop.hive.ql.exec.tez.TezSessionState.open(TezSessionState.java:117) at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:541) at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:680) at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:624) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.util.RunJar.run(RunJar.java:221) at org.apache.hadoop.util.RunJar.main(RunJar.java:136) 2021-04-30 15:57:10,958 INFO [main]: tez.DagUtils (DagUtils.java:localizeResource(958)) - Looks like another thread is writing the same file will wait. 2021-04-30 15:57:10,958 INFO [main]: tez.DagUtils (DagUtils.java:localizeResource(965)) - Number of wait attempts: 5. Wait interval: 5000 2021-04-30 15:57:35,968 ERROR [main]: tez.DagUtils (DagUtils.java:localizeResource(981)) - Could not find the jar that was being uploaded kms.log: 2021-04-30 15:57:10,901 ERROR RangerKeyStore - com.sun.crypto.provider.KeyProtector.unseal(javax.crypto.SealedObject) 2021-04-30 15:57:10,902 ERROR RangerKeyStore - com.sun.crypto.provider.KeyProtector.unseal(javax.crypto.SealedObject) We still don't have any solution at this point.
... View more
04-05-2021
05:13 AM
Hi @raghu9raghavend , did you manage to upgrade Java to the latest version or any version above 8u161? We are getting the same error, any versions below 8u161 doesn't give this error but once upgraded to any versions above 8u161, that is the time the error is showing.
... View more
03-30-2021
08:21 AM
Currently we have the 2 jar files under lib/security which works only for JDK versions 8u161 and below. Now we tried to remove the jar files and did not define crypto.policy as well, which then by default is using "unlimited", but still doesn't work. We also observed that when we do a query on the existing tables on the default hive location, we get the same error; but if we create a new table and don't put it on the default hive location, we can query the new table. Just a quick correction on the versions we are using: Ambari 2.2.2 Hive 1.2.1 Hadoop 2.7 Spark 1.6 HDP 2.4
... View more
03-24-2021
08:02 AM
Anyone encountered this issue with Hive CLI? We get this error if we upgrade to any versions of Oracle JDK between 8u171 and 8u281. Hive CLI is working perfectly fine if we go with 8u161 and older. Below are the versions we are running. Ambari 2.0.0 Hive 1.2.1 Tez 0.7.0.2.4 Below is the error we are getting on hive.log when running the hive command. WARN [main]: retry.RetryInvocationHandler (RetryInvocationHandler.java:invoke(126)) - Exception while invoking class org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create over null. Not retrying because try once and fail. org.apache.hadoop.ipc.RemoteException(java.io.IOException): java.util.concurrent.ExecutionException: java.io.IOException: com.google.common.util.concurrent.UncheckedExecutionException: java.lang.NullPointerException: No KeyVersion exists for key 'hivekey' at org.apache.hadoop.crypto.key.kms.KMSClientProvider.generateEncryptedKey(KMSClientProvider.java:802) at org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.generateEncryptedKey(KeyProviderCryptoExtension.java:371) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.generateEncryptedDataEncryptionKey(FSNamesystem.java:2355) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2481) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2380) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:716) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:405) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2273) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2269) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1724) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2267) Caused by: java.util.concurrent.ExecutionException: java.io.IOException: com.google.common.util.concurrent.UncheckedExecutionException: java.lang.NullPointerException: No KeyVersion exists for key 'hivekey' at com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:289) at com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:276) at com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:111) at com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:132) at com.google.common.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2381) at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2351) at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2313) at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2228) at com.google.common.cache.LocalCache.get(LocalCache.java:3965) at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3969) at com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4829) at org.apache.hadoop.crypto.key.kms.ValueQueue.getAtMost(ValueQueue.java:266) at org.apache.hadoop.crypto.key.kms.ValueQueue.getNext(ValueQueue.java:226) at org.apache.hadoop.crypto.key.kms.KMSClientProvider.generateEncryptedKey(KMSClientProvider.java:797) ... 15 more Caused by: java.io.IOException: com.google.common.util.concurrent.UncheckedExecutionException: java.lang.NullPointerException: No KeyVersion exists for key 'hivekey' at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.hadoop.util.HttpExceptionUtils.validateResponse(HttpExceptionUtils.java:157) at org.apache.hadoop.crypto.key.kms.KMSClientProvider.call(KMSClientProvider.java:611) at org.apache.hadoop.crypto.key.kms.KMSClientProvider.call(KMSClientProvider.java:569) at org.apache.hadoop.crypto.key.kms.KMSClientProvider.access$200(KMSClientProvider.java:90) at org.apache.hadoop.crypto.key.kms.KMSClientProvider$EncryptedQueueRefiller.fillQueueForKey(KMSClientProvider.java:145) at org.apache.hadoop.crypto.key.kms.ValueQueue$1.load(ValueQueue.java:181) at org.apache.hadoop.crypto.key.kms.ValueQueue$1.load(ValueQueue.java:175) at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3568) at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2350) ... 23 more at org.apache.hadoop.ipc.Client.call(Client.java:1455) at org.apache.hadoop.ipc.Client.call(Client.java:1392) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229) at com.sun.proxy.$Proxy12.create(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:298) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:258) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104) at com.sun.proxy.$Proxy13.create(Unknown Source) at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1719) at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1699) at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1634) at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:456) at org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:452) at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:452) at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:395) at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:914) at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:895) at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:792) at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:365) at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:338) at org.apache.hadoop.fs.FileSystem.copyFromLocalFile(FileSystem.java:1972) at org.apache.hadoop.hive.ql.exec.tez.DagUtils.localizeResource(DagUtils.java:956) at org.apache.hadoop.hive.ql.exec.tez.DagUtils.addTempResources(DagUtils.java:862) at org.apache.hadoop.hive.ql.exec.tez.DagUtils.localizeTempFilesFromConf(DagUtils.java:805) at org.apache.hadoop.hive.ql.exec.tez.TezSessionState.refreshLocalResourcesFromConf(TezSessionState.java:233) at org.apache.hadoop.hive.ql.exec.tez.TezSessionState.open(TezSessionState.java:158) at org.apache.hadoop.hive.ql.exec.tez.TezSessionState.open(TezSessionState.java:117) at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:541) at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:680) at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:624) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.util.RunJar.run(RunJar.java:221) at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
... View more
Labels: