<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question After enabling kerberos (local MIT) unable to access the HDFS. in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/After-enabling-kerberos-local-MIT-unable-to-access-the-HDFS/m-p/280094#M208673</link>
    <description>&lt;P&gt;Please find the logs.&lt;/P&gt;
&lt;P&gt;$ HADOOP_ROOT_LOGGER=DEBUG,console hdfs dfs -ls /&lt;BR /&gt;19/10/14 08:59:25 DEBUG util.Shell: setsid exited with exit code 0&lt;BR /&gt;19/10/14 08:59:25 DEBUG conf.Configuration: parsing URL jar:file:/usr/hdp/3.0.1.0-187/hadoop/hadoop-common-3.1.1.3.0.1.0-187.jar!/core-default.xml&lt;BR /&gt;19/10/14 08:59:25 DEBUG conf.Configuration: parsing input stream sun.net.&lt;A href="http://www.protocol.jar.JarURLConnection$JarURLInputStream@66480dd7" target="_blank"&gt;www.protocol.jar.JarURLConnection$JarURLInputStream@66480dd7&lt;/A&gt;&lt;BR /&gt;19/10/14 08:59:25 DEBUG conf.Configuration: parsing URL file:/etc/hadoop/3.0.1.0-187/0/core-site.xml&lt;BR /&gt;19/10/14 08:59:25 DEBUG conf.Configuration: parsing input stream java.io.BufferedInputStream@1877ab81&lt;BR /&gt;19/10/14 08:59:25 DEBUG security.SecurityUtil: Setting hadoop.security.token.service.use_ip to true&lt;BR /&gt;19/10/14 08:59:25 DEBUG security.Groups: Creating new Groups object&lt;BR /&gt;19/10/14 08:59:25 DEBUG util.NativeCodeLoader: Trying to load the custom-built native-hadoop library...&lt;BR /&gt;19/10/14 08:59:25 DEBUG util.NativeCodeLoader: Loaded the native-hadoop library&lt;BR /&gt;19/10/14 08:59:25 DEBUG security.JniBasedUnixGroupsMapping: Using JniBasedUnixGroupsMapping for Group resolution&lt;BR /&gt;19/10/14 08:59:25 DEBUG security.JniBasedUnixGroupsMappingWithFallback: Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMapping&lt;BR /&gt;19/10/14 08:59:25 DEBUG security.Groups: Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=300000; warningDeltaMs=5000&lt;BR /&gt;19/10/14 08:59:25 DEBUG core.Tracer: sampler.classes = ; loaded no samplers&lt;BR /&gt;19/10/14 08:59:25 DEBUG core.Tracer: span.receiver.classes = ; loaded no span receivers&lt;BR /&gt;19/10/14 08:59:25 DEBUG security.UserGroupInformation: hadoop login&lt;BR /&gt;19/10/14 08:59:25 DEBUG security.UserGroupInformation: hadoop login commit&lt;BR /&gt;19/10/14 08:59:25 DEBUG security.UserGroupInformation: using local user:UnixPrincipal: hdfs&lt;BR /&gt;19/10/14 08:59:25 DEBUG security.UserGroupInformation: Using user: "UnixPrincipal: hdfs" with name hdfs&lt;BR /&gt;19/10/14 08:59:25 DEBUG security.UserGroupInformation: User entry: "hdfs"&lt;BR /&gt;19/10/14 08:59:25 DEBUG security.UserGroupInformation: UGI loginUser:hdfs (auth:SIMPLE)&lt;BR /&gt;19/10/14 08:59:25 DEBUG core.Tracer: sampler.classes = ; loaded no samplers&lt;BR /&gt;19/10/14 08:59:25 DEBUG core.Tracer: span.receiver.classes = ; loaded no span receivers&lt;BR /&gt;19/10/14 08:59:25 DEBUG fs.FileSystem: Loading filesystems&lt;BR /&gt;19/10/14 08:59:25 DEBUG fs.FileSystem: file:// = class org.apache.hadoop.fs.LocalFileSystem from /usr/hdp/3.0.1.0-187/hadoop/hadoop-common-3.1.1.3.0.1.0-187.jar&lt;BR /&gt;19/10/14 08:59:25 DEBUG fs.FileSystem: viewfs:// = class org.apache.hadoop.fs.viewfs.ViewFileSystem from /usr/hdp/3.0.1.0-187/hadoop/hadoop-common-3.1.1.3.0.1.0-187.jar&lt;BR /&gt;19/10/14 08:59:25 DEBUG fs.FileSystem: har:// = class org.apache.hadoop.fs.HarFileSystem from /usr/hdp/3.0.1.0-187/hadoop/hadoop-common-3.1.1.3.0.1.0-187.jar&lt;BR /&gt;19/10/14 08:59:25 DEBUG fs.FileSystem: http:// = class org.apache.hadoop.fs.http.HttpFileSystem from /usr/hdp/3.0.1.0-187/hadoop/hadoop-common-3.1.1.3.0.1.0-187.jar&lt;BR /&gt;19/10/14 08:59:25 DEBUG fs.FileSystem: https:// = class org.apache.hadoop.fs.http.HttpsFileSystem from /usr/hdp/3.0.1.0-187/hadoop/hadoop-common-3.1.1.3.0.1.0-187.jar&lt;BR /&gt;19/10/14 08:59:25 DEBUG fs.FileSystem: hdfs:// = class org.apache.hadoop.hdfs.DistributedFileSystem from /usr/hdp/3.0.1.0-187/hadoop-hdfs/hadoop-hdfs-client-3.1.1.3.0.1.0-187.jar&lt;BR /&gt;19/10/14 08:59:25 DEBUG fs.FileSystem: webhdfs:// = class org.apache.hadoop.hdfs.web.WebHdfsFileSystem from /usr/hdp/3.0.1.0-187/hadoop-hdfs/hadoop-hdfs-client-3.1.1.3.0.1.0-187.jar&lt;BR /&gt;19/10/14 08:59:25 DEBUG fs.FileSystem: swebhdfs:// = class org.apache.hadoop.hdfs.web.SWebHdfsFileSystem from /usr/hdp/3.0.1.0-187/hadoop-hdfs/hadoop-hdfs-client-3.1.1.3.0.1.0-187.jar&lt;BR /&gt;19/10/14 08:59:25 DEBUG gcs.GoogleHadoopFileSystemBase: GHFS version: 1.9.0.3.0.1.0-187&lt;BR /&gt;19/10/14 08:59:25 DEBUG fs.FileSystem: gs:// = class com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystem from /usr/hdp/3.0.1.0-187/hadoop-mapreduce/gcs-connector-1.9.0.3.0.1.0-187-shaded.jar&lt;BR /&gt;19/10/14 08:59:25 DEBUG fs.FileSystem: s3n:// = class org.apache.hadoop.fs.s3native.NativeS3FileSystem from /usr/hdp/3.0.1.0-187/hadoop-mapreduce/hadoop-aws-3.1.1.3.0.1.0-187.jar&lt;BR /&gt;19/10/14 08:59:25 DEBUG fs.FileSystem: Looking for FS supporting hdfs&lt;BR /&gt;19/10/14 08:59:25 DEBUG fs.FileSystem: looking for configuration option fs.hdfs.impl&lt;BR /&gt;19/10/14 08:59:26 DEBUG fs.FileSystem: Looking in service filesystems for implementation class&lt;BR /&gt;19/10/14 08:59:26 DEBUG fs.FileSystem: FS for hdfs is class org.apache.hadoop.hdfs.DistributedFileSystem&lt;BR /&gt;19/10/14 08:59:26 DEBUG impl.DfsClientConf: dfs.client.use.legacy.blockreader.local = false&lt;BR /&gt;19/10/14 08:59:26 DEBUG impl.DfsClientConf: dfs.client.read.shortcircuit = true&lt;BR /&gt;19/10/14 08:59:26 DEBUG impl.DfsClientConf: dfs.client.domain.socket.data.traffic = false&lt;BR /&gt;19/10/14 08:59:26 DEBUG impl.DfsClientConf: dfs.domain.socket.path = /var/lib/hadoop-hdfs/dn_socket&lt;BR /&gt;19/10/14 08:59:26 DEBUG hdfs.DFSClient: Sets dfs.client.block.write.replace-datanode-on-failure.min-replication to 0&lt;BR /&gt;19/10/14 08:59:26 DEBUG hdfs.HAUtilClient: No HA service delegation token found for logical URI hdfs://datalakeqa&lt;BR /&gt;19/10/14 08:59:26 DEBUG impl.DfsClientConf: dfs.client.use.legacy.blockreader.local = false&lt;BR /&gt;19/10/14 08:59:26 DEBUG impl.DfsClientConf: dfs.client.read.shortcircuit = true&lt;BR /&gt;19/10/14 08:59:26 DEBUG impl.DfsClientConf: dfs.client.domain.socket.data.traffic = false&lt;BR /&gt;19/10/14 08:59:26 DEBUG impl.DfsClientConf: dfs.domain.socket.path = /var/lib/hadoop-hdfs/dn_socket&lt;BR /&gt;19/10/14 08:59:26 DEBUG retry.RetryUtils: multipleLinearRandomRetry = null&lt;BR /&gt;19/10/14 08:59:26 DEBUG ipc.Server: rpcKind=RPC_PROTOCOL_BUFFER, rpcRequestWrapperClass=class org.apache.hadoop.ipc.ProtobufRpcEngine$RpcProtobufRequest, rpcInvoker=org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker@6babf3bf&lt;BR /&gt;19/10/14 08:59:26 DEBUG ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@3d6f0054&lt;BR /&gt;19/10/14 08:59:26 DEBUG unix.DomainSocketWatcher: org.apache.hadoop.net.unix.DomainSocketWatcher$2@6129020f: starting with interruptCheckPeriodMs = 60000&lt;BR /&gt;19/10/14 08:59:26 DEBUG shortcircuit.DomainSocketFactory: The short-circuit local reads feature is enabled.&lt;BR /&gt;19/10/14 08:59:26 DEBUG sasl.DataTransferSaslUtil: DataTransferProtocol using SaslPropertiesResolver, configured QOP dfs.data.transfer.protection = authentication,privacy, configured class dfs.data.transfer.saslproperties.resolver.class = class org.apache.hadoop.security.SaslPropertiesResolver&lt;BR /&gt;19/10/14 08:59:26 DEBUG ipc.Client: The ping interval is 60000 ms.&lt;BR /&gt;19/10/14 08:59:26 DEBUG ipc.Client: Connecting to /10.49.70.13:8020&lt;BR /&gt;19/10/14 08:59:26 DEBUG security.UserGroupInformation: PrivilegedAction as:hdfs (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:796)&lt;BR /&gt;19/10/14 08:59:26 DEBUG security.SaslRpcClient: Sending sasl message state: NEGOTIATE&lt;/P&gt;
&lt;P&gt;19/10/14 08:59:26 DEBUG security.SaslRpcClient: Get token info proto:interface org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB info:@org.apache.hadoop.security.token.TokenInfo(value=class org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSelector)&lt;BR /&gt;19/10/14 08:59:26 DEBUG security.SaslRpcClient: tokens aren't supported for this protocol or user doesn't have one&lt;BR /&gt;19/10/14 08:59:26 DEBUG security.SaslRpcClient: client isn't using kerberos&lt;BR /&gt;19/10/14 08:59:26 DEBUG security.UserGroupInformation: PrivilegedActionException as:hdfs (auth:SIMPLE) cause:org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN, KERBEROS]&lt;BR /&gt;19/10/14 08:59:26 DEBUG security.UserGroupInformation: PrivilegedAction as:hdfs (auth:SIMPLE) from:org.apache.hadoop.ipc.Client$Connection.handleSaslConnectionFailure(Client.java:720)&lt;BR /&gt;19/10/14 08:59:26 WARN ipc.Client: Exception encountered while connecting to the server : org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN, KERBEROS]&lt;BR /&gt;19/10/14 08:59:26 DEBUG security.UserGroupInformation: PrivilegedActionException as:hdfs (auth:SIMPLE) cause:java.io.IOException: org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN, KERBEROS]&lt;BR /&gt;19/10/14 08:59:26 DEBUG ipc.Client: closing ipc connection to /10.49.70.13:8020: org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN, KERBEROS]&lt;BR /&gt;java.io.IOException: org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN, KERBEROS]&lt;BR /&gt;at org.apache.hadoop.ipc.Client$Connection$1.run(Client.java:757)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:422)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)&lt;BR /&gt;at org.apache.hadoop.ipc.Client$Connection.handleSaslConnectionFailure(Client.java:720)&lt;BR /&gt;at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:813)&lt;BR /&gt;at org.apache.hadoop.ipc.Client$Connection.access$3600(Client.java:410)&lt;BR /&gt;at org.apache.hadoop.ipc.Client.getConnection(Client.java:1558)&lt;BR /&gt;at org.apache.hadoop.ipc.Client.call(Client.java:1389)&lt;BR /&gt;at org.apache.hadoop.ipc.Client.call(Client.java:1353)&lt;BR /&gt;at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)&lt;BR /&gt;at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)&lt;BR /&gt;at com.sun.proxy.$Proxy9.getFileInfo(Unknown Source)&lt;BR /&gt;at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:900)&lt;BR /&gt;at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)&lt;BR /&gt;at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)&lt;BR /&gt;at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)&lt;BR /&gt;at java.lang.reflect.Method.invoke(Method.java:498)&lt;BR /&gt;at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)&lt;BR /&gt;at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)&lt;BR /&gt;at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)&lt;BR /&gt;at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)&lt;BR /&gt;at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)&lt;BR /&gt;at com.sun.proxy.$Proxy10.getFileInfo(Unknown Source)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1654)&lt;BR /&gt;at org.apache.hadoop.hdfs.DistributedFileSystem$29.doCall(DistributedFileSystem.java:1583)&lt;BR /&gt;at org.apache.hadoop.hdfs.DistributedFileSystem$29.doCall(DistributedFileSystem.java:1580)&lt;BR /&gt;at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)&lt;BR /&gt;at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1595)&lt;BR /&gt;at org.apache.hadoop.fs.Globber.getFileStatus(Globber.java:65)&lt;BR /&gt;at org.apache.hadoop.fs.Globber.doGlob(Globber.java:283)&lt;BR /&gt;at org.apache.hadoop.fs.Globber.glob(Globber.java:149)&lt;BR /&gt;at org.apache.hadoop.fs.FileSystem.globStatus(FileSystem.java:2067)&lt;BR /&gt;at org.apache.hadoop.fs.shell.PathData.expandAsGlob(PathData.java:353)&lt;BR /&gt;at org.apache.hadoop.fs.shell.Command.expandArgument(Command.java:250)&lt;BR /&gt;at org.apache.hadoop.fs.shell.Command.expandArguments(Command.java:233)&lt;BR /&gt;at org.apache.hadoop.fs.shell.FsCommand.processRawArguments(FsCommand.java:104)&lt;BR /&gt;at org.apache.hadoop.fs.shell.Command.run(Command.java:177)&lt;BR /&gt;at org.apache.hadoop.fs.FsShell.run(FsShell.java:328)&lt;BR /&gt;at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76)&lt;BR /&gt;at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:90)&lt;BR /&gt;at org.apache.hadoop.fs.FsShell.main(FsShell.java:391)&lt;BR /&gt;Caused by: org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN, KERBEROS]&lt;BR /&gt;at org.apache.hadoop.security.SaslRpcClient.selectSaslClient(SaslRpcClient.java:173)&lt;BR /&gt;at org.apache.hadoop.security.SaslRpcClient.saslConnect(SaslRpcClient.java:390)&lt;BR /&gt;at org.apache.hadoop.ipc.Client$Connection.setupSaslConnection(Client.java:614)&lt;BR /&gt;at org.apache.hadoop.ipc.Client$Connection.access$2300(Client.java:410)&lt;BR /&gt;at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:800)&lt;BR /&gt;at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:796)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:422)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)&lt;BR /&gt;at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:796)&lt;/P&gt;</description>
    <pubDate>Mon, 14 Oct 2019 22:47:06 GMT</pubDate>
    <dc:creator>saivenkatg55</dc:creator>
    <dc:date>2019-10-14T22:47:06Z</dc:date>
  </channel>
</rss>

