<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: org.apache.hadoop.security.AccessControlException: Permission denied - Need c in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/org-apache-hadoop-security-AccessControlException-Permission/m-p/48950#M30031</link>
    <description>&lt;P&gt;Can this be done in production ? mate&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
    <pubDate>Mon, 02 Jan 2017 02:47:54 GMT</pubDate>
    <dc:creator>csguna</dc:creator>
    <dc:date>2017-01-02T02:47:54Z</dc:date>
    <item>
      <title>org.apache.hadoop.security.AccessControlException: Permission denied - Need c</title>
      <link>https://community.cloudera.com/t5/Support-Questions/org-apache-hadoop-security-AccessControlException-Permission/m-p/48930#M30029</link>
      <description>&lt;P&gt;The mapreduce is getting successed and I am able to check the results in the hdfs.&amp;nbsp;&lt;/P&gt;&lt;P&gt;The problem is when i try to see in the histroy server the jobs are not there checkout the logs found this error&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;16/12/31 06:34:27 ERROR hs.HistoryFileManager: Error while trying to move a job to done
org.apache.hadoop.security.AccessControlException: Permission denied: user=mapred, access=READ, inode="/user/history/done_intermediate/matt/job_1483174306930_0005.summary":matt:hadoop:-rwxrwx---
	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkFsPermission(FSPermissionChecker.java:265)
	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:251)
	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:182)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:5461)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:5443)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPathAccess(FSNamesystem.java:5405)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1680)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:1632)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1612)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1586)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:482)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:322)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:585)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1026)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1986)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1982)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1548)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1980)

	at sun.reflect.GeneratedConstructorAccessor29.newInstance(Unknown Source)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
	at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
	at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)
	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1139)
	at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1127)
	at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1117)
	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:264)
	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:231)
	at org.apache.hadoop.hdfs.DFSInputStream.&amp;lt;init&amp;gt;(DFSInputStream.java:224)
	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1290)
	at org.apache.hadoop.fs.Hdfs.open(Hdfs.java:309)
	at org.apache.hadoop.fs.Hdfs.open(Hdfs.java:54)
	at org.apache.hadoop.fs.AbstractFileSystem.open(AbstractFileSystem.java:619)
	at org.apache.hadoop.fs.FileContext$6.next(FileContext.java:785)
	at org.apache.hadoop.fs.FileContext$6.next(FileContext.java:781)
	at org.apache.hadoop.fs.FSLinkResolver.resolve(FSLinkResolver.java:90)
	at org.apache.hadoop.fs.FileContext.open(FileContext.java:781)
	at org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.getJobSummary(HistoryFileManager.java:953)
	at org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.access$400(HistoryFileManager.java:82)
	at org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager$HistoryFileInfo.moveToDone(HistoryFileManager.java:370)
	at org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager$HistoryFileInfo.access$1400(HistoryFileManager.java:295)
	at org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager$1.run(HistoryFileManager.java:843)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:744)&lt;/PRE&gt;&lt;P&gt;looks like a permission but i am not sure where i should change em and what should be the chmod value&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;below is my current configuration&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;sudo -u hdfs hadoop fs -mkdir /user
$ sudo -u hdfs hadoop fs -mkdir /user/matt
$ sudo -u hdfs hadoop fs -chown matt /user/matt
$ sudo -u hdfs hadoop fs -mkdir /user/history
$ sudo -u hdfs hadoop fs -chmod 1777 /user/history
$ sudo -u hdfs hadoop fs -chown mapred:hadoop \
/user/history

&lt;/PRE&gt;&lt;P&gt;can someone please help me with this issue .&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Fri, 16 Sep 2022 10:52:43 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/org-apache-hadoop-security-AccessControlException-Permission/m-p/48930#M30029</guid>
      <dc:creator>csguna</dc:creator>
      <dc:date>2022-09-16T10:52:43Z</dc:date>
    </item>
    <item>
      <title>Re: org.apache.hadoop.security.AccessControlException: Permission denied - Need c</title>
      <link>https://community.cloudera.com/t5/Support-Questions/org-apache-hadoop-security-AccessControlException-Permission/m-p/48933#M30030</link>
      <description>I'd put the mapred account in the Hadoop group. It will then have the needed access. The hdfs, yarn, and mapred accts should all be in the Hadoop group.</description>
      <pubDate>Sun, 01 Jan 2017 05:59:57 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/org-apache-hadoop-security-AccessControlException-Permission/m-p/48933#M30030</guid>
      <dc:creator>mbigelow</dc:creator>
      <dc:date>2017-01-01T05:59:57Z</dc:date>
    </item>
    <item>
      <title>Re: org.apache.hadoop.security.AccessControlException: Permission denied - Need c</title>
      <link>https://community.cloudera.com/t5/Support-Questions/org-apache-hadoop-security-AccessControlException-Permission/m-p/48950#M30031</link>
      <description>&lt;P&gt;Can this be done in production ? mate&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Mon, 02 Jan 2017 02:47:54 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/org-apache-hadoop-security-AccessControlException-Permission/m-p/48950#M30031</guid>
      <dc:creator>csguna</dc:creator>
      <dc:date>2017-01-02T02:47:54Z</dc:date>
    </item>
    <item>
      <title>Re: org.apache.hadoop.security.AccessControlException: Permission denied - Need c</title>
      <link>https://community.cloudera.com/t5/Support-Questions/org-apache-hadoop-security-AccessControlException-Permission/m-p/48964#M30032</link>
      <description>Yes. Go through your process. It is granting more accessible which is generally less risky.&lt;BR /&gt;&lt;BR /&gt;Also, it is the correct way to install Hadoop/CDH.&lt;BR /&gt;&lt;BR /&gt;&lt;A href="https://www.cloudera.com/documentation/enterprise/5-6-x/topics/cm_sg_cm_users_principals.html" target="_blank"&gt;https://www.cloudera.com/documentation/enterprise/5-6-x/topics/cm_sg_cm_users_principals.html&lt;/A&gt;</description>
      <pubDate>Mon, 02 Jan 2017 21:19:36 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/org-apache-hadoop-security-AccessControlException-Permission/m-p/48964#M30032</guid>
      <dc:creator>mbigelow</dc:creator>
      <dc:date>2017-01-02T21:19:36Z</dc:date>
    </item>
  </channel>
</rss>

