<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: Permission denied: user=mapred, access=WRITE, inode=&amp;quot;/&amp;quot;:hdfs:supergroup:drwxr-xr-x in Archives of Support Questions (Read Only)</title>
    <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/44282#M2477</link>
    <description>&lt;P&gt;Hi,&lt;BR /&gt;&lt;BR /&gt;I'm getting a similar error, while starting the HBase Region Server. I'm not pretty sure, which permissions I have to set.. &lt;span class="lia-unicode-emoji" title=":confused_face:"&gt;😕&lt;/span&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;2016-08-24 15:47:49,361 ERROR org.apache.hadoop.hbase.coprocessor.CoprocessorHost: The coprocessor org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint threw java.lang.IllegalStateException: Failed to get FileSystem instance
java.lang.IllegalStateException: Failed to get FileSystem instance
	at org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint.start(SecureBulkLoadEndpoint.java:152)
	at org.apache.hadoop.hbase.coprocessor.CoprocessorHost$Environment.startup(CoprocessorHost.java:414)
	at org.apache.hadoop.hbase.coprocessor.CoprocessorHost.loadInstance(CoprocessorHost.java:255)
	at org.apache.hadoop.hbase.coprocessor.CoprocessorHost.loadSystemCoprocessors(CoprocessorHost.java:161)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.&amp;lt;init&amp;gt;(RegionCoprocessorHost.java:218)
	at org.apache.hadoop.hbase.regionserver.HRegion.&amp;lt;init&amp;gt;(HRegion.java:720)
	at org.apache.hadoop.hbase.regionserver.HRegion.&amp;lt;init&amp;gt;(HRegion.java:628)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
	at org.apache.hadoop.hbase.regionserver.HRegion.newHRegion(HRegion.java:6128)
	at org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:6432)
	at org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:6404)
	at org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:6360)
	at org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:6311)
	at org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler.openRegion(OpenRegionHandler.java:362)
	at org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler.process(OpenRegionHandler.java:129)
	at org.apache.hadoop.hbase.executor.EventHandler.run(EventHandler.java:129)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hadoop.security.AccessControlException: Permission denied: user=hbase, access=WRITE, inode="/tmp":hdfs:supergroup:drwxr-xr-x
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:281)
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:262)
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:242)
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:169)
	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:152)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6590)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6572)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:6524)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4322)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4292)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4265)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:867)
	at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:322)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:603)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2086)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2082)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2080)

	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
	at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
	at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)
	at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:3084)
	at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:3049)
	at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:957)
	at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:953)
	at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
	at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirsInternal(DistributedFileSystem.java:953)
	at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem.java:946)
	at org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint.start(SecureBulkLoadEndpoint.java:139)
	... 21 more
Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): Permission denied: user=hbase, access=WRITE, inode="/tmp":hdfs:supergroup:drwxr-xr-x
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:281)
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:262)
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:242)
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:169)
	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:152)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6590)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6572)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:6524)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4322)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4292)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4265)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:867)
	at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:322)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:603)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2086)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2082)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2080)

	at org.apache.hadoop.ipc.Client.call(Client.java:1471)
	at org.apache.hadoop.ipc.Client.call(Client.java:1408)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
	at com.sun.proxy.$Proxy23.mkdirs(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdirs(ClientNamenodeProtocolTranslatorPB.java:544)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:256)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104)
	at com.sun.proxy.$Proxy24.mkdirs(Unknown Source)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:279)
	at com.sun.proxy.$Proxy25.mkdirs(Unknown Source)
	at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:3082)
	... 28 more&lt;/PRE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I am running a CDH 5.7 Cluster on 4 Ubuntu 14.04 machines.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;It would be nice if someone could help me out and thanks a lot.&lt;/P&gt;</description>
    <pubDate>Wed, 24 Aug 2016 14:09:54 GMT</pubDate>
    <dc:creator>MaxM</dc:creator>
    <dc:date>2016-08-24T14:09:54Z</dc:date>
    <item>
      <title>Permission denied: user=mapred, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/16318#M2470</link>
      <description>&lt;P&gt;When I try to start the job traker using this command&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;service hadoop-0.20-mapreduce-jobtracker start&lt;/PRE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;I can see this error&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;org.apache.hadoop.security.AccessControlException: Permission denied: user=mapred, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x
at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:224)
at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:204)
at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:149)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:4891)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:4873)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:4847)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:3192)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:3156)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:3137)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:669)&lt;/PRE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I found this blog post which tries to address this issue&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;A target="_blank" href="http://blog.spryinc.com/2013/06/hdfs-permissions-overcoming-permission.html"&gt;http://blog.spryinc.com/2013/06/hdfs-permissions-overcoming-permission.html&lt;/A&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I followed the steps here and did&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;groupadd supergroup
usermod -a -G supergroup mapred
usermod -a -G supergroup hdfs&lt;/PRE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;but i still get this problem. The only different between the blog entry and me is that for me the error is on the "root" dir whereas for the blog it is for the "/user"&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Here is my mapred-site.xml&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;&amp;lt;?xml version="1.0"?&amp;gt;
&amp;lt;?xml-stylesheet type="text/xsl" href="configuration.xsl"?&amp;gt;
&amp;lt;configuration&amp;gt;
  &amp;lt;property&amp;gt;
    &amp;lt;name&amp;gt;mapred.job.tracker&amp;lt;/name&amp;gt;
    &amp;lt;value&amp;gt;jt1:8021&amp;lt;/value&amp;gt;
  &amp;lt;/property&amp;gt;
  &amp;lt;property&amp;gt;
    &amp;lt;name&amp;gt;mapred.local.dir&amp;lt;/name&amp;gt;
    &amp;lt;value&amp;gt;/tmp/mapred/jt&amp;lt;/value&amp;gt;
  &amp;lt;/property&amp;gt;
  &amp;lt;property&amp;gt;
    &amp;lt;name&amp;gt;mapred.system.dir&amp;lt;/name&amp;gt;
    &amp;lt;value&amp;gt;/tmp/mapred/system&amp;lt;/value&amp;gt;
  &amp;lt;/property&amp;gt;
  &amp;lt;property&amp;gt;
    &amp;lt;name&amp;gt;mapreduce.jobtracker.staging.root.dir&amp;lt;/name&amp;gt;
    &amp;lt;value&amp;gt;/user&amp;lt;/value&amp;gt;
  &amp;lt;/property&amp;gt;
  &amp;lt;property&amp;gt;
    &amp;lt;name&amp;gt;mapred.job.tracker.persist.jobstatus.active&amp;lt;/name&amp;gt;
    &amp;lt;value&amp;gt;true&amp;lt;/value&amp;gt;
  &amp;lt;/property&amp;gt;
  &amp;lt;property&amp;gt;
    &amp;lt;name&amp;gt;mapred.job.tracker.persist.jobstatus.hours&amp;lt;/name&amp;gt;
    &amp;lt;value&amp;gt;24&amp;lt;/value&amp;gt;
  &amp;lt;/property&amp;gt;
  &amp;lt;property&amp;gt;
    &amp;lt;name&amp;gt;mapred.jobtracker.taskScheduler&amp;lt;/name&amp;gt;
    &amp;lt;value&amp;gt;org.apache.hadoop.mapred.FairScheduler&amp;lt;/value&amp;gt;
  &amp;lt;/property&amp;gt;
  &amp;lt;property&amp;gt;
    &amp;lt;name&amp;gt;mapred.fairscheduler.poolnameproperty&amp;lt;/name&amp;gt;
    &amp;lt;value&amp;gt;user.name&amp;lt;/value&amp;gt;
  &amp;lt;/property&amp;gt;
  &amp;lt;property&amp;gt;
    &amp;lt;name&amp;gt;mapred.fairscheduler.allocation.file&amp;lt;/name&amp;gt;
    &amp;lt;value&amp;gt;/etc/hadoop/conf/fair-scheduler.xml&amp;lt;/value&amp;gt;
  &amp;lt;/property&amp;gt;
  &amp;lt;property&amp;gt;
    &amp;lt;name&amp;gt;mapred.fairscheduler.allow.undeclared.pools&amp;lt;/name&amp;gt;
    &amp;lt;value&amp;gt;true&amp;lt;/value&amp;gt;
  &amp;lt;/property&amp;gt;
&amp;lt;/configuration&amp;gt;&lt;/PRE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I also found &amp;nbsp;this blog&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;A target="_blank" href="http://www.hadoopinrealworld.com/fixing-org-apache-hadoop-security-accesscontrolexception-permission-denied/"&gt;http://www.hadoopinrealworld.com/fixing-org-apache-hadoop-security-accesscontrolexception-permission-denied/&lt;/A&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I did&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;sudo -u hdfs hdfs dfs -mkdir /home&lt;/P&gt;&lt;P&gt;sudo -u hdfs hdfs dfs -chown mapred:mapred /home&lt;/P&gt;&lt;P&gt;sudo -u hdfs hdfs dfs -mkdir /home/mapred&lt;/P&gt;&lt;P&gt;sudo -u hdfs hdfs dfs -chown mapred /home/mapred&lt;/P&gt;&lt;P&gt;sudo -u hdfs hdfs dfs -chown hdfs:supergroup /&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;but still problem is not resolved &lt;span class="lia-unicode-emoji" title=":disappointed_face:"&gt;😞&lt;/span&gt; Please help.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I wonder why it is going for the "root" dir&amp;nbsp;inode=&lt;SPAN style="color: #ff0000;"&gt;&lt;STRONG&gt;"/"&lt;/STRONG&gt;&lt;/SPAN&gt;:hdfs:supergroup:drwxr-xr-x&lt;/P&gt;</description>
      <pubDate>Fri, 16 Sep 2022 09:04:10 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/16318#M2470</guid>
      <dc:creator>abhishes</dc:creator>
      <dc:date>2022-09-16T09:04:10Z</dc:date>
    </item>
    <item>
      <title>Re: Permission denied: user=mapred, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/16420#M2471</link>
      <description>&lt;P&gt;The error indicates that mapreduce wants to be able to write to /. &amp;nbsp;you have the owner as hdfs with rwx, you have groups with r-x, &amp;nbsp;and others set to r-x. Since you added mapred to the groups membership earlier by adding it to supergroup and making supergroup the group for / it is the group level permissions that we will need to modify. &amp;nbsp;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;To get it working you can do the following:&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;&lt;SPAN&gt;sudo -u hdfs&amp;nbsp;&lt;/SPAN&gt;hdfs dfs -chmod 775 /&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;this will change the permissions on / to drwxrwxr-x&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;as for why mapreduce is trying to write to / it may be that it's trying to create /user and /tmp that you have defined as the user space and the temporary space. &amp;nbsp;if you don't have those directories you could instead do the following:&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;sudo -u hdfs hdfs dfs -mkdir /user&lt;/P&gt;&lt;P&gt;sudo -u hdfs hdfs dfs -chown mapred:mapred /user&lt;/P&gt;&lt;P&gt;sudo -u hdfs hdfs dfs -mkdir /tmp&lt;/P&gt;&lt;P&gt;sudo -u hdfs hdfs dfs -chown mapred:mapred /tmp&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Mon, 04 Aug 2014 20:12:57 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/16420#M2471</guid>
      <dc:creator>ben.hemphill</dc:creator>
      <dc:date>2014-08-04T20:12:57Z</dc:date>
    </item>
    <item>
      <title>Re: Permission denied: user=mapred, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/21116#M2472</link>
      <description>&lt;P&gt;I was able to resolve the AccessControLException by using "sudo -u hdfs" and pushing my data file to hdfs (using the full path) as below:&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;STRONG&gt;sudo -u hdfs spark-submit --class com.cloudera.sparkwordcount.JavaWordCount --master local target/sparkwordcount-0.0.1-SNAPSHOT.jar /user/cloudera/data/inputfile.txt 2&lt;/STRONG&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I thought it would then be simple to switch the user to 'cloudera' (e.g. "sudo -u cloudera"), since I was putting the data file under the cloudera user path it seemed reasonable, but gave me the same exception. Not sure why?&lt;/P&gt;</description>
      <pubDate>Fri, 31 Oct 2014 22:50:53 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/21116#M2472</guid>
      <dc:creator>mister</dc:creator>
      <dc:date>2014-10-31T22:50:53Z</dc:date>
    </item>
    <item>
      <title>Re: Permission denied: user=mapred, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/36274#M2473</link>
      <description>&lt;P&gt;A quick update to this thread to advise of a new Community Knowledge Article on this subject.&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;&lt;A href="https://community.cloudera.com/t5/CDH-Manual-Installation/How-to-resolve-quot-Permission-denied-quot-errors-in-CDH/ta-p/36141" target="_blank"&gt;&lt;SPAN class="lia-link-navigation blog-article-link lia-link-disabled"&gt;How to resolve "Permissio&lt;WBR /&gt;n denied" errors in CDH&lt;/SPAN&gt;&lt;/A&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 14 Jan 2016 21:19:35 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/36274#M2473</guid>
      <dc:creator>cjervis</dc:creator>
      <dc:date>2016-01-14T21:19:35Z</dc:date>
    </item>
    <item>
      <title>Re: Permission denied: user=mapred, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/40624#M2474</link>
      <description>I had a similar issue starting up pyspark shell, spark 1.6 and turned out that my program is writing log info to /user/spark/applhistorylogs and it does not have sufficient permissions to write to this path on hdfs. Changing permissions to 777 helped.&lt;BR /&gt;&lt;BR /&gt;Any idea why this issue popped up all of a sudden. I have been using same environment for last 2 months</description>
      <pubDate>Mon, 09 May 2016 15:49:57 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/40624#M2474</guid>
      <dc:creator>Abhiyada</dc:creator>
      <dc:date>2016-05-09T15:49:57Z</dc:date>
    </item>
    <item>
      <title>Re: Permission denied: user=mapred, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/44244#M2475</link>
      <description>&lt;P&gt;I am trying to invoke a sqoop oozie job from a oozie shell action . But i am getting following error and oozie sqoop job is in suspended status.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;JA009: Permission denied: user=yarn, access=WRITE,&lt;BR /&gt;inode="/user":hdfs:supergroup:drwxr-xr-x at&lt;BR /&gt;org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission&lt;BR /&gt;(DefaultAuthorizationProvider.java:257) at&lt;BR /&gt;org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check&lt;BR /&gt;(DefaultAuthorizationProvider.java:238) at&lt;BR /&gt;org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check&lt;BR /&gt;(DefaultAuthorizationProvider.java:216) at org.apache.hadoop.hdfs.server.namenode.De&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I know the issue is the job is invoked by yarn and its not having WRITE permission on /user folder.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;As per your solution i need to change permission rights of /user.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;But in my company it's not possible since i don't have rights to do that and there are so many users there.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I have tried to change the sqoop import operation to a /temp folder where there is WRITE access to all users.But i am still getting same error. I don't know why its is always referring to /user folder.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Is there any other way i can resolve this issue?&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thanks in advace.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Tue, 23 Aug 2016 13:29:29 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/44244#M2475</guid>
      <dc:creator>thachil</dc:creator>
      <dc:date>2016-08-23T13:29:29Z</dc:date>
    </item>
    <item>
      <title>Re: Permission denied: user=mapred, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/44255#M2476</link>
      <description>&lt;P&gt;I would say that you should work with your cluster administrator to update the permissions, since your user will not be able to create the subfolder that YARN is trying to create for your user either.&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Tue, 23 Aug 2016 22:06:20 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/44255#M2476</guid>
      <dc:creator>ben.hemphill</dc:creator>
      <dc:date>2016-08-23T22:06:20Z</dc:date>
    </item>
    <item>
      <title>Re: Permission denied: user=mapred, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/44282#M2477</link>
      <description>&lt;P&gt;Hi,&lt;BR /&gt;&lt;BR /&gt;I'm getting a similar error, while starting the HBase Region Server. I'm not pretty sure, which permissions I have to set.. &lt;span class="lia-unicode-emoji" title=":confused_face:"&gt;😕&lt;/span&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;2016-08-24 15:47:49,361 ERROR org.apache.hadoop.hbase.coprocessor.CoprocessorHost: The coprocessor org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint threw java.lang.IllegalStateException: Failed to get FileSystem instance
java.lang.IllegalStateException: Failed to get FileSystem instance
	at org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint.start(SecureBulkLoadEndpoint.java:152)
	at org.apache.hadoop.hbase.coprocessor.CoprocessorHost$Environment.startup(CoprocessorHost.java:414)
	at org.apache.hadoop.hbase.coprocessor.CoprocessorHost.loadInstance(CoprocessorHost.java:255)
	at org.apache.hadoop.hbase.coprocessor.CoprocessorHost.loadSystemCoprocessors(CoprocessorHost.java:161)
	at org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.&amp;lt;init&amp;gt;(RegionCoprocessorHost.java:218)
	at org.apache.hadoop.hbase.regionserver.HRegion.&amp;lt;init&amp;gt;(HRegion.java:720)
	at org.apache.hadoop.hbase.regionserver.HRegion.&amp;lt;init&amp;gt;(HRegion.java:628)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
	at org.apache.hadoop.hbase.regionserver.HRegion.newHRegion(HRegion.java:6128)
	at org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:6432)
	at org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:6404)
	at org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:6360)
	at org.apache.hadoop.hbase.regionserver.HRegion.openHRegion(HRegion.java:6311)
	at org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler.openRegion(OpenRegionHandler.java:362)
	at org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler.process(OpenRegionHandler.java:129)
	at org.apache.hadoop.hbase.executor.EventHandler.run(EventHandler.java:129)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hadoop.security.AccessControlException: Permission denied: user=hbase, access=WRITE, inode="/tmp":hdfs:supergroup:drwxr-xr-x
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:281)
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:262)
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:242)
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:169)
	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:152)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6590)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6572)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:6524)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4322)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4292)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4265)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:867)
	at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:322)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:603)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2086)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2082)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2080)

	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
	at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
	at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)
	at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:3084)
	at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:3049)
	at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:957)
	at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:953)
	at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
	at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirsInternal(DistributedFileSystem.java:953)
	at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem.java:946)
	at org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint.start(SecureBulkLoadEndpoint.java:139)
	... 21 more
Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): Permission denied: user=hbase, access=WRITE, inode="/tmp":hdfs:supergroup:drwxr-xr-x
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:281)
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:262)
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:242)
	at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:169)
	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:152)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6590)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6572)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:6524)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4322)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4292)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4265)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:867)
	at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:322)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:603)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2086)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2082)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2080)

	at org.apache.hadoop.ipc.Client.call(Client.java:1471)
	at org.apache.hadoop.ipc.Client.call(Client.java:1408)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
	at com.sun.proxy.$Proxy23.mkdirs(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdirs(ClientNamenodeProtocolTranslatorPB.java:544)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:256)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104)
	at com.sun.proxy.$Proxy24.mkdirs(Unknown Source)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:279)
	at com.sun.proxy.$Proxy25.mkdirs(Unknown Source)
	at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:3082)
	... 28 more&lt;/PRE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I am running a CDH 5.7 Cluster on 4 Ubuntu 14.04 machines.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;It would be nice if someone could help me out and thanks a lot.&lt;/P&gt;</description>
      <pubDate>Wed, 24 Aug 2016 14:09:54 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/44282#M2477</guid>
      <dc:creator>MaxM</dc:creator>
      <dc:date>2016-08-24T14:09:54Z</dc:date>
    </item>
    <item>
      <title>Re: Permission denied: user=mapred, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/44286#M2478</link>
      <description>Already fixed it</description>
      <pubDate>Wed, 24 Aug 2016 14:43:15 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/44286#M2478</guid>
      <dc:creator>MaxM</dc:creator>
      <dc:date>2016-08-24T14:43:15Z</dc:date>
    </item>
    <item>
      <title>Re: Permission denied: user=mapred, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/44485#M2479</link>
      <description>&lt;P&gt;Instead of invoking the sqoop action from shell, i created a sub workflow that does the sqoop job and then called the sub workflow from the main Oozie workflow. The subworkflow will be invoked with submitter as the current user.&lt;/P&gt;&lt;P&gt;So problem solved for me.&lt;/P&gt;</description>
      <pubDate>Mon, 29 Aug 2016 17:13:12 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/44485#M2479</guid>
      <dc:creator>thachil</dc:creator>
      <dc:date>2016-08-29T17:13:12Z</dc:date>
    </item>
    <item>
      <title>Re: Permission denied: user=mapred, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/52135#M2480</link>
      <description>&lt;P&gt;I have some strange issue going on with spark jobs.&lt;/P&gt;&lt;P&gt;CDH 5.8.3.&amp;nbsp;&lt;/P&gt;&lt;P&gt;Even hive on spark jobs.&lt;/P&gt;&lt;P&gt;Job seems to run successfully. While the job is running i can go thorough the Resource manager to application master which leads me to spark execution web UI.&lt;/P&gt;&lt;P&gt;But after the job finishes, even though the job is moved to Job history server, when i click on histroy server webui it doesnt take me to spark history web UI.&lt;/P&gt;&lt;P&gt;Instead the job remains under /tmp/logs/user/logs/applicationid&lt;/P&gt;&lt;P&gt;eg.&amp;nbsp;drwxrwx--- &amp;nbsp; - bigdata hadoop &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp;0 2017-03-13 15:26 /tmp/logs/bigdata/logs/application_1489248168306_0076&lt;/P&gt;&lt;P&gt;drwxrwxrwt+ &amp;nbsp;- mapred hadoop &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp;0 2017-03-09 17:39 /tmp/logs&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Permissions for /tmp is 1777&lt;/P&gt;&lt;P&gt;/user/bigdata is 755&lt;/P&gt;&lt;P&gt;drwxrwx---+ - mapred hadoop 0 2017-01-03 13:09 /user/history/done&lt;BR /&gt;drwxrwxrwt+ - mapred hadoop 0 2017-03-09 17:39 /user/history/done_intermediate&lt;/P&gt;&lt;P&gt;uid=489(mapred) gid=486(mapred) groups=486(mapred),493(hadoop)&lt;/P&gt;&lt;P&gt;uid=517(bigdata) gid=522(bigdata) groups=522(bigdata),528(hdpdev)&lt;/P&gt;&lt;P&gt;hadoop:x:493:hdfs,mapred,yarn&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;All of below is done.&amp;nbsp;&lt;A href="https://www.cloudera.com/documentation/enterprise/5-4-x/topics/admin_spark_history_server.html" target="_blank"&gt;https://www.cloudera.com/documentation/enterprise/5-4-x/topics/admin_spark_history_server.html&amp;nbsp;&lt;/A&gt;&lt;/P&gt;&lt;PRE&gt;$ sudo -u hdfs hadoop fs -mkdir /user/spark
$ sudo -u hdfs hadoop fs -mkdir /user/spark/applicationHistory
$ sudo -u hdfs hadoop fs -chown -R spark:spark /user/spark
$ sudo -u hdfs hadoop fs -chmod 1777 /user/spark/applicationHistory&lt;/PRE&gt;&lt;PRE&gt;spark.eventLog.dir=/user/spark/applicationHistory
spark.eventLog.enabled=true&lt;/PRE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Not sure whats going on here. Everything seems to be in order.&lt;BR /&gt;Surprisingly for some of the jobs i was able to be redirected from job history server to spark history server.&lt;/P&gt;</description>
      <pubDate>Mon, 13 Mar 2017 21:05:55 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/52135#M2480</guid>
      <dc:creator>ABaaya</dc:creator>
      <dc:date>2017-03-13T21:05:55Z</dc:date>
    </item>
    <item>
      <title>Re: Permission denied: user=mapred, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/52225#M2481</link>
      <description>&lt;P&gt;Figuered out the issue.&lt;/P&gt;&lt;P&gt;The issue was we were passing a spark.conf file while submitting the spark job hoping the config changes would be aggregated with default parameters from default spark.conf.&lt;/P&gt;&lt;P&gt;Turns out it overrides the default spark config file. Even if you pass blank spark conf it will not consider the default spark.conf for the job.&lt;/P&gt;&lt;P&gt;We had to below 3 lines on the custom spark conf file to enable log aggregation at spark history server and URL at resource manager to point to spark history server.&lt;/P&gt;&lt;P&gt;This has to be done with every spark job. If a job is submitted with below 3 parms it will not be available in spark history server even if u restart anything.&lt;/P&gt;&lt;P&gt;```spark.eventLog.enabled=true&lt;BR /&gt;spark.eventLog.dir=hdfs://nameservice1/user/spark/applicationHistory&lt;BR /&gt;spark.yarn.historyServer.address=&lt;A href="http://sparkhist-dev.visibleworld.com:18088" target="_blank"&gt;http://sparkhist-dev.visibleworld.com:18088&lt;/A&gt;```&lt;/P&gt;</description>
      <pubDate>Thu, 16 Mar 2017 01:16:24 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/52225#M2481</guid>
      <dc:creator>ABaaya</dc:creator>
      <dc:date>2017-03-16T01:16:24Z</dc:date>
    </item>
    <item>
      <title>Re: Permission denied: user=mapred, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/53173#M2482</link>
      <description>&lt;P&gt;&amp;nbsp;how did you solve it Max?&lt;/P&gt;</description>
      <pubDate>Tue, 04 Apr 2017 08:48:28 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/53173#M2482</guid>
      <dc:creator>jack0188</dc:creator>
      <dc:date>2017-04-04T08:48:28Z</dc:date>
    </item>
    <item>
      <title>Re: Permission denied: user=mapred, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/53174#M2483</link>
      <description>How did u solved it ??? Which things one has to check ?</description>
      <pubDate>Tue, 04 Apr 2017 08:50:06 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Permission-denied-user-mapred-access-WRITE-inode-quot-quot/m-p/53174#M2483</guid>
      <dc:creator>jack0188</dc:creator>
      <dc:date>2017-04-04T08:50:06Z</dc:date>
    </item>
  </channel>
</rss>

