<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question spark not started on ambari server in Archives of Support Questions (Read Only)</title>
    <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/spark-not-started-on-ambari-server/m-p/179012#M70521</link>
    <description>&lt;P&gt;spark history server not started and from the logs under /var/log/spark2 we see the following&lt;/P&gt;&lt;PRE&gt;17/10/31 21:00:23 ERROR FsHistoryProvider: Exception encountered when attempting to load application log hdfs://hdfsha/spark2-history/application_1507958402099_0822_1
org.apache.hadoop.security.AccessControlException: Permission denied: user=root, access=READ, inode="/spark2-history/application_1507958402099_0822_1":bipflop:hadoop:-rwxrwx---
        at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:319)
        at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:219)
        at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:190)
        at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1955)
        at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1939)
        at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPathAccess(FSDirectory.java:1913)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2000)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1969)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1882)
        at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:699)
        at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
        at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
        at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:640)
        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982)
        at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2351)
        at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2347)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2345)&lt;/PRE&gt;&lt;P&gt;please advice how to resolve this ?&lt;/P&gt;&lt;P&gt;do you think that we need to do restore to postgresql from backup in order to solve this?&lt;/P&gt;</description>
    <pubDate>Wed, 01 Nov 2017 03:00:45 GMT</pubDate>
    <dc:creator>mike_bronson7</dc:creator>
    <dc:date>2017-11-01T03:00:45Z</dc:date>
    <item>
      <title>spark not started on ambari server</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/spark-not-started-on-ambari-server/m-p/179012#M70521</link>
      <description>&lt;P&gt;spark history server not started and from the logs under /var/log/spark2 we see the following&lt;/P&gt;&lt;PRE&gt;17/10/31 21:00:23 ERROR FsHistoryProvider: Exception encountered when attempting to load application log hdfs://hdfsha/spark2-history/application_1507958402099_0822_1
org.apache.hadoop.security.AccessControlException: Permission denied: user=root, access=READ, inode="/spark2-history/application_1507958402099_0822_1":bipflop:hadoop:-rwxrwx---
        at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:319)
        at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:219)
        at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:190)
        at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1955)
        at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1939)
        at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPathAccess(FSDirectory.java:1913)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:2000)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1969)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1882)
        at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:699)
        at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:376)
        at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
        at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:640)
        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982)
        at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2351)
        at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2347)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2345)&lt;/PRE&gt;&lt;P&gt;please advice how to resolve this ?&lt;/P&gt;&lt;P&gt;do you think that we need to do restore to postgresql from backup in order to solve this?&lt;/P&gt;</description>
      <pubDate>Wed, 01 Nov 2017 03:00:45 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/spark-not-started-on-ambari-server/m-p/179012#M70521</guid>
      <dc:creator>mike_bronson7</dc:creator>
      <dc:date>2017-11-01T03:00:45Z</dc:date>
    </item>
    <item>
      <title>Re: spark not started on ambari server</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/spark-not-started-on-ambari-server/m-p/179013#M70522</link>
      <description>&lt;P&gt; &lt;A rel="user" href="https://community.cloudera.com/users/26229/uribarih.html" nodeid="26229"&gt;@Michael Bronson&lt;/A&gt;,&lt;/P&gt;&lt;P&gt;This looks like a permission issue. The /spark2-history should belong to the spark user. You can change it as below&lt;/P&gt;&lt;PRE&gt;hdfs dfs -chown spark /spark2-history
hdfs dfs -chown spark /spark-history&lt;/PRE&gt;&lt;P&gt;Thanks,&lt;/P&gt;&lt;P&gt;Aditya&lt;/P&gt;</description>
      <pubDate>Sun, 03 Dec 2017 13:05:13 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/spark-not-started-on-ambari-server/m-p/179013#M70522</guid>
      <dc:creator>asirna</dc:creator>
      <dc:date>2017-12-03T13:05:13Z</dc:date>
    </item>
  </channel>
</rss>

