<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: Quick Start VM : Hive Editor Parsing Error : Exercise 3 in Archives of Support Questions (Read Only)</title>
    <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Quick-Start-VM-Hive-Editor-Parsing-Error-Exercise-3/m-p/28175#M5764</link>
    <description>&lt;P&gt;I thought I was?&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;[cloudera@quickstart ~]$ sudo -u hdfs hadoop fs -mkdir /user/hive/warehouse/original_access_logs&lt;BR /&gt;[cloudera@quickstart ~]$ sudo -u hdfs hadoop fs -copyFromLocal /opt/examples/log_files/access.log.2 /user/hive/warehouse/original_access_logs&lt;BR /&gt;[cloudera@quickstart ~]$ hadoop fs -ls /user/hive/warehouse/original_access_logs&lt;BR /&gt;Found 1 items&lt;BR /&gt;-rw-r--r-- 1 hdfs hive 39593868 2015-06-03 11:43 /user/hive/warehouse/original_access_logs/access.log.2&lt;BR /&gt;[cloudera@quickstart ~]$ beeline -u jdbc:hive2://quickstart:10000/default -n admin -d org.apache.hive.jdbc.HiveDriver&lt;BR /&gt;Connecting to jdbc:hive2://quickstart:10000/default&lt;BR /&gt;Connected to: Apache Hive (version 1.1.0-cdh5.4.0)&lt;BR /&gt;Driver: Hive JDBC (version 1.1.0-cdh5.4.0)&lt;BR /&gt;Transaction isolation: TRANSACTION_REPEATABLE_READ&lt;BR /&gt;Beeline version 1.1.0-cdh5.4.0 by Apache Hive&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt; CREATE EXTERNAL TABLE intermediate_access_logs (&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; ip STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; date STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; method STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; url STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; http_version STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; code1 STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; code2 STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; dash STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; user_agent STRING)&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.RegexSerDe'&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; WITH SERDEPROPERTIES (&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; 'input.regex' = '([^ ]*) - - \\[([^\\]]*)\\] "([^\ ]*) ([^\ ]*) ([^\ ]*)" (\\d*) (\\d*) "([^"]*)" "([^"]*)"',&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; 'output.format.string' = '%1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s'&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; )&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; LOCATION '/user/hive/warehouse/original_access_logs';&lt;BR /&gt;No rows affected (1.786 seconds)&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt;&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt; CREATE EXTERNAL TABLE tokenized_access_logs (&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; ip STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; date STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; method STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; url STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; http_version STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; code1 STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; code2 STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; dash STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; user_agent STRING)&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; ROW FORMAT DELIMITED FIELDS TERMINATED BY ','&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; LOCATION '/user/hive/warehouse/tokenized_access_logs';&lt;BR /&gt;No rows affected (0.215 seconds)&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt;&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt; ADD JAR /usr/lib/hive/lib/hive-contrib.jar;&lt;BR /&gt;INFO : Added [/usr/lib/hive/lib/hive-contrib.jar] to class path&lt;BR /&gt;INFO : Added resources: [/usr/lib/hive/lib/hive-contrib.jar]&lt;BR /&gt;No rows affected (0.051 seconds)&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt;&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt; INSERT OVERWRITE TABLE tokenized_access_logs SELECT * FROM intermediate_access_logs;&lt;BR /&gt;INFO : Number of reduce tasks is set to 0 since there's no reduce operator&lt;BR /&gt;ERROR : Job Submission failed with exception 'org.apache.hadoop.security.AccessControlException(Permission denied: user=admin, access=WRITE, inode="/user":hdfs:supergroup:drwxr-xr-x&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:257)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:238)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:216)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:145)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:138)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6553)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6535)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:6487)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4291)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4261)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4234)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:817)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:321)&lt;BR /&gt;at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:601)&lt;BR /&gt;at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)&lt;BR /&gt;at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)&lt;BR /&gt;at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1060)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2044)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2040)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:415)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2038)&lt;BR /&gt;)'&lt;BR /&gt;org.apache.hadoop.security.AccessControlException: Permission denied: user=admin, access=WRITE, inode="/user":hdfs:supergroup:drwxr-xr-x&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:257)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:238)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:216)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:145)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:138)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6553)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6535)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:6487)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4291)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4261)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4234)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:817)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:321)&lt;BR /&gt;at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:601)&lt;BR /&gt;at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)&lt;BR /&gt;at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)&lt;BR /&gt;at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1060)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2044)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2040)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:415)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2038)&lt;/P&gt;&lt;P&gt;at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)&lt;BR /&gt;at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)&lt;BR /&gt;at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)&lt;BR /&gt;at java.lang.reflect.Constructor.newInstance(Constructor.java:526)&lt;BR /&gt;at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)&lt;BR /&gt;at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:2760)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:2729)&lt;BR /&gt;at org.apache.hadoop.hdfs.DistributedFileSystem$17.doCall(DistributedFileSystem.java:870)&lt;BR /&gt;at org.apache.hadoop.hdfs.DistributedFileSystem$17.doCall(DistributedFileSystem.java:866)&lt;BR /&gt;at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)&lt;BR /&gt;at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirsInternal(DistributedFileSystem.java:866)&lt;BR /&gt;at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem.java:859)&lt;BR /&gt;at org.apache.hadoop.mapreduce.JobSubmissionFiles.getStagingDir(JobSubmissionFiles.java:133)&lt;BR /&gt;at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:437)&lt;BR /&gt;at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1306)&lt;BR /&gt;at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1303)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:415)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)&lt;BR /&gt;at org.apache.hadoop.mapreduce.Job.submit(Job.java:1303)&lt;BR /&gt;at org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:564)&lt;BR /&gt;at org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:559)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:415)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)&lt;BR /&gt;at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:559)&lt;BR /&gt;at org.apache.hadoop.mapred.JobClient.submitJob(JobClient.java:550)&lt;BR /&gt;at org.apache.hadoop.hive.ql.exec.mr.ExecDriver.execute(ExecDriver.java:428)&lt;BR /&gt;at org.apache.hadoop.hive.ql.exec.mr.MapRedTask.execute(MapRedTask.java:137)&lt;BR /&gt;at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)&lt;BR /&gt;at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:88)&lt;BR /&gt;at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1638)&lt;BR /&gt;at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1397)&lt;BR /&gt;at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1181)&lt;BR /&gt;at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1047)&lt;BR /&gt;at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1042)&lt;BR /&gt;at org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:145)&lt;BR /&gt;at org.apache.hive.service.cli.operation.SQLOperation.access$100(SQLOperation.java:70)&lt;BR /&gt;at org.apache.hive.service.cli.operation.SQLOperation$1$1.run(SQLOperation.java:197)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:415)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)&lt;BR /&gt;at org.apache.hive.service.cli.operation.SQLOperation$1.run(SQLOperation.java:209)&lt;BR /&gt;at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)&lt;BR /&gt;at java.util.concurrent.FutureTask.run(FutureTask.java:262)&lt;BR /&gt;at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)&lt;BR /&gt;at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)&lt;BR /&gt;at java.lang.Thread.run(Thread.java:745)&lt;BR /&gt;Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): Permission denied: user=admin, access=WRITE, inode="/user":hdfs:supergroup:drwxr-xr-x&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:257)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:238)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:216)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:145)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:138)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6553)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6535)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:6487)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4291)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4261)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4234)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:817)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:321)&lt;BR /&gt;at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:601)&lt;BR /&gt;at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)&lt;BR /&gt;at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)&lt;BR /&gt;at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1060)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2044)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2040)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:415)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2038)&lt;/P&gt;&lt;P&gt;at org.apache.hadoop.ipc.Client.call(Client.java:1468)&lt;BR /&gt;at org.apache.hadoop.ipc.Client.call(Client.java:1399)&lt;BR /&gt;at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)&lt;BR /&gt;at com.sun.proxy.$Proxy17.mkdirs(Unknown Source)&lt;BR /&gt;at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdirs(ClientNamenodeProtocolTranslatorPB.java:539)&lt;BR /&gt;at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)&lt;BR /&gt;at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)&lt;BR /&gt;at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)&lt;BR /&gt;at java.lang.reflect.Method.invoke(Method.java:606)&lt;BR /&gt;at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187)&lt;BR /&gt;at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)&lt;BR /&gt;at com.sun.proxy.$Proxy18.mkdirs(Unknown Source)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:2758)&lt;BR /&gt;... 42 more&lt;/P&gt;&lt;P&gt;Error: Error while processing statement: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask (state=08S01,code=1)&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt;&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt; DROP TABLE intermediate_access_logs;&lt;BR /&gt;No rows affected (0.16 seconds)&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt;&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt; !quit&lt;BR /&gt;Closing: 0: jdbc:hive2://quickstart:10000/default&lt;/P&gt;</description>
    <pubDate>Wed, 03 Jun 2015 21:05:58 GMT</pubDate>
    <dc:creator>dbassassin</dc:creator>
    <dc:date>2015-06-03T21:05:58Z</dc:date>
    <item>
      <title>Quick Start VM : Hive Editor Parsing Error : Exercise 3</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Quick-Start-VM-Hive-Editor-Parsing-Error-Exercise-3/m-p/27038#M5760</link>
      <description>&lt;P&gt;I am working through the quickstart tutorial and am experiencing errors with the HUE Hive Editor using the below SQL; upon execution the editor parses the output.format.string property and throws up a dialog requesting parameter "s"...&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Sorry if this is a FAQ, but couldn't find any reference to it in the community forums&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;mnsm&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;FONT face="courier new,courier"&gt;CREATE EXTERNAL TABLE intermediate_access_logs (&lt;/FONT&gt;&lt;BR /&gt;&lt;FONT face="courier new,courier"&gt;ip STRING,&lt;/FONT&gt;&lt;BR /&gt;&lt;FONT face="courier new,courier"&gt;date STRING,&lt;/FONT&gt;&lt;BR /&gt;&lt;FONT face="courier new,courier"&gt;method STRING,&lt;/FONT&gt;&lt;BR /&gt;&lt;FONT face="courier new,courier"&gt;url STRING,&lt;/FONT&gt;&lt;BR /&gt;&lt;FONT face="courier new,courier"&gt;http_version STRING,&lt;/FONT&gt;&lt;BR /&gt;&lt;FONT face="courier new,courier"&gt;code1 STRING,&lt;/FONT&gt;&lt;BR /&gt;&lt;FONT face="courier new,courier"&gt;code2 STRING,&lt;/FONT&gt;&lt;BR /&gt;&lt;FONT face="courier new,courier"&gt;dash STRING,&lt;/FONT&gt;&lt;BR /&gt;&lt;FONT face="courier new,courier"&gt;user_agent STRING)&lt;/FONT&gt;&lt;BR /&gt;&lt;FONT face="courier new,courier"&gt;ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.ou'&lt;/FONT&gt;&lt;BR /&gt;&lt;FONT face="courier new,courier"&gt;WITH SERDEPROPERTIES (&lt;/FONT&gt;&lt;BR /&gt;&lt;FONT face="courier new,courier"&gt;'input.regex' = '([^ ]*) - - \\[([^\\]]*)\\] "([^\ ]*) ([^\ ]*) ([^\ ]*)" (\\d*) (\\d*) "([^"]*)" "([^"]*)"',&lt;/FONT&gt;&lt;BR /&gt;&lt;FONT face="courier new,courier"&gt;'output.format.string' = '%1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s'&lt;/FONT&gt;&lt;BR /&gt;&lt;FONT face="courier new,courier"&gt;)&lt;/FONT&gt;&lt;BR /&gt;&lt;FONT face="courier new,courier"&gt;LOCATION '/user/hive/warehouse/original_access_logs';&lt;/FONT&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;/*... downstream activities elided */&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;FONT face="courier new,courier"&gt;DROP TABLE intermediate_access_logs;&lt;/FONT&gt;&lt;/P&gt;&lt;P&gt;&lt;FONT face="courier new,courier"&gt;!quit&lt;/FONT&gt;&lt;/P&gt;</description>
      <pubDate>Tue, 21 Apr 2026 13:58:39 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Quick-Start-VM-Hive-Editor-Parsing-Error-Exercise-3/m-p/27038#M5760</guid>
      <dc:creator>makenosuddenmovements</dc:creator>
      <dc:date>2026-04-21T13:58:39Z</dc:date>
    </item>
    <item>
      <title>Re: Quick Start VM : Hive Editor Parsing Error : Exercise 3</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Quick-Start-VM-Hive-Editor-Parsing-Error-Exercise-3/m-p/27039#M5761</link>
      <description>I see there's an error in the tutorial. These queries are intended to be in the Beeline shell that is described above this step. The statements should be pasted into that shell, not into the Hive Query Editor app. I'll get that corrected...&lt;BR /&gt;&lt;BR /&gt;The "$s" in the format string is a special syntax interpreted by Hue as a parameter for the user to provide. To make it send the query to Hive as intended, you would need to escape the $ signs (e.g. %1$$s, etc.) However you may also run into some permissions issues querying the dataset via Hue that were beyond the scope of the tutorial - hence using Beeline.&lt;BR /&gt;&lt;BR /&gt;Thanks for reporting this!</description>
      <pubDate>Tue, 05 May 2015 20:40:16 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Quick-Start-VM-Hive-Editor-Parsing-Error-Exercise-3/m-p/27039#M5761</guid>
      <dc:creator>Sean</dc:creator>
      <dc:date>2015-05-05T20:40:16Z</dc:date>
    </item>
    <item>
      <title>Re: Quick Start VM : Hive Editor Parsing Error : Exercise 3</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Quick-Start-VM-Hive-Editor-Parsing-Error-Exercise-3/m-p/28171#M5762</link>
      <description>&lt;P&gt;I ran into a similar issue, however when I used the copy text button and pasted into Putty I received the permission denied error. &amp;nbsp;Do you know how to fix the permissions?&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thanks&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Wed, 03 Jun 2015 19:06:22 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Quick-Start-VM-Hive-Editor-Parsing-Error-Exercise-3/m-p/28171#M5762</guid>
      <dc:creator>dbassassin</dc:creator>
      <dc:date>2015-06-03T19:06:22Z</dc:date>
    </item>
    <item>
      <title>Re: Quick Start VM : Hive Editor Parsing Error : Exercise 3</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Quick-Start-VM-Hive-Editor-Parsing-Error-Exercise-3/m-p/28173#M5763</link>
      <description>I think the point of Sean's post is that you shouldn't use the standard shell -- Try using the Beeline shell instead -- if that doesn't help maybe the community will weigh in. Good luck.</description>
      <pubDate>Wed, 03 Jun 2015 19:40:34 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Quick-Start-VM-Hive-Editor-Parsing-Error-Exercise-3/m-p/28173#M5763</guid>
      <dc:creator>makenosuddenmovements</dc:creator>
      <dc:date>2015-06-03T19:40:34Z</dc:date>
    </item>
    <item>
      <title>Re: Quick Start VM : Hive Editor Parsing Error : Exercise 3</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Quick-Start-VM-Hive-Editor-Parsing-Error-Exercise-3/m-p/28175#M5764</link>
      <description>&lt;P&gt;I thought I was?&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;[cloudera@quickstart ~]$ sudo -u hdfs hadoop fs -mkdir /user/hive/warehouse/original_access_logs&lt;BR /&gt;[cloudera@quickstart ~]$ sudo -u hdfs hadoop fs -copyFromLocal /opt/examples/log_files/access.log.2 /user/hive/warehouse/original_access_logs&lt;BR /&gt;[cloudera@quickstart ~]$ hadoop fs -ls /user/hive/warehouse/original_access_logs&lt;BR /&gt;Found 1 items&lt;BR /&gt;-rw-r--r-- 1 hdfs hive 39593868 2015-06-03 11:43 /user/hive/warehouse/original_access_logs/access.log.2&lt;BR /&gt;[cloudera@quickstart ~]$ beeline -u jdbc:hive2://quickstart:10000/default -n admin -d org.apache.hive.jdbc.HiveDriver&lt;BR /&gt;Connecting to jdbc:hive2://quickstart:10000/default&lt;BR /&gt;Connected to: Apache Hive (version 1.1.0-cdh5.4.0)&lt;BR /&gt;Driver: Hive JDBC (version 1.1.0-cdh5.4.0)&lt;BR /&gt;Transaction isolation: TRANSACTION_REPEATABLE_READ&lt;BR /&gt;Beeline version 1.1.0-cdh5.4.0 by Apache Hive&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt; CREATE EXTERNAL TABLE intermediate_access_logs (&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; ip STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; date STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; method STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; url STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; http_version STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; code1 STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; code2 STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; dash STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; user_agent STRING)&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.RegexSerDe'&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; WITH SERDEPROPERTIES (&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; 'input.regex' = '([^ ]*) - - \\[([^\\]]*)\\] "([^\ ]*) ([^\ ]*) ([^\ ]*)" (\\d*) (\\d*) "([^"]*)" "([^"]*)"',&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; 'output.format.string' = '%1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s'&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; )&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; LOCATION '/user/hive/warehouse/original_access_logs';&lt;BR /&gt;No rows affected (1.786 seconds)&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt;&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt; CREATE EXTERNAL TABLE tokenized_access_logs (&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; ip STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; date STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; method STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; url STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; http_version STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; code1 STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; code2 STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; dash STRING,&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; user_agent STRING)&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; ROW FORMAT DELIMITED FIELDS TERMINATED BY ','&lt;BR /&gt;. . . . . . . . . . . . . . . . . . . . &amp;gt; LOCATION '/user/hive/warehouse/tokenized_access_logs';&lt;BR /&gt;No rows affected (0.215 seconds)&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt;&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt; ADD JAR /usr/lib/hive/lib/hive-contrib.jar;&lt;BR /&gt;INFO : Added [/usr/lib/hive/lib/hive-contrib.jar] to class path&lt;BR /&gt;INFO : Added resources: [/usr/lib/hive/lib/hive-contrib.jar]&lt;BR /&gt;No rows affected (0.051 seconds)&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt;&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt; INSERT OVERWRITE TABLE tokenized_access_logs SELECT * FROM intermediate_access_logs;&lt;BR /&gt;INFO : Number of reduce tasks is set to 0 since there's no reduce operator&lt;BR /&gt;ERROR : Job Submission failed with exception 'org.apache.hadoop.security.AccessControlException(Permission denied: user=admin, access=WRITE, inode="/user":hdfs:supergroup:drwxr-xr-x&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:257)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:238)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:216)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:145)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:138)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6553)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6535)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:6487)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4291)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4261)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4234)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:817)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:321)&lt;BR /&gt;at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:601)&lt;BR /&gt;at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)&lt;BR /&gt;at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)&lt;BR /&gt;at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1060)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2044)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2040)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:415)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2038)&lt;BR /&gt;)'&lt;BR /&gt;org.apache.hadoop.security.AccessControlException: Permission denied: user=admin, access=WRITE, inode="/user":hdfs:supergroup:drwxr-xr-x&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:257)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:238)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:216)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:145)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:138)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6553)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6535)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:6487)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4291)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4261)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4234)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:817)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:321)&lt;BR /&gt;at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:601)&lt;BR /&gt;at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)&lt;BR /&gt;at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)&lt;BR /&gt;at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1060)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2044)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2040)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:415)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2038)&lt;/P&gt;&lt;P&gt;at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)&lt;BR /&gt;at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)&lt;BR /&gt;at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)&lt;BR /&gt;at java.lang.reflect.Constructor.newInstance(Constructor.java:526)&lt;BR /&gt;at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)&lt;BR /&gt;at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:2760)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:2729)&lt;BR /&gt;at org.apache.hadoop.hdfs.DistributedFileSystem$17.doCall(DistributedFileSystem.java:870)&lt;BR /&gt;at org.apache.hadoop.hdfs.DistributedFileSystem$17.doCall(DistributedFileSystem.java:866)&lt;BR /&gt;at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)&lt;BR /&gt;at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirsInternal(DistributedFileSystem.java:866)&lt;BR /&gt;at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem.java:859)&lt;BR /&gt;at org.apache.hadoop.mapreduce.JobSubmissionFiles.getStagingDir(JobSubmissionFiles.java:133)&lt;BR /&gt;at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:437)&lt;BR /&gt;at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1306)&lt;BR /&gt;at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1303)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:415)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)&lt;BR /&gt;at org.apache.hadoop.mapreduce.Job.submit(Job.java:1303)&lt;BR /&gt;at org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:564)&lt;BR /&gt;at org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:559)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:415)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)&lt;BR /&gt;at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:559)&lt;BR /&gt;at org.apache.hadoop.mapred.JobClient.submitJob(JobClient.java:550)&lt;BR /&gt;at org.apache.hadoop.hive.ql.exec.mr.ExecDriver.execute(ExecDriver.java:428)&lt;BR /&gt;at org.apache.hadoop.hive.ql.exec.mr.MapRedTask.execute(MapRedTask.java:137)&lt;BR /&gt;at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)&lt;BR /&gt;at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:88)&lt;BR /&gt;at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1638)&lt;BR /&gt;at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1397)&lt;BR /&gt;at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1181)&lt;BR /&gt;at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1047)&lt;BR /&gt;at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1042)&lt;BR /&gt;at org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:145)&lt;BR /&gt;at org.apache.hive.service.cli.operation.SQLOperation.access$100(SQLOperation.java:70)&lt;BR /&gt;at org.apache.hive.service.cli.operation.SQLOperation$1$1.run(SQLOperation.java:197)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:415)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)&lt;BR /&gt;at org.apache.hive.service.cli.operation.SQLOperation$1.run(SQLOperation.java:209)&lt;BR /&gt;at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)&lt;BR /&gt;at java.util.concurrent.FutureTask.run(FutureTask.java:262)&lt;BR /&gt;at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)&lt;BR /&gt;at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)&lt;BR /&gt;at java.lang.Thread.run(Thread.java:745)&lt;BR /&gt;Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): Permission denied: user=admin, access=WRITE, inode="/user":hdfs:supergroup:drwxr-xr-x&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:257)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:238)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:216)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:145)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:138)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6553)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6535)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:6487)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4291)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4261)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4234)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:817)&lt;BR /&gt;at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:321)&lt;BR /&gt;at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:601)&lt;BR /&gt;at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)&lt;BR /&gt;at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)&lt;BR /&gt;at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1060)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2044)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2040)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:415)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)&lt;BR /&gt;at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2038)&lt;/P&gt;&lt;P&gt;at org.apache.hadoop.ipc.Client.call(Client.java:1468)&lt;BR /&gt;at org.apache.hadoop.ipc.Client.call(Client.java:1399)&lt;BR /&gt;at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)&lt;BR /&gt;at com.sun.proxy.$Proxy17.mkdirs(Unknown Source)&lt;BR /&gt;at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdirs(ClientNamenodeProtocolTranslatorPB.java:539)&lt;BR /&gt;at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)&lt;BR /&gt;at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)&lt;BR /&gt;at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)&lt;BR /&gt;at java.lang.reflect.Method.invoke(Method.java:606)&lt;BR /&gt;at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187)&lt;BR /&gt;at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)&lt;BR /&gt;at com.sun.proxy.$Proxy18.mkdirs(Unknown Source)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:2758)&lt;BR /&gt;... 42 more&lt;/P&gt;&lt;P&gt;Error: Error while processing statement: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask (state=08S01,code=1)&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt;&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt; DROP TABLE intermediate_access_logs;&lt;BR /&gt;No rows affected (0.16 seconds)&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt;&lt;BR /&gt;0: jdbc:hive2://quickstart:10000/default&amp;gt; !quit&lt;BR /&gt;Closing: 0: jdbc:hive2://quickstart:10000/default&lt;/P&gt;</description>
      <pubDate>Wed, 03 Jun 2015 21:05:58 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Quick-Start-VM-Hive-Editor-Parsing-Error-Exercise-3/m-p/28175#M5764</guid>
      <dc:creator>dbassassin</dc:creator>
      <dc:date>2015-06-03T21:05:58Z</dc:date>
    </item>
    <item>
      <title>Re: Quick Start VM : Hive Editor Parsing Error : Exercise 3</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Quick-Start-VM-Hive-Editor-Parsing-Error-Exercise-3/m-p/28615#M5765</link>
      <description>&lt;P&gt;you probably figured this out, but your step to start beeline should use cloudera as the username. The tutorial has a typographic error:&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;this [cloudera@quickstart ~]$ beeline -u jdbc:hive2://quickstart:10000/default -n admin -d org.apache.hive.jdbc.HiveDriver should be&lt;/P&gt;&lt;P&gt;[cloudera@quickstart ~]$ beeline -u jdbc:hive2://quickstart:10000/default -n cloudera -d org.apache.hive.jdbc.HiveDriver&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Delete the two log tables you created and then start this section again using the -n cloudera.&lt;/P&gt;</description>
      <pubDate>Wed, 17 Jun 2015 14:35:39 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Quick-Start-VM-Hive-Editor-Parsing-Error-Exercise-3/m-p/28615#M5765</guid>
      <dc:creator>rim</dc:creator>
      <dc:date>2015-06-17T14:35:39Z</dc:date>
    </item>
    <item>
      <title>Re: Quick Start VM : Hive Editor Parsing Error : Exercise 3</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Quick-Start-VM-Hive-Editor-Parsing-Error-Exercise-3/m-p/31104#M5766</link>
      <description>&lt;P&gt;&lt;SPAN class=""&gt;&lt;SPAN class="hps"&gt;This&lt;/SPAN&gt; &lt;SPAN class="hps"&gt;topic&lt;/SPAN&gt; &lt;SPAN class="hps"&gt;is interesting&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;FONT size="1 2 3 4 5 6 7" color="#FFFFFF"&gt;&lt;A href="http://www.tinytanksunblocked.com/" target="_self"&gt;Tiny Tanks&lt;/A&gt;&lt;/FONT&gt;&lt;/P&gt;&lt;P&gt;&lt;FONT size="1 2 3 4 5 6 7" color="#FFFFFF"&gt;&lt;A href="http://www.tinytanksunblocked.com/" target="_self"&gt;Tiny Tanks Unblocked&lt;/A&gt;&lt;/FONT&gt;&lt;/P&gt;</description>
      <pubDate>Mon, 24 Aug 2015 01:41:48 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Quick-Start-VM-Hive-Editor-Parsing-Error-Exercise-3/m-p/31104#M5766</guid>
      <dc:creator>leanhnam220</dc:creator>
      <dc:date>2015-08-24T01:41:48Z</dc:date>
    </item>
  </channel>
</rss>

