<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>rss.livelink.threads-in-node</title>
    <link>http://community.cloudera.com/</link>
    <description>Cloudera Community</description>
    <pubDate>Tue, 26 Jun 2018 21:37:55 GMT</pubDate>
    <dc:creator>Community</dc:creator>
    <dc:date>2018-06-26T21:37:55Z</dc:date>
    <item>
      <title>Hbase Role Log is not generated.</title>
      <link>http://community.cloudera.com/t5/Cloudera-Manager-Installation/Hbase-Role-Log-is-not-generated/m-p/69393#M14135</link>
      <description>&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;Hbase Role Log is not generated.&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p2"&gt;I got errors below when I clicked on Role Log -&amp;gt; Full Log File&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;There was an error communicating with the server:&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;[Errno 2] No such file or directory: '/var/log/hbase/hbase-cmf-HBASE-MASTER-ip-10-11-1-105.ec2.internal.log.out'&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;sudo ls -alF /var/log/hbase&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;total 4&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;drwxr-xr-x &lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;3 hbase hbase &lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;20 Jun 26 11:20 ./&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;drwxr-xr-x. 17 root&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;root&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;4096 Jun 26 11:20 ../&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;drwxr-xr-x &lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;2 hbase hbase&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;6 Jun 26 11:20 stacks/&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Tue, 26 Jun 2018 19:12:54 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Cloudera-Manager-Installation/Hbase-Role-Log-is-not-generated/m-p/69393#M14135</guid>
      <dc:creator>PeterLuo</dc:creator>
      <dc:date>2018-06-26T19:12:54Z</dc:date>
    </item>
    <item>
      <title>Could not find yarn-site.xml, make sure to deploy yarn client in UI</title>
      <link>http://community.cloudera.com/t5/Cloudera-Manager-Installation/Could-not-find-yarn-site-xml-make-sure-to-deploy-yarn-client/m-p/69391#M14133</link>
      <description>&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;Deploy hbase failed. It looks for the folder /etc/hadoop/conf.cloudera.yarn. But, at the master server. the folder name is conf.cloudera.YARN and conf.cloudera.HDFS. "YARN" and "HDFS" are capital case. See below. How to solve it?&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p2"&gt;Error Messages:&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;++ echo 'Copying yarn-site.xml to hbase directory'&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;++ '[' -r /etc/hadoop/conf.cloudera.yarn/yarn-site.xml ']'&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;++ echo 'Could not find yarn-site.xml, make sure to deploy yarn client in UI'&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;Could not find yarn-site.xml, make sure to deploy yarn client in UI&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;ls -alF /etc/hadoop&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;total 12&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;drwxr-xr-x&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;4 root root &lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;70 Jun 26 11:29 ./&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;drwxr-xr-x. 101 root root 8192 Jun 26 11:27 ../&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;lrwxrwxrwx&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;1 root root &lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;29 Jun 26 11:29 conf -&amp;gt; /etc/alternatives/hadoop-conf/&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;drwxr-xr-x&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;2 root root&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;214 Jun 26 11:29 conf.cloudera.HDFS/&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;drwxr-xr-x&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;2 root root&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;258 Jun 26 11:29 conf.cloudera.YARN/&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;ls -alF /etc/hadoop/conf.cloudera.YARN&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;total 48&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;drwxr-xr-x 2 root root&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;258 Jun 26 11:29 ./&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;drwxr-xr-x 4 root root &lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;70 Jun 26 11:29 ../&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root &lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;20 Jun 26 11:29 __cloudera_generation__&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root &lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;66 Jun 26 11:29 __cloudera_metadata__&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root 3870 Jun 26 11:29 core-site.xml&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;617 Jun 26 11:29 hadoop-env.sh&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root 1772 Jun 26 11:29 hdfs-site.xml&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;314 Jun 26 11:29 log4j.properties&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root 5107 Jun 26 11:29 mapred-site.xml&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;315 Jun 26 11:29 ssl-client.xml&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;630 Jun 26 11:29 topology.map&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rwxr-xr-x 1 root root 1594 Jun 26 11:29 topology.py*&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root 3661 Jun 26 11:29 yarn-site.xml&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;ls -alF /etc/hadoop/conf.cloudera.HDFS&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;total 36&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;drwxr-xr-x 2 root root&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;214 Jun 26 11:29 ./&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;drwxr-xr-x 4 root root &lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;70 Jun 26 11:29 ../&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root &lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;20 Jun 26 11:29 __cloudera_generation__&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root &lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;66 Jun 26 11:29 __cloudera_metadata__&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root 3557 Jun 26 11:29 core-site.xml&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root 2696 Jun 26 11:29 hadoop-env.sh&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root 1772 Jun 26 11:29 hdfs-site.xml&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;314 Jun 26 11:29 log4j.properties&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;315 Jun 26 11:29 ssl-client.xml&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rw-r--r-- 1 root root&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;630 Jun 26 11:29 topology.map&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-rwxr-xr-x 1 root root 1594 Jun 26 11:29 topology.py*&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Tue, 26 Jun 2018 18:56:27 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Cloudera-Manager-Installation/Could-not-find-yarn-site-xml-make-sure-to-deploy-yarn-client/m-p/69391#M14133</guid>
      <dc:creator>PeterLuo</dc:creator>
      <dc:date>2018-06-26T18:56:27Z</dc:date>
    </item>
    <item>
      <title>Limit Query Result Rows from a Hive Table on Hue</title>
      <link>http://community.cloudera.com/t5/Web-UI-Hue-Beeswax/Limit-Query-Result-Rows-from-a-Hive-Table-on-Hue/m-p/69390#M2748</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;how could I limit the rows returned from a query on a Hive table using Hue 4.1.0 without using the LIMIT syntax or the parameters %_partitions_limit.&lt;/P&gt;</description>
      <pubDate>Tue, 26 Jun 2018 18:05:51 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Web-UI-Hue-Beeswax/Limit-Query-Result-Rows-from-a-Hive-Table-on-Hue/m-p/69390#M2748</guid>
      <dc:creator>rengon</dc:creator>
      <dc:date>2018-06-26T18:05:51Z</dc:date>
    </item>
    <item>
      <title>Failed to install Cloudera Manager Agent on all Nodes</title>
      <link>http://community.cloudera.com/t5/Cloudera-Manager-Installation/Failed-to-install-Cloudera-Manager-Agent-on-all-Nodes/m-p/69387#M14130</link>
      <description>&lt;P&gt;Good Day,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Setting up my first 17 node cluster and have gotten to the point of deploying Cloudera Manager Agent (5.15.0) through Cloudera Manager across the nodes in the cluster (running CentOS 7.5).&amp;nbsp; Every nodes reports the following error:&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;&amp;nbsp;---------------------&lt;/SPAN&gt;&lt;SPAN&gt;-----------------------------------------&lt;/SPAN&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;Error: Package: glibc-2.17-222.el7.i686 (CentOS)&amp;nbsp;&lt;/SPAN&gt;&lt;BR /&gt;Requires: libfreebl3.so&amp;nbsp;&lt;BR /&gt;&lt;SPAN&gt;Error: Package: krb5-devel-1.15.1-18.el7.x86_64 (CentOS)&amp;nbsp;&lt;/SPAN&gt;&lt;BR /&gt;Requires: libkadm5(x86-64) = 1.15.1-18.el7&amp;nbsp;&lt;BR /&gt;Installed: libkadm5-1.15.1-19.el7.x86_64 (@updates)&amp;nbsp;&lt;BR /&gt;libkadm5(x86-64) = 1.15.1-19.el7&amp;nbsp;&lt;BR /&gt;Available: libkadm5-1.15.1-18.el7.x86_64 (CentOS)&amp;nbsp;&lt;BR /&gt;libkadm5(x86-64) = 1.15.1-18.el7&amp;nbsp;&lt;BR /&gt;&lt;SPAN&gt;Error: Package: glibc-2.17-222.el7.i686 (CentOS)&amp;nbsp;&lt;/SPAN&gt;&lt;BR /&gt;Requires: libfreebl3.so(NSSRAWHASH_3.12.3)&amp;nbsp;&lt;BR /&gt;&lt;SPAN&gt;Error: Package: krb5-devel-1.15.1-18.el7.x86_64 (CentOS)&amp;nbsp;&lt;/SPAN&gt;&lt;BR /&gt;Requires: krb5-libs(x86-64) = 1.15.1-18.el7&amp;nbsp;&lt;BR /&gt;Installed: krb5-libs-1.15.1-19.el7.x86_64 (@updates)&amp;nbsp;&lt;BR /&gt;krb5-libs(x86-64) = 1.15.1-19.el7&amp;nbsp;&lt;BR /&gt;You could try using --skip-broken to work around the problem&amp;nbsp;&lt;BR /&gt;You could try running: rpm -Va --nofiles --nodigest&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;&amp;nbsp;--------------------------------------------------------------&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I spoke to my server admin and was informed that we cannot deploy the version of the components it is requesting on CentOS 7.5, as these are core components that come at this version level with the OS template.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I did open a case and the Cloudera support person is attempting to setup an environment using the same versions, although I thought that maybe someone could offer some additional suggestions or advice as this is a high-priority issue.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Any assistance would be greatly appreciated.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thanks&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Greg&lt;/P&gt;</description>
      <pubDate>Tue, 26 Jun 2018 14:28:06 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Cloudera-Manager-Installation/Failed-to-install-Cloudera-Manager-Agent-on-all-Nodes/m-p/69387#M14130</guid>
      <dc:creator>gfrair</dc:creator>
      <dc:date>2018-06-26T14:28:06Z</dc:date>
    </item>
    <item>
      <title>How to automatically sync a Hive external table with a MySQL table without using Sqoop?</title>
      <link>http://community.cloudera.com/t5/Batch-SQL-Apache-Hive/How-to-automatically-sync-a-Hive-external-table-with-a-MySQL/m-p/69383#M2737</link>
      <description>&lt;P&gt;Hi All,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I'm already having a MySQL table in my local machine (Linux) itself, and I have a Hive external table with the same schema as the MySQL table.&lt;/P&gt;&lt;P&gt;I want to sync my hive external table whenever new record is inserted or updated.Batch update is ok with me say hourly.&lt;BR /&gt;What is the best possible approach to achieve the same.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thanks,&lt;/P&gt;&lt;P&gt;Sumit&lt;/P&gt;</description>
      <pubDate>Tue, 26 Jun 2018 13:55:56 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Batch-SQL-Apache-Hive/How-to-automatically-sync-a-Hive-external-table-with-a-MySQL/m-p/69383#M2737</guid>
      <dc:creator>sumit2687</dc:creator>
      <dc:date>2018-06-26T13:55:56Z</dc:date>
    </item>
    <item>
      <title>Kudu start up - ksck: table consistency check errors</title>
      <link>http://community.cloudera.com/t5/Interactive-Short-cycle-SQL/Kudu-start-up-ksck-table-consistency-check-errors/m-p/69381#M4647</link>
      <description>&lt;P&gt;Hi everyone,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;We use CDH to manage our data using Kudu and Impala. Whenever we restart Kudu, we see very high start up times to the tune of an hour or more. When we run ksck during this time we get the following (summary at the end):&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;==================
Errors:
==================
table consistency check error: Corruption: 51 out of 74 table(s) are bad

FAILED
Runtime error: ksck discovered errors&lt;/PRE&gt;&lt;P&gt;All the other tables are either under replicated or unavailable. Is this normal? Any help in making this faster will be appreciated.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thanks,&lt;/P&gt;&lt;P&gt;--&lt;/P&gt;&lt;P&gt;Razee&lt;/P&gt;</description>
      <pubDate>Tue, 26 Jun 2018 13:22:37 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Interactive-Short-cycle-SQL/Kudu-start-up-ksck-table-consistency-check-errors/m-p/69381#M4647</guid>
      <dc:creator>razee</dc:creator>
      <dc:date>2018-06-26T13:22:37Z</dc:date>
    </item>
    <item>
      <title>Failed to become active master org.apache.hadoop.security.AccessControlException: Permission denied:</title>
      <link>http://community.cloudera.com/t5/Cloudera-Manager-Installation/Failed-to-become-active-master-org-apache-hadoop-security/m-p/69371#M14124</link>
      <description>&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;I cannot start hbase service. I got Permission denied.&amp;nbsp;&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;Failed to become active master&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;org.apache.hadoop.security.AccessControlException: Permission denied: user=hbase, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:279)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:260)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:240)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:162)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;......&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;Unhandled exception. Starting shutdown.&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;org.apache.hadoop.security.AccessControlException: Permission denied: user=hbase, access=WRITE, inode="/":hdfs:supergroup:drwxr-xr-x&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:279)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:260)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;......&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;Output of ls:&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;sudo -u hdfs hdfs dfs -ls /user&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;Found 2 items&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;drwxrwxrwx &lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;- mapred mapred&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;0 2018-06-25 14:48 /user/history&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;drwxrwxrwx &lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;- mapred mapred&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;0 2018-06-25 13:59 /user/spark&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;sudo -u hdfs hdfs dfs -ls /hbase&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;Found 1 items&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;drwxr-xr-x &lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &lt;/SPAN&gt;- hdfs hbase&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;0 2018-06-25 15:06 /hbase/data&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;sudo -u hdfs hdfs dfs -ls /hbase/data/hbase&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p2"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;The /hbase/data/hbase is empty.&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Mon, 25 Jun 2018 22:20:48 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Cloudera-Manager-Installation/Failed-to-become-active-master-org-apache-hadoop-security/m-p/69371#M14124</guid>
      <dc:creator>PeterLuo</dc:creator>
      <dc:date>2018-06-25T22:20:48Z</dc:date>
    </item>
    <item>
      <title>Failed to start Cloudera Manager Agent</title>
      <link>http://community.cloudera.com/t5/Cloudera-Manager-Installation/Failed-to-start-Cloudera-Manager-Agent/m-p/69368#M14123</link>
      <description>&lt;P&gt;Hi, Please help me to address below issue.. after agent installation , while starting cloudera manager agent on second VM machine I am getting following error ..&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;----------------- logs is as shown below --------------&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;Starting cloudera-scm-agent: &amp;#27;[60G[&amp;#27;[0;31mFAILED&amp;#27;[0;39m]&lt;BR /&gt;END (1)&lt;BR /&gt;agent logs:&lt;BR /&gt;BEGIN tail -n 50 /var/log/cloudera-scm-agent//cloudera-scm-agent.out | sed 's/^/&amp;gt;&amp;gt;/'&lt;BR /&gt;&amp;gt;&amp;gt; [--orphan_process_directory_refresh_interval ORPHAN_PROCESS_DIRECTORY_REFRESH_INTERVAL]&lt;BR /&gt;&amp;gt;&amp;gt; [--agent_httpd_port AGENT_HTTPD_PORT] --package_dir&lt;BR /&gt;&amp;gt;&amp;gt; PACKAGE_DIR [--parcel_dir PARCEL_DIR]&lt;BR /&gt;&amp;gt;&amp;gt; [--supervisord_path SUPERVISORD_PATH]&lt;BR /&gt;&amp;gt;&amp;gt; [--supervisord_httpd_port SUPERVISORD_HTTPD_PORT]&lt;BR /&gt;&amp;gt;&amp;gt; [--standalone STANDALONE] [--master MASTER]&lt;BR /&gt;&amp;gt;&amp;gt; [--environment ENVIRONMENT] [--host_id HOST_ID]&lt;BR /&gt;&amp;gt;&amp;gt; [--disable_supervisord_events] --hostname HOSTNAME&lt;BR /&gt;&amp;gt;&amp;gt; --ip_address IP_ADDRESS&lt;BR /&gt;&amp;gt;&amp;gt; [--reported_hostname REPORTED_HOSTNAME] [--use_tls]&lt;BR /&gt;&amp;gt;&amp;gt; [--client_key_file CLIENT_KEY_FILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--client_cert_file CLIENT_CERT_FILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--verify_cert_file VERIFY_CERT_FILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--verify_cert_dir VERIFY_CERT_DIR]&lt;BR /&gt;&amp;gt;&amp;gt; [--client_keypw_file CLIENT_KEYPW_FILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--client_keypw_cmd CLIENT_KEYPW_CMD]&lt;BR /&gt;&amp;gt;&amp;gt; [--max_cert_depth MAX_CERT_DEPTH] [--logfile LOGFILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--logdir LOGDIR] [--optional_token] [--clear_agent_dir]&lt;BR /&gt;&amp;gt;&amp;gt; [--sudo_command SUDO_COMMAND] [--pidfile PIDFILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--comm_name COMM_NAME]&lt;BR /&gt;&amp;gt;&amp;gt;cmf-agent: error: argument --hostname is required&lt;BR /&gt;&amp;gt;&amp;gt;[26/Jun/2018 00:17:43 +0000] 11328 MainThread agent INFO SCM Agent Version: 5.13.0&lt;BR /&gt;&amp;gt;&amp;gt;[26/Jun/2018 00:17:43 +0000] 11328 MainThread agent ERROR Could not determine hostname or ip address; proceeding.&lt;BR /&gt;&amp;gt;&amp;gt;Traceback (most recent call last):&lt;BR /&gt;&amp;gt;&amp;gt; File "/usr/lib64/cmf/agent/build/env/lib/python2.6/site-packages/cmf-5.13.0-py2.6.egg/cmf/agent.py", line 2686, in parse_arguments&lt;BR /&gt;&amp;gt;&amp;gt; ip_address = socket.gethostbyname(fqdn)&lt;BR /&gt;&amp;gt;&amp;gt;gaierror: [Errno -3] Temporary failure in name resolution&lt;BR /&gt;&amp;gt;&amp;gt;usage: cmf-agent [-h] [--agent_dir AGENT_DIR] [--daemon] [--lib_dir LIB_DIR]&lt;BR /&gt;&amp;gt;&amp;gt; [--orphan_process_directory_staleness_threshold ORPHAN_PROCESS_DIRECTORY_STALENESS_THRESHOLD]&lt;BR /&gt;&amp;gt;&amp;gt; [--orphan_process_directory_refresh_interval ORPHAN_PROCESS_DIRECTORY_REFRESH_INTERVAL]&lt;BR /&gt;&amp;gt;&amp;gt; [--agent_httpd_port AGENT_HTTPD_PORT] --package_dir&lt;BR /&gt;&amp;gt;&amp;gt; PACKAGE_DIR [--parcel_dir PARCEL_DIR]&lt;BR /&gt;&amp;gt;&amp;gt; [--supervisord_path SUPERVISORD_PATH]&lt;BR /&gt;&amp;gt;&amp;gt; [--supervisord_httpd_port SUPERVISORD_HTTPD_PORT]&lt;BR /&gt;&amp;gt;&amp;gt; [--standalone STANDALONE] [--master MASTER]&lt;BR /&gt;&amp;gt;&amp;gt; [--environment ENVIRONMENT] [--host_id HOST_ID]&lt;BR /&gt;&amp;gt;&amp;gt; [--disable_supervisord_events] --hostname HOSTNAME&lt;BR /&gt;&amp;gt;&amp;gt; --ip_address IP_ADDRESS&lt;BR /&gt;&amp;gt;&amp;gt; [--reported_hostname REPORTED_HOSTNAME] [--use_tls]&lt;BR /&gt;&amp;gt;&amp;gt; [--client_key_file CLIENT_KEY_FILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--client_cert_file CLIENT_CERT_FILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--verify_cert_file VERIFY_CERT_FILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--verify_cert_dir VERIFY_CERT_DIR]&lt;BR /&gt;&amp;gt;&amp;gt; [--client_keypw_file CLIENT_KEYPW_FILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--client_keypw_cmd CLIENT_KEYPW_CMD]&lt;BR /&gt;&amp;gt;&amp;gt; [--max_cert_depth MAX_CERT_DEPTH] [--logfile LOGFILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--logdir LOGDIR] [--optional_token] [--clear_agent_dir]&lt;BR /&gt;&amp;gt;&amp;gt; [--sudo_command SUDO_COMMAND] [--pidfile PIDFILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--comm_name COMM_NAME]&lt;BR /&gt;&amp;gt;&amp;gt;cmf-agent: error: argument --hostname is required&lt;BR /&gt;&amp;gt;&amp;gt; [--orphan_process_directory_refresh_interval ORPHAN_PROCESS_DIRECTORY_REFRESH_INTERVAL]&lt;BR /&gt;&amp;gt;&amp;gt; [--agent_httpd_port AGENT_HTTPD_PORT] --package_dir&lt;BR /&gt;&amp;gt;&amp;gt; PACKAGE_DIR [--parcel_dir PARCEL_DIR]&lt;BR /&gt;&amp;gt;&amp;gt; [--supervisord_path SUPERVISORD_PATH]&lt;BR /&gt;&amp;gt;&amp;gt; [--supervisord_httpd_port SUPERVISORD_HTTPD_PORT]&lt;BR /&gt;&amp;gt;&amp;gt; [--standalone STANDALONE] [--master MASTER]&lt;BR /&gt;&amp;gt;&amp;gt; [--environment ENVIRONMENT] [--host_id HOST_ID]&lt;BR /&gt;&amp;gt;&amp;gt; [--disable_supervisord_events] --hostname HOSTNAME&lt;BR /&gt;&amp;gt;&amp;gt; --ip_address IP_ADDRESS&lt;BR /&gt;&amp;gt;&amp;gt; [--reported_hostname REPORTED_HOSTNAME] [--use_tls]&lt;BR /&gt;&amp;gt;&amp;gt; [--client_key_file CLIENT_KEY_FILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--client_cert_file CLIENT_CERT_FILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--verify_cert_file VERIFY_CERT_FILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--verify_cert_dir VERIFY_CERT_DIR]&lt;BR /&gt;&amp;gt;&amp;gt; [--client_keypw_file CLIENT_KEYPW_FILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--client_keypw_cmd CLIENT_KEYPW_CMD]&lt;BR /&gt;&amp;gt;&amp;gt; [--max_cert_depth MAX_CERT_DEPTH] [--logfile LOGFILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--logdir LOGDIR] [--optional_token] [--clear_agent_dir]&lt;BR /&gt;&amp;gt;&amp;gt; [--sudo_command SUDO_COMMAND] [--pidfile PIDFILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--comm_name COMM_NAME]&lt;BR /&gt;&amp;gt;&amp;gt;cmf-agent: error: argument --hostname is required&lt;BR /&gt;&amp;gt;&amp;gt;[26/Jun/2018 00:17:43 +0000] 11328 MainThread agent INFO SCM Agent Version: 5.13.0&lt;BR /&gt;&amp;gt;&amp;gt;[26/Jun/2018 00:17:43 +0000] 11328 MainThread agent ERROR Could not determine hostname or ip address; proceeding.&lt;BR /&gt;&amp;gt;&amp;gt;Traceback (most recent call last):&lt;BR /&gt;&amp;gt;&amp;gt; File "/usr/lib64/cmf/agent/build/env/lib/python2.6/site-packages/cmf-5.13.0-py2.6.egg/cmf/agent.py", line 2686, in parse_arguments&lt;BR /&gt;&amp;gt;&amp;gt; ip_address = socket.gethostbyname(fqdn)&lt;BR /&gt;&amp;gt;&amp;gt;gaierror: [Errno -3] Temporary failure in name resolution&lt;BR /&gt;&amp;gt;&amp;gt;usage: cmf-agent [-h] [--agent_dir AGENT_DIR] [--daemon] [--lib_dir LIB_DIR]&lt;BR /&gt;&amp;gt;&amp;gt; [--orphan_process_directory_staleness_threshold ORPHAN_PROCESS_DIRECTORY_STALENESS_THRESHOLD]&lt;BR /&gt;&amp;gt;&amp;gt; [--orphan_process_directory_refresh_interval ORPHAN_PROCESS_DIRECTORY_REFRESH_INTERVAL]&lt;BR /&gt;&amp;gt;&amp;gt; [--agent_httpd_port AGENT_HTTPD_PORT] --package_dir&lt;BR /&gt;&amp;gt;&amp;gt; PACKAGE_DIR [--parcel_dir PARCEL_DIR]&lt;BR /&gt;&amp;gt;&amp;gt; [--supervisord_path SUPERVISORD_PATH]&lt;BR /&gt;&amp;gt;&amp;gt; [--supervisord_httpd_port SUPERVISORD_HTTPD_PORT]&lt;BR /&gt;&amp;gt;&amp;gt; [--standalone STANDALONE] [--master MASTER]&lt;BR /&gt;&amp;gt;&amp;gt; [--environment ENVIRONMENT] [--host_id HOST_ID]&lt;BR /&gt;&amp;gt;&amp;gt; [--disable_supervisord_events] --hostname HOSTNAME&lt;BR /&gt;&amp;gt;&amp;gt; --ip_address IP_ADDRESS&lt;BR /&gt;&amp;gt;&amp;gt; [--reported_hostname REPORTED_HOSTNAME] [--use_tls]&lt;BR /&gt;&amp;gt;&amp;gt; [--client_key_file CLIENT_KEY_FILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--client_cert_file CLIENT_CERT_FILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--verify_cert_file VERIFY_CERT_FILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--verify_cert_dir VERIFY_CERT_DIR]&lt;BR /&gt;&amp;gt;&amp;gt; [--client_keypw_file CLIENT_KEYPW_FILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--client_keypw_cmd CLIENT_KEYPW_CMD]&lt;BR /&gt;&amp;gt;&amp;gt; [--max_cert_depth MAX_CERT_DEPTH] [--logfile LOGFILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--logdir LOGDIR] [--optional_token] [--clear_agent_dir]&lt;BR /&gt;&amp;gt;&amp;gt; [--sudo_command SUDO_COMMAND] [--pidfile PIDFILE]&lt;BR /&gt;&amp;gt;&amp;gt; [--comm_name COMM_NAME]&lt;BR /&gt;&amp;gt;&amp;gt;cmf-agent: error: argument --hostname is required&lt;BR /&gt;END (0)&lt;BR /&gt;BEGIN tail -n 50 /var/log/cloudera-scm-agent//cloudera-scm-agent.log | sed 's/^/&amp;gt;&amp;gt;/'&lt;BR /&gt;tail: cannot open `/var/log/cloudera-scm-agent//cloudera-scm-agent.log' for reading: No such file or directory&lt;BR /&gt;tail: cannot open `/var/log/cloudera-scm-agent//cloudera-scm-agent.log' for reading: No such file or directory&lt;BR /&gt;END (0)&lt;BR /&gt;&lt;BR /&gt;&lt;/P&gt;</description>
      <pubDate>Mon, 25 Jun 2018 19:39:05 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Cloudera-Manager-Installation/Failed-to-start-Cloudera-Manager-Agent/m-p/69368#M14123</guid>
      <dc:creator>Siv</dc:creator>
      <dc:date>2018-06-25T19:39:05Z</dc:date>
    </item>
    <item>
      <title>Summary: Exchange operation taking too much time on a single node Impala</title>
      <link>http://community.cloudera.com/t5/Interactive-Short-cycle-SQL/Summary-Exchange-operation-taking-too-much-time-on-a-single/m-p/69364#M4642</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;
&lt;P&gt;Our&amp;nbsp;system has single node impala cluster on top of 3 kudu nodes.&amp;nbsp;As you can see the summary below, Exchange is taking too much time. I want to understand is this normal, or do I need to make any changes in Impala configurations.&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;&lt;span class="lia-inline-image-display-wrapper lia-image-align-inline" style="width: 600px;"&gt;&lt;img src="http://xgkfq28377.i.lithium.com/t5/image/serverpage/image-id/4278i132B1B8DD83C6757/image-size/large?v=1.0&amp;amp;px=600" alt="Screen Shot 2018-06-25 at 9.48.04 PM.png" title="Screen Shot 2018-06-25 at 9.48.04 PM.png" /&gt;&lt;/span&gt;&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;Thanks!&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Mon, 25 Jun 2018 17:28:16 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Interactive-Short-cycle-SQL/Summary-Exchange-operation-taking-too-much-time-on-a-single/m-p/69364#M4642</guid>
      <dc:creator>himanshuag</dc:creator>
      <dc:date>2018-06-25T17:28:16Z</dc:date>
    </item>
    <item>
      <title>Full authentication is required to access this resource</title>
      <link>http://community.cloudera.com/t5/Cloudera-Manager-Installation/Full-authentication-is-required-to-access-this-resource/m-p/69359#M14122</link>
      <description>&lt;P&gt;I am currently using Unix to access Cloudera manager (SAML protected). I have access to links&amp;nbsp;such as&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;A href="https://servername.net:1111/static/release/js/cloudera/cmf/" target="_blank"&gt;https://servername.net:1111/static/release/js/cloudera/cmf/&lt;/A&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;but, I am unable to gain access to links with "/api"&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;example:&lt;/P&gt;&lt;P&gt;&lt;A href="https://servername.net:1111/api/v13/clusters/" target="_blank"&gt;https://servername.net:1111/api/v13/clusters/&lt;/A&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Here is my error message that I managed to get.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I am using get requests from python to gain access to Cloudera.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;lt;html&amp;gt;&lt;BR /&gt;&amp;lt;head&amp;gt;&lt;BR /&gt;&amp;lt;meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1"/&amp;gt;&lt;BR /&gt;&amp;lt;title&amp;gt;Error 401 Full authentication is required to access this resource&amp;lt;/title&amp;gt;&lt;BR /&gt;&amp;lt;/head&amp;gt;&lt;BR /&gt;&amp;lt;body&amp;gt;&amp;lt;h2&amp;gt;HTTP ERROR 401&amp;lt;/h2&amp;gt;&lt;BR /&gt;&amp;lt;p&amp;gt;Problem accessing /api/v13/clusters/. Reason:&lt;BR /&gt;&amp;lt;pre&amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp; Full authentication is required to access this resource&amp;lt;/pre&amp;gt;&amp;lt;/p&amp;gt;&amp;lt;hr /&amp;gt;&amp;lt;i&amp;gt;&amp;lt;small&amp;gt;Powered by Jetty://&amp;lt;/small&amp;gt;&amp;lt;/i&amp;gt;&amp;lt;br/&amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&lt;BR /&gt;&amp;lt;br/&amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&lt;BR /&gt;&amp;lt;br/&amp;gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Mon, 25 Jun 2018 15:42:15 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Cloudera-Manager-Installation/Full-authentication-is-required-to-access-this-resource/m-p/69359#M14122</guid>
      <dc:creator>don1123</dc:creator>
      <dc:date>2018-06-25T15:42:15Z</dc:date>
    </item>
    <item>
      <title>Maven Error With Cloudera Repository</title>
      <link>http://community.cloudera.com/t5/CDH-Manual-Installation/Maven-Error-With-Cloudera-Repository/m-p/69355#M1729</link>
      <description>&lt;P&gt;When trying to compile a maven project, I'm receiving the following error:&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;"(&lt;A href="https://repository.cloudera.com/artifactory/cloudera-repos/" target="_blank"&gt;https://repository.cloudera.com/artifactory/cloudera-repos/&lt;/A&gt;): Remote host closed connection during handshake: SSL peer shut down incorrectly"&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Here is how I have the repository configured:&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;lt;repository&amp;gt;&lt;BR /&gt;&amp;lt;id&amp;gt;cloudera&amp;lt;/id&amp;gt;&lt;BR /&gt;&amp;lt;url&amp;gt;&lt;A href="https://repository.cloudera.com/artifactory/cloudera-repos/&amp;lt;/url" target="_blank"&gt;https://repository.cloudera.com/artifactory/cloudera-repos/&amp;lt;/url&lt;/A&gt;&amp;gt;&lt;BR /&gt;&amp;lt;/repository&amp;gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;My maven version is 3.0.5&lt;/P&gt;</description>
      <pubDate>Mon, 25 Jun 2018 14:14:26 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/CDH-Manual-Installation/Maven-Error-With-Cloudera-Repository/m-p/69355#M1729</guid>
      <dc:creator>bthelm2</dc:creator>
      <dc:date>2018-06-25T14:14:26Z</dc:date>
    </item>
    <item>
      <title>.sanpshot file in kafka log directory</title>
      <link>http://community.cloudera.com/t5/Data-Ingestion-Integration/sanpshot-file-in-kafka-log-directory/m-p/69352#M3139</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;What is the use of .sanpshot files in the data.log directory of a kafka topic and how do I make use of it? Is this present in apache kafka as well or available only on cloudera?&lt;/P&gt;</description>
      <pubDate>Mon, 25 Jun 2018 13:24:00 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Data-Ingestion-Integration/sanpshot-file-in-kafka-log-directory/m-p/69352#M3139</guid>
      <dc:creator>RajeshBodolla</dc:creator>
      <dc:date>2018-06-25T13:24:00Z</dc:date>
    </item>
    <item>
      <title>spark 1.6 csd for CDH5.5</title>
      <link>http://community.cloudera.com/t5/Advanced-Analytics-Apache-Spark/spark-1-6-csd-for-CDH5-5/m-p/69350#M3366</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Do we have csd for spark 1.6 supporting on CDH5.5.X? Any pointer are appreciated.&lt;/P&gt;</description>
      <pubDate>Mon, 25 Jun 2018 09:25:26 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Advanced-Analytics-Apache-Spark/spark-1-6-csd-for-CDH5-5/m-p/69350#M3366</guid>
      <dc:creator>RajeshBodolla</dc:creator>
      <dc:date>2018-06-25T09:25:26Z</dc:date>
    </item>
    <item>
      <title>[kafka] Error sending message when leader is not master node</title>
      <link>http://community.cloudera.com/t5/Data-Ingestion-Integration/kafka-Error-sending-message-when-leader-is-not-master-node/m-p/69349#M3138</link>
      <description>Hi,&lt;BR /&gt;&lt;BR /&gt;My current setup has 1 management node and 3 worker node.&lt;BR /&gt;Server A(master) - kafka broker, ...&lt;BR /&gt;Server B - zookeeper, kafka broker, ...&lt;BR /&gt;Server C - zookeeper, kafka broker, ...&lt;BR /&gt;Server D - zookeeper, kafka broker, ...&lt;BR /&gt;&lt;BR /&gt;KAFKA-3.0.0-1.3.0.0.p0.40&lt;BR /&gt;CDH-5.14.2-1.cdh5.14.2.p0.3&lt;BR /&gt;&lt;BR /&gt;I am using a client machine in the same network to create topic, produce and consume message using kafka bat files.&lt;BR /&gt;&lt;BR /&gt;I realise when i create a topic, and the leader of the topic happens to be the broker on server A, everything works.&lt;BR /&gt;&lt;BR /&gt;But if the leader of the topic is any broker on server b/c/d, when i attempt to send any messages, it prompt the following message.&lt;BR /&gt;WARN - Got error produce response with correlation id _ on topic-partition ___, retrying. Error: UNKNOWN_TOPIC_OR_PARTITION&lt;BR /&gt;WARN - Receive unknown topic or partition error in produce request on partition ___. The topic/partition may not exist or the user may not have Describe access to it&lt;BR /&gt;ERROR - Error when sending message to topic ___ with key: null, value... UnknownTopicOrPartitionException: This server does not host this topic-partition.&lt;BR /&gt;&lt;BR /&gt;I am suspecting something to do with the zookeeper, but have no idea where to look deeper into.&lt;BR /&gt;I am a beginner in this area, any help would be appreciated.&lt;BR /&gt;&lt;BR /&gt;Thanks in advance!&lt;BR /&gt;</description>
      <pubDate>Mon, 25 Jun 2018 08:04:05 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Data-Ingestion-Integration/kafka-Error-sending-message-when-leader-is-not-master-node/m-p/69349#M3138</guid>
      <dc:creator>Qing</dc:creator>
      <dc:date>2018-06-25T08:04:05Z</dc:date>
    </item>
    <item>
      <title>Cloudera Navigator Error Message</title>
      <link>http://community.cloudera.com/t5/Data-Discovery-Optimization/Cloudera-Navigator-Error-Message/m-p/69343#M267</link>
      <description>&lt;P&gt;Hello everyone,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I'm using an application to connect to Cloudera Navigator API (v10), scan some databases and collect metrics and I'm receiving the following return error:&lt;/P&gt;&lt;P&gt;&lt;FONT color="#FF0000"&gt;Can not map JSON result for API call to relations?entityIds=3125134&amp;amp;types=PARENT_CHILD&amp;amp;roles=PARENT&lt;/FONT&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;It looks like it is not finding a JSON clause.&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;I cheked my configurations&amp;nbsp;and everything is correct.&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;Could someone please explain to me what this error means? And what can I do to solve this?&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;P.s: In my application, I received a warning message before this error:&lt;FONT color="#FFCC00"&gt; "Exception has occurred while scanning database "databaseName". Error message: "null".&amp;nbsp;&lt;/FONT&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;Can these two message be related?&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;Thanks,&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;Danilo&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Sun, 24 Jun 2018 04:54:07 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Data-Discovery-Optimization/Cloudera-Navigator-Error-Message/m-p/69343#M267</guid>
      <dc:creator>dans_vander</dc:creator>
      <dc:date>2018-06-24T04:54:07Z</dc:date>
    </item>
    <item>
      <title>NULL columns importing csv data into table</title>
      <link>http://community.cloudera.com/t5/Batch-SQL-Apache-Hive/NULL-columns-importing-csv-data-into-table/m-p/69340#M2735</link>
      <description>&lt;P&gt;Hi everyone,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I'm trying to import a csv file to a table. But after I created the table and load the data into the table some columns (data types except STRING) is getting NULL. Here is the create table statement I used:&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;CREATE TABLE deneme6 (framenumber int,frametime TIMESTAMP, ipsrc STRING, ipdst STRING, protocol STRING, flag int, windowsize int, info STRING)

ROW FORMAT DELIMITED

FIELDS TERMINATED BY ','

STORED AS TEXTFILE ;

&lt;/PRE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;And then I'm loading my data file:&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;load data inpath 'hdfs:///user/hive/warehouse/deneme4/deneme4.csv' into table deneme6;&lt;/PRE&gt;&lt;P&gt;But for the columns framenumber,&amp;nbsp; frametime, flag, windowsize data is returning NULL. These are the columns that their data type are not STRING. What can I do for the issue? And here is an example of the csv file:&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;frame.number,frame.time_relative,ip.src,ip.dst,_ws.col.Protocol,tcp.flags,tcp.window_size_value,_ws.col.Info&amp;nbsp;&lt;/P&gt;&lt;P&gt;1,"0.000000000","147.32.84.165","91.212.135.158","TCP","0x00000002","64240","1040 â†’ 5678 [SYN] Seq=0 Win=64240 Len=0 MSS=1460 SACK_PERM=1"&lt;/P&gt;&lt;P&gt;2,"0.000009000","147.32.84.165","91.212.135.158","TCP","0x00000002","64240","[TCP Out-Of-Order] 1040 â†’ 5678 [SYN] Seq=0 Win=64240 Len=0 MSS=1460 SACK_PERM=1"&lt;/P&gt;&lt;P&gt;3,"0.062970000","91.212.135.158","147.32.84.165","TCP","0x00000012","65535","5678 â†’ 1040 [SYN, ACK] Seq=0 Ack=1 Win=65535 Len=0 MSS=1460 SACK_PERM=1"&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;And this is the result for the table:&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;NULL NULL "147.32.84.165" "91.212.135.158" "TCP" NULL NULL "1040 → 5678 [SYN] Seq=0 Win=64240 Len=0 MSS=1460 SACK_PERM=1"&lt;BR /&gt;NULL NULL "147.32.84.165" "91.212.135.158" "TCP" NULL NULL "[TCP Out-Of-Order] 1040 → 5678 [SYN] Seq=0 Win=64240 Len=0 MSS=1460 SACK_PERM=1"&lt;/P&gt;&lt;P&gt;NULL NULL "91.212.135.158" "147.32.84.165" "TCP" NULL NULL "5678 → 1040 [SYN&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Sat, 23 Jun 2018 11:59:31 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Batch-SQL-Apache-Hive/NULL-columns-importing-csv-data-into-table/m-p/69340#M2735</guid>
      <dc:creator>uur</dc:creator>
      <dc:date>2018-06-23T11:59:31Z</dc:date>
    </item>
    <item>
      <title>Something similar to PL/HQL available on Cloudera?</title>
      <link>http://community.cloudera.com/t5/Batch-SQL-Apache-Hive/Something-similar-to-PL-HQL-available-on-Cloudera/m-p/69339#M2736</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I need to process some data from Hive tables through loops. I have seen that PL/HQL would be perfect for my goal but it is not available on Cloudera. Anybody knows about something similar?&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thanks for your hepl!&lt;/P&gt;</description>
      <pubDate>Sat, 23 Jun 2018 07:23:57 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Batch-SQL-Apache-Hive/Something-similar-to-PL-HQL-available-on-Cloudera/m-p/69339#M2736</guid>
      <dc:creator>Nachopa</dc:creator>
      <dc:date>2018-06-23T07:23:57Z</dc:date>
    </item>
    <item>
      <title>Name node not starting after computer restarts</title>
      <link>http://community.cloudera.com/t5/Cloudera-Manager-Installation/Name-node-not-starting-after-computer-restarts/m-p/69338#M14114</link>
      <description>&lt;P&gt;Hello,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;My cluster has the following configuration&amp;nbsp; 2 NN , 5 DN , 3 JN.&lt;/P&gt;&lt;P&gt;After restarting the cluster ,none of the name node started .Please see the attached screenshot &amp;amp; logs.&lt;/P&gt;&lt;P&gt;Is there any suggestion to start the name node ? This is DEV enviroment,so if i lost the data i can easily reload it.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thanks&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;span class="lia-inline-image-display-wrapper lia-image-align-inline" style="width: 200px;"&gt;&lt;img src="http://xgkfq28377.i.lithium.com/t5/image/serverpage/image-id/4275i5B38E6CC64C913F2/image-size/small?v=1.0&amp;amp;px=200" alt="2018-06-22_21-24-22.png" title="2018-06-22_21-24-22.png" /&gt;&lt;/span&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;TABLE&gt;&lt;TBODY&gt;&lt;TR&gt;&lt;TD&gt;9:02:01.896 PM&lt;/TD&gt;&lt;TD&gt;WARN&lt;/TD&gt;&lt;TD&gt;FSEditLog&lt;/TD&gt;&lt;TD&gt;&lt;PRE&gt;Unable to determine input streams from QJM to [192.168.83.30:8485, 192.168.83.28:8485, 192.168.83.5:8485]. Skipping.
org.apache.hadoop.hdfs.qjournal.client.QuorumException: Got too many exceptions to achieve quorum size 2/3. 1 successful responses:
192.168.83.5:8485: [[1292263,1292312], [1292313,1292376], [1292377,1292426], [1292427,1292488], [1292489,1292538], [1292539,1292588], [1292589,1292664], [1292665,1292714], [1292715,1292778], [1292779,1292846], [1292847,1292985], [1292986,1293049], [1293050,1293099], [1293100,1293164], [1293165,1293214], [1293215,1293264], [1293265,1293355], [1293356,1293405], [1293406,1293469], [1293470,1293520], [1293521,1293579], [1293580,1293643], [1293644,1293706], [1293707,1293775], [1293776,1293796]]
2 exceptions thrown:
192.168.83.28:8485: Journal Storage Directory /tmp/jrnldata/HAnameService not formatted
	at org.apache.hadoop.hdfs.qjournal.server.Journal.checkFormatted(Journal.java:472)
	at org.apache.hadoop.hdfs.qjournal.server.Journal.getEditLogManifest(Journal.java:655)&lt;/PRE&gt;&lt;/TD&gt;&lt;/TR&gt;&lt;/TBODY&gt;&lt;/TABLE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;TABLE&gt;&lt;TBODY&gt;&lt;TR&gt;&lt;TD&gt;9:02:02.665 PM&lt;/TD&gt;&lt;TD&gt;FATAL&lt;/TD&gt;&lt;TD&gt;FSEditLog&lt;/TD&gt;&lt;TD&gt;&lt;PRE&gt;Error: recoverUnfinalizedSegments failed for required journal (JournalAndStream(mgr=QJM to [192.168.83.30:8485, 192.168.83.28:8485, 192.168.83.5:8485], stream=null))
org.apache.hadoop.hdfs.qjournal.client.QuorumException: Got too many exceptions to achieve quorum size 2/3. 1 successful responses:
192.168.83.5:8485: lastPromisedEpoch: 23
httpPort: 8480
fromURL: "http://ntiger:8480"

2 exceptions thrown:
192.168.83.30:8485: Journal Storage Directory /tmp/jrnldata/HAnameService not formatted
	at org.apache.hadoop.hdfs.qjournal.server.Journal.checkFormatted(Journal.java:472)
	at org.apache.hadoop.hdfs.qjournal.server.Journal.getLastPromisedEpoch(Journal.java:245)&lt;/PRE&gt;&lt;/TD&gt;&lt;/TR&gt;&lt;/TBODY&gt;&lt;/TABLE&gt;</description>
      <pubDate>Sat, 23 Jun 2018 01:40:19 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Cloudera-Manager-Installation/Name-node-not-starting-after-computer-restarts/m-p/69338#M14114</guid>
      <dc:creator>kokokoko</dc:creator>
      <dc:date>2018-06-23T01:40:19Z</dc:date>
    </item>
    <item>
      <title>java.lang.ClassNotFoundException: com.splicemachine.hbase.SpliceMasterObserver</title>
      <link>http://community.cloudera.com/t5/Storage-Random-Access-HDFS/java-lang-ClassNotFoundException-com-splicemachine-hbase/m-p/69332#M3646</link>
      <description>&lt;P&gt;I am using CDH5.12.2. I Cannot start HBASE Master. I Got,&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;java.lang.ClassNotFoundException: com.splicemachine.hbase.SpliceMasterObserver&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;At Java Configuration Option for HBASE Master in Cloudera Manager, I configured,&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-Dsplice.spark.driver.extraClassPath=/opt/cloudera/parcels/&lt;/SPAN&gt;&lt;SPAN class="s2"&gt;SPLICEMACHINE&lt;/SPAN&gt;&lt;SPAN class="s1"&gt;/lib/*:/opt/cloudera/parcels/CDH/lib/hbase/conf&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-Dsplice.spark.executor.extraClassPath=/opt/cloudera/parcels/CDH/lib/hbase/conf:/opt/cloudera/parcels/&lt;/SPAN&gt;&lt;SPAN class="s2"&gt;SPLICEMACHINE&lt;/SPAN&gt;&lt;SPAN class="s1"&gt;/lib/*&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;-Dsplice.spark.yarn.jars=/opt/cloudera/parcels/&lt;/SPAN&gt;&lt;SPAN class="s2"&gt;SPLICEMACHINE&lt;/SPAN&gt;&lt;SPAN class="s1"&gt;/lib/*&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;The jars are at&amp;nbsp;&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;/opt/cloudera/parcels/SPLICEMACHINE/lib/.&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;But, I got errors:&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;ERROR org.apache.hadoop.hbase.coprocessor.CoprocessorHost: The coprocessor com.splicemachine.hbase.SpliceMasterObserver threw java.lang.ClassNotFoundException: com.splicemachine.hbase.SpliceMasterObserver&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;java.lang.ClassNotFoundException: com.splicemachine.hbase.SpliceMasterObserver&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at java.net.URLClassLoader.findClass(URLClassLoader.java:381)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at java.lang.ClassLoader.loadClass(ClassLoader.java:424)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at java.lang.ClassLoader.loadClass(ClassLoader.java:357)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at org.apache.hadoop.hbase.coprocessor.CoprocessorHost.loadSystemCoprocessors(CoprocessorHost.java:158)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at org.apache.hadoop.hbase.master.MasterCoprocessorHost.&amp;lt;init&amp;gt;(MasterCoprocessorHost.java:92)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at org.apache.hadoop.hbase.master.HMaster.finishActiveMasterInitialization(HMaster.java:741)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at org.apache.hadoop.hbase.master.HMaster.access$500(HMaster.java:194)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at org.apache.hadoop.hbase.master.HMaster$1.run(HMaster.java:1834)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at java.lang.Thread.run(Thread.java:745)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;2018-06-22 09:24:15,482 FATAL org.apache.hadoop.hbase.master.HMaster: The coprocessor com.splicemachine.hbase.SpliceMasterObserver threw java.lang.ClassNotFoundException: com.splicemachine.hbase.SpliceMasterObserver&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;java.lang.ClassNotFoundException: com.splicemachine.hbase.SpliceMasterObserver&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at java.net.URLClassLoader.findClass(URLClassLoader.java:381)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at java.lang.ClassLoader.loadClass(ClassLoader.java:424)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at java.lang.ClassLoader.loadClass(ClassLoader.java:357)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at org.apache.hadoop.hbase.coprocessor.CoprocessorHost.loadSystemCoprocessors(CoprocessorHost.java:158)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at org.apache.hadoop.hbase.master.MasterCoprocessorHost.&amp;lt;init&amp;gt;(MasterCoprocessorHost.java:92)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at org.apache.hadoop.hbase.master.HMaster.finishActiveMasterInitialization(HMaster.java:741)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at org.apache.hadoop.hbase.master.HMaster.access$500(HMaster.java:194)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at org.apache.hadoop.hbase.master.HMaster$1.run(HMaster.java:1834)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&lt;SPAN class="s1"&gt;&lt;SPAN class="Apple-converted-space"&gt;&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &lt;/SPAN&gt;at java.lang.Thread.run(Thread.java:745)&lt;/SPAN&gt;&lt;/P&gt;&lt;P class="p1"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&amp;nbsp;&lt;/P&gt;&lt;P class="p1"&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Sat, 23 Jun 2018 00:55:17 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Storage-Random-Access-HDFS/java-lang-ClassNotFoundException-com-splicemachine-hbase/m-p/69332#M3646</guid>
      <dc:creator>PeterLuo</dc:creator>
      <dc:date>2018-06-23T00:55:17Z</dc:date>
    </item>
    <item>
      <title>[ANNOUNCE] Cloudera Data Science Workbench 1.4 Released</title>
      <link>http://community.cloudera.com/t5/Community-News-Release/ANNOUNCE-Cloudera-Data-Science-Workbench-1-4-Released/m-p/69325#M235</link>
      <description>&lt;P&gt;&lt;SPAN&gt;Cloudera is delighted to announce the release of Cloudera Data Science Workbench 1.4.0. With this release, &lt;/SPAN&gt;&lt;SPAN&gt;Cloudera Data Science Workbench extends the machine learning platform experience from research to production. Data scientists can now build, train, and deploy models in a unified workflow with two new key capabilities: experiments and models. &lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;A href="https://www.cloudera.com/documentation/data-science-workbench/latest/topics/cdsw_experiments.html" target="_blank"&gt;&lt;STRONG&gt;Experiments&lt;/STRONG&gt;&lt;/A&gt;&lt;STRONG&gt;: &lt;/STRONG&gt;&lt;SPAN&gt;Experiments let data scientists train, compare, and reproduce versioned models. With this feature,, data scientists can run a batch job that will:&lt;/SPAN&gt;&lt;SPAN&gt;&lt;BR /&gt;&lt;BR /&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;OL&gt;&lt;LI&gt;&lt;SPAN&gt;create a snapshot of model code, dependencies, and configuration parameters necessary to train the model&lt;/SPAN&gt;&lt;/LI&gt;&lt;LI&gt;&lt;SPAN&gt;build and execute the training run in an isolated container&lt;/SPAN&gt;&lt;/LI&gt;&lt;LI&gt;&lt;SPAN&gt;track model metrics, performance, and any model artifacts the user specifies&lt;/SPAN&gt;&lt;/LI&gt;&lt;/OL&gt;&lt;P&gt;&lt;A href="https://www.cloudera.com/documentation/data-science-workbench/latest/topics/cdsw_models.html" target="_blank"&gt;&lt;STRONG&gt;Models&lt;/STRONG&gt;&lt;/A&gt;&lt;STRONG&gt;: &lt;/STRONG&gt;&lt;SPAN&gt;Models let data scientists build, deploy, and manage models as REST APIs to serve predictions. With this feature, data scientists can simply select a Python or R function within a project file, and Cloudera Data Science Workbench will:&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;&amp;nbsp;&lt;/SPAN&gt;&lt;/P&gt;&lt;OL&gt;&lt;LI&gt;&lt;SPAN&gt;create a snapshot of model code, saved model parameters, and dependencies&lt;/SPAN&gt;&lt;/LI&gt;&lt;LI&gt;&lt;SPAN&gt;build an immutable executable container with the trained model and serving code&lt;/SPAN&gt;&lt;/LI&gt;&lt;LI&gt;&lt;SPAN&gt;add a REST endpoint that automatically accepts input parameters matching the function signature, and that returns a data structure matching the function’s return type&lt;/SPAN&gt;&lt;/LI&gt;&lt;LI&gt;&lt;SPAN&gt;save the built model container, along with metadata like who built or deployed it&lt;/SPAN&gt;&lt;/LI&gt;&lt;LI&gt;&lt;SPAN&gt;deploy and start a specified number of model API replicas, automatically load balanced&lt;/SPAN&gt;&lt;/LI&gt;&lt;LI&gt;&lt;SPAN&gt;let the user document, test, and share the model&lt;/SPAN&gt;&amp;nbsp;&lt;/LI&gt;&lt;/OL&gt;&lt;P&gt;&lt;SPAN&gt;In addition, Cloudera Data Science Workbench 1.4 also includes security enhancements that help automate user administration.&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;STRONG&gt;Simplified user administration&lt;/STRONG&gt;&lt;SPAN&gt;: &lt;/SPAN&gt;&lt;SPAN&gt;Previous CDSW releases offered LDAP and SAML authentication but allowed every user to log in. The consequence was user sprawl and unintended license consumption. Site administrators had to be manually entitled in the Cloudera Data Science Workbench UI.&lt;/SPAN&gt;&lt;SPAN&gt;&lt;BR /&gt;&lt;/SPAN&gt;&lt;SPAN&gt;&lt;BR /&gt;&lt;/SPAN&gt;&lt;SPAN&gt;With version 1.4 you can now designate LDAP and SAML groups for both users and administrators. With automatic synchronisation, the ability to log in or administer CDSW now depends on a user’s group membership. These groups can be assigned in your existing centralised LDAP/SAML authentication system.&lt;/SPAN&gt;&lt;SPAN&gt;&lt;BR /&gt;&lt;/SPAN&gt;&lt;SPAN&gt;&lt;BR /&gt;&lt;/SPAN&gt;&lt;SPAN&gt;We’ve added two new properties to CDSW:&lt;/SPAN&gt;&lt;/P&gt;&lt;UL&gt;&lt;LI&gt;&lt;STRONG&gt;LDAP/SAML User Groups - &lt;/STRONG&gt;&lt;SPAN&gt;Groups whose users can log in to CDSW&lt;/SPAN&gt;&lt;/LI&gt;&lt;LI&gt;&lt;STRONG&gt;LDAP/SAML Admin Groups - &lt;/STRONG&gt;&lt;SPAN&gt;Groups whose users are automatically made site administrators in CDSW&lt;/SPAN&gt;&lt;/LI&gt;&lt;/UL&gt;&lt;P&gt;&lt;SPAN&gt;For a complete list of new features and bug fixes in this release, please see the &lt;/SPAN&gt;&lt;A href="https://www.cloudera.com/documentation/data-science-workbench/latest/topics/cdsw_release_notes.html#rel_140" target="_blank"&gt;&lt;SPAN&gt;Release Notes&lt;/SPAN&gt;&lt;/A&gt;&lt;SPAN&gt;.&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;For more information on downloading, installing and using Cloudera Data Science Workbench, please see the links below:&lt;/SPAN&gt;&lt;/P&gt;&lt;UL&gt;&lt;LI&gt;&lt;A href="https://www.cloudera.com/downloads/workbench.html" target="_blank"&gt;&lt;SPAN&gt;Download Cloudera Data Science Workbench&lt;/SPAN&gt;&lt;/A&gt;&lt;/LI&gt;&lt;LI&gt;&lt;A href="https://www.cloudera.com/documentation/data-science-workbench/latest/topics/cdsw_overview.html" target="_blank"&gt;&lt;SPAN&gt;Product Overview&lt;/SPAN&gt;&lt;/A&gt;&lt;/LI&gt;&lt;LI&gt;&lt;A href="https://www.cloudera.com/documentation/data-science-workbench/latest/topics/cdsw_install.html" target="_blank"&gt;&lt;SPAN&gt;Installation Guide&lt;/SPAN&gt;&lt;/A&gt;&lt;/LI&gt;&lt;LI&gt;&lt;A href="https://www.cloudera.com/documentation/data-science-workbench/latest/topics/cdsw_quickstart.html" target="_blank"&gt;&lt;SPAN&gt;Quickstart Guide&lt;/SPAN&gt;&lt;/A&gt;&lt;/LI&gt;&lt;LI&gt;&lt;A href="https://www.cloudera.com/documentation/data-science-workbench/latest/topics/cdsw_release_notes.html" target="_blank"&gt;&lt;SPAN&gt;Release Notes&lt;/SPAN&gt;&lt;SPAN&gt;&lt;BR /&gt;&lt;/SPAN&gt;&lt;/A&gt;&lt;/LI&gt;&lt;/UL&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;As always, we welcome your feedback. Please send your comments and suggestions on our &lt;/SPAN&gt;&lt;A href="http://community.cloudera.com/" target="_blank"&gt;&lt;SPAN&gt;community forums.&lt;/SPAN&gt;&lt;/A&gt;&lt;/P&gt;</description>
      <pubDate>Fri, 22 Jun 2018 23:32:15 GMT</pubDate>
      <guid>http://community.cloudera.com/t5/Community-News-Release/ANNOUNCE-Cloudera-Data-Science-Workbench-1-4-Released/m-p/69325#M235</guid>
      <dc:creator>ameet</dc:creator>
      <dc:date>2018-06-22T23:32:15Z</dc:date>
    </item>
  </channel>
</rss>

