Member since 
    
	
		
		
		01-05-2016
	
	
	
	
	
	
	
	
	
	
	
	
	
	
			
      
                56
            
            
                Posts
            
        
                23
            
            
                Kudos Received
            
        
                9
            
            
                Solutions
            
        My Accepted Solutions
| Title | Views | Posted | 
|---|---|---|
| 2442 | 09-27-2017 06:11 AM | |
| 2927 | 09-21-2017 06:36 PM | |
| 1384 | 06-15-2017 01:28 PM | |
| 2617 | 12-09-2016 08:39 PM | |
| 2728 | 09-06-2016 04:57 PM | 
			
    
	
		
		
		09-27-2017
	
		
		06:11 AM
	
	
	
	
	
	
	
	
	
	
	
	
	
	
		
	
				
		
			
					
	
		1 Kudo
		
	
				
		
	
		
					
							 Deleting the kafka topics for atlas and restart of atlas fixed the issue.  
						
					
					... View more
				
			
			
			
			
			
			
			
			
			
		
			
    
	
		
		
		03-07-2017
	
		
		05:07 PM
	
	
	
	
	
	
	
	
	
	
	
	
	
	
		
	
				
		
			
					
	
		3 Kudos
		
	
				
		
	
		
					
							 Assumption - HDP 2.5.3 and above versions and kerberized cluster.   Create a hplsql-site.xml as per the below template.   
	 <configuration>
<property>
<name>hplsql.conn.default</name>
<value>hive2conn</value>
<description>The default connection profile</description>
</property>
<property>
<name>hplsql.conn.hiveconn</name>
<value>org.apache.hive.jdbc.HiveDriver;jdbc:hive2://</value>
<description>HiveServer2 JDBC connection (embedded mode)</description>
</property>
<property>
<name>hplsql.conn.init.hiveconn</name>
<value>
set hive.execution.engine=mr;
use default;
</value>
<description>Statements for execute after connection to the database</description>
</property>
<property>
<name>hplsql.conn.convert.hiveconn</name><
<value>true</value>
<description>Convert SQL statements before execution</description><
</property>
<property>
<name>hplsql.conn.hive1conn</name>
<value>org.apache.hadoop.hive.jdbc.HiveDriver;jdbc:hive://</value>
<description>Hive embedded JDBC (not requiring HiveServer)</description>
</property>
<property>
<name>hplsql.conn.hive2conn</name>
<value>org.apache.hive.jdbc.HiveDriver;jdbc:hive2://node1.field.hortonworks.com:10500/default;principal=hive/node1.field.hortonworks.com@REALM</value>
<description>HiveServer2 JDBC connection</description>
</property>
<property>
<name>hplsql.conn.init.hive2conn</name>
<value>
set hive.execution.engine=tez;
use default;
</value>
<description>Statements for execute after connection to the database</description>
</property>
<property>
<name>hplsql.conn.convert.hive2conn</name>
<value>true</value>
<description>Convert SQL statements before execution</description>
</property>
<property>
<name>hplsql.conn.db2conn</name>
<value>com.ibm.db2.jcc.DB2Driver;jdbc:db2://localhost:50001/dbname;user;password</value>
<description>IBM DB2 connection</description>
</property>
<property>
<name>hplsql.conn.tdconn</name>
<value>com.teradata.jdbc.TeraDriver;jdbc:teradata://localhost/database=dbname,logmech=ldap;user;password</value>
<description>Teradata connection</description>
</property>
<property>
<name>hplsql.conn.mysqlconn</name>
<value>com.mysql.jdbc.Driver;jdbc:mysql://localhost/test;user;password</value>
<description>MySQL connection</description>
</property>
<property>
<name>hplsql.dual.table</name>
<value>default.dual</value>
<description>Single row, single column table for internal operations</description>
</property>
<property>
<name>hplsql.insert.values</name>
<value>native</value>
<description>How to execute INSERT VALUES statement: native (default) and select</description>
</property>
<property>
<name>hplsql.onerror</name>
<value>exception</value>
<description>Error handling behavior: exception (default), seterror and stop</description>
</property>
<property>
<name>hplsql.temp.tables</name>
<value>native</value>
<description>Temporary tables: native (default) and managed</description>
</property>
<property>
<name>hplsql.temp.tables.schema</name>
<value></value>
<description>Schema for managed temporary tables</description>
</property>
<property>
<name>hplsql.temp.tables.location</name>
<value>/tmp/plhql</value>
<description>LOcation for managed temporary tables in HDFS</description>
</property>
</configuration>
	   
	Modify
the LLAP hostname and the hive Principal based on the cluster environment in the following section  Note: This is a kerberized cluster 
 <property>
  <name>hplsql.conn.hive2conn</name>
  <value>org.apache.hive.jdbc.HiveDriver;jdbc:hive2://<<LLAP_HOSTNAME>>:10500/default;principal=hive/<<LLAPHOSTNAME>>@<<KERBEROS_REALM>></value>
  <description>HiveServer2 JDBC connection</description>
</property>
<property>
  <name>hplsql.conn.init.hive2conn</name>
  <value>
     set hive.execution.engine=tez;
     use default;
  </value>
  <description>Statements for execute after connection to the database</description>
</property>
  
	Update the
hive-hplsql jar file with the modified hplsql-site.xml 
 cd /usr/hdp/current/hive-server2-hive2/lib;
/usr/jdk64/jdk1.8.0_77/bin/jar uf hive-hplsql-2.1.0.XXX.jar hplsql-site.xml;   Note: Please refer to your JDK version path
  
	Authenticate the user with the KDC 
 kinit <user principal>
  
	Execute the HPLSQL code as below 
 ./hplsql -f /root/myhpl.sql
  
	If success then you must be seeing the logs as below,  Starting SQL statement
SQL statement executed successfully (128 ms)
Starting SQL statement
SQL statement executed successfully (145 ms) 
						
					
					... View more
				
			
			
			
			
			
			
			
			
			
		
			
    
	
		
		
		12-07-2016
	
		
		03:26 PM
	
	
	
	
	
	
	
	
	
	
	
	
	
	
		
	
				
		
			
					
				
		
	
		
					
							 @Baruch AMOUSSOU DJANGBAN
  you can do this as well.If you have installed the Cluster Shell in cluster, we can perform below simple steps to stop and start  -----------------------------------  #!/bin/sh  clush -g all ambari-agent restart  -------------------------------------------  Refer below link for more info about open source Cluster shell:   https://github.com/cea-hpc/clustershell/downloads 
						
					
					... View more
				
			
			
			
			
			
			
			
			
			
		
			
    
	
		
		
		09-01-2016
	
		
		03:32 PM
	
	
	
	
	
	
	
	
	
	
	
	
	
	
		
	
				
		
			
					
				
		
	
		
					
							 It is pretty simple to configure:  http://hdb.docs.pivotal.io/20/pxf/ConfigurePXF.html#topic_i3f_hvm_ss 
						
					
					... View more
				
			
			
			
			
			
			
			
			
			
		
			
    
	
		
		
		06-13-2016
	
		
		08:48 PM
	
	
	
	
	
	
	
	
	
	
	
	
	
	
		
	
				
		
			
					
				
		
	
		
					
							 Well, it seems like AMS metrics aren't consumable by Ambari until version 2.4.0 comes out: https://issues.apache.org/jira/browse/AMBARI-15766  This means that JMX metrics are the only ones which work by default and I don't think Kafka exposes them. You could use a third-party plugin which exposes them, but that's probably not ideal.   So, it seems like to monitor Kafka metrics today you'd need to write a SCRIPT based alert which would check a specific metric from AMS. You could write a single, generic SCRIPT alert which takes the metric to check as a parameter. This way, you'd create several different alert definitions (1 for each metric) and then re-use the same Python script.   If you wanted to expose Kafka metrics via JMX, once of these might help:  https://cwiki.apache.org/confluence/display/KAFKA/JMX+Reporters  You'd expose JMX via Kafka and then expose that JMX data via something like JMXtrans which alerts could consume.  Or, you could wait for Ambari 2.4.0 which should let you consume AMS metrics natively and just write a simple alert definition for it. 
						
					
					... View more
				
			
			
			
			
			
			
			
			
			
		
			
    
	
		
		
		06-06-2016
	
		
		07:25 PM
	
	
	
	
	
	
	
	
	
	
	
	
	
	
		
	
				
		
			
					
				
		
	
		
					
							 You can try as below   li <- read.table(textConnection(c), sep = ","); 
						
					
					... View more
				
			
			
			
			
			
			
			
			
			
		
			
    
	
		
		
		06-20-2017
	
		
		04:59 AM
	
	
	
	
	
	
	
	
	
	
	
	
	
	
		
	
				
		
			
					
				
		
	
		
					
							 Hi,  Is there any steps to take our own kafka server metrics to elasticsearch..because we have grafana which will have all the dashboards but some of our project requirement need to keep few kafka metrics in kibana visualization.so we want to index the kafka metrics logs to elasticsearch.  Can we consume kafka metrics into elasticsearch ? 
						
					
					... View more
				
			
			
			
			
			
			
			
			
			
		 
        













