Member since
12-09-2015
15
Posts
1
Kudos Received
1
Solution
My Accepted Solutions
Title | Views | Posted |
---|---|---|
4497 | 12-16-2015 07:00 PM |
01-04-2016
07:32 PM
1 Kudo
thank you . Created JIRA : https://issues.apache.org/jira/browse/PHOENIX-2561
... View more
12-29-2015
08:38 PM
Exception I get when i use ; as delimiter in connection string . :
org.apache.phoenix.exception.PhoenixIOException: com.google.protobuf.ServiceException: java.io.IOException: Could not set up IO Streams
at org.apache.phoenix.util.ServerUtil.parseServerException(ServerUtil.java:108)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.ensureTableCreated(ConnectionQueryServicesImpl.java:840)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.createTable(ConnectionQueryServicesImpl.java:1134)
at org.apache.phoenix.query.DelegateConnectionQueryServices.createTable(DelegateConnectionQueryServices.java:110)
at org.apache.phoenix.schema.MetaDataClient.createTableInternal(MetaDataClient.java:1591)
at org.apache.phoenix.schema.MetaDataClient.createTable(MetaDataClient.java:569)
at org.apache.phoenix.compile.CreateTableCompiler$2.execute(CreateTableCompiler.java:175)
at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:271)
at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:263)
at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
at org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:261)
at org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1043)
at org.apache.phoenix.query.ConnectionQueryServicesImpl$9.call(ConnectionQueryServicesImpl.java:1561)
at org.apache.phoenix.query.ConnectionQueryServicesImpl$9.call(ConnectionQueryServicesImpl.java:1530)
at org.apache.phoenix.util.PhoenixContextExecutor.call(PhoenixContextExecutor.java:77)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.init(ConnectionQueryServicesImpl.java:1530)
at org.apache.phoenix.jdbc.PhoenixDriver.getConnectionQueryServices(PhoenixDriver.java:162)
at org.apache.phoenix.jdbc.PhoenixEmbeddedDriver.connect(PhoenixEmbeddedDriver.java:126)
at org.apache.phoenix.jdbc.PhoenixDriver.connect(PhoenixDriver.java:133)
at java.sql.DriverManager.getConnection(DriverManager.java:571)
at java.sql.DriverManager.getConnection(DriverManager.java:233)
at simpleHBase.PhoenixConnectionFactory.<init>(PhoenixConnectionFactory.java:33)
at simpleHBase.PhoenixClient.<init>(PhoenixClient.java:27)
at simpleHBase.actionClass.main(actionClass.java:118)
Caused by: org.apache.hadoop.hbase.MasterNotRunningException: com.google.protobuf.ServiceException: java.io.IOException: Could not set up IO Streams
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation$StubMaker.makeStub(HConnectionManager.java:1650)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation$MasterServiceStubMaker.makeStub(HConnectionManager.java:1676)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getKeepAliveMasterService(HConnectionManager.java:1884)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getHTableDescriptor(HConnectionManager.java:2655)
at org.apache.hadoop.hbase.client.HBaseAdmin.getTableDescriptor(HBaseAdmin.java:388)
at org.apache.hadoop.hbase.client.HBaseAdmin.getTableDescriptor(HBaseAdmin.java:393)
at org.apache.phoenix.query.ConnectionQueryServicesImpl.ensureTableCreated(ConnectionQueryServicesImpl.java:772)
... 22 more
Caused by: com.google.protobuf.ServiceException: java.io.IOException: Could not set up IO Streams
at org.apache.hadoop.hbase.ipc.RpcClient.callBlockingMethod(RpcClient.java:1666)
at org.apache.hadoop.hbase.ipc.RpcClient$BlockingRpcChannelImplementation.callBlockingMethod(RpcClient.java:1707)
at org.apache.hadoop.hbase.protobuf.generated.MasterProtos$MasterService$BlockingStub.isMasterRunning(MasterProtos.java:42561)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation$MasterServiceStubMaker.isMasterRunning(HConnectionManager.java:1687)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation$StubMaker.makeStubNoRetries(HConnectionManager.java:1596)
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation$StubMaker.makeStub(HConnectionManager.java:1622)
... 28 more
Caused by: java.io.IOException: Could not set up IO Streams
at org.apache.hadoop.hbase.ipc.RpcClient$Connection.setupIOstreams(RpcClient.java:927)
at org.apache.hadoop.hbase.ipc.RpcClient.getConnection(RpcClient.java:1531)
at org.apache.hadoop.hbase.ipc.RpcClient.call(RpcClient.java:1430)
at org.apache.hadoop.hbase.ipc.RpcClient.callBlockingMethod(RpcClient.java:1649)
... 33 more
Caused by: java.lang.RuntimeException: SASL authentication failed. The most likely cause is missing or invalid credentials. Consider 'kinit'.
at org.apache.hadoop.hbase.ipc.RpcClient$Connection$1.run(RpcClient.java:832)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
at org.apache.hadoop.hbase.ipc.RpcClient$Connection.handleSaslConnectionFailure(RpcClient.java:793)
at org.apache.hadoop.hbase.ipc.RpcClient$Connection.setupIOstreams(RpcClient.java:895)
... 36 more
Caused by: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:212)
at org.apache.hadoop.hbase.security.HBaseSaslRpcClient.saslConnect(HBaseSaslRpcClient.java:152)
at org.apache.hadoop.hbase.ipc.RpcClient$Connection.setupSaslConnection(RpcClient.java:767)
at org.apache.hadoop.hbase.ipc.RpcClient$Connection.access$600(RpcClient.java:356)
at org.apache.hadoop.hbase.ipc.RpcClient$Connection$2.run(RpcClient.java:888)
at org.apache.hadoop.hbase.ipc.RpcClient$Connection$2.run(RpcClient.java:885)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
at org.apache.hadoop.hbase.ipc.RpcClient$Connection.setupIOstreams(RpcClient.java:885)
... 36 more
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:121)
at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:223)
at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:193)
... 45 more
... View more
12-29-2015
08:00 PM
It doesn't work . Can you please open Apache JIRA ?
... View more
12-28-2015
06:15 PM
Hey Guys, I get below error when i use keytab path as below for phoenix jdbc connection . Can anyone please help me here . I have keytab file locally on my laptop. keytab.path=C\:\\Users\\VBorhad\\krbProperties\\hdpsrvc.keytab Error : java.sql.SQLException: ERROR 102 (08001): Malformed connection url.
jdbc:phoenix:p006.unix.gsm1900.org,p001.unix.gsm1900.org,p002.unix.gsm1900.org,p003.unix.gsm1900.org:2181:/hbase-secure:srvc@HDP_EIT_DEV.com:C:\Users\VBorhad\krbProperties\hdpsrvc.keytab
at
org.apache.phoenix.exception.SQLExceptionCode$Factory$1.newException(SQLExceptionCode.java:337)
at
org.apache.phoenix.exception.SQLExceptionInfo.buildException(SQLExceptionInfo.java:133)
... View more
12-23-2015
10:03 PM
Hi, My code works fine and I get results when my keytab path do not contain ":" . I understand its a token used by Phoenix internally but if my path contains this token and i try to escape it using "\" , I still get malformed url error . Can you please guide here ? Is it a bug in phoenix connection string code or I am missing something here . my keytab path in my property file : keytab.path=C\:\\Users\\VBorhad\\krbProperties\\hdpsrvc.keytab Error :
java.sql.SQLException: ERROR 102 (08001): Malformed connection url. jdbc:phoenix:p006.unix.gsm1900.org,p001.unix.gsm1900.org,p002.unix.gsm1900.org,p003.unix.gsm1900.org:2181:/hbase-secure:srvc@HDP_EIT_DEV.com:C:\Users\VBorhad\krbProperties\hdpsrvc.keytab
at org.apache.phoenix.exception.SQLExceptionCode$Factory$1.newException(SQLExceptionCode.java:337)
at org.apache.phoenix.exception.SQLExceptionInfo.buildException(SQLExceptionInfo.java:133)
... View more
Labels:
- Labels:
-
Apache Phoenix
12-23-2015
09:21 PM
thank you . It works now . I was missing maven dependency . steps : in pom.xml I added <repositories>
<repository>
<id>repo.hortonworks.com</id>
<name>Hortonworks HDP Maven Repository</name>
<url>http://repo.hortonworks.com/content/repositories/releases/</url>
</repository>
</repositories> <dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix-core</artifactId>
<version>4.2.0.2.2.4.2-2</version>
</dependency> My connection url : conn = DriverManager.getConnection("jdbc:phoenix:" + zookeeperQuorum + ":" + port + ":" + "/hbase-secure" + ":" + krb_principal + ":" + kerberosKeytab);
... View more
12-23-2015
04:09 AM
I think i am not having correct dependency , what maven dependency should i use for phoenix-client.jar . i used phoenix , incubating 4.0 version and it gives above error. When i explicitly add jar then it causes other kerberose issue , which is wrong.
... View more
12-23-2015
02:43 AM
Error : It enters this loop since it has more than 3 tokens . The
code is fine with first 3 tokens but not more than that.protected static
ConnectionInfo create(String url) { ........ if (!isMalformedUrl) {
if (tokenizer.hasMoreTokens() && !TERMINATOR.equals(token)) {
isMalformedUrl = true;
} else if (i > 1)
... View more
12-23-2015
02:06 AM
I get Malformed connection url error from ConnectionInfo class : PhoenixEmbeddedDriver , when i use above url suggestion , not sure what I am doing wrong . Removing credentials for obvious reasons and replacing just names of server and principal . jdbc:phoenix:d001.unix.gsm1900.org,d002.unix.gsm1900.org,d003.unix.gsm1900.org:2181:/hbase-secure:srvc@DEFAULT_DEV.com:/home/myname/krb/hdpsrvc.keytab
... View more
12-22-2015
11:54 PM
Hi, I have hbase server that have Phoenix installed . It is using kerberos . phoenix-4.2.0.2.2.4.2-2-client.jar phoenix-4.2.0.2.2.4.2-2-server.jar . I am trying to write java code where I can use phoenix jdbc driver and write a simple sql " select * from emp " . What are the POM dependencies I need for Phoenix ? How should My connection string look like ? I am using : conn = DriverManager.getConnection("jdbc:" + zookeeper + ":2181/hbase-secure:principal@default_domain.com:" + keytabPath); where principal@default_domain.com : krb.principal
... View more
Labels:
- Labels:
-
Apache HBase
-
Apache Phoenix
12-16-2015
07:00 PM
So the main problem was that I was using HTTP Rest webservice (running on linux server ) to call hbase . Since hbase is kerberozied it didn't consider any request from HTTP as its unsecured . I had to change server.xml (tomcat ) on my linux server to allow HTTPS request and open port 8443 . Restart the tomcat . Once I followed above steps my Webservice Rest requests were going through . References : https://steveloughran.gitbooks.io/kerberos_and_had... and https://tomcat.apache.org/tomcat-7.0-doc/ssl-howto... and http://stackoverflow.com/questions/22469838/implem...
... View more
12-15-2015
11:38 PM
I have krb5.conf set in my properties and i have verified it exists on my server at below location: in my property file : krb.principal=hdpsrvc@HDP_EIT_DEV.com
krb.config=/opt/app/apache-tomcat-8.0.15/conf/krb5.conf
keytab.path=/opt/app/apache-tomcat-8.0.15/conf/hdpsrvc.keytab and i verified that its being picked up. by below code : kerberosUser = props.getProperty("krb.principal");
kerberosKeytab = props.getProperty("keytab.path");
kerberoseConfig = props.getProperty("krb.config");
zookeeperQuorum = props.getProperty("zookeeperQuorum"); System.out.println("krb : principal :"+ kerberoseConfig); Hbase Code that I use to set kerberose: System.setProperty("javax.security.auth.useSubjectCredsOnly", "true");
System.setProperty("java.security.krb5.conf", kerberoseConfig);
org.apache.hadoop.conf.Configuration config = new org.apache.hadoop.conf.Configuration();
config.set("hadoop.security.authentication", "kerberos");
UserGroupInformation.setConfiguration(config); --- my code dies here and gives above error .
... View more
12-15-2015
06:31 AM
Hi, I think I am missing some dependency in my pom.xml to get my kerberose realm picked up correctly via rest webservice call . The code works fine and i get results when I deploy simple java project and run jar file directly from the server . But I get below error when I try to make rest service call from the Linux server . The code works fine on my laptop which is windows laptop . I verified that I have below config on my hbase box : $ hadoop version
Hadoop 2.6.0.2.2.4.2-2
Subversion git@github.com:hortonworks/hadoop.git -r 22a563ebe448969d07902aed869ac13c652b2872
Compiled by jenkins on 2015-03-31T19:49Z
Compiled with protoc 2.5.0
From source with checksum b3481c2cdbe2d181f2621331926e267
This command was run using /usr/hdp/2.2.4.2-2/hadoop/hadoop-common-2.6.0.2.2.4.2-2.jar
$ hadoop-client version
-ksh: hadoop-client: not found [No such file or directory]
$ hbase version
2015-12-14 15:18:10,348 INFO [main] util.VersionInfo: HBase 0.98.4.2.2.4.2-2-hadoop2
2015-12-14 15:18:10,348 INFO [main] util.VersionInfo: Subversion git://ip-10-0-0-5.ec2.internal/grid/0/jenkins/workspace/HDP-2.2.4.1-centos6/bigtop/build/hbase/rpm/BUILD/hbase-0.98.4.2.2.4.2 -r dd8a499345afc1ac49dc5ef212ba64b23abfe110 Error I am getting : at org.apache.hadoop.security.authentication.util.KerberosUtil.getDefaultRealm(KerberosUtil.java:84)
at org.apache.hadoop.security.HadoopKerberosName.setConfiguration(HadoopKerberosName.java:63) Caused by: KrbException: Generic error (description in e-text) (60) - Unable to locate Kerberos realm
at sun.security.krb5.Config.getRealmFromDNS(Config.java:1102)
at sun.security.krb5.Config.getDefaultRealm(Config.java:987) My pom.xml : <projectxmlns="http://maven.apache.org/POM/4.0.0"xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>msoa.hbaseSampleOlderVersion</groupId> <artifactId>hbaseBeanType</artifactId> <packaging>war</packaging> <version>0.0.1-SNAPSHOT</version> <name>hbaseBeanType</name> <build> <finalName>hbaseBeanType</finalName> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>2.5.1</version> <inherited>true</inherited> <configuration> <source>1.7</source> <target>1.7</target> </configuration> </plugin> </plugins> </build> <dependencyManagement> <dependencies> <dependency> <groupId>org.glassfish.jersey</groupId> <artifactId>jersey-bom</artifactId> <version>${jersey.version}</version> <type>pom</type> <scope>import</scope> </dependency> </dependencies> </dependencyManagement> <!-- not req for local --> <repositories> <repository> <releases> <enabled>true</enabled> <updatePolicy>always</updatePolicy> <checksumPolicy>warn</checksumPolicy> </releases> <snapshots> <enabled>false</enabled> <updatePolicy>never</updatePolicy> <checksumPolicy>fail</checksumPolicy> </snapshots> <id>HDPReleases</id> <name>HDP Releases</name> <url>http://repo.hortonworks.com/content/repositories/releases/</url> <layout>default</layout> </repository> </repositories> <dependencies> <dependency> <groupId>org.glassfish.jersey.containers</groupId> <artifactId>jersey-container-servlet-core</artifactId> <!-- use the following artifactId if you don't need servlet 2.x compatibility <artifactId>jersey-container-servlet</artifactId>--> </dependency> <dependency> <groupId>org.glassfish.jersey.media</groupId> <artifactId>jersey-media-moxy</artifactId> </dependency> <dependency> <groupId>org.apache.cxf</groupId> <artifactId>cxf-bundle-jaxrs</artifactId> <version>2.7.7</version> </dependency> <!-- HBASE DEPENDENCY --> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>3.8.1</version> <scope>test</scope> </dependency> <!-- not req for local --> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>2.6.0</version> </dependency> <!-- not req for local --> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-core</artifactId> <version>1.2.1</version> </dependency> <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-client</artifactId> <version>0.98.0-hadoop2</version> </dependency> <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-common</artifactId> <version>0.98.0-hadoop2</version> </dependency> <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-protocol</artifactId> <version>0.98.0-hadoop2</version> </dependency> <dependency> <groupId>jdk.tools</groupId> <artifactId>jdk.tools</artifactId> <scope>system</scope> <version>1.8.0_60</version> <systemPath>C:\Program Files\Java\jdk1.8.0_60\lib\tools.jar</systemPath> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-core</artifactId> <version>4.2.3.RELEASE</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-context</artifactId> <version>4.2.3.RELEASE</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-beans</artifactId> <version>4.2.3.RELEASE</version> </dependency> </dependencies> <properties> <!-- <jersey.version>1.1</jersey.version> --> <jersey.version>2.16</jersey.version> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <hadoop.version>2.6.0.2.2.0.0-2041</hadoop.version> </properties> </project>
... View more
Labels:
- Labels:
-
Apache Hadoop
-
Apache HBase
12-10-2015
06:43 PM
awesome ... i just did that exactly and it worked . thanks for looking at it.
... View more
12-09-2015
07:55 PM
I am trying to connect to HBase server running on different server
from a linux client and i get below error . The code works fine from my
windows laptop and I am able to connect to Hbase server and get results.
I think I am missing some dependencies jar for my linux server ibecause
when i added hbase-client jar it worked from my laptop, which indicates
my code logic is correct. All of configuration is being picked up
correctly as I have verified it from my laptop. Please provide some
suggestion. I am passing hbase-site.xml,core-site.xml,hdfs-site.xml in
my resources . My port and zookeeper qurom is correct. My kerberose code
works fine. I don't understand if this can be permission issue too. i don't understand why this happening and when does it happen. Any help or suggestion is much appreciated Code : connection is returned as null 😞 this.conf =HBaseConfiguration.create();
this.conf.set("hbase.zookeeper.quorum", zookeeperQuorum);
this.conf.set("hbase.zookeeper.property.clientPort", port);
this.conf.set("zookeeper.znode.parent","/hbase-secure");
// this.conf.set("hbase.client.retries.number", Integer.toString(35));
// this.conf.set("zookeeper.session.timeout", Integer.toString(20000));
//this.conf.set("zookeeper.recovery.retry", Integer.toString(1));
this.conf.set("hadoop.security.authentication","kerberos");
this.conf.set("hbase.security.authentication","kerberos");
this.conf.set("hbase.master.kerberos.principal", userName);this.conf.set("user.name",userName)
;try{this.connection =HConnectionManager.createConnection(conf);}catch(IOException e){// TODO Auto-generated catch block
e.printStackTrace();}
pom.xml :<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 <a href="http://maven.apache.org">http://maven.apache.org</a> /xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.msoa.hbase.client</groupId>
<artifactId>simpleHBase</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<name>HbaseWrite</name>
<url>http://maven.apache.org</url>
<build>
<plugins>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration><archive>
<manifest>
<mainClass>simpleHBase.actionClass</mainClass>
</manifest>
</archive>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs></configuration>
</plugin></plugins></build>
<!-- added for dev box -->
<repositories><repository><id>repo.hortonworks.com</id><name>Hortonworks HDP MavenRepository</name><url>http://repo.hortonworks.com/content/repositories/releases/</url></repository></repositories><!-- end dev box -->
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>jdk.tools</groupId><artifactId>jdk.tools</artifactId><scope>system</scope><version>1.7.0_60</version>
<systemPath>C:\Program Files\Java\jdk1.7.0_60\lib\tools.jar</systemPath></dependency>
<!-- adding to test on beam -->
<dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-common</artifactId><version>2.2.0</version></dependency>
<dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-hdfs</artifactId><version>2.2.0</version></dependency>
<dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-client</artifactId><version>2.2.0</version></dependency>
<!-- add protocol for beam test-->
<dependency><groupId>org.apache.hbase</groupId><artifactId>hbase-protocol</artifactId><version>0.98.0-hadoop2</version></dependency>
<dependency><groupId>org.apache.hbase</groupId><artifactId>hbase-client</artifactId><version>0.98.0-hadoop2</version></dependency>
<dependency><groupId>org.apache.hbase</groupId><artifactId>hbase-common</artifactId><version>0.98.0-hadoop2</version></dependency>
<dependency><groupId>org.apache.hbase</groupId><artifactId>hbase-protocol</artifactId><version>0.98.0-hadoop2</version></dependency>
<dependency><groupId>org.apache.hbase</groupId><artifactId>hbase-server</artifactId><version>0.98.0-hadoop2</version></dependency>
<dependency><groupId>org.springframework</groupId><artifactId>spring-core</artifactId><version>4.2.3.RELEASE</version></dependency>
<dependency><groupId>org.springframework</groupId><artifactId>spring-context</artifactId><version>4.2.3.RELEASE</version></dependency>
<dependency><groupId>org.springframework</groupId><artifactId>spring-beans</artifactId><version>4.2.3.RELEASE</version></dependency>
</dependencies>
Error :
java.io.IOException: java.lang.reflect.InvocationTargetException
at
org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:416)
at
org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:309)
at
simpleHBase.HBaseConnectionFactory.(HBaseConnectionFactory.java:99)
at simpleHBase.HBaseClient.(HBaseClient.java:26)
at simpleHBase.actionClass.main(actionClass.java:118) Caused
by: java.lang.reflect.InvocationTargetException
at
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at
java.lang.reflect.Constructor.newInstance(Constructor.java:408)
at
org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:414)
... 4 more Caused by: java.lang.ExceptionInInitializerError
at
org.apache.hadoop.hbase.ClusterId.parseFrom(ClusterId.java:64)
at
org.apache.hadoop.hbase.zookeeper.ZKClusterId.readClusterIdZNode(ZKClusterId.java:69)
at
org.apache.hadoop.hbase.client.ZooKeeperRegistry.getClusterId(ZooKeeperRegistry.java:83)
at
org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.retrieveClusterId(HConnectionManager.java:857)
at
org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.(HConnectionManager.java:662)
... 9 more Caused by: java.lang.RuntimeException: Failed to
create local dir /data0/hadoop/hbase/local/jars, DynamicClassLoader
failed to init
at
org.apache.hadoop.hbase.util.DynamicClassLoader.(DynamicClassLoader.java:94)
at
org.apache.hadoop.hbase.protobuf.ProtobufUtil.(ProtobufUtil.java:201)
... 14 more
... View more
Labels:
- Labels:
-
Apache Hadoop
-
Apache HBase