Support Questions
Find answers, ask questions, and share your expertise

I am using the Cloudera docker , In that HBase services is up and running and able perform sorts of commands like as list and scan for particular table from Hbase shell but whenever I am trying to the same the java Api I keep getting the below error.

I am using the Cloudera docker , In that HBase services is up and running and able perform sorts of commands like as list and scan for particular table from Hbase shell but whenever I am trying to the same the java Api I keep getting the below error.

Explorer
hbase(main):001:0> list
TABLE
ATLAS_ENTITY_AUDIT_EVENTS
atlas_titan
employee
iemployee
test

a. Find the hbase-site.xml in my classpath and configuration has been loaded successfully.

 

<!--    <property>
        <name>dfs.domain.socket.path</name>
        <value>/var/lib/hadoop-hdfs/dn_socket</value>
    </property>
 -->
    <property>
        <name>hbase.bucketcache.ioengine</name>
        <value>offheap</value>
    </property>

    <property>
        <name>hbase.bucketcache.percentage.in.combinedcache</name>
        <value></value>
    </property>

    <property>
        <name>hbase.bucketcache.size</name>
        <value>1024</value>
    </property>
<!-- 
    <property>
        <name>hbase.bulkload.staging.dir</name>
        <value>/apps/hbase/staging</value>
    </property> -->

    <property>
        <name>hbase.client.keyvalue.maxsize</name>
        <value>1048576</value>
    </property>

    <property>
        <name>hbase.client.retries.number</name>
        <value>35</value>
    </property>

    <property>
        <name>hbase.client.scanner.caching</name>
        <value>100</value>
    </property>

    <property>
        <name>hbase.cluster.distributed</name>
        <value>true</value>
    </property>

    <property>
        <name>hbase.coprocessor.master.classes</name>
        <value>org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor
        </value>
    </property>

    <property>
        <name>hbase.coprocessor.region.classes</name>
        <value>org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor
        </value>
    </property>

    <property>
        <name>hbase.coprocessor.regionserver.classes</name>
        <value>org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor
        </value>
    </property>

    <property>
        <name>hbase.defaults.for.version.skip</name>
        <value>true</value>
    </property>

    <property>
        <name>hbase.hregion.majorcompaction</name>
        <value>604800000</value>
    </property>

    <property>
        <name>hbase.hregion.majorcompaction.jitter</name>
        <value>0.50</value>
    </property>

    <property>
        <name>hbase.hregion.max.filesize</name>
        <value>10737418240</value>
    </property>

    <property>
        <name>hbase.hregion.memstore.block.multiplier</name>
        <value>4</value>
    </property>

    <property>
        <name>hbase.hregion.memstore.flush.size</name>
        <value>134217728</value>
    </property>

    <property>
        <name>hbase.hregion.memstore.mslab.enabled</name>
        <value>true</value>
    </property>

    <property>
        <name>hbase.hstore.blockingStoreFiles</name>
        <value>10</value>
    </property>

    <property>
        <name>hbase.hstore.compaction.max</name>
        <value>10</value>
    </property>

    <property>
        <name>hbase.hstore.compactionThreshold</name>
        <value>3</value>
    </property>

    <property>
        <name>hbase.local.dir</name>
        <value>${hbase.tmp.dir}/local</value>
    </property>

    <property>
        <name>hbase.master.info.bindAddress</name>
        <value>0.0.0.0</value>
    </property>

    <property>
        <name>hbase.master.info.port</name>
        <value>16010</value>
    </property>

    <property>
        <name>hbase.master.port</name>
        <value>16000</value>
    </property>

    <property>
        <name>hbase.master.ui.readonly</name>
        <value>false</value>
    </property>

    <property>
        <name>hbase.region.server.rpc.scheduler.factory.class</name>
        <value></value>
    </property>

    <property>
        <name>hbase.regionserver.global.memstore.size</name>
        <value>0.4</value>
    </property>

    <property>
        <name>hbase.regionserver.handler.count</name>
        <value>30</value>
    </property>

    <property>
        <name>hbase.regionserver.info.port</name>
        <value>16030</value>
    </property>

    <property>
        <name>hbase.regionserver.port</name>
        <value>16020</value>
    </property>

    <property>
        <name>hbase.regionserver.wal.codec</name>
        <value>org.apache.hadoop.hbase.regionserver.wal.WALCellCodec</value>
    </property>

    <property>
        <name>hbase.rootdir</name>
        <value>hdfs://sandbox.hortonworks.com:8020/apps/hbase/data</value>
    </property>

    <property>
        <name>hbase.rpc.controllerfactory.class</name>
        <value></value>
    </property>

    <property>
        <name>hbase.rpc.protection</name>
        <value>authentication</value>
    </property>

    <property>
        <name>hbase.rpc.timeout</name>
        <value>90000</value>
    </property>

    <property>
        <name>hbase.security.authentication</name>
        <value>simple</value>
    </property>

    <property>
        <name>hbase.security.authorization</name>
        <value>true</value>
    </property>

    <property>
        <name>hbase.superuser</name>
        <value>hbase</value>
    </property>

    <property>
        <name>hbase.tmp.dir</name>
        <value>/tmp/hbase-${user.name}</value>
    </property>

    <property>
        <name>hbase.zookeeper.property.clientPort</name>
        <value>2181</value>
    </property>

    <property>
        <name>hbase.zookeeper.quorum</name>
        <value>sandbox.hortonworks.com</value>
    </property>

    <property>
        <name>hbase.zookeeper.useMulti</name>
        <value>true</value>
    </property>

    <property>
        <name>hfile.block.cache.size</name>
        <value>0.4</value>
    </property>

    <property>
        <name>phoenix.functions.allowUserDefinedFunctions</name>
        <value>
        </value>
    </property>

    <property>
        <name>phoenix.query.timeoutMs</name>
        <value>60000</value>
    </property>

    <property>
        <name>zookeeper.recovery.retry</name>
        <value>6</value>
    </property>

    <property>
        <name>zookeeper.session.timeout</name>
        <value>90000</value>
    </property>

    <property>
        <name>zookeeper.znode.parent</name>
        <value>/hbase-unsecure</value>
    </property>
    <!-- <property> <name>hbase.thrift.connection.max-idletime</name> <value>1800000</value> 
        </property> <property> <name>hbase.regionserver.executor.openregion.threads</name> 
        <value>100</value> </property> -->
    <property>
        <name>hbase.client.ipc.pool.type</name>
        <value>RoundRobinPool</value>
    </property>
    <property>
        <name>hbase.client.ipc.pool.size</name>
        <value>10</value>
    </property>

</configuration>

b. Find the Java Connector for Hbase :

 
public class HbaseConnector {
        private static Logger LOGGER = LoggerFactory.getLogger(HbaseConnector.class);

        @SuppressWarnings("deprecation")
        public static void main(String[] args) throws Exception {
            Configuration conf = HBaseConfiguration.create();
            conf.set("hbase.master", "sandbox.hortonworks.com:16010");
             conf.set("hbase.zookeeper.quorum", "sandbox.hortonworks.com");
              conf.set("hbase.zookeeper.property.clientPort", "2181");
              conf.set("zookeeper.znode.parent", "/hbase-unsecure");
            String TABLE_NAME = "employee1";
            try (Connection connection = ConnectionFactory.createConnection(conf); Admin admin = connection.getAdmin()) {

                HTableDescriptor table = new HTableDescriptor(TableName.valueOf(TABLE_NAME));
                table.addFamily(new HColumnDescriptor("cf"));

                if (!admin.tableExists(table.getTableName())) {
                    System.out.print("Creating table. ");
                    admin.createTable(table);
                    System.out.println(" Done.");
                }
            }

        }

    }
c.Find the logs for an exception :
3.9.9.RELEASE-e4.12.0-win32-x86_64\sts-bundle\sts-3.9.9.RELEASE;;.
    2020-05-03 17:22:56 INFO  ZooKeeper:100 - Client environment:java.io.tmpdir=C:\Users\username\AppData\Local\Temp\
    2020-05-03 17:22:56 INFO  ZooKeeper:100 - Client environment:java.compiler=<NA>
    2020-05-03 17:22:56 INFO  ZooKeeper:100 - Client environment:os.name=Windows 10
    2020-05-03 17:22:56 INFO  ZooKeeper:100 - Client environment:os.arch=amd64
    2020-05-03 17:22:56 INFO  ZooKeeper:100 - Client environment:os.version=10.0
    2020-05-03 17:22:56 INFO  ZooKeeper:100 - Client environment:user.name=username
    2020-05-03 17:22:56 INFO  ZooKeeper:100 - Client environment:user.home=C:\Users\username
    2020-05-03 17:22:56 INFO  ZooKeeper:100 - Client environment:user.dir=C:\Users\username\Documents\workspace-sts-3.9.9.RELEASE\GDPR\HiveDataLoad
    2020-05-03 17:22:56 INFO  ZooKeeper:438 - Initiating client connection, connectString=localhost:2181 sessionTimeout=90000 watcher=hconnection-0x7b227d8d0x0, quorum=localhost:2181, baseZNode=/hbase-unsecure
    2020-05-03 17:22:57 INFO  ClientCnxn:975 - Opening socket connection to server 0:0:0:0:0:0:0:1/0:0:0:0:0:0:0:1:2181. Will not attempt to authenticate using SASL (unknown error)
    2020-05-03 17:22:59 WARN  ClientCnxn:1102 - Session 0x0 for server null, unexpected error, closing socket connection and attempting reconnect
    java.net.ConnectException: Connection refused: no further information
        at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
        at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
        at org.apache.zookeeper.ClientCnxnSocketNIO.doTransport(ClientCnxnSocketNIO.java:350)
        at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1081)
    2020-05-03 17:22:59 INFO  ClientCnxn:975 - Opening socket connection to server sandbox.hortonworks.com/127.0.0.1:2181. Will not attempt to authenticate using SASL (unknown error)
    2020-05-03 17:22:59 INFO  ClientCnxn:852 - Socket connection established, initiating session, client: /127.0.0.1:59128, server: sandbox.hortonworks.com/127.0.0.1:2181
    2020-05-03 17:22:59 INFO  ClientCnxn:1235 - Session establishment complete on server sandbox.hortonworks.com/127.0.0.1:2181, sessionid = 0x171d6619b910111, negotiated timeout = 40000
    2020-05-03 17:23:02 WARN  DomainSocketFactory:117 - The short-circuit local reads feature cannot be used because UNIX Domain sockets are not available on Windows.
    2020-05-03 17:23:58 INFO  ConnectionManager$HConnectionImplementation:1709 - Closing zookeeper sessionid=0x171d6619b910111
    2020-05-03 17:23:58 INFO  ZooKeeper:684 - Session: 0x171d6619b910111 closed
    2020-05-03 17:23:58 INFO  ClientCnxn:512 - EventThread shut down
    Exception in thread "main" org.apache.hadoop.hbase.client.RetriesExhaustedException: Failed after attempts=36, exceptions:
    Sun May 03 17:23:58 IST 2020, null, java.net.SocketTimeoutException: callTimeout=60000, callDuration=61129: row 'employee1,,' on table 'hbase:meta' at region=hbase:meta,,1.1588230740, hostname=sandbox.hortonworks.com,16020,1588492448730, seqNum=0

        at org.apache.hadoop.hbase.client.RpcRetryingCallerWithReadReplicas.throwEnrichedException(RpcRetryingCallerWithReadReplicas.java:276)
        at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:207)
        at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:60)
        at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
        at org.apache.hadoop.hbase.client.ClientScanner.call(ClientScanner.java:320)
        at org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:295)
        at org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:160)
        at org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:155)
        at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:802)
        at org.apache.hadoop.hbase.MetaTableAccessor.fullScan(MetaTableAccessor.java:602)
        at org.apache.hadoop.hbase.MetaTableAccessor.tableExists(MetaTableAccessor.java:366)
        at org.apache.hadoop.hbase.client.HBaseAdmin.tableExists(HBaseAdmin.java:396)
        at HbaseConnector.main(HbaseConnector.java:79)
    Caused by: java.net.SocketTimeoutException: callTimeout=60000, callDuration=61129: row 'employee1,,' on table 'hbase:meta' at region=hbase:meta,,1.1588230740, hostname=sandbox.hortonworks.com,16020,1588492448730, seqNum=0
        at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:159)
        at org.apache.hadoop.hbase.client.ResultBoundedCompletionService$QueueingFuture.run(ResultBoundedCompletionService.java:65)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
    Caused by: java.net.ConnectException: Connection refused: no further information
        at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
        at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
        at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
        at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
        at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:494)
        at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.setupConnection(RpcClientImpl.java:416)
        at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.setupIOstreams(RpcClientImpl.java:722)
        at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.writeRequest(RpcClientImpl.java:906)
        at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.tracedWriteRequest(RpcClientImpl.java:873)
        at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1242)
        at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:226)
        at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:331)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.scan(ClientProtos.java:34094)
        at org.apache.hadoop.hbase.client.ScannerCallable.openScanner(ScannerCallable.java:394)
        at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:203)
        at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:64)
        at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
        at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas$RetryingRPC.call(ScannerCallableWithReplicas.java:360)
        at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas$RetryingRPC.call(ScannerCallableWithReplicas.java:334)
        at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
        ... 4 more