<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: Spark Hbase  connection issue in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/269586#M206947</link>
    <description>&lt;P&gt;Can you please elaborate in detail with commands you used to resolve?&lt;/P&gt;</description>
    <pubDate>Tue, 03 Sep 2019 03:33:46 GMT</pubDate>
    <dc:creator>sagar_shimpi_16</dc:creator>
    <dc:date>2019-09-03T03:33:46Z</dc:date>
    <item>
      <title>Spark Hbase  connection issue</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106691#M69569</link>
      <description>&lt;P&gt;Hi All,&lt;/P&gt;&lt;P&gt;Hitting with followiong error while i am trying to connect the hbase through spark(using newhadoopAPIRDD) in HDP 2.4.2.Already tried increasing the RPC time in hbase site xml file,its not working.any idea how to fix this?&lt;/P&gt;&lt;PRE&gt;Exception in thread "main" org.apache.hadoop.hbase.client.RetriesExhaustedException: Failed after attempts=36, exceptions:
Wed Nov 16 14:59:36 IST 2016, null, java.net.SocketTimeoutException: callTimeout=60000, callDuration=71216: row 'scores,,00000000000000' on table 'hbase:meta' at region=hbase:meta,,1.1588230740, hostname=hklvadcnc06.hk.standardchartered.com,16020,1478491683763, seqNum=0


       at org.apache.hadoop.hbase.client.RpcRetryingCallerWithReadReplicas.throwEnrichedException(RpcRetryingCallerWithReadReplicas.java:271)
       at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:195)
       at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:59)
       at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
       at org.apache.hadoop.hbase.client.ClientScanner.call(ClientScanner.java:320)
       at org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:295)
       at org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:160)
       at org.apache.hadoop.hbase.client.ClientScanner.&amp;lt;init&amp;gt;(ClientScanner.java:155)
       at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:821)
       at org.apache.hadoop.hbase.client.MetaScanner.metaScan(MetaScanner.java:193)
       at org.apache.hadoop.hbase.client.MetaScanner.metaScan(MetaScanner.java:89)
       at org.apache.hadoop.hbase.client.MetaScanner.allTableRegions(MetaScanner.java:324)
       at org.apache.hadoop.hbase.client.HRegionLocator.getAllRegionLocations(HRegionLocator.java:88)
       at org.apache.hadoop.hbase.util.RegionSizeCalculator.init(RegionSizeCalculator.java:94)
       at org.apache.hadoop.hbase.util.RegionSizeCalculator.&amp;lt;init&amp;gt;(RegionSizeCalculator.java:81)
       at org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.getSplits(TableInputFormatBase.java:256)
       at org.apache.hadoop.hbase.mapreduce.TableInputFormat.getSplits(TableInputFormat.java:237)
       at org.apache.spark.rdd.NewHadoopRDD.getPartitions(NewHadoopRDD.scala:120)
       at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
       at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
       at scala.Option.getOrElse(Option.scala:120)
       at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
       at org.apache.spark.SparkContext.runJob(SparkContext.scala:1929)
       at org.apache.spark.rdd.RDD.count(RDD.scala:1157)
       at scb.Hbasetest$.main(Hbasetest.scala:85)
       at scb.Hbasetest.main(Hbasetest.scala)
Caused by: java.net.SocketTimeoutException: callTimeout=60000, callDuration=71216: row 'scores,,00000000000000' on table 'hbase:meta' at region=hbase:meta,,1.1588230740, hostname=hklvadcnc06.hk.standardchartered.com,16020,1478491683763, seqNum=0
       at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:159)
       at org.apache.hadoop.hbase.client.ResultBoundedCompletionService$QueueingFuture.run(ResultBoundedCompletionService.java:64)
       at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
       at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
       at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hadoop.hbase.exceptions.ConnectionClosingException: Call to hklvadcnc06.hk.standardchartered.com/10.20.235.13:16020 failed on local exception: org.apache.hadoop.hbase.exceptions.ConnectionClosingException: Connection to hklvadcnc06.hk.standardchartered.com/10.20.235.13:16020 is closing. Call id=9, waitTime=171
       at org.apache.hadoop.hbase.ipc.RpcClientImpl.wrapException(RpcClientImpl.java:1281)
       at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1252)
       at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:213)
       at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:287)
       at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.scan(ClientProtos.java:32651)
       at org.apache.hadoop.hbase.client.ScannerCallable.openScanner(ScannerCallable.java:372)
       at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:199)
       at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:62)
       at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
       at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas$RetryingRPC.call(ScannerCallableWithReplicas.java:346)
       at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas$RetryingRPC.call(ScannerCallableWithReplicas.java:320)
       at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
       ... 4 more
Caused by: org.apache.hadoop.hbase.exceptions.ConnectionClosingException: Connection to hklvadcnc06.hk.standardchartered.com/10.20.235.13:16020 is closing. Call id=9, waitTime=171
       at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.cleanupCalls(RpcClientImpl.java:1078)
       at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.close(RpcClientImpl.java:879)
       at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.run(RpcClientImpl.java:604)
16/11/16 14:59:36 INFO SparkContext: Invoking stop() from shutdown hook
&lt;/PRE&gt;</description>
      <pubDate>Wed, 16 Nov 2016 17:48:26 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106691#M69569</guid>
      <dc:creator>senthilkumarP</dc:creator>
      <dc:date>2016-11-16T17:48:26Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Hbase  connection issue</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106692#M69570</link>
      <description>&lt;P&gt;Are you using secured cluster?&lt;/P&gt;</description>
      <pubDate>Wed, 16 Nov 2016 17:52:23 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106692#M69570</guid>
      <dc:creator>ssoldatov</dc:creator>
      <dc:date>2016-11-16T17:52:23Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Hbase  connection issue</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106693#M69571</link>
      <description>&lt;P&gt;yes...using kerberos..&lt;/P&gt;&lt;P&gt;Already i have mentioned the kerberos auth in my coding&lt;/P&gt;&lt;P&gt;// 
UserGroupInformation.setConfiguration(&lt;U&gt;conf&lt;/U&gt;)&lt;/P&gt;&lt;P&gt;//  &lt;U&gt;val&lt;/U&gt;
userGroupInformation =
UserGroupInformation.loginUserFromKeytabAndReturnUGI("&lt;A href="mailto:hadoop1@ZONE1.SCB.NET"&gt;hadoop1@ZONE1.SCB.NET&lt;/A&gt;","C:\\Users\\1554160\\Downloads\\hadoop1.&lt;U&gt;keytab&lt;/U&gt;")&lt;/P&gt;&lt;P&gt;// 
UserGroupInformation.setLoginUser(userGroupInformation)&lt;/P&gt;</description>
      <pubDate>Wed, 16 Nov 2016 18:01:52 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106693#M69571</guid>
      <dc:creator>senthilkumarP</dc:creator>
      <dc:date>2016-11-16T18:01:52Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Hbase  connection issue</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106694#M69572</link>
      <description>&lt;P&gt;check this topic how to use doAs: &lt;/P&gt;&lt;P&gt;&lt;A href="https://community.hortonworks.com/questions/46500/spark-cant-connect-to-hbase-using-kerberos-in-clus.html"&gt;https://community.hortonworks.com/questions/46500/spark-cant-connect-to-hbase-using-kerberos-in-clus.html&lt;/A&gt;&lt;/P&gt;</description>
      <pubDate>Wed, 16 Nov 2016 18:11:05 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106694#M69572</guid>
      <dc:creator>ssoldatov</dc:creator>
      <dc:date>2016-11-16T18:11:05Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Hbase  connection issue</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106695#M69573</link>
      <description>&lt;P&gt;are you running spark job on same cluster or from different cluster? If,it's from different cluster then check if nodes on the cluster have access to lvadcnc06.hk.standardchartered.com&lt;/P&gt;</description>
      <pubDate>Wed, 16 Nov 2016 18:15:42 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106695#M69573</guid>
      <dc:creator>asinghal</dc:creator>
      <dc:date>2016-11-16T18:15:42Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Hbase  connection issue</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106696#M69574</link>
      <description>&lt;P&gt;Already i have verified the above link.But its not useful for my case .&lt;/P&gt;&lt;P&gt;Thanks!&lt;/P&gt;</description>
      <pubDate>Wed, 16 Nov 2016 18:45:48 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106696#M69574</guid>
      <dc:creator>senthilkumarP</dc:creator>
      <dc:date>2016-11-16T18:45:48Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Hbase  connection issue</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106697#M69575</link>
      <description>&lt;P&gt;You can try to increase the log level to DEBUG for HBase and look at the RegionServer's log which your receive the Connection closing error from. This is likely the HBase server denying your RPC for some reason. There should be a DEBUG message which informs you why the RPC was rejected.&lt;/P&gt;</description>
      <pubDate>Wed, 16 Nov 2016 23:35:27 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106697#M69575</guid>
      <dc:creator>elserj</dc:creator>
      <dc:date>2016-11-16T23:35:27Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Hbase  connection issue</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106698#M69576</link>
      <description>&lt;P&gt;where can i find the region server logs ?&lt;/P&gt;</description>
      <pubDate>Thu, 17 Nov 2016 00:54:48 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106698#M69576</guid>
      <dc:creator>senthilkumarP</dc:creator>
      <dc:date>2016-11-17T00:54:48Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Hbase  connection issue</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106699#M69577</link>
      <description>&lt;P&gt;Wherever you configured them to be stored. It defaults to the standard log directory on Linux: /var/log/hbase&lt;/P&gt;</description>
      <pubDate>Thu, 17 Nov 2016 01:02:07 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106699#M69577</guid>
      <dc:creator>elserj</dc:creator>
      <dc:date>2016-11-17T01:02:07Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Hbase  connection issue</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106700#M69578</link>
      <description>&lt;P&gt;pointed hbase -conf to hadoop classpath resolved above problem .Thanks!&lt;/P&gt;</description>
      <pubDate>Fri, 18 Nov 2016 14:21:24 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106700#M69578</guid>
      <dc:creator>senthilkumarP</dc:creator>
      <dc:date>2016-11-18T14:21:24Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Hbase  connection issue</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106701#M69579</link>
      <description>&lt;P&gt;thanks for your reply ..issue resolved &lt;/P&gt;</description>
      <pubDate>Fri, 18 Nov 2016 14:22:01 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106701#M69579</guid>
      <dc:creator>senthilkumarP</dc:creator>
      <dc:date>2016-11-18T14:22:01Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Hbase  connection issue</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106702#M69580</link>
      <description>&lt;P&gt;Can you please let us know how do you pointed hbase -conf to hadoop classpath?&lt;/P&gt;&lt;P&gt;I am stuck in the same problem.&lt;/P&gt;</description>
      <pubDate>Wed, 18 Jul 2018 04:54:58 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/106702#M69580</guid>
      <dc:creator>pritam_paul19</dc:creator>
      <dc:date>2018-07-18T04:54:58Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Hbase  connection issue</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/269586#M206947</link>
      <description>&lt;P&gt;Can you please elaborate in detail with commands you used to resolve?&lt;/P&gt;</description>
      <pubDate>Tue, 03 Sep 2019 03:33:46 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Spark-Hbase-connection-issue/m-p/269586#M206947</guid>
      <dc:creator>sagar_shimpi_16</dc:creator>
      <dc:date>2019-09-03T03:33:46Z</dc:date>
    </item>
  </channel>
</rss>

