<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: java.net.SocketTimeoutException on table 'hbase:meta' at region=hbase:meta while reading hbase s in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/java-net-SocketTimeoutException-on-table-hbase-meta-at/m-p/88807#M21542</link>
    <description>&lt;P&gt;I had a similar issue while conecting&amp;nbsp;Hbase with hive.&lt;/P&gt;&lt;P&gt;I solved this by providing the correct HBase table metadata structure(column family)&amp;nbsp; in my&amp;nbsp;"hbase.columns.mapping" property.&lt;/P&gt;</description>
    <pubDate>Mon, 08 Apr 2019 04:53:33 GMT</pubDate>
    <dc:creator>Kaizen</dc:creator>
    <dc:date>2019-04-08T04:53:33Z</dc:date>
    <item>
      <title>java.net.SocketTimeoutException on table 'hbase:meta' at region=hbase:meta while reading hbase stats</title>
      <link>https://community.cloudera.com/t5/Support-Questions/java-net-SocketTimeoutException-on-table-hbase-meta-at/m-p/45074#M21538</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Environment CDH 5.71., Hbase: 1.2.0, Java 1.7, Spark 1.6&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;We are reading stats of hbase table through spark programme. Getting below error while running the spark programme:&lt;/P&gt;&lt;PRE&gt;16/09/13 15:39:21 INFO client.ConnectionManager$HConnectionImplementation: Closing zookeeper sessionid=0x15706638bd6428f
16/09/13 15:39:21 INFO zookeeper.ZooKeeper: Session: 0x15706638bd6428f closed
16/09/13 15:39:21 INFO zookeeper.ClientCnxn: EventThread shut down
16/09/13 15:39:21 ERROR yarn.ApplicationMaster: User class threw exception: org.apache.hadoop.hbase.client.RetriesExhaustedException: Failed after attempts=36, exceptions:
Tue Sep 13 15:39:21 CDT 2016, null, java.net.SocketTimeoutException: callTimeout=60000, callDuration=68387: row 'hbase table,,00000000000000' on table 'hbase:meta' at region=hbase:meta,,1.1588230740, hostname=cdh_host,60020,1473796634780, seqNum=0

org.apache.hadoop.hbase.client.RetriesExhaustedException: Failed after attempts=36, exceptions:
Tue Sep 13 15:39:21 CDT 2016, null, java.net.SocketTimeoutException: callTimeout=60000, callDuration=68387: row 'hbase table,,00000000000000' on table 'hbase:meta' at region=hbase:meta,,1.1588230740, hostname=cdh_host,60020,1473796634780, seqNum=0

	at org.apache.hadoop.hbase.client.RpcRetryingCallerWithReadReplicas.throwEnrichedException(RpcRetryingCallerWithReadReplicas.java:276)
	at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:207)
	at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:60)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
	at org.apache.hadoop.hbase.client.ClientScanner.call(ClientScanner.java:320)
	at org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:295)
	at org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:160)
	at org.apache.hadoop.hbase.client.ClientScanner.&amp;lt;init&amp;gt;(ClientScanner.java:155)
	at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:867)
	at org.apache.hadoop.hbase.client.MetaScanner.metaScan(MetaScanner.java:193)
	at org.apache.hadoop.hbase.client.MetaScanner.metaScan(MetaScanner.java:89)
	at org.apache.hadoop.hbase.client.MetaScanner.allTableRegions(MetaScanner.java:324)
	at org.apache.hadoop.hbase.client.HRegionLocator.getAllRegionLocations(HRegionLocator.java:88)
	at org.apache.hadoop.hbase.util.RegionSizeCalculator.init(RegionSizeCalculator.java:94)
	at org.apache.hadoop.hbase.util.RegionSizeCalculator.&amp;lt;init&amp;gt;(RegionSizeCalculator.java:81)
	at org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.getSplits(TableInputFormatBase.java:256)
	at org.apache.hadoop.hbase.mapreduce.TableInputFormat.getSplits(TableInputFormat.java:239)
	at org.apache.spark.rdd.NewHadoopRDD.getPartitions(NewHadoopRDD.scala:120)
	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
	at scala.Option.getOrElse(Option.scala:120)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
	at scala.Option.getOrElse(Option.scala:120)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
	at scala.Option.getOrElse(Option.scala:120)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1963)
	at org.apache.spark.rdd.RDD$$anonfun$reduce$1.apply(RDD.scala:1025)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
	at org.apache.spark.rdd.RDD.withScope(RDD.scala:316)
	at org.apache.spark.rdd.RDD.reduce(RDD.scala:1007)
	at org.apache.spark.rdd.DoubleRDDFunctions$$anonfun$stats$1.apply(DoubleRDDFunctions.scala:42)
	at org.apache.spark.rdd.DoubleRDDFunctions$$anonfun$stats$1.apply(DoubleRDDFunctions.scala:42)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
	at org.apache.spark.rdd.RDD.withScope(RDD.scala:316)
	at org.apache.spark.rdd.DoubleRDDFunctions.stats(DoubleRDDFunctions.scala:41)
	at org.apache.spark.api.java.JavaDoubleRDD.stats(JavaDoubleRDD.scala:183)
	at com.cisco.eng.sdaf.profiler.hbase.HbaseDataProfiler.sdafCatalogProfile(HbaseDataProfiler.java:227)
	at com.cisco.eng.sdaf.profiler.hbase.HbaseDataProfiler.main(HbaseDataProfiler.java:72)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:542)
Caused by: java.net.SocketTimeoutException: callTimeout=60000, callDuration=68387: row 'hbase table,,00000000000000' on table 'hbase:meta' at region=hbase:meta,,1.1588230740, hostname=cdh_host,60020,1473796634780, seqNum=0
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:159)
	at org.apache.hadoop.hbase.client.ResultBoundedCompletionService$QueueingFuture.run(ResultBoundedCompletionService.java:65)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hadoop.hbase.exceptions.ConnectionClosingException: Call to cdh_host/ip:60020 failed on local exception: org.apache.hadoop.hbase.exceptions.ConnectionClosingException: Connection to cdh_host/ip:60020 is closing. Call id=9, waitTime=4
	at org.apache.hadoop.hbase.ipc.AbstractRpcClient.wrapException(AbstractRpcClient.java:288)
	at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1272)
	at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:226)
	at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:331)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.scan(ClientProtos.java:34094)
	at org.apache.hadoop.hbase.client.ScannerCallable.openScanner(ScannerCallable.java:394)
	at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:203)
	at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:64)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
	at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas$RetryingRPC.call(ScannerCallableWithReplicas.java:360)
	at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas$RetryingRPC.call(ScannerCallableWithReplicas.java:334)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
	... 4 more
Caused by: org.apache.hadoop.hbase.exceptions.ConnectionClosingException: Connection to cdh_host/ip:60020 is closing. Call id=9, waitTime=4
	at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.cleanupCalls(RpcClientImpl.java:1084)
	at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.close(RpcClientImpl.java:863)
	at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.run(RpcClientImpl.java:580)
16/09/13 15:39:21 INFO yarn.ApplicationMaster: Final app status: FAILED, exitCode: 15, (reason: User class threw exception: org.apache.hadoop.hbase.client.RetriesExhaustedException: Failed after attempts=36, exceptions:
Tue Sep 13 15:39:21 CDT 2016, null, java.net.SocketTimeoutException: callTimeout=60000, callDuration=68387: row 'hbase table,,00000000000000' on table 'hbase:meta' at region=hbase:meta,,1.1588230740, hostname=cdh_host,60020,1473796634780, seqNum=0
)
16/09/13 15:39:21 INFO spark.SparkContext: Invoking stop() from shutdown hook&lt;/PRE&gt;&lt;P&gt;Appreciate any help here.&lt;/P&gt;</description>
      <pubDate>Fri, 16 Sep 2022 10:39:10 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/java-net-SocketTimeoutException-on-table-hbase-meta-at/m-p/45074#M21538</guid>
      <dc:creator>RaviPapisetti</dc:creator>
      <dc:date>2022-09-16T10:39:10Z</dc:date>
    </item>
    <item>
      <title>Re: java.net.SocketTimeoutException on table 'hbase:meta' at region=hbase:meta while reading hbase s</title>
      <link>https://community.cloudera.com/t5/Support-Questions/java-net-SocketTimeoutException-on-table-hbase-meta-at/m-p/45389#M21539</link>
      <description>This has been solved after passing hbase-site.xml as part of spark-submit --files parameter.</description>
      <pubDate>Wed, 21 Sep 2016 17:55:01 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/java-net-SocketTimeoutException-on-table-hbase-meta-at/m-p/45389#M21539</guid>
      <dc:creator>RaviPapisetti</dc:creator>
      <dc:date>2016-09-21T17:55:01Z</dc:date>
    </item>
    <item>
      <title>Re: java.net.SocketTimeoutException on table 'hbase:meta' at region=hbase:meta while reading hbase s</title>
      <link>https://community.cloudera.com/t5/Support-Questions/java-net-SocketTimeoutException-on-table-hbase-meta-at/m-p/84955#M21540</link>
      <description>&lt;P&gt;still failed after add --files parameter. could u post ur code? tks.&lt;/P&gt;</description>
      <pubDate>Wed, 16 Jan 2019 02:29:19 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/java-net-SocketTimeoutException-on-table-hbase-meta-at/m-p/84955#M21540</guid>
      <dc:creator>edcSam</dc:creator>
      <dc:date>2019-01-16T02:29:19Z</dc:date>
    </item>
    <item>
      <title>Re: java.net.SocketTimeoutException on table 'hbase:meta' at region=hbase:meta while reading hbase s</title>
      <link>https://community.cloudera.com/t5/Support-Questions/java-net-SocketTimeoutException-on-table-hbase-meta-at/m-p/88517#M21541</link>
      <description>&lt;P&gt;can you explain or give us the code????? please&lt;/P&gt;</description>
      <pubDate>Sun, 31 Mar 2019 19:39:47 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/java-net-SocketTimeoutException-on-table-hbase-meta-at/m-p/88517#M21541</guid>
      <dc:creator>nayha</dc:creator>
      <dc:date>2019-03-31T19:39:47Z</dc:date>
    </item>
    <item>
      <title>Re: java.net.SocketTimeoutException on table 'hbase:meta' at region=hbase:meta while reading hbase s</title>
      <link>https://community.cloudera.com/t5/Support-Questions/java-net-SocketTimeoutException-on-table-hbase-meta-at/m-p/88807#M21542</link>
      <description>&lt;P&gt;I had a similar issue while conecting&amp;nbsp;Hbase with hive.&lt;/P&gt;&lt;P&gt;I solved this by providing the correct HBase table metadata structure(column family)&amp;nbsp; in my&amp;nbsp;"hbase.columns.mapping" property.&lt;/P&gt;</description>
      <pubDate>Mon, 08 Apr 2019 04:53:33 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/java-net-SocketTimeoutException-on-table-hbase-meta-at/m-p/88807#M21542</guid>
      <dc:creator>Kaizen</dc:creator>
      <dc:date>2019-04-08T04:53:33Z</dc:date>
    </item>
    <item>
      <title>Re: java.net.SocketTimeoutException on table 'hbase:meta' at region=hbase:meta while reading hbase s</title>
      <link>https://community.cloudera.com/t5/Support-Questions/java-net-SocketTimeoutException-on-table-hbase-meta-at/m-p/281892#M209638</link>
      <description>&lt;P&gt;Do you resolve this error ? I'm facing the same problem&lt;/P&gt;</description>
      <pubDate>Sat, 02 Nov 2019 08:00:02 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/java-net-SocketTimeoutException-on-table-hbase-meta-at/m-p/281892#M209638</guid>
      <dc:creator>ChuHop</dc:creator>
      <dc:date>2019-11-02T08:00:02Z</dc:date>
    </item>
  </channel>
</rss>

