<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: Unable to copy hdfs files to hdp sandbox docker in Archives of Support Questions (Read Only)</title>
    <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Unable-to-copy-hdfs-files-to-hdp-sandbox-docker/m-p/239301#M85633</link>
    <description>&lt;P&gt;Needed to open a tcp port and not an http port. That resolved the issue. &lt;/P&gt;</description>
    <pubDate>Thu, 03 Jan 2019 13:44:11 GMT</pubDate>
    <dc:creator>aasha_medhi2004</dc:creator>
    <dc:date>2019-01-03T13:44:11Z</dc:date>
    <item>
      <title>Unable to copy hdfs files to hdp sandbox docker</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Unable-to-copy-hdfs-files-to-hdp-sandbox-docker/m-p/239299#M85631</link>
      <description>&lt;P&gt;I have set up docker sandbox set up of hdp as explained in the docs. The dockers are up and running and ambari is also running. I am trying to do a copyFromLocal (hdfs copy) from host machine. But it gives this error.&lt;/P&gt;&lt;PRE&gt;amedhi:~ amedhi$ hadoop fs -copyFromLocal trial.txt hdfs://sandbox-hdp.hortonworks.com:8020/tmp/
2018-12-19 14:27:33,103 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
2018-12-19 14:28:34,062 INFO hdfs.DataStreamer: Exception in createBlockOutputStream blk_1073743875_3061
org.apache.hadoop.net.ConnectTimeoutException: 60000 millis timeout while waiting for channel to be ready for connect. ch : java.nio.channels.SocketChannel[connection-pending remote=/172.18.0.2:50010]
                at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:534)
                at org.apache.hadoop.hdfs.DataStreamer.createSocketForPipeline(DataStreamer.java:253)
                at org.apache.hadoop.hdfs.DataStreamer.createBlockOutputStream(DataStreamer.java:1725)
                at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1679)
                at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:716)
2018-12-19 14:28:34,063 WARN hdfs.DataStreamer: Abandoning BP-1419118625-172.17.0.2-1543512323726:blk_1073743875_3061
2018-12-19 14:28:34,078 WARN hdfs.DataStreamer: Excluding datanode DatanodeInfoWithStorage[172.18.0.2:50010,DS-6c34ba72-0587-4927-88a1-781ba7d444d9,DISK]
2018-12-19 14:28:34,105 WARN hdfs.DataStreamer: DataStreamer Exception
org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /tmp/trial.txt._COPYING_ could only be written to 0 of the 1 minReplication nodes. There are 1 datanode(s) running and 1 node(s) are excluded in this operation.
                at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2121)
                at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:286)
                at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2706)
                at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
                at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
                at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
                at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:524)
                at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1025)
                at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:876)
                at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:822)
                at java.security.AccessController.doPrivileged(Native Method)
                at javax.security.auth.Subject.doAs(Subject.java:422)
                at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
                at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2682)
 
                at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1497)
                at org.apache.hadoop.ipc.Client.call(Client.java:1443)
                at org.apache.hadoop.ipc.Client.call(Client.java:1353)
                at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
                at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
                at com.sun.proxy.$Proxy11.addBlock(Unknown Source)
                at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:510)
                at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
                at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
                at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
                at java.lang.reflect.Method.invoke(Method.java:498)
                at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
                at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
                at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
                at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
                at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
                at com.sun.proxy.$Proxy12.addBlock(Unknown Source)
                at org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1078)
                at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1865)
                at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1668)
                at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:716)
copyFromLocal: File /tmp/trial.txt._COPYING_ could only be written to 0 of the 1 minReplication nodes. There are 1 datanode(s) running and 1 node(s) are excluded in this operation.


&lt;/PRE&gt;&lt;P&gt;I made the following fixes to solve the above issue.&lt;/P&gt;&lt;P&gt;1. Open http port 50010 on the docker&lt;/P&gt;&lt;P&gt;2. Add the property dfs.client.use.datanode.hostname to true in my host machine hadoop conf and also in ambari conf&lt;/P&gt;&lt;P&gt;Now I am facing the following issue. Please note that hadoop version is same in my host machine and docker&lt;/P&gt;&lt;PRE&gt;2018-12-20 09:55:05,045 INFO hdfs.DataStreamer: Exception in createBlockOutputStream blk_1073743815_2999
com.google.protobuf.InvalidProtocolBufferException: Protocol message end-group tag did not match expected tag.
	at com.google.protobuf.InvalidProtocolBufferException.invalidEndTag(InvalidProtocolBufferException.java:94)
	at com.google.protobuf.CodedInputStream.checkLastTagWas(CodedInputStream.java:124)
	at com.google.protobuf.AbstractParser.parsePartialFrom(AbstractParser.java:202)
	at com.google.protobuf.AbstractParser.parseFrom(AbstractParser.java:217)
	at com.google.protobuf.AbstractParser.parseFrom(AbstractParser.java:223)
	at com.google.protobuf.AbstractParser.parseFrom(AbstractParser.java:49)
	at org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos$BlockOpResponseProto.parseFrom(DataTransferProtos.java:23592)
	at org.apache.hadoop.hdfs.DataStreamer.createBlockOutputStream(DataStreamer.java:1761)
	at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1679)
	at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:716)
2018-12-20 09:55:05,048 WARN hdfs.DataStreamer: Abandoning BP-1419118625-172.17.0.2-1543512323726:blk_1073743815_2999
2018-12-20 09:55:05,055 WARN hdfs.DataStreamer: Excluding datanode DatanodeInfoWithStorage[172.18.0.2:50010,DS-6c34ba72-0587-4927-88a1-781ba7d444d9,DISK]
2018-12-20 09:55:05,075 WARN hdfs.DataStreamer: DataStreamer Exception
org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /tmp/hdfs-site.xml._COPYING_ could only be written to 0 of the 1 minReplication nodes. There are 1 datanode(s) running and 1 node(s) are excluded in this operation.
	at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2121)
	at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:286)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2706)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:524)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1025)
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:876)
	at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:822)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2682)


	at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1497)
	at org.apache.hadoop.ipc.Client.call(Client.java:1443)
	at org.apache.hadoop.ipc.Client.call(Client.java:1353)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
	at com.sun.proxy.$Proxy11.addBlock(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:510)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
	at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
	at com.sun.proxy.$Proxy12.addBlock(Unknown Source)
	at org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1078)
	at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1865)
	at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1668)
	at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:716)
copyFromLocal: File /tmp/hdfs-site.xml._COPYING_ could only be written to 0 of the 1 minReplication nodes. There are 1 datanode(s) running and 1 node(s) are excluded in this operation.
&lt;/PRE&gt;</description>
      <pubDate>Thu, 20 Dec 2018 21:36:48 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Unable-to-copy-hdfs-files-to-hdp-sandbox-docker/m-p/239299#M85631</guid>
      <dc:creator>aasha_medhi2004</dc:creator>
      <dc:date>2018-12-20T21:36:48Z</dc:date>
    </item>
    <item>
      <title>Re: Unable to copy hdfs files to hdp sandbox docker</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Unable-to-copy-hdfs-files-to-hdp-sandbox-docker/m-p/239300#M85632</link>
      <description>&lt;P&gt;not very sure, but can you try the hdfs command instead? it should be configured to include the necessary jars for the execution:&lt;/P&gt;&lt;P&gt;hdfs dfs -copyFromLocal trial.txt hdfs://sandbox-hdp.hortonworks.com:8020/tmp/&lt;/P&gt;</description>
      <pubDate>Sun, 23 Dec 2018 21:25:02 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Unable-to-copy-hdfs-files-to-hdp-sandbox-docker/m-p/239300#M85632</guid>
      <dc:creator>arald</dc:creator>
      <dc:date>2018-12-23T21:25:02Z</dc:date>
    </item>
    <item>
      <title>Re: Unable to copy hdfs files to hdp sandbox docker</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Unable-to-copy-hdfs-files-to-hdp-sandbox-docker/m-p/239301#M85633</link>
      <description>&lt;P&gt;Needed to open a tcp port and not an http port. That resolved the issue. &lt;/P&gt;</description>
      <pubDate>Thu, 03 Jan 2019 13:44:11 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Unable-to-copy-hdfs-files-to-hdp-sandbox-docker/m-p/239301#M85633</guid>
      <dc:creator>aasha_medhi2004</dc:creator>
      <dc:date>2019-01-03T13:44:11Z</dc:date>
    </item>
    <item>
      <title>Re: Unable to copy hdfs files to hdp sandbox docker</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Unable-to-copy-hdfs-files-to-hdp-sandbox-docker/m-p/239302#M85634</link>
      <description>&lt;P&gt;Same error&lt;/P&gt;</description>
      <pubDate>Thu, 03 Jan 2019 13:44:30 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Unable-to-copy-hdfs-files-to-hdp-sandbox-docker/m-p/239302#M85634</guid>
      <dc:creator>aasha_medhi2004</dc:creator>
      <dc:date>2019-01-03T13:44:30Z</dc:date>
    </item>
    <item>
      <title>Re: Unable to copy hdfs files to hdp sandbox docker</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Unable-to-copy-hdfs-files-to-hdp-sandbox-docker/m-p/239303#M85635</link>
      <description>&lt;P&gt;@Aasha Medhi , could you please explain more details about which tcp port to open? How do you open it or them? Thanks.&lt;/P&gt;</description>
      <pubDate>Fri, 26 Jul 2019 11:50:32 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Unable-to-copy-hdfs-files-to-hdp-sandbox-docker/m-p/239303#M85635</guid>
      <dc:creator>figo1984</dc:creator>
      <dc:date>2019-07-26T11:50:32Z</dc:date>
    </item>
    <item>
      <title>Re: Unable to copy hdfs files to hdp sandbox docker</title>
      <link>https://community.cloudera.com/t5/Archives-of-Support-Questions/Unable-to-copy-hdfs-files-to-hdp-sandbox-docker/m-p/269160#M85636</link>
      <description>&lt;P&gt;Same error, port 50010 is already open since i can telnet from namenode to datanode, but still same error. Does anyone have a different solution?&lt;/P&gt;</description>
      <pubDate>Wed, 28 Aug 2019 12:25:46 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Archives-of-Support-Questions/Unable-to-copy-hdfs-files-to-hdp-sandbox-docker/m-p/269160#M85636</guid>
      <dc:creator>stelsavva</dc:creator>
      <dc:date>2019-08-28T12:25:46Z</dc:date>
    </item>
  </channel>
</rss>

