<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question HDFS command hdfs dfs -ls throws fatal internal error java.lang.ArrayIndexOutOfBoundsException: 1 in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/HDFS-command-hdfs-dfs-ls-throws-fatal-internal-error-java/m-p/60939#M55614</link>
    <description>&lt;P&gt;Hello All,&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I have .har file on hdfs for which I am trying to check the list of files that it archived, but getting below error on CDH 5.9.2 cluster.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;[user1@usnbka700p ~]$ hdfs dfs -ls har:///user/user1/HDFSArchival/Output1/Archive-13-10-2017-03-10.har&lt;BR /&gt;-ls: Fatal internal error&lt;BR /&gt;java.lang.ArrayIndexOutOfBoundsException: 1&lt;BR /&gt;at org.apache.hadoop.fs.HarFileSystem$HarStatus.&amp;lt;init&amp;gt;(HarFileSystem.java:597)&lt;BR /&gt;at org.apache.hadoop.fs.HarFileSystem$HarMetaData.parseMetaData(HarFileSystem.java:1201)&lt;BR /&gt;at org.apache.hadoop.fs.HarFileSystem$HarMetaData.access$000(HarFileSystem.java:1098)&lt;BR /&gt;at org.apache.hadoop.fs.HarFileSystem.initialize(HarFileSystem.java:166)&lt;BR /&gt;at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2711)&lt;BR /&gt;at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:382)&lt;BR /&gt;at org.apache.hadoop.fs.Path.getFileSystem(Path.java:296)&lt;BR /&gt;at org.apache.hadoop.fs.shell.PathData.expandAsGlob(PathData.java:325)&lt;BR /&gt;at org.apache.hadoop.fs.shell.Command.expandArgument(Command.java:235)&lt;BR /&gt;at org.apache.hadoop.fs.shell.Command.expandArguments(Command.java:218)&lt;BR /&gt;at org.apache.hadoop.fs.shell.FsCommand.processRawArguments(FsCommand.java:102)&lt;BR /&gt;at org.apache.hadoop.fs.shell.Command.run(Command.java:165)&lt;BR /&gt;at org.apache.hadoop.fs.FsShell.run(FsShell.java:315)&lt;BR /&gt;at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)&lt;BR /&gt;at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:84)&lt;BR /&gt;at org.apache.hadoop.fs.FsShell.main(FsShell.java:372)&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;However I can see size of .har file as below.&lt;/P&gt;&lt;P&gt;hdfs dfs -du -s -h /user/user1/HDFSArchival/Output1/Archive-13-10-2017-03-10.har&lt;BR /&gt;16.5 G 49.5 G /user/user1/HDFSArchival/Output1/Archive-13-10-2017-03-10.har&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Also hdfs command hdfs dfs -ls works for other files. Kindly refer to below logs.&lt;/P&gt;&lt;P&gt;hdfs dfs -ls har:///user/user1/HDFSArchival/Output1/Archive-12-10-2017-07-10.har&lt;BR /&gt;Found 1 items&lt;BR /&gt;drwxr-xr-x - user1 user1 0 2017-10-12 07:12 har:///user/user1/HDFSArchival/Output1/Archive-12-10-2017-07-10.har/ArchivalTemp&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Can you please suggest on this?&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thanks,&lt;/P&gt;&lt;P&gt;Priya&lt;/P&gt;</description>
    <pubDate>Mon, 16 Oct 2017 04:25:56 GMT</pubDate>
    <dc:creator>cdhhadoop</dc:creator>
    <dc:date>2017-10-16T04:25:56Z</dc:date>
  </channel>
</rss>

