<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>question Re: Exercise 1 Sqoop import fails in Support Questions</title>
    <link>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46890#M6065</link>
    <description>Thanks !</description>
    <pubDate>Wed, 02 Nov 2016 14:13:21 GMT</pubDate>
    <dc:creator>csguna</dc:creator>
    <dc:date>2016-11-02T14:13:21Z</dc:date>
    <item>
      <title>Exercise 1 Sqoop import fails</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46864#M6059</link>
      <description>&lt;P&gt;all the Hadoop services in my cloudera manager green and still my sqoop import fails badly.&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;(DFSOutputStream.java:789)&lt;BR /&gt;16/11/01 19:13:34 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/01 19:13:34 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/01 19:13:36 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/01 19:13:36 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/01 19:13:36 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeInternal(DFSOutputStream.java:830)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:826)&lt;BR /&gt;16/11/01 19:13:40 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/01 19:13:40 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/01 19:13:40 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/01 19:13:40 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/01 19:13:40 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/01 19:13:40 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/01 19:13:40 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/01 19:13:40 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/01 19:13:40 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeInternal(DFSOutputStream.java:830)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:826)&lt;BR /&gt;16/11/01 19:13:41 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/01 19:13:41 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/01 19:13:42 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/01 19:13:43 INFO db.DBInputFormat: Using read commited transaction isolation&lt;BR /&gt;16/11/01 19:13:43 INFO mapreduce.JobSubmitter: number of splits:1&lt;BR /&gt;16/11/01 19:13:43 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1478051991187_0002&lt;BR /&gt;16/11/01 19:13:45 INFO impl.YarnClientImpl: Submitted application application_1478051991187_0002&lt;BR /&gt;16/11/01 19:13:45 INFO mapreduce.Job: The url to track the job: &lt;A href="http://quickstart.cloudera:8088/proxy/application_1478051991187_0002/" target="_blank"&gt;http://quickstart.cloudera:8088/proxy/application_1478051991187_0002/&lt;/A&gt;&lt;BR /&gt;16/11/01 19:13:45 INFO mapreduce.Job: Running job: job_1478051991187_0002&lt;BR /&gt;16/11/01 19:14:09 INFO mapreduce.Job: Job job_1478051991187_0002 running in uber mode : false&lt;BR /&gt;16/11/01 19:14:09 INFO mapreduce.Job: map 0% reduce 0%&lt;BR /&gt;16/11/01 19:14:30 INFO mapreduce.Job: Task Id : attempt_1478051991187_0002_m_000000_0, Status : FAILED&lt;BR /&gt;Error: org.kitesdk.data.DatasetOperationException: Failed to append {"customer_id": 1, "customer_fname": "Richard", "customer_lname": "Hernandez", "customer_email": "XXXXXXXXX", "customer_password": "XXXXXXXXX", "customer_street": "6303 Heather Plaza", "customer_city": "Brownsville", "customer_state": "TX", "customer_zipcode": "78521"} to ParquetAppender{path=hdfs://quickstart.cloudera:8020/tmp/default/.temp/job_1478051991187_0002/mr/attempt_1478051991187_0002_m_000000_0/.50037358-6ff7-4964-8742-d6736bbbacb7.parquet.tmp, schema={"type":"record","name":"customers","fields":[{"name":"id","type":["null","int"],"doc":"Converted from 'int'","default":null},{"name":"name","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"email_preferences","type":["null",{"type":"record","name":"email_preferences","fields":[{"name":"email_format","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"frequency","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"categories","type":["null",{"type":"record","name":"categories","fields":[{"name":"promos","type":["null","boolean"],"doc":"Converted from 'boolean'","default":null},{"name":"surveys","type":["null","boolean"],"doc":"Converted from 'boolean'","default":null}]}],"default":null}]}],"default":null},{"name":"addresses","type":["null",{"type":"map","values":["null",{"type":"record","name":"addresses","fields":[{"name":"street_1","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"street_2","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"city","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"state","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"zip_code","type":["null","string"],"doc":"Converted from 'string'","default":null}]}]}],"doc":"Converted from 'map&amp;lt;string,struct&amp;lt;street_1:string,street_2:string,city:string,state:string,zip_code:string&amp;gt;&amp;gt;'","default":null},{"name":"orders","type":["null",{"type":"array","items":["null",{"type":"record","name":"orders","fields":[{"name":"order_id","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"order_date","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"items","type":["null",{"type":"array","items":["null",{"type":"record","name":"items","fields":[{"name":"product_id","type":["null","int"],"doc":"Converted from 'int'","default":null},{"name":"sku","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"name","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"price","type":["null","double"],"doc":"Converted from 'double'","default":null},{"name":"qty","type":["null","int"],"doc":"Converted from 'int'","default":null}]}]}],"doc":"Converted from 'array&amp;lt;struct&amp;lt;product_id:int,sku:string,name:string,price:double,qty:int&amp;gt;&amp;gt;'","default":null}]}]}],"doc":"Converted from 'array&amp;lt;struct&amp;lt;order_id:string,order_date:string,items:array&amp;lt;struct&amp;lt;product_id:int,sku:string,name:string,price:double,qty:int&amp;gt;&amp;gt;&amp;gt;&amp;gt;'","default":null}]}, fileSystem=DFS[DFSClient[clientName=DFSClient_attempt_1478051991187_0002_m_000000_0_-1040737999_1, ugi=cloudera (auth:SIMPLE)]], avroParquetWriter=parquet.avro.AvroParquetWriter@4c380929}&lt;BR /&gt;at org.kitesdk.data.spi.filesystem.FileSystemWriter.write(FileSystemWriter.java:184)&lt;BR /&gt;at org.kitesdk.data.mapreduce.DatasetKeyOutputFormat$DatasetRecordWriter.write(DatasetKeyOutputFormat.java:325)&lt;BR /&gt;at org.kitesdk.data.mapreduce.DatasetKeyOutputFormat$DatasetRecordWriter.write(DatasetKeyOutputFormat.java:304)&lt;BR /&gt;at org.apache.hadoop.mapred.MapTask$NewDirectOutputCollector.write(MapTask.java:658)&lt;BR /&gt;at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89)&lt;BR /&gt;at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112)&lt;BR /&gt;at org.apache.sqoop.mapreduce.ParquetImportMapper.map(ParquetImportMapper.java:70)&lt;BR /&gt;at org.apache.sqoop.mapreduce.ParquetImportMapper.map(ParquetImportMapper.java:39)&lt;BR /&gt;at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)&lt;BR /&gt;at org.apache.sqoop.mapreduce.AutoProgressMapper.run(AutoProgressMapper.java:64)&lt;BR /&gt;at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)&lt;BR /&gt;at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)&lt;BR /&gt;at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:164)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:415)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)&lt;BR /&gt;at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)&lt;BR /&gt;Caused by: java.lang.ClassCastException: java.lang.String cannot be cast to org.apache.avro.generic.IndexedRecord&lt;BR /&gt;at org.apache.avro.generic.GenericData.getField(GenericData.java:658)&lt;BR /&gt;at parquet.avro.AvroWriteSupport.writeRecordFields(AvroWriteSupport.java:164)&lt;BR /&gt;at parquet.avro.AvroWriteSupport.writeRecord(AvroWriteSupport.java:149)&lt;BR /&gt;at parquet.avro.AvroWriteSupport.writeValue(AvroWriteSupport.java:262)&lt;BR /&gt;at parquet.avro.AvroWriteSupport.writeRecordFields(AvroWriteSupport.java:167)&lt;BR /&gt;at parquet.avro.AvroWriteSupport.write(AvroWriteSupport.java:142)&lt;BR /&gt;at parquet.hadoop.InternalParquetRecordWriter.write(InternalParquetRecordWriter.java:116)&lt;BR /&gt;at parquet.hadoop.ParquetWriter.write(ParquetWriter.java:324)&lt;BR /&gt;at org.kitesdk.data.spi.filesystem.ParquetAppender.append(ParquetAppender.java:75)&lt;BR /&gt;at org.kitesdk.data.spi.filesystem.ParquetAppender.append(ParquetAppender.java:36)&lt;BR /&gt;at org.kitesdk.data.spi.filesystem.FileSystemWriter.write(FileSystemWriter.java:178)&lt;BR /&gt;... 16 more&lt;/P&gt;
&lt;P&gt;16/11/01 19:14:47 INFO mapreduce.Job: Task Id : attempt_1478051991187_0002_m_000000_1, Status : FAILED&lt;BR /&gt;Error: org.kitesdk.data.DatasetOperationException: Failed to append {"customer_id": 1, "customer_fname": "Richard", "customer_lname": "Hernandez", "customer_email": "XXXXXXXXX", "customer_password": "XXXXXXXXX", "customer_street": "6303 Heather Plaza", "customer_city": "Brownsville", "customer_state": "TX", "customer_zipcode": "78521"} to ParquetAppender{path=hdfs://quickstart.cloudera:8020/tmp/default/.temp/job_1478051991187_0002/mr/attempt_1478051991187_0002_m_000000_1/.28314686-e026-4280-b624-260a2a95e50b.parquet.tmp, schema={"type":"record","name":"customers","fields":[{"name":"id","type":["null","int"],"doc":"Converted from 'int'","default":null},{"name":"name","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"email_preferences","type":["null",{"type":"record","name":"email_preferences","fields":[{"name":"email_format","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"frequency","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"categories","type":["null",{"type":"record","name":"categories","fields":[{"name":"promos","type":["null","boolean"],"doc":"Converted from 'boolean'","default":null},{"name":"surveys","type":["null","boolean"],"doc":"Converted from 'boolean'","default":null}]}],"default":null}]}],"default":null},{"name":"addresses","type":["null",{"type":"map","values":["null",{"type":"record","name":"addresses","fields":[{"name":"street_1","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"street_2","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"city","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"state","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"zip_code","type":["null","string"],"doc":"Converted from 'string'","default":null}]}]}],"doc":"Converted from 'map&amp;lt;string,struct&amp;lt;street_1:string,street_2:string,city:string,state:string,zip_code:string&amp;gt;&amp;gt;'","default":null},{"name":"orders","type":["null",{"type":"array","items":["null",{"type":"record","name":"orders","fields":[{"name":"order_id","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"order_date","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"items","type":["null",{"type":"array","items":["null",{"type":"record","name":"items","fields":[{"name":"product_id","type":["null","int"],"doc":"Converted from 'int'","default":null},{"name":"sku","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"name","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"price","type":["null","double"],"doc":"Converted from 'double'","default":null},{"name":"qty","type":["null","int"],"doc":"Converted from 'int'","default":null}]}]}],"doc":"Converted from 'array&amp;lt;struct&amp;lt;product_id:int,sku:string,name:string,price:double,qty:int&amp;gt;&amp;gt;'","default":null}]}]}],"doc":"Converted from 'array&amp;lt;struct&amp;lt;order_id:string,order_date:string,items:array&amp;lt;struct&amp;lt;product_id:int,sku:string,name:string,price:double,qty:int&amp;gt;&amp;gt;&amp;gt;&amp;gt;'","default":null}]}, fileSystem=DFS[DFSClient[clientName=DFSClient_attempt_1478051991187_0002_m_000000_1_-1610591894_1, ugi=cloudera (auth:SIMPLE)]], avroParquetWriter=parquet.avro.AvroParquetWriter@43569009}&lt;BR /&gt;at org.kitesdk.data.spi.filesystem.FileSystemWriter.write(FileSystemWriter.java:184)&lt;BR /&gt;at org.kitesdk.data.mapreduce.DatasetKeyOutputFormat$DatasetRecordWriter.write(DatasetKeyOutputFormat.java:325)&lt;BR /&gt;at org.kitesdk.data.mapreduce.DatasetKeyOutputFormat$DatasetRecordWriter.write(DatasetKeyOutputFormat.java:304)&lt;BR /&gt;at org.apache.hadoop.mapred.MapTask$NewDirectOutputCollector.write(MapTask.java:658)&lt;BR /&gt;at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89)&lt;BR /&gt;at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112)&lt;BR /&gt;at org.apache.sqoop.mapreduce.ParquetImportMapper.map(ParquetImportMapper.java:70)&lt;BR /&gt;at org.apache.sqoop.mapreduce.ParquetImportMapper.map(ParquetImportMapper.java:39)&lt;BR /&gt;at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)&lt;BR /&gt;at org.apache.sqoop.mapreduce.AutoProgressMapper.run(AutoProgressMapper.java:64)&lt;BR /&gt;at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)&lt;BR /&gt;at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)&lt;BR /&gt;at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:164)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:415)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)&lt;BR /&gt;at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)&lt;BR /&gt;Caused by: java.lang.ClassCastException: java.lang.String cannot be cast to org.apache.avro.generic.IndexedRecord&lt;BR /&gt;at org.apache.avro.generic.GenericData.getField(GenericData.java:658)&lt;BR /&gt;at parquet.avro.AvroWriteSupport.writeRecordFields(AvroWriteSupport.java:164)&lt;BR /&gt;at parquet.avro.AvroWriteSupport.writeRecord(AvroWriteSupport.java:149)&lt;BR /&gt;at parquet.avro.AvroWriteSupport.writeValue(AvroWriteSupport.java:262)&lt;BR /&gt;at parquet.avro.AvroWriteSupport.writeRecordFields(AvroWriteSupport.java:167)&lt;BR /&gt;at parquet.avro.AvroWriteSupport.write(AvroWriteSupport.java:142)&lt;BR /&gt;at parquet.hadoop.InternalParquetRecordWriter.write(InternalParquetRecordWriter.java:116)&lt;BR /&gt;at parquet.hadoop.ParquetWriter.write(ParquetWriter.java:324)&lt;BR /&gt;at org.kitesdk.data.spi.filesystem.ParquetAppender.append(ParquetAppender.java:75)&lt;BR /&gt;at org.kitesdk.data.spi.filesystem.ParquetAppender.append(ParquetAppender.java:36)&lt;BR /&gt;at org.kitesdk.data.spi.filesystem.FileSystemWriter.write(FileSystemWriter.java:178)&lt;BR /&gt;... 16 more&lt;/P&gt;
&lt;P&gt;16/11/01 19:15:07 INFO mapreduce.Job: Task Id : attempt_1478051991187_0002_m_000000_2, Status : FAILED&lt;BR /&gt;Error: org.kitesdk.data.DatasetOperationException: Failed to append {"customer_id": 1, "customer_fname": "Richard", "customer_lname": "Hernandez", "customer_email": "XXXXXXXXX", "customer_password": "XXXXXXXXX", "customer_street": "6303 Heather Plaza", "customer_city": "Brownsville", "customer_state": "TX", "customer_zipcode": "78521"} to ParquetAppender{path=hdfs://quickstart.cloudera:8020/tmp/default/.temp/job_1478051991187_0002/mr/attempt_1478051991187_0002_m_000000_2/.3a99f42c-4eb1-4485-a3c3-6ba06bd52148.parquet.tmp, schema={"type":"record","name":"customers","fields":[{"name":"id","type":["null","int"],"doc":"Converted from 'int'","default":null},{"name":"name","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"email_preferences","type":["null",{"type":"record","name":"email_preferences","fields":[{"name":"email_format","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"frequency","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"categories","type":["null",{"type":"record","name":"categories","fields":[{"name":"promos","type":["null","boolean"],"doc":"Converted from 'boolean'","default":null},{"name":"surveys","type":["null","boolean"],"doc":"Converted from 'boolean'","default":null}]}],"default":null}]}],"default":null},{"name":"addresses","type":["null",{"type":"map","values":["null",{"type":"record","name":"addresses","fields":[{"name":"street_1","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"street_2","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"city","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"state","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"zip_code","type":["null","string"],"doc":"Converted from 'string'","default":null}]}]}],"doc":"Converted from 'map&amp;lt;string,struct&amp;lt;street_1:string,street_2:string,city:string,state:string,zip_code:string&amp;gt;&amp;gt;'","default":null},{"name":"orders","type":["null",{"type":"array","items":["null",{"type":"record","name":"orders","fields":[{"name":"order_id","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"order_date","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"items","type":["null",{"type":"array","items":["null",{"type":"record","name":"items","fields":[{"name":"product_id","type":["null","int"],"doc":"Converted from 'int'","default":null},{"name":"sku","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"name","type":["null","string"],"doc":"Converted from 'string'","default":null},{"name":"price","type":["null","double"],"doc":"Converted from 'double'","default":null},{"name":"qty","type":["null","int"],"doc":"Converted from 'int'","default":null}]}]}],"doc":"Converted from 'array&amp;lt;struct&amp;lt;product_id:int,sku:string,name:string,price:double,qty:int&amp;gt;&amp;gt;'","default":null}]}]}],"doc":"Converted from 'array&amp;lt;struct&amp;lt;order_id:string,order_date:string,items:array&amp;lt;struct&amp;lt;product_id:int,sku:string,name:string,price:double,qty:int&amp;gt;&amp;gt;&amp;gt;&amp;gt;'","default":null}]}, fileSystem=DFS[DFSClient[clientName=DFSClient_attempt_1478051991187_0002_m_000000_2_-1940758529_1, ugi=cloudera (auth:SIMPLE)]], avroParquetWriter=parquet.avro.AvroParquetWriter@249cfd31}&lt;BR /&gt;at org.kitesdk.data.spi.filesystem.FileSystemWriter.write(FileSystemWriter.java:184)&lt;BR /&gt;at org.kitesdk.data.mapreduce.DatasetKeyOutputFormat$DatasetRecordWriter.write(DatasetKeyOutputFormat.java:325)&lt;BR /&gt;at org.kitesdk.data.mapreduce.DatasetKeyOutputFormat$DatasetRecordWriter.write(DatasetKeyOutputFormat.java:304)&lt;BR /&gt;at org.apache.hadoop.mapred.MapTask$NewDirectOutputCollector.write(MapTask.java:658)&lt;BR /&gt;at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89)&lt;BR /&gt;at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112)&lt;BR /&gt;at org.apache.sqoop.mapreduce.ParquetImportMapper.map(ParquetImportMapper.java:70)&lt;BR /&gt;at org.apache.sqoop.mapreduce.ParquetImportMapper.map(ParquetImportMapper.java:39)&lt;BR /&gt;at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)&lt;BR /&gt;at org.apache.sqoop.mapreduce.AutoProgressMapper.run(AutoProgressMapper.java:64)&lt;BR /&gt;at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)&lt;BR /&gt;at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)&lt;BR /&gt;at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:164)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:415)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1693)&lt;BR /&gt;at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)&lt;BR /&gt;Caused by: java.lang.ClassCastException: java.lang.String cannot be cast to org.apache.avro.generic.IndexedRecord&lt;BR /&gt;at org.apache.avro.generic.GenericData.getField(GenericData.java:658)&lt;BR /&gt;at parquet.avro.AvroWriteSupport.writeRecordFields(AvroWriteSupport.java:164)&lt;BR /&gt;at parquet.avro.AvroWriteSupport.writeRecord(AvroWriteSupport.java:149)&lt;BR /&gt;at parquet.avro.AvroWriteSupport.writeValue(AvroWriteSupport.java:262)&lt;BR /&gt;at parquet.avro.AvroWriteSupport.writeRecordFields(AvroWriteSupport.java:167)&lt;BR /&gt;at parquet.avro.AvroWriteSupport.write(AvroWriteSupport.java:142)&lt;BR /&gt;at parquet.hadoop.InternalParquetRecordWriter.write(InternalParquetRecordWriter.java:116)&lt;BR /&gt;at parquet.hadoop.ParquetWriter.write(ParquetWriter.java:324)&lt;BR /&gt;at org.kitesdk.data.spi.filesystem.ParquetAppender.append(ParquetAppender.java:75)&lt;BR /&gt;at org.kitesdk.data.spi.filesystem.ParquetAppender.append(ParquetAppender.java:36)&lt;BR /&gt;at org.kitesdk.data.spi.filesystem.FileSystemWriter.write(FileSystemWriter.java:178)&lt;BR /&gt;... 16 more&lt;/P&gt;
&lt;P&gt;16/11/01 19:15:25 INFO mapreduce.Job: map 100% reduce 0%&lt;BR /&gt;16/11/01 19:15:27 INFO mapreduce.Job: Job job_1478051991187_0002 failed with state FAILED due to: Task failed task_1478051991187_0002_m_000000&lt;BR /&gt;Job failed as tasks failed. failedMaps:1 failedReduces:0&lt;/P&gt;
&lt;P&gt;16/11/01 19:15:27 INFO mapreduce.Job: Counters: 8&lt;BR /&gt;Job Counters&lt;BR /&gt;Failed map tasks=4&lt;BR /&gt;Launched map tasks=4&lt;BR /&gt;Other local map tasks=4&lt;BR /&gt;Total time spent by all maps in occupied slots (ms)=33132032&lt;BR /&gt;Total time spent by all reduces in occupied slots (ms)=0&lt;BR /&gt;Total time spent by all map tasks (ms)=64711&lt;BR /&gt;Total vcore-seconds taken by all map tasks=64711&lt;BR /&gt;Total megabyte-seconds taken by all map tasks=33132032&lt;BR /&gt;16/11/01 19:15:27 WARN mapreduce.Counters: Group FileSystemCounters is deprecated. Use org.apache.hadoop.mapreduce.FileSystemCounter instead&lt;BR /&gt;16/11/01 19:15:27 INFO mapreduce.ImportJobBase: Transferred 0 bytes in 122.9588 seconds (0 bytes/sec)&lt;BR /&gt;16/11/01 19:15:27 WARN mapreduce.Counters: Group org.apache.hadoop.mapred.Task$Counter is deprecated. Use org.apache.hadoop.mapreduce.TaskCounter instead&lt;BR /&gt;16/11/01 19:15:27 INFO mapreduce.ImportJobBase: Retrieved 0 records.&lt;BR /&gt;16/11/01 19:15:27 ERROR tool.ImportAllTablesTool: Error during import: Import job failed!&lt;/P&gt;</description>
      <pubDate>Fri, 16 Sep 2022 10:46:23 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46864#M6059</guid>
      <dc:creator>harish172</dc:creator>
      <dc:date>2022-09-16T10:46:23Z</dc:date>
    </item>
    <item>
      <title>Re: Exercise 1 Sqoop import fails</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46865#M6060</link>
      <description>&lt;P&gt;Its throwing a class cast exception , meaning you are trying to cast&amp;nbsp;&lt;SPAN&gt;java.lang.String to org.apache.avro.generic.IndexedRecord which is not comptabile &amp;nbsp;. Could you provide the table schema and your sqoop import command.&amp;nbsp;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;&amp;nbsp;&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Wed, 02 Nov 2016 02:43:21 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46865#M6060</guid>
      <dc:creator>csguna</dc:creator>
      <dc:date>2016-11-02T02:43:21Z</dc:date>
    </item>
    <item>
      <title>Re: Exercise 1 Sqoop import fails</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46866#M6061</link>
      <description>&lt;P&gt;Here is my sqoop command from exercise 1 and I am not sure how to find out the table schema. will research on that shortly.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;sqoop import-all-tables \
    -m 1 \
    --connect jdbc:mysql://quickstart:3306/retail_db \
    --username=retail_dba \
    --password=cloudera \
    --compression-codec=snappy \
    --as-parquetfile \
    --warehouse-dir=/user/hive/warehouse \
    --hive-import&lt;/PRE&gt;</description>
      <pubDate>Wed, 02 Nov 2016 02:53:45 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46866#M6061</guid>
      <dc:creator>harish172</dc:creator>
      <dc:date>2016-11-02T02:53:45Z</dc:date>
    </item>
    <item>
      <title>Re: Exercise 1 Sqoop import fails</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46871#M6062</link>
      <description>&lt;P&gt;Could you replace&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;--as-parquetfile&lt;/PRE&gt;&lt;P&gt;with&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;--as-sequencefile&lt;/PRE&gt;&lt;P&gt;and let me know if you are able pass through the error.&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Wed, 02 Nov 2016 04:35:14 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46871#M6062</guid>
      <dc:creator>csguna</dc:creator>
      <dc:date>2016-11-02T04:35:14Z</dc:date>
    </item>
    <item>
      <title>Re: Exercise 1 Sqoop import fails</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46880#M6063</link>
      <description>&lt;P&gt;Doesn't work&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;[cloudera@quickstart ~]$ sqoop import-all-tables \&lt;BR /&gt;&amp;gt; -m 1 \&lt;BR /&gt;&amp;gt; --connect jdbc:mysql://quickstart:3306/retail_db \&lt;BR /&gt;&amp;gt; --username=retail_dba \&lt;BR /&gt;&amp;gt; --password=cloudera \&lt;BR /&gt;&amp;gt; --compression-codec=snappy \&lt;BR /&gt;&amp;gt; --as-sequencefile \&lt;BR /&gt;&amp;gt; --warehouse-dir=/user/hive/warehouse \&lt;BR /&gt;&amp;gt; --hive-import&lt;BR /&gt;Warning: /usr/lib/sqoop/../accumulo does not exist! Accumulo imports will fail.&lt;BR /&gt;Please set $ACCUMULO_HOME to the root of your Accumulo installation.&lt;BR /&gt;16/11/02 04:11:22 INFO sqoop.Sqoop: Running Sqoop version: 1.4.6-cdh5.8.0&lt;BR /&gt;16/11/02 04:11:22 WARN tool.BaseSqoopTool: Setting your password on the command-line is insecure. Consider using -P instead.&lt;BR /&gt;16/11/02 04:11:22 INFO tool.BaseSqoopTool: Using Hive-specific delimiters for output. You can override&lt;BR /&gt;16/11/02 04:11:22 INFO tool.BaseSqoopTool: delimiters with --fields-terminated-by, etc.&lt;BR /&gt;Hive import is not compatible with importing into SequenceFile format.&lt;BR /&gt;[cloudera@quickstart ~]$ ^C&lt;/P&gt;</description>
      <pubDate>Wed, 02 Nov 2016 11:12:35 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46880#M6063</guid>
      <dc:creator>harish172</dc:creator>
      <dc:date>2016-11-02T11:12:35Z</dc:date>
    </item>
    <item>
      <title>Re: Exercise 1 Sqoop import fails</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46883#M6064</link>
      <description>&lt;P&gt;&lt;SPAN&gt;1 . Type show tables in Hive and note down the tables .&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;2 . check under&amp;nbsp;user/hive/warehuse/ using&amp;nbsp;Hue -&amp;gt; File Browser or command line&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;that Customer folder or categories folders are already being populated .&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;if so &amp;nbsp;&lt;/SPAN&gt;&lt;SPAN&gt;Remove it using HUE-&amp;gt;File browser-Delete or&amp;nbsp;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;Drop table command from Hive.&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;&amp;nbsp;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;Then re run the script and please let me know &amp;nbsp;.&amp;nbsp;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;STRONG&gt;&lt;EM&gt;Or&amp;nbsp;&lt;/EM&gt;&lt;/STRONG&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;U&gt;Simply change the last line of the script&amp;nbsp;&lt;/U&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;sqoop import-all-tables \
 -m 1 \
 --connect jdbc:mysql://quickstart:3306/retail_db \
 --username=retail_dba \ --password=cloudera \ 
--compression-codec=snappy \
--as-sequencefile \
--warehouse-dir=/user/hive/warehouse \
 --hive-overwrite&lt;/PRE&gt;</description>
      <pubDate>Wed, 02 Nov 2016 11:33:43 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46883#M6064</guid>
      <dc:creator>csguna</dc:creator>
      <dc:date>2016-11-02T11:33:43Z</dc:date>
    </item>
    <item>
      <title>Re: Exercise 1 Sqoop import fails</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46890#M6065</link>
      <description>Thanks !</description>
      <pubDate>Wed, 02 Nov 2016 14:13:21 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46890#M6065</guid>
      <dc:creator>csguna</dc:creator>
      <dc:date>2016-11-02T14:13:21Z</dc:date>
    </item>
    <item>
      <title>Re: Exercise 1 Sqoop import fails</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46891#M6066</link>
      <description>&lt;P&gt;Things I did to fix the issue.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;1) Create a new virtual box sadnbox environment.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;2) Lanched cloudera manager express environment.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;3) Go to Hue &amp;gt; Hive editor &amp;gt; deleted all the tables with in it.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;4) ran the Exercise 1&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;sqoop import-all-tables \
    -m {{cluster_data.worker_node_hostname.length}} \
    --connect jdbc:mysql://{{cluster_data.manager_node_hostname}}:3306/retail_db \
    --username=retail_dba \
    --password=cloudera \
    --compression-codec=snappy \
    --as-parquetfile \
    --warehouse-dir=/user/hive/warehouse \
    --hive-import&lt;/PRE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;5) Check all the tables with in Hive and you are good to go.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;But there is a minor glitch i.e exception which I am curious about.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;/11/02 07:08:32 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/02 07:08:33 INFO db.DBInputFormat: Using read commited transaction isolation&lt;BR /&gt;16/11/02 07:08:33 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/11/02 07:08:33 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Wed, 02 Nov 2016 14:15:28 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/46891#M6066</guid>
      <dc:creator>harish172</dc:creator>
      <dc:date>2016-11-02T14:15:28Z</dc:date>
    </item>
    <item>
      <title>Re: Exercise 1 Sqoop import fails</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/48427#M6067</link>
      <description>&lt;P&gt;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/16544"&gt;@csguna&lt;/a&gt;&lt;/P&gt;&lt;P&gt;hadoop fs -ls /user/hive/warehouse//categories shows only .metadata&lt;/P&gt;&lt;P&gt;canot remove that file using hadoop fs -ls rm /user/hive/warehouse/categories/.metadata&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;alsoe Hue &amp;gt; Query editor&lt;/P&gt;&lt;P&gt;select * from categories keeps running for a while with no result. also delete from categories in hive query editor says failed attempt to delete using transaction manager does not support these operations.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;lastly hive in cmd shows nothing&lt;/P&gt;&lt;P&gt;hive&amp;gt; show_tables&lt;/P&gt;&lt;P&gt;&amp;nbsp; &amp;nbsp; &amp;gt;&lt;/P&gt;&lt;P&gt;How do i get rid of categories tables to run the comman as overwrite as you mentioned.&lt;/P&gt;</description>
      <pubDate>Sat, 10 Dec 2016 18:21:39 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/48427#M6067</guid>
      <dc:creator>anotherrohit</dc:creator>
      <dc:date>2016-12-10T18:21:39Z</dc:date>
    </item>
    <item>
      <title>Re: Exercise 1 Sqoop import fails</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/48634#M6068</link>
      <description>&lt;P&gt;If you want to "drop" the categories table you should run an hive query like this :&lt;/P&gt;&lt;P&gt;DROP TABLE categories;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;If you want to "delete" the content of the table only then try "TRUNCATE TABLE categories;". It should work or try deleting the table content in HDFS directly.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;As for your use of "hadoop fs", you should know that "hadoop fs -ls rm" does not exist.&lt;/P&gt;&lt;P&gt;For deleting HDFS files or folders it is directly "hadoop fs -rm".&lt;/P&gt;</description>
      <pubDate>Mon, 19 Dec 2016 13:09:49 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/48634#M6068</guid>
      <dc:creator>mathieu.d</dc:creator>
      <dc:date>2016-12-19T13:09:49Z</dc:date>
    </item>
    <item>
      <title>Re: Exercise 1 Sqoop import fails</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/48658#M6069</link>
      <description>&lt;P&gt;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/11415"&gt;@mathieu.d&lt;/a&gt;&lt;/P&gt;&lt;P&gt;Thanks was bale to delete using hadoop fs -rm however I get an exception like hte user above as well, the import however does complete and hadoop fs -ls /user/hive/warehouse/categories shows me all the tables as below&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;[cloudera@quickstart ~]$ hadoop fs -ls /user/hive/warehouse&lt;BR /&gt;Found 6 items&lt;BR /&gt;drwxr-xr-x - cloudera supergroup 0 2016-12-19 14:10 /user/hive/warehouse/categories&lt;BR /&gt;drwxr-xr-x - cloudera supergroup 0 2016-12-19 14:11 /user/hive/warehouse/customers&lt;BR /&gt;drwxr-xr-x - cloudera supergroup 0 2016-12-19 14:11 /user/hive/warehouse/departments&lt;BR /&gt;drwxr-xr-x - cloudera supergroup 0 2016-12-19 14:11 /user/hive/warehouse/order_items&lt;BR /&gt;drwxr-xr-x - cloudera supergroup 0 2016-12-19 14:12 /user/hive/warehouse/orders&lt;BR /&gt;drwxr-xr-x - cloudera supergroup 0 2016-12-19&amp;nbsp; 14:12 /user/hive/warehouse/products&lt;BR /&gt;[cloudera@quickstart ~]$ hadoop fs -ls /user/hive/warehouse/categories&lt;BR /&gt;Found 2 items&lt;BR /&gt;-rw-r--r-- 1 cloudera supergroup 0 2016-12-19 14:10 /user/hive/warehouse/categories/_SUCCESS&lt;BR /&gt;-rw-r--r-- 1 cloudera supergroup 1427 2016-12-19 14:10 /user/hive/warehouse/categories/part-m-00000&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;But in hte hue browser &amp;gt; impala show tables; still shows only the categories table. Any idea what went wrong. Below is the command i used as suggested.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;sqoop import-all-tables -m 1 --connect jdbc:mysql://quickstart.cloudera:3306/retail_db --username=retail_dba --password=cloudera --compression-codec=snappy --as-sequencefile --warehouse-dir=/user/hive/warehouse --hive-overwrite&lt;/P&gt;&lt;P&gt;16/12/19 14:12:14 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/12/19 14:12:14 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/12/19 14:12:15 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;BR /&gt;at java.lang.Object.wait(Native Method)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1281)&lt;BR /&gt;at java.lang.Thread.join(Thread.java:1355)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:862)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:600)&lt;BR /&gt;at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:789)&lt;BR /&gt;16/12/19 14:12:15 INFO db.DBInputFormat: Using read commited transaction isolation&lt;BR /&gt;16/12/19 14:12:15 WARN hdfs.DFSClient: Caught exception&lt;BR /&gt;java.lang.InterruptedException&lt;/P&gt;</description>
      <pubDate>Mon, 19 Dec 2016 22:44:06 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/48658#M6069</guid>
      <dc:creator>anotherrohit</dc:creator>
      <dc:date>2016-12-19T22:44:06Z</dc:date>
    </item>
    <item>
      <title>Re: Exercise 1 Sqoop import fails</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/48662#M6070</link>
      <description>&lt;P&gt;&lt;a href="https://community.cloudera.com/t5/user/viewprofilepage/user-id/19341"&gt;@harish172&lt;/a&gt;were you able to figure out the cause of the exception? Are you able to see all tables including customers,departments,order_items,products under warehouse in the hive query browser. I only see the table categories after the exception was seen. My command used is typed below in the thread.&lt;/P&gt;</description>
      <pubDate>Tue, 20 Dec 2016 01:33:27 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/48662#M6070</guid>
      <dc:creator>anotherrohit</dc:creator>
      <dc:date>2016-12-20T01:33:27Z</dc:date>
    </item>
    <item>
      <title>Re: Exercise 1 Sqoop import fails</title>
      <link>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/60923#M6071</link>
      <description>&lt;P&gt;Do you need the --override?&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;I reran my Tutorial 1 and it didnt append new records....I thought it would...why do you think it allowed it?&lt;/P&gt;</description>
      <pubDate>Sat, 14 Oct 2017 14:05:01 GMT</pubDate>
      <guid>https://community.cloudera.com/t5/Support-Questions/Exercise-1-Sqoop-import-fails/m-p/60923#M6071</guid>
      <dc:creator>oraman</dc:creator>
      <dc:date>2017-10-14T14:05:01Z</dc:date>
    </item>
  </channel>
</rss>

