Support Questions

Find answers, ask questions, and share your expertise

Mkdirs failed to create

avatar

Hi comunity,

i need to run the HBaseBulkLoadbut i have in exception main "Mkdirs failed to create"

this is my code :

import java.io.IOException;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.Path;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.TableName;import org.apache.hadoop.hbase.client.Connection;import org.apache.hadoop.hbase.client.ConnectionFactory;import org.apache.hadoop.hbase.client.Put;import org.apache.hadoop.hbase.client.RegionLocator;import org.apache.hadoop.hbase.client.Table;import org.apache.hadoop.hbase.io.ImmutableBytesWritable;import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2;import org.apache.hadoop.mapreduce.Job;import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;import org.apache.hadoop.util.GenericOptionsParser;import org.apache.hadoop.hbase.util.Bytes;import org.apache.hadoop.io.LongWritable;import org.apache.hadoop.io.Text;import org.apache.hadoop.mapreduce.Mapper;public class HBaseBulkLoad {   public static class BulkLoadMap extends Mapper<LongWritable, Text, ImmutableBytesWritable, Put> {     @Override     public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {       String line = value.toString();       String[] parts = line.split(",");       String rowKey = parts[0];       ImmutableBytesWritable HKey = new ImmutableBytesWritable(Bytes.toBytes(rowKey));       Put HPut = new Put(Bytes.toBytes(rowKey));       HPut.addColumn(Bytes.toBytes("id"), Bytes.toBytes("name"), Bytes.toBytes(parts[1]));       HPut.addColumn(Bytes.toBytes("id"), Bytes.toBytes("mail_id"), Bytes.toBytes(parts[2]));       HPut.addColumn(Bytes.toBytes("id"), Bytes.toBytes("sal"), Bytes.toBytes(parts[3]));       context.write(HKey, HPut);     }   }   public static void main(String[] args) throws Exception {     Configuration conf = HBaseConfiguration.create();     String inputPath = args[0];     // Define and set the host and the port     conf.set("hbase.master", "tcb-inspiron-5559:16000");     // Set the configuration: force the configuration     conf.set("zookeeper.znode.parent", "/hbase-unsecure");     // create a connection using createConnection()     Connection connection = ConnectionFactory.createConnection(conf);     Configuration config = new Configuration();     //configure hdfs     config.set("fs.defaultFS", "hdfs://tcb-inspiron-5559:8020");     // hdfs permissions     config.set("dfs.permissions.enabled", "true");     String[] files = new GenericOptionsParser(config, args).getRemainingArgs();     Table table = connection.getTable(TableName.valueOf("hbaseexample"));     conf.set("hbase.mapred.outputtable", "hbaseexample");     Job job = Job.getInstance(conf, "HBASE_BULK_LOAD");     job.setMapOutputKeyClass(ImmutableBytesWritable.class);     job.setMapOutputValueClass(Put.class);     job.setSpeculativeExecution(false);     job.setReduceSpeculativeExecution(false);     job.setInputFormatClass(TextInputFormat.class);     job.setOutputFormatClass(HFileOutputFormat2.class);     job.setJarByClass(HBaseBulkLoad.class);     job.setMapperClass(HBaseBulkLoad.BulkLoadMap.class);     FileInputFormat.setInputPaths(job, inputPath);     TextOutputFormat.setOutputPath(job, new Path(args[1]));     RegionLocator regionLocator = connection.getRegionLocator(TableName.valueOf("hbaseexample"));     try {       HFileOutputFormat2.configureIncrementalLoad(job, table, regionLocator);       System.exit(job.waitForCompletion(true) ? 0 : 1);     } finally {       table.close();       connection.close();     }   }}

and this is the exception error:

can help me please and thanks

Exception in thread "main" java.io.IOException: Mkdirs failed to create /user/tcb/hbase-staging (exists=false, cwd=file:/home/tcb/Documents/workspace/HbaseExampleOne)   at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:455)   at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:440)   at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)   at org.apache.hadoop.io.SequenceFile$Writer.<init>(SequenceFile.java:1135)   at org.apache.hadoop.io.SequenceFile$RecordCompressWriter.<init>(SequenceFile.java:1441)   at org.apache.hadoop.io.SequenceFile.createWriter(SequenceFile.java:275)   at org.apache.hadoop.io.SequenceFile.createWriter(SequenceFile.java:297)   at org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.writePartitions(HFileOutputFormat2.java:335)   at org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.configurePartitioner(HFileOutputFormat2.java:593)   at org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.configureIncrementalLoad(HFileOutputFormat2.java:440)   at org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.configureIncrementalLoad(HFileOutputFormat2.java:405)   at org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.configureIncrementalLoad(HFileOutputFormat2.java:386)   at com.hbase.example.HBaseBulkLoad.main(HBaseBulkLoad.java:77)

1 REPLY 1

avatar
Super Collaborator

Check that you have /user/tcb directory on the HDFS. Log in as hdfs user and run following commands:

hadoop fs -mkdir /user/tcb

hadoop fs -chown tcp /user/tcb