Reply
New Contributor
Posts: 1
Registered: ‎11-14-2015

Compile error when executing my first Hadoop program : Unable to load native-hadoop library ...

Hi All,

My objective is to do some pratices with hadoop and MapReduce.  I download and install the Cloudera VM 5-4 and everything goes well.

 

I open the Eclipse instance availble with the VM and writte : The Mapper Class, the Reducer Class and the Driver Class.

 

  • WordCountMapper.java
  • WordCountReducer.java
  • WordCount.java

The source code are provided bellow and it does not show any error.

But When I run the WordCount fom Elipse, I'm getting the following errors :

 

 

SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.
15/11/14 15:44:33 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
15/11/14 15:44:34 INFO Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
15/11/14 15:44:34 INFO jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
15/11/14 15:44:35 WARN mapreduce.JobSubmitter: Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this.
15/11/14 15:44:35 WARN mapreduce.JobSubmitter: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).

 

I check in the libraries and slf4j-api.jar is there.

Do I miss something on the way, what can I do to make the execute the program ?

 

Thanks

 

 

 

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class WordCountMapper 
extends Mapper<LongWritable, Text, Text, IntWritable> {

private final static IntWritable one = new IntWritable(1);
private Text word = new Text();
 
@Override
public void map(LongWritable key, Text value, Context context)
   throws IOException, InterruptedException {
 String line = value.toString();
 StringTokenizer itr = new StringTokenizer(line);
 
 while (itr.hasMoreTokens()) {
//just added the below line to convert everything to lower case 
   word.set(itr.nextToken().toLowerCase());
// the following check is that the word starts with an alphabet. 
   if(Character.isAlphabetic((word.toString().charAt(0)))){
 	  context.write(word, one);
   }
 }
}

}

 

 

 

 

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class WordCountReducer
extends Reducer<Text, IntWritable, Text, IntWritable> {

@Override
public void reduce(Text key, Iterable<IntWritable> values,
   Context context)
   throws IOException, InterruptedException {
 
   int sum = 0;
   for (IntWritable value : values) {
     sum += value.get();
   }
 context.write(key, new IntWritable(sum));
}

}

 

 

 

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class WordCount {
	
	

public static void main(String[] args) throws Exception {
 /*
	if (args.length != 2) {
   System.err.println("Usage: WordCount <input path> <output path>");
   System.exit(-1);
 }
 */
 final String in = "in"; 
 final String out  = "out";
 
 System.out.println(in);
 
 //System.exit(-1);
 
 Job job = new Job();
 job.setJarByClass(WordCount.class);
 job.setJobName("Word Count");

 //System.exit(-1);
 
 FileInputFormat.addInputPath(job, new Path(in)); //new Path(args[0])
 FileOutputFormat.setOutputPath(job, new Path(out) ); //new Path(args[1])
 
 job.setMapperClass(WordCountMapper.class);
 job.setReducerClass(WordCountReducer.class);

 job.setOutputKeyClass(Text.class);
 job.setOutputValueClass(IntWritable.class);
 
 System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}