fork download
  1. /* package whatever; // don't place package name! */
  2.  
  3. import java.util.*;
  4. import java.lang.*;
  5. import java.io.*;
  6.  
  7. /* Name of the class has to be "Main" only if the class is public. */
  8. class Ideone
  9. {
  10. public static void main (String[] args) throws java.lang.Exception
  11. {
  12. // your code goes here
  13. }
  14. }
Success #stdin #stdout 0.09s 54692KB
stdin
import java.io.IOException;
import java.util.StringTokenizer;
import org.appache.hadoop.io.IntWritable;
import org.appache.hadoop.io.LongWritable;
import org.appache.hadoop.io.Text;
import org.appache.hadoop.mapreduce.Mapper;
import org.appache.hadoop.mapreduce.Reducer;
import org.appache.hadoop.conf.configuration;
import org.appache.hadoop.mapreduce.Job;
import org.appache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.appache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.appache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.appache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.appache.hadoop.fs.Path;
public class WordCount
{
	public static class Map extends Mapper<Long Writable,Text,IntWritable>
{ public void map(LongWritable Key,Text value,Context context) throws
IOException,InterruptedException{
string line=value.toString();
stringTokenizer tokenizer=new StringTokenizer(line);
while(tokenizer.hasMoreTokens())
{value.set(tokenizer.nexToken());
contex.write(value,new IntWritable(1));
}
}
}
public static class Reduce extends reducerer<Text,IntWritable,Text,IntWritable>
{ public void reduce(TextKey,Iterable<IntWritable>values,Context context) 
throws IOException,InterruptedException{
int sum=0;


for(IntWritable x:values)
{
	sum+=x.get();
}
context.Write(key,new IntWritable(sum));
}
}
public static void main(String[]args)throws Exception
{Configuration conf=new Configuration();
Job job=new Job(conf,"My Word count program");
job.setJarByClass(WordCount.class);
job.setMapperClass(Map.class);
job.setReducerClass(Reduce.class);
job.setOutputKeyClass(Text.class);
job.setOutputvalueClass(IntWritable.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextInputFormat.class);
Path outputPath =new Path(args[1]);
//Configuring the input/output path from the filesystem into the job
FileInputFormat.addInputPath(job,new Path(args[0]));
FileOutputFormat.setoutputPath(job,new Path(args[1]));
//deleting the output path automatically from hdfs so that we don't have to delete it explicity
outputPath.getFileSystem(Conf).delete(OutputPath);
//exiting the job only if the flag value becomes false
System.exit(job.waitForCompletion(true)?0 : 1);
}
}



stdout
Standard output is empty