Hadoop MapReduce 入门 单词统计

    科技2022-08-27  99

    覆盖 mapper类的map方法和reducer类的reduce方法 感兴趣的可以一起交流,只是单纯的分享代码,如果需要详细指导可以私聊

    package mapreduce; import java.io.IOException; import java.util.StringTokenizer; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; // word counter main class public class WordCount { public static void main(String[] args) throws IOException,ClassNotFoundException,InterruptedException{ Job job = Job.getInstance(); job.setJobName("WordCount"); job.setJarByClass(WordCount.class); job.setMapperClass(doMapper.class); job.setReducerClass(doReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); Path in = new Path("hdfs://localhost:9000/mymapreduce1/in/buyer_favorite1"); Path out = new Path("hdfs://localhost:9000/mymapreduce1/out"); FileInputFormat.addInputPath(job, in); FileOutputFormat.setOutputPath(job, out); System.exit(job.waitForCompletion(true)? 0: 1); } // map public static class doMapper extends Mapper<Object, Text, Text, IntWritable>{ public static final IntWritable count_one = new IntWritable(1); public static Text word = new Text(); @Override protected void map(Object key, Text value, Context context) throws IOException, InterruptedException{ StringTokenizer st = new StringTokenizer(value.toString(), "\t"); while (st.hasMoreTokens()){ word.set(st.nextToken()); context.write(word, count_one); } } } // reduce public static class doReducer extends Reducer<Text, IntWritable, Text, IntWritable>{ private IntWritable result = new IntWritable(); @Override protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException{ int sum = 0; for(IntWritable value:values){ sum += value.get(); } result.set(sum); context.write(key,result); } } }
    Processed: 0.016, SQL: 9