hadoop 自学指南三之WordCount解析(3)

本文介绍了如何使用CombinerClass优化Hadoop的WordCount程序,通过合并Map阶段的中间数据,减少网络传输,提高Reducer性能。以MaxTemperatureWithCombiner为例,阐述了Combiner的工作原理。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

一、前言

在某些情况下,Map函数会产生许多中间数据,而且都是重复的,为了减少reducer的输入提高reducer性能,我们往往需要一个中间程序来合并Map的输入,这时候可以利用

combinerclass来有效减少网络传输的数据量。

二、WordCount 的CombinerClass

原理很简单:例如有一个map中(”hello",1)有10000个,传输到reducer端的确没什么必要,那么我们合并一个变成(“hello",10000)就ok了

package hadoop.v3;

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.hai.hdfs.utils.HDFSUtils;

/**
 * @author : chenhaipeng
 * @date : 2015年9月6日 上午2:00:50
 */
public class WordCountNewAPI extends Configured implements Tool {
	
	public static class Map extends Mapper<LongWritable, Text, Text, IntWritable>{
		private final static IntWritable one = new IntWritable(1);
		private Text word = new Text("UTF-8");
		/* (non-Javadoc)
		 * @see org.apache.hadoop.mapreduce.Mapper#map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapreduce.Mapper.Context)
		 */
		@Override
		public void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			String line = value.toString();
			StringTokenizer tokenizer = new StringTokenizer(line);
			while(tokenizer.hasMoreTokens()){
				word.set(tokenizer.nextToken());
				context.write(word, one);
			}
		}
		
	}
	
	public static class Reduce extends Reducer<Text, IntWritable, Text, IntWritable>{
		/* (non-Javadoc)
		 * @see org.apache.hadoop.mapreduce.Reducer#reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer.Context)
		 */
		@Override
		public void reduce(Text key, Iterable<IntWritable> values,Context context) throws IOException, InterruptedException {
			int sum = 0; 
			for(IntWritable value : values){
				sum += value.get();
			}
			System.out.println("key----->"+key);
			context.write(key, new IntWritable(sum));
		}
		
	}

	public static void main(String[] args) throws Exception {
		int ret = ToolRunner.run(new WordCountNewAPI(), args);
		System.exit(ret);

	}
	
	public static void deletedir(String path){
		try {
			HDFSUtils.DeleteHDFSFile(path);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	/* 
	 * @see org.apache.hadoop.util.Tool#run(java.lang.String[])
	 */
	@Override
	public int run(String[] args) throws Exception {
		Job job = new Job(getConf());
		job.setJarByClass(WordCountNewAPI.class);
		job.setJobName("WordCountNewAPI");
		
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(IntWritable.class);
		
		job.setInputFormatClass(TextInputFormat.class);
		job.setOutputFormatClass(TextOutputFormat.class);
		
		job.setMapperClass(Map.class);
		job.setReducerClass(Reduce.class);
		
		job.setCombinerClass(Reduce.class);
		
		FileInputFormat.setInputPaths(job, new Path(args[0]));
		deletedir(args[1]);
		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		
		boolean sucess = job.waitForCompletion(true);
		return sucess == true? 0 : 1;
		
	}

}


下面提供来自hadoop 权威指南的一个例子!

二、MaxTemperatureWithCombiner

package hadoop.v3;

import java.io.IOException;

import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.hai.hdfs.utils.HDFSUtils;

/**
 * @author : chenhaipeng
 * @date : 2015年9月6日 下午5:48:57
 */
public class MaxTemperatureWithCombiner extends Configured implements Tool {
	
	public static class MaxTemperatureMapper extends Mapper<LongWritable, Text, Text, IntWritable>{
		
		private static final int MISSING = 9999;

		/* (non-Javadoc)
		 * @see org.apache.hadoop.mapreduce.Mapper#map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapreduce.Mapper.Context)
		 */
		@Override
		protected void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			String line  = value.toString();
			String year = line.substring(15, 19);
			int airTemperature;
			if(line.charAt(87) == '+'){	
				airTemperature = Integer.parseInt(line.substring(88,92));
			}else{
				airTemperature = Integer.parseInt(line.substring(87,92));
			}
			String quality = line.substring(92,93);
			if(airTemperature != MISSING && quality.matches("[01459]")){
				context.write(new Text(year), new IntWritable(airTemperature));
			}
		}
		
	}
	
	public static class MaxTemperatureReducer extends Reducer<Text, IntWritable, Text, IntWritable>{

		/* (non-Javadoc)
		 * @see org.apache.hadoop.mapreduce.Reducer#reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer.Context)
		 */
		@Override
		protected void reduce(Text key, Iterable<IntWritable> values,Context context) throws IOException, InterruptedException {
			int maxValue = Integer.MIN_VALUE;
			for(IntWritable value : values){
				maxValue = Math.max(maxValue, value.get());
			}
			context.write(new Text(key), new IntWritable(maxValue));
		}
	}

	public static void deletedir(String path){
		try {
			HDFSUtils.DeleteHDFSFile(path);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
	
	/* (non-Javadoc)
	 * @see org.apache.hadoop.util.Tool#run(java.lang.String[])
	 */
	@Override
	public int run(String[] args) throws Exception {
		Job job = new Job(getConf());
		job.setJarByClass(MaxTemperatureWithCombiner.class);
		job.setJobName("MaxTemperatureWithCombiner");
		
		job.setInputFormatClass(TextInputFormat.class);
		job.setOutputFormatClass(TextOutputFormat.class);
		
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(IntWritable.class);
		
		job.setMapperClass(MaxTemperatureMapper.class);
		job.setCombinerClass(MaxTemperatureReducer.class);
		job.setReducerClass(MaxTemperatureReducer.class);
		
		FileInputFormat.addInputPath(job, new Path(args[0]));
		deletedir(args[1]);
		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		
		boolean sucess = job.waitForCompletion(true);
		return sucess == true? 0 : 1;
		
	}


	public static void main(String[] args) throws Exception {
		if(args.length != 2){
			System.err.println("需要两个输入参数,<input path> <output path>");
			System.exit(-1);
		}
		
		int ret = ToolRunner.run(new MaxTemperatureWithCombiner(), args);
		System.exit(ret);
		
		
		
	}

}


输出:




评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值