win提交local,做测试用
package WordCount;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.log4j.BasicConfigurator;
public class WinS {
public static void main(String[] args) throws Exception {
BasicConfigurator.configure(); //自动快速地使用缺省Log4j环境。
Configuration conf = new Configuration();//
//默认在本地运行,这里会直接读取win里里面的hadoop配置,所以可以把conf.set省略
// conf.set("fs.defaultFS", "file:///");
// conf.set("mapreduce.framework.name", "local");
Job job = Job.getInstance(conf);
//mapper,reducer,submitter三个类
job.setJarByClass(WinS.class);
job.setMapperClass(CountMapper.class);
job.setReducerClass(CountReducer.class);
//mapper,reducer输出key和value的类型
job.setMapOutputKeyClass(Text.class);//mapper
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(Text.class);//reducer
job.setOutputValueClass(IntWritable.class);
FileInputFormat.setInputPaths(job, new Path("d:/input"));
FileOutputFormat.setOutputPath(job, new Path("d:/output"));
job.setNumReduceTasks(3);
boolean res = job.waitForCompletion(true);
System.exit(res?0:1);
}
}
相关资源:
Win本地测试hadoop