package com.ws.wordcount;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashMap;
public class WordCountReduce {
public static void main(String[] args) throws IOException {
run("0");
}
public static void run (String reduceId) throws IOException {
System.setProperty("HADOOP_USER_NAME","root");
Configuration conf = new Configuration();
conf.set("fs.defaultFS","hdfs://dream1:9000");
FileSystem fs = FileSystem.get(conf);
RemoteIterator<LocatedFileStatus> filemetas = fs.listFiles(new Path("/wordcount/mapout/"), false);
if(!fs.exists(new Path("/wordcount/reduceout"))){
fs.mkdirs(new Path("/wordcount/reduceout/"));
}
FSDataInputStream in;
BufferedReader br;
String name;
String line;
HashMap<String, Integer> resultmap = new HashMap<>();
while (filemetas.hasNext()){
name = filemetas.next().getPath().getName();
if(name.split("\\.")[0].endsWith(reduceId)){
in = fs.open(new Path("/wordcount/mapout/" + name));
br = new BufferedReader(new InputStreamReader(in));
while ((line=br.readLine()) != null){
for (String word : line.split("\001")) {
resultmap.put(word,resultmap.getOrDefault(word,0)+1);
}
}
}
}
FSDataOutputStream out = fs.create(new Path("/wordcount/reduceout/reduce"+reduceId+".txt"));
for (String key : resultmap.keySet()) {
out.write((key+"有"+resultmap.get(key)+"个\n").getBytes());
}
}
}