需求二:上行流量倒序排序(递减排序)
FlowBeanSort :
import org.apache.hadoop.io.WritableComparable;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
public class FlowBeanSort implements WritableComparable<FlowBeanSort> {
//上行流量
private Integer upFlow;
//下行流量
private Integer downFlow;
//上行总流量
private Integer upCountFlow;
//下行总流量
private Integer downCountFlow;
@Override
public String toString() {
return upFlow+"\t"+downFlow+"\t"+upCountFlow+"\t"+downCountFlow;
}
public Integer getUpFlow() {
return upFlow;
}
public void setUpFlow(Integer upFlow) {
this.upFlow = upFlow;
}
public Integer getDownFlow() {
return downFlow;
}
public void setDownFlow(Integer downFlow) {
this.downFlow = downFlow;
}
public Integer getUpCountFlow() {
return upCountFlow;
}
public void setUpCountFlow(Integer upCountFlow) {
this.upCountFlow = upCountFlow;
}
public Integer getDownCountFlow() {
return downCountFlow;
}
public void setDownCountFlow(Integer downCountFlow) {
this.downCountFlow = downCountFlow;
}
/**
* 比较器,以上行流量进行比较
* @param o
* @return
*/
@Override
public int compareTo(FlowBeanSort o) {
int i = this.upFlow.compareTo(o.upFlow);
return -i;
}
/**
* 序列化的方法
* @param out
* @throws IOException
*/
@Override
public void write(DataOutput out) throws IOException {
out.writeInt(upFlow);
out.writeInt(downFlow);
out.writeInt(upCountFlow);
out.writeInt(downCountFlow);
}
/**
* 反序列的方法
* @param in
* @throws IOException
*/
@Override
public void readFields(DataInput in) throws IOException {
this.upFlow = in.readInt();
this.downFlow = in.readInt();
this.upCountFlow = in.readInt();
this.downCountFlow = in.readInt();
}
}
FlowSortMapper:
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;
public class FlowSortMapper extends Mapper<LongWritable,Text,FlowBeanSort,Text> {
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
//13480253104 3 3 180 180
String[] split = value.toString().split("\t");
FlowBeanSort flowBeanSort = new FlowBeanSort();
flowBeanSort.setUpFlow(Integer.parseInt(split[1]));
flowBeanSort.setDownFlow(Integer.parseInt(split[2]));
flowBeanSort.setUpCountFlow(Integer.parseInt(split[3]));
flowBeanSort.setDownCountFlow(Integer.parseInt(split[4]));
context.write(flowBeanSort,new Text(split[0]));
}
}
FlowSortReducer:
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
public class FlowSortReducer extends Reducer<FlowBeanSort,Text,FlowBeanSort,Text> {
@Override
protected void reduce(FlowBeanSort key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
for (Text value : values) {
context.write(key,value);
}
}
}
FlowSortMain:
import cn.itcast.demo3.flowCount.FlowBean;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
public class FlowSortMain extends Configured implements Tool {
@Override
public int run(String[] args) throws Exception {
Job job = Job.getInstance(super.getConf(), "flowSort");
job.setInputFormatClass(TextInputFormat.class);
TextInputFormat.addInputPath(job,new Path("file:///F:\\流量统计\\out_sort_flow"));
job.setMapperClass(FlowSortMapper.class);
job.setMapOutputKeyClass(FlowBeanSort.class);
job.setMapOutputValueClass(Text.class);
job.setReducerClass(FlowSortReducer.class);
job.setOutputKeyClass(FlowBeanSort.class);
job.setOutputValueClass(Text.class);
job.setOutputFormatClass(TextOutputFormat.class);
TextOutputFormat.setOutputPath(job,new Path("file:///F:\\upflow_sort_output"));
boolean b = job.waitForCompletion(true);
return b?0:1;
}
public static void main(String[] args) throws Exception {
int run = ToolRunner.run(new Configuration(), new FlowSortMain(), args);
System.exit(run);
}
}
数据输入:
13480253104 3 3 180 180
13502468823 57 102 7335 110349
13560439658 33 24 2034 5892
13600217502 18 138 1080 186852
13602846565 15 12 1938 2910
13660577991 24 9 6960 690
13719199419 4 0 240 0
13726230503 24 27 2481 24681
13760778710 2 2 120 120
13823070001 6 3 360 180
13826544101 4 0 264 0
13922314466 12 12 3008 3720
13925057413 69 63 11058 48243
13926251106 4 0 240 0
13926435656 2 4 132 1512
15013685858 28 27 3659 3538
15920133257 20 20 3156 2936
15989002119 3 3 1938 180
18211575961 15 12 1527 2106
18320173382 21 18 9531 2412
84138413 20 16 4116 1432
数据输出:
13925057413 69 63 11058 48243
13502468823 57 102 7335 110349
13560439658 33 24 2034 5892
15013685858 28 27 3659 3538
13726230503 24 27 2481 24681
13660577991 24 9 6960 690
18320173382 21 18 9531 2412
84138413 20 16 4116 1432
15920133257 20 20 3156 2936
13600217502 18 138 1080 186852
13602846565 15 12 1938 2910
18211575961 15 12 1527 2106
13922314466 12 12 3008 3720
13823070001 6 3 360 180
13826544101 4 0 264 0
13719199419 4 0 240 0
13926251106 4 0 240 0
15989002119 3 3 1938 180
13480253104 3 3 180 180
13926435656 2 4 132 1512
13760778710 2 2 120 120