一.什么是序列化和反序列化
- 序列化:将对象转化为字节流,以便在网络上传输或者写在磁盘磁盘上进行永久存储
- 反序列化:将字节流转回成对象
- 序列化在分布式数据处理的两个领域经常出现: 进程间通信和永久储存
- Hadoop中多个节点进程间通信是通过远程过程调用(Remote Procedure Call,RPC) 实现的
二.Hadoop的序列化
- Hadoop的序列化并不采用Java的序列化,而是采用自己的序列化机制.在hadoop序列化机制中,用户可以复用对象,减少了Java对象的分配和回收,提高了应用效率
- Hadoop通过Writable接口实现序列化机制但没有提供比较功能,所以和Java的Comparable接口合并,提供了一个WritableComparable接口
三.Hadoop的序列化案例
- 功能需求:先读一个流量统计文件到MapReduce中统记某手机号上行总流量,下行总流量,和总流量和,最后都输出到一个文件中(对象形式保存)
- 所需的文件链接: https://pan.baidu.com/s/13dqp3rk_sgq-3pRqVQg0OQ 提取码: abu3
流量文件内容如下:
1363157985066 13726230503 00-FD-07-A4-72-B8:CMCC 120.196.100.82 i02.c.aliimg.com 24 27 2481 24681 200
1363157995052 13826544101 5C-0E-8B-C7-F1-E0:CMCC 120.197.40.4 4 0 264 0 200
1363157991076 13926435656 20-10-7A-28-CC-0A:CMCC 120.196.100.99 2 4 132 1512 200
1363154400022 13926251106 5C-0E-8B-8B-B1-50:CMCC 120.197.40.4 4 0 240 0 200
1363157993044 18211575961 94-71-AC-CD-E6-18:CMCC-EASY 120.196.100.99 iface.qiyi.com 视频网站 15 12 1527 2106 200
1363157995074 84138413 5C-0E-8B-8C-E8-20:7DaysInn 120.197.40.4 122.72.52.12 20 16 4116 1432 200
1363157993055 13560439658 C4-17-FE-BA-DE-D9:CMCC 120.196.100.99 18 15 1116 954 200
1363157995033 15920133257 5C-0E-8B-C7-BA-20:CMCC 120.197.40.4 sug.so.360.cn 信息安全 20 20 3156 2936 200
1363157983019 13719199419 68-A1-B7-03-07-B1:CMCC-EASY 120.196.100.82 4 0 240 0 200
1363157984041 13660577991 5C-0E-8B-92-5C-20:CMCC-EASY 120.197.40.4 s19.cnzz.com 站点统计 24 9 6960 690 200
1363157973098 15013685858 5C-0E-8B-C7-F7-90:CMCC 120.197.40.4 rank.ie.sogou.com 搜索引擎 28 27 3659 3538 200
1363157986029 15989002119 E8-99-C4-4E-93-E0:CMCC-EASY 120.196.100.99 www.umeng.com 站点统计 3 3 1938 180 200
1363157992093 13560439658 C4-17-FE-BA-DE-D9:CMCC 120.196.100.99 15 9 918 4938 200
1363157986041 13480253104 5C-0E-8B-C7-FC-80:CMCC-EASY 120.197.40.4 3 3 180 180 200
1363157984040 13602846565 5C-0E-8B-8B-B6-00:CMCC 120.197.40.4 2052.flash2-http.qq.com 综合门户 15 12 1938 2910 200
1363157995093 13922314466 00-FD-07-A2-EC-BA:CMCC 120.196.100.82 img.qfc.cn 12 12 3008 3720 200
1363157982040 13502468823 5C-0A-5B-6A-0B-D4:CMCC-EASY 120.196.100.99 y0.ifengimg.com 综合门户 57 102 7335 110349 200
1363157986072 18320173382 84-25-DB-4F-10-1A:CMCC-EASY 120.196.100.99 input.shouji.sogou.com 搜索引擎 21 18 9531 2412 200
1363157990043 13925057413 00-1F-64-E1-E6-9A:CMCC 120.196.100.55 t3.baidu.com 搜索引擎 69 63 11058 48243 200
1363157988072 13760778710 00-FD-07-A4-7B-08:CMCC 120.196.100.82 2 2 120 120 200
1363157985066 13560436666 00-FD-07-A4-72-B8:CMCC 120.196.100.82 i02.c.aliimg.com 24 27 2481 24681 200
1363157993055 13560436666 C4-17-FE-BA-DE-D9:CMCC 120.196.100.99 18 15 1116 954 200
package hadoop.hdfs.flowcount;
import org.apache.hadoop.io.Writable;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
public class FlowBean implements Writable {
private long upFlow;
private long downFlow;
private long sumFlow;
@Override
public void write(DataOutput dataOutput) throws IOException {
dataOutput.writeLong(upFlow);
dataOutput.writeLong(downFlow);
dataOutput.writeLong(sumFlow);
}
@Override
public void readFields(DataInput dataInput) throws IOException {
this.upFlow = dataInput.readLong();
this.downFlow = dataInput.readLong();
this.sumFlow = dataInput.readLong();
}
public FlowBean() {
}
public FlowBean(long upFlow, long downFlow, long sumFlow) {
this.upFlow = upFlow;
this.downFlow = downFlow;
this.sumFlow = sumFlow;
}
public long getUpFlow() {
return upFlow;
}
public void setUpFlow(long upFlow) {
this.upFlow = upFlow;
}
public long getDownFlow() {
return downFlow;
}
public void setDownFlow(long downFlow) {
this.downFlow = downFlow;
}
public long getSumFlow() {
return sumFlow;
}
public void setSumFlow(long sumFlow) {
this.sumFlow = sumFlow;
}
public void set(long upFlow,long downFlow){
this.downFlow=downFlow;
this.upFlow=upFlow;
this.sumFlow=upFlow+downFlow;
}
@Override
public String toString() {
return "FlowBean{" +
"upFlow=" + upFlow +
", downFlow=" + downFlow +
", sumFlow=" + sumFlow +
'}';
}
}
package hadoop.hdfs.flowcount;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;
public class FlowMapper extends Mapper<LongWritable, Text,Text,FlowBean> {
Text k = new Text();
FlowBean v = new FlowBean();
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String line = value.toString();
String[] fields = line.split("\\s+");
String phoneNum = fields[1];
long upFlow=Long.parseLong(fields[fields.length-3]);
long downFlow=Long.parseLong(fields[fields.length-2]);
k.set(phoneNum);
v.set(upFlow,downFlow);
context.write(k,v);
}
}
package hadoop.hdfs.flowcount;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
public class FlowReducer extends Reducer< Text,FlowBean,Text,FlowBean> {
FlowBean v = new FlowBean();
@Override
protected void reduce(Text key, Iterable<FlowBean> values, Context context) throws IOException, InterruptedException {
long sum_upFlow = 0;
long sum_downFlow=0;
for (FlowBean flowBean : values) {
sum_upFlow+=flowBean.getUpFlow();
sum_downFlow+=flowBean.getDownFlow();
}
v.set(sum_upFlow,sum_downFlow);
context.write(key,v);
}
}
package hadoop.hdfs.flowcount;
import hadoop.mapreduce.WCDriver;
import hadoop.mapreduce.WCMapper;
import hadoop.mapreduce.WCReducer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
public class FlowDriver {
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf,"wordcount");
job.setJarByClass(FlowDriver.class);
job.setMapperClass(FlowMapper.class);
job.setReducerClass(FlowReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(FlowBean.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(FlowBean.class);
FileInputFormat.setInputPaths(job,new Path("F:\\sunyong\\Java\\codes\\javaToHdfs\\phone_data.txt"));
FileOutputFormat.setOutputPath(job,new Path("flow"));
boolean result = job.waitForCompletion(true);
System.exit(result?0:1);
}
}
- 5)执行结果如下:

- 6)打开统计文件如下(里面是一个键为手机号,值为FlowBean对象):
13480253104 FlowBean{upFlow=180, downFlow=180, sumFlow=360}
13502468823 FlowBean{upFlow=7335, downFlow=110349, sumFlow=117684}
13560436666 FlowBean{upFlow=3597, downFlow=25635, sumFlow=29232}
13560439658 FlowBean{upFlow=2034, downFlow=5892, sumFlow=7926}
13602846565 FlowBean{upFlow=1938, downFlow=2910, sumFlow=4848}
13660577991 FlowBean{upFlow=6960, downFlow=690, sumFlow=7650}
13719199419 FlowBean{upFlow=240, downFlow=0, sumFlow=240}
13726230503 FlowBean{upFlow=2481, downFlow=24681, sumFlow=27162}
13760778710 FlowBean{upFlow=120, downFlow=120, sumFlow=240}
13826544101 FlowBean{upFlow=264, downFlow=0, sumFlow=264}
13922314466 FlowBean{upFlow=3008, downFlow=3720, sumFlow=6728}
13925057413 FlowBean{upFlow=11058, downFlow=48243, sumFlow=59301}
13926251106 FlowBean{upFlow=240, downFlow=0, sumFlow=240}
13926435656 FlowBean{upFlow=132, downFlow=1512, sumFlow=1644}
15013685858 FlowBean{upFlow=3659, downFlow=3538, sumFlow=7197}
15920133257 FlowBean{upFlow=3156, downFlow=2936, sumFlow=6092}
15989002119 FlowBean{upFlow=1938, downFlow=180, sumFlow=2118}
18211575961 FlowBean{upFlow=1527, downFlow=2106, sumFlow=3633}
18320173382 FlowBean{upFlow=9531, downFlow=2412, sumFlow=11943}
84138413 FlowBean{upFlow=4116, downFlow=1432, sumFlow=5548}