1. 多表关联ReduceJoin实践
1.1 新建order与product文件
在resource中新建order目录并创建文件
order.txt
// id pid amount
1001 01 1
1002 02 2
1003 03 3
1004 01 4
1005 03 5
1006 02 6
product.txt
//pid pname
01 苹果
02 华为
03 小米
我们预期业务输出为
//id pname amount
1001 苹果 1
1004 苹果 4
1002 华为 2
1006 华为 6
1003 小米 3
1005 小米 5
1.2 新建OrderProductDto
@Data
public class OrderProductDto implements Writable {
private String id = ""; // 订单ID
private String pid = ""; // 商品ID
private int amount = 0; // 商品数量
private String productName = ""; // 商品名称
private String flag = ""; // 记录表类型order或product
@Override
public void write(DataOutput out) throws IOException {
out.writeUTF(id);
out.writeUTF(pid);
out.writeInt(amount);
out.writeUTF(productName);
out.writeUTF(flag);
}
@Override
public void readFields(DataInput in) throws IOException {
this.id = in.readUTF();
this.pid = in.readUTF();
this.amount = in.readInt();
this.productName = in.readUTF();
this.flag = in.readUTF();
}
@Override
public String toString() {
// 格式与输出结果保持一致
return id + "\t" + productName + "\t" + amount;
}
}
1.3 新建OrderProductMapper类
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
public class OrderProductMapper extends Mapper<LongWritable, Text, Text, OrderProductDto> {
private String filename;
private Text keyOut = new Text();
private OrderProductDto orderProductDto = new OrderProductDto();
@Override
protected void setup(Mapper<LongWritable, Text, Text, OrderProductDto>.Context context) throws IOException, InterruptedException {
// 获取当前处理行的文件名,setup只需初始化一次
FileSplit fileSplit = (FileSplit) context.getInputSplit();
this.filename = fileSplit.getPath().getName();
}
@Override
protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, OrderProductDto>.Context context) throws IOException, InterruptedException {
String line = value.toString();
String[] split = line.split(" ");
if (filename.contains("order")) {
// id pid amount
// 我们需要通过pid关联,所以需要将pid设置为key
String id = split[0];
String pid = split[1];
String amount = split[2];
keyOut.set(pid);
orderProductDto.setId(id);
orderProductDto.setPid(pid);
orderProductDto.setAmount(Integer.parseInt(amount));
orderProductDto.setFlag("order");
} else if (filename.contains("product")) {
// pid pname
String pid = split[0];
String productName = split[1];
keyOut.set(pid);
orderProductDto.setPid(pid);
orderProductDto.setProductName(productName);
orderProductDto.setFlag("product");
}
context.write(keyOut, orderProductDto);
}
}
1.4 新建OrderProductReducer类
public class OrderProductReducer extends Reducer<Text, OrderProductDto, OrderProductDto, NullWritable> {
@Override
protected void reduce(Text key, Iterable<OrderProductDto> values, Reducer<Text, OrderProductDto, OrderProductDto, NullWritable>.Context context) throws IOException, InterruptedException {
// 存放所有订单信息
List<OrderProductDto> orderDtoList = new ArrayList<>();
// 存放产品信息
OrderProductDto productDto = new OrderProductDto();
for (OrderProductDto value : values) {
if ("order".equals(value.getFlag())) {
// 防止因为指向同一内存地址导致覆盖
OrderProductDto temp = new OrderProductDto();
try {
BeanUtils.copyProperties(temp, value);
} catch (IllegalAccessException | InvocationTargetException e) {}
orderDtoList.add(temp);
} else if ("product".equals(value.getFlag())) {
try {
BeanUtils.copyProperties(productDto, value);
} catch (IllegalAccessException | InvocationTargetException e) {}
}
}
// 循环遍历赋值订单表产品名称
for (OrderProductDto dto : orderDtoList) {
dto.setProductName(productDto.getProductName());
context.write(dto, NullWritable.get());
}
}
}
1.5 新建OrderProductDriver类
public class OrderProductDriver {
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf, "order product");
job.setJarByClass(OrderProductDriver.class);
job.setMapperClass(OrderProductMapper.class);
job.setReducerClass(OrderProductReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(OrderProductDto.class);
job.setOutputKeyClass(OrderProductDto.class);
job.setOutputValueClass(NullWritable.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
传参并运行,NICE
E:\Java\blogCode\hadoop\src\main\resources\order E:\Java\blogCode\hadoop\src\main\resources\order_ret

2. 多表关联MapJoin实践
上篇文章中,我们在Reduce端进行join操作,但是当订单表数据量特别大时,明显reduce端join存在性能问题。我们可以用MapJoin进行操作,它适合关联表中有小表的情形
2.1 新建OrderMapper类
public class OrderMapper extends Mapper<LongWritable, Text, Text, NullWritable> {
private Map<String, String> productMap = new HashMap<>();
private Text keyOut = new Text();
@Override
protected void setup(Mapper<LongWritable, Text, Text, NullWritable>.Context context) throws IOException, InterruptedException {
// 获取缓存的文件小表product.txt
URI[] cacheFiles = context.getCacheFiles();
FSDataInputStream fsDataInputStream = FileSystem.get(context.getConfiguration()).open(new Path(cacheFiles[0]));
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(fsDataInputStream, "UTF-8"));
String line;
// 01 苹果
while (StringUtils.isNotBlank(line = bufferedReader.readLine())) {
String[] field = line.split(" ");
productMap.put(field[0], field[1]);
}
IOUtils.closeStream(bufferedReader);
}
@Override
protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, NullWritable>.Context context) throws IOException, InterruptedException {
String line = value.toString();
String[] fields = line.split(" ");
// 1001 01 1
String productName = productMap.get(fields[1]);
keyOut.set(fields[0] + "\t" + productName + "\t" + fields[2]);
context.write(keyOut, NullWritable.get());
}
}
2.2 新建OrderDriver类
public class OrderDriver {
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException, URISyntaxException {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf, "order");
job.setJarByClass(OrderDriver.class);
job.setMapperClass(OrderMapper.class);
job.setNumReduceTasks(0);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(NullWritable.class);
job.addCacheFile(new URI("hadoop/src/main/resources/order/product.txt"));
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
运行后结果如图,NICE

欢迎关注公众号算法小生与我沟通交流
欢迎关注公众号算法小生与我沟通交流
2062

被折叠的 条评论
为什么被折叠?



