案例2:
求共同好友
用户:好友
A:B,C,D,F,E,O
B:A,C,E,K
C:F,A,D,I
D:A,E,F,L
E:B,C,D,M,L
F:A,B,C,D,E,O,M
G:A,C,D,E,F
H:A,C,D,E,O
I:A,O
J:B,O
K:A,C,D
L:D,E,F
M:E,F,G
O:A,H,I,J
输出结果:
A-B:C,E
A-C:F,D
A-D:E,F
...
map端的输出:
key:好友
value:用户
reduce端的输出:
key:好友
value:用户列表
第一次的mapreduce输出结果:
A:B,C,D,F,G,H,I,K,O
B:A,E,F,J
C:A,B,E,F,G,H,K
......
第二次的map的输出:
key:用户的两两组合
value:好友
reduce的输出:
key:用户的两两组合
value:共同好友列表
A-B
B-A
Taak.java
package tmp;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
import java.util.TreeSet;
public class Taak1 {
static class MyMapper extends Mapper<LongWritable, Text,Text,Text>
{
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String line=value.toString();
String index0=""+line.charAt(0);
for(int i=1;i<=line.length()-1;i++)
{
char c=line.charAt(i);
if(c>='A' && c<='Z')
context.write(new Text(""+c),new Text(index0));
}
}
}
static class MyReducer extends Reducer<Text,Text,Text,Text>
{
@Override
protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
TreeSet<String> tree=new TreeSet<>();
for(Text v:values)
{
tree.add(v.toString());
}
String str="";
for(String i:tree)
{
str=str+i+" ";
}
context.write(key,new Text(str));
}
}
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf=new Configuration();
Job job=Job.getInstance(conf,"task1");
job.setJarByClass(Taak1.class);
job.setMapperClass(MyMapper.class);
job.setReducerClass(MyReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.setInputPaths(job,new Path(args[0]));
FileOutputFormat.setOutputPath(job,new Path(args[1]));
boolean b=job.waitForCompletion(true);
System.exit(b?0:1);
}
}
Task2.java
package tmp;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
import java.util.Iterator;
public class Task2 {
static class MyMapper extends Mapper<LongWritable, Text,Text,Text>
{
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String line=value.toString().trim();
String[] strs=line.split(" ");
for(int i=1;i<=strs.length-2;i++)
{
for(int j=i+1;j<=strs.length-2;j++)
{
context.write(new Text(strs[i]+"-"+strs[j]),new Text(strs[0]));
}
}
}
}
static class MyReducer extends Reducer<Text,Text,Text,Text>
{
@Override
protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
String keys=":";
Iterator<Text> it=values.iterator();
while(it.hasNext())
{
keys=keys+it.next()+" ";
}
context.write(key,new Text(keys));
}
}
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf=new Configuration();
Job job=Job.getInstance(conf,"task2");
job.setJarByClass(Task2.class);
job.setMapperClass(MyMapper.class);
job.setReducerClass(MyReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.setInputPaths(job,new Path(args[0]));
FileOutputFormat.setOutputPath(job,new Path(args[1]));
boolean b=job.waitForCompletion(true);
System.exit(b?0:1);
}
}