好友案例

本文介绍了一种使用MapReduce算法求解社交网络中用户共同好友的高效方法。通过两次MapReduce过程,首先将用户与其好友关系映射,然后通过Reduce阶段聚合共同好友,最后再次MapReduce找出用户间的共同联系人。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

案例2:
求共同好友
用户:好友
A:B,C,D,F,E,O
B:A,C,E,K
C:F,A,D,I
D:A,E,F,L
E:B,C,D,M,L
F:A,B,C,D,E,O,M
G:A,C,D,E,F
H:A,C,D,E,O
I:A,O
J:B,O
K:A,C,D
L:D,E,F
M:E,F,G
O:A,H,I,J

输出结果:
A-B:C,E
A-C:F,D
A-D:E,F
...

map端的输出:
key:好友
value:用户

reduce端的输出:
key:好友
value:用户列表

第一次的mapreduce输出结果:
A:B,C,D,F,G,H,I,K,O
B:A,E,F,J
C:A,B,E,F,G,H,K
......

第二次的map的输出:
key:用户的两两组合
value:好友

reduce的输出:
key:用户的两两组合
value:共同好友列表

A-B
B-A

Taak.java

package tmp;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
import java.util.TreeSet;

public class Taak1 {
    static class MyMapper extends Mapper<LongWritable, Text,Text,Text>
    {
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String line=value.toString();
            String index0=""+line.charAt(0);
            for(int i=1;i<=line.length()-1;i++)
            {
                char c=line.charAt(i);
                if(c>='A' && c<='Z')
                context.write(new Text(""+c),new Text(index0));
            }
        }

    }
    static class MyReducer extends Reducer<Text,Text,Text,Text>
    {
        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            TreeSet<String> tree=new TreeSet<>();
            for(Text v:values)
            {
               tree.add(v.toString());
            }
            String str="";
            for(String i:tree)
            {
                str=str+i+" ";
            }
            context.write(key,new Text(str));
        }
    }

    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        Configuration conf=new Configuration();
        Job job=Job.getInstance(conf,"task1");
        job.setJarByClass(Taak1.class);
        job.setMapperClass(MyMapper.class);
        job.setReducerClass(MyReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);
        FileInputFormat.setInputPaths(job,new Path(args[0]));
        FileOutputFormat.setOutputPath(job,new Path(args[1]));
        boolean b=job.waitForCompletion(true);
        System.exit(b?0:1);
    }

}

 

Task2.java

package tmp;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
import java.util.Iterator;

public class Task2 {
    static class MyMapper extends Mapper<LongWritable, Text,Text,Text>
    {
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String line=value.toString().trim();
            String[] strs=line.split(" ");
            for(int i=1;i<=strs.length-2;i++)
            {
                for(int j=i+1;j<=strs.length-2;j++)
                {
                      context.write(new Text(strs[i]+"-"+strs[j]),new Text(strs[0]));
                }
            }
        }
    }


    static class MyReducer extends Reducer<Text,Text,Text,Text>
    {
        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            String keys=":";
            Iterator<Text> it=values.iterator();
            while(it.hasNext())
            {
                keys=keys+it.next()+" ";
            }
            context.write(key,new Text(keys));
        }
    }

    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        Configuration conf=new Configuration();
        Job job=Job.getInstance(conf,"task2");
        job.setJarByClass(Task2.class);
        job.setMapperClass(MyMapper.class);
        job.setReducerClass(MyReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);
        FileInputFormat.setInputPaths(job,new Path(args[0]));
        FileOutputFormat.setOutputPath(job,new Path(args[1]));
        boolean b=job.waitForCompletion(true);
        System.exit(b?0:1);
    }
}

 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值