/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.examples;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.TaskTracker;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
public class Pagerank {
public static double dampFactor = 0.85;
public static int iterations = 10 ;
public static class PagerankMapper
extends Mapper<LongWritable, Text, Text, DoubleWritable>{
public void map(LongWritable key, Text value, Context context
) throws IOException, InterruptedException {
String line = value.toString();
String urls[] = line.split("\t");
int urlSize = urls.length;
// TaskTracker.LOG.info("line: " + line + " urlSize: " + urlSize) ;
//urls[0] is the srcUrl.
double outLinkNum = urlSize-1;
double srcPageRank = 0.25 ;
double pageRank_part = srcPageRank/outLinkNum;
for(int i = 1; i < urlSize; i++) {
context.write(new Text(urls[i]), new DoubleWritable(pageRank_part));
}
}
}
public static class PagerankReducer
extends Reducer<Text,DoubleWritable,Text,DoubleWritable> {
public void reduce(Text key, Iterable<DoubleWritable> values,
Context context) throws IOException, InterruptedException {
// TaskTracker.LOG.info("Reducer key: " + key) ;
double pageRank = 0;
for(DoubleWritable value : values) {
pageRank += value.get();
// TaskTracker.LOG.info("Reducer value: " + value) ;
}
pageRank = 1-dampFactor+dampFactor*pageRank;
context.write(key, new DoubleWritable(pageRank));
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
if (otherArgs.length != 2) {
System.err.println("Usage: pagerank <in> <out>");
System.exit(2);
}
Job job = new Job(conf, "pagerank");
job.setJarByClass(Pagerank.class);
job.setMapperClass(PagerankMapper.class);
job.setCombinerClass(PagerankReducer.class);
job.setReducerClass(PagerankReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(DoubleWritable.class);
FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
输入数据格式为:
1\t2\t\4 //以\t隔开,即起始顶点\t边1\t边2...
这里有个问题就是一轮迭代结束后,顶点的边的信息无法传入下一轮中,如何改进?
批量测试脚本;
for i in `seq 1 10` ; do hadoop jar hadoop-examples-1.2.1.jar pagerank /test/soc-LiveJournal_final.txt /output ; hadoop fs -rmr /output ; done