package MapSort;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import java.io.IOException;
import java.util.ArrayList;
public class MobileImpressionAnalysis {
public static String inPath = "hdfs://node1:8020/in123";
public static String outPath = "hdfs://node1:8020/out123";
public static void main(String[] args) throws IOException,InterruptedException,ClassNotFoundException
{
Configuration configuration = new Configuration();
configuration.set("fs.defaultFS","hdfs://node1:8020/");
Job job = Job.getInstance(configuration,MobileImpressionAnalysis.class.getSimpleName());
job.setJarByClass(MobileImpressionAnalysis.class);
job.setMapperClass(AvgMap.class);
job.setReducerClass(AvgReduce.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(NullWritable.class);
job.setNumReduceTasks(1);
FileInputFormat.setInputPaths(job,new Path(inPath));
FileSystem fs = FileSystem.get(configuration);
Path output = new Path(outPath);
if (fs.exists(output)){
fs.delete(output);
}
FileOutputFormat.setOutputPath(job,output);
// job.setInputFormatClass(TextInputFormat.class);
// job.setOutputFormatClass(TextOutputFormat.class);
job.waitForCompletion(true);
}
}
class AvgMap extends Mapper<LongWritable,Text,Text,Text>
{
@Override
protected void map(LongWritable key,Text values,Context context) throws IOException,InterruptedException
{
String [] line = values.toString().split(",");
String id = line[0];
context.write(new Text(id),values);
}
}
class AvgReduce extends Reducer<Text,Text,Text,NullWritable>
{
private ArrayList<Text> records = new ArrayList<>();
@Override
protected void reduce(Text key, Iterable<Text> values,Context context)
throws IOException,InterruptedException
{
for (Text value:values){
records.add(new Text(value));
}
}
@Override
protected void cleanup(Context context) throws IOException,InterruptedException
{
double sum = 0;
int count = 0;
for (Text record:records){
String [] line = record.toString().split(",");
if(!line[2].isEmpty()){
double voltage = Double.parseDouble(line[2]);
sum += voltage;
count++;
}
}
double avg = count>0?sum/count:0;
int printed=0;
for (Text record:records){
String [] line = record.toString().split(",");
String volt = line[2].trim();
if (volt.isEmpty()){
line[2] = String.format("%.2f",avg);
}
String newrecord = String.join(",",line);
if (printed<10){
context.write(new Text(newrecord),NullWritable.get());
printed++;
} else {
break;
}
}
}
}