需求:
现有一份QQ好友数据,其中数据第一列代表用户QQ昵称,第二列代表好友QQ昵称。要求编写MapReduce程序实现QQ好友推荐,例如A的好友是B,B的好友是C,即A与C有共同的好友B,则可以向A推荐C,向C推荐A。通过MapReduce完成推荐后,会发现存在给A推B,也给B推A的重复情况,请继续完成处理,避免这个重复操作。
数据:
A,B
A,C
A,E
B,D
B,A
B,F
C,E
C,A
共同好友依次为A,B,C
即要求运行结果为:A -> C,A -> D…
代码如下:
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;
public class QqFriendsMapper extends Mapper<LongWritable, Text,Text,Text> {
private String spliter="";
@Override
protected void setup(Mapper<LongWritable, Text, Text, Text>.Context context) throws IOException, InterruptedException {
spliter = context.getConfiguration().get(spliter);
}
@Override
protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, Text>.Context context) throws IOException, InterruptedException {
String line = value.toString();
String[] split = line.split(",");
context.write(new Text(split[0]),new Text(split[1]));
context.write(new Text(split[1]),new Text(split[0]));
}
}
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
import java.util.HashSet;
import java.util.Iterator;
public class QqFriendsReducer extends Reducer<Text, Text, Text, Text> {
private Text outkey = new Text();
private Text outvalue = new Text();
@Override
protected void reduce(Text key, Iterable<Text> values, Reducer<Text, Text, Text, Text>.Context context) throws IOException, InterruptedException {
HashSet<String> set = new HashSet<>();
for (Text value : values) {
set.add(value.toString());
}
Iterator<String> i = set.iterator();
Iterator<String> j = set.iterator();
if (set.size() > 1) {
while (i.hasNext()) {
String qqname = i.next();
while (j.hasNext()) {
String otherqqname = j.next();
if (!qqname.equals(otherqqname)) {
outkey.set(qqname);
outvalue.set(otherqqname);
context.write(outkey, outvalue);
}
}
}
}
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
public class QqFriendsDriver {
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf);
job.setJarByClass(QqFriendsDriver.class);
job.setMapperClass(QqFriendsMapper.class);
job.setReducerClass(QqFriendsReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.setInputPaths(job, new Path("C:\\Users\\XR\\Desktop\\大数据开发\\hadoop\\作业\\qqfriends.txt"));
FileOutputFormat.setOutputPath(job, new Path("C:\\Users\\XR\\Desktop\\大数据开发\\hadoop\\作业\\qqresult1"));
boolean b = job.waitForCompletion(true);
System.exit(b ? 0 : 1);
}
}
输出·结果·为:
B C
B E
A D
A F
A E
A C