這是某資訊APP公司的面試題,考察MapReduce的編程思想。
給定一個人脈關係的文件,從中找到二度人脈。比如給定如下的人脈關係,
A B C D E
B E F
C G
G H I J
應輸出
A F
A G
C H
C I
C J
這裏假設關係是單向的,比如通過第一行"A B C D",我們認爲B是A的好友,但A不是B的好友(有點像單相思)。
方法是map階段輸出每人的前向結點和後向結點,前向和後向可以用1和0標記;reduce階段輸出每人的前向和後向即爲二度人脈。但要考慮一種特殊情況,兩人之間可能既是一度人脈又是二度人脈,比如上例中的A和E,此時按就近原則認爲二者是一度人脈,結果中需要把它過濾掉。代碼如下
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
public class TwoDegreeConnection {
public static class ConnectionMap extends Mapper<LongWritable, Text, Text, Text> {
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String line = value.toString();
String ele[] = line.split(" ");
int i;
for (i=1; i<ele.length; i++) {
//前向和後向都要輸出
context.write(new Text(ele[0]), new Text(ele[i] + " " + String.valueOf(0)));
context.write(new Text(ele[i]), new Text(ele[0] + " " + String.valueOf(1)));
}
}
}
public static class ConnectionReduce extends Reducer<Text, Text, Text, Text> {
Set<String> oneDegree = new HashSet<String>(); //保存一度人脈關係,以便後面過濾結果
protected void setup(Reducer<Text, Text, Text, Text>.Context context) throws IOException {
Configuration conf = context.getConfiguration();
String input = conf.get("input");
FileSystem fs = FileSystem.get(conf);
FSDataInputStream dis = fs.open(new Path(input));
InputStreamReader isr = new InputStreamReader(dis, "utf-8");
BufferedReader br = new BufferedReader(isr);
String s = br.readLine(), sub[];
int i;
while (s != null) {
sub = s.split(" ");
for (i=1; i<sub.length; i++) {
oneDegree.add(sub[0] + sub[i]);
}
s = br.readLine();
}
dis.close();
}
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
Set<String> set_start = new HashSet<String>();
Set<String> set_end = new HashSet<String>();
String s[];
for (Text val : values) {
s = val.toString().split(" ");
if (s[1].equals("0")) {
set_end.add(s[0]);
} else {
set_start.add(s[0]);
}
}
String start, end, tmp;
Iterator<String> it_start = set_start.iterator();
Iterator<String> it_end;
while (it_start.hasNext()) {
start = it_start.next();
it_end = set_end.iterator();
while (it_end.hasNext()) {
end = it_end.next();
tmp = start + end;
if (!oneDegree.contains(tmp)) {
context.write(new Text(start), new Text(end));
}
}
}
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
conf.setStrings("input", args[0]);
Job job = Job.getInstance(conf);
job.setJarByClass(TwoDegreeConnection.class);
job.setJobName("TwoDegreeConnection");
job.setMapperClass(ConnectionMap.class);
job.setReducerClass(ConnectionReduce.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
job.waitForCompletion(true);
}
}