MapReduce实例(二)

原文地址:http://chenxiaoqiong.com/articles/mapreduce2/
看了 MapReduce实例(一),应该对mapreduce有了基本了解,试着自己去实现下面的例子,相信你会有收获的。

实例需求

将输入文件中的数字进行排序,要求输出文件中输出序号、数字。

输入文件

1
999 
24
12
45

输出文件

1   1
2   12
3   24
4   45
5   999

设计思路

熟悉MapReduce过程的读者会很快想到在MapReduce过程中就有排序,我们可以利用IntWritable排序规则,map按数字大小对key进行排序,reduce拿到key,循环value-list之后,将行号作为序号,输入的key作为value输出。

代码实现

代码已上传至我的git:https://github.com/chenxiaoqiong/sortMapReduce
主要代码:

/**
 * <h1> MapReduce实例(二) </h1>
 * SortMapReduce:对输入数字进行排序输出
 * Created by chenxiaoqiong on 2017/3/27 0017 下午 2:14.
 */
public class SortMapReduce extends Configured implements Tool {

    /**
     * map:处理输入文件:按输入数字排序输出(数字 1)
     */
    public static class SortMapper
            extends Mapper<LongWritable, Text, IntWritable, IntWritable> {

        private final static IntWritable ints = new IntWritable(1);
        private IntWritable keyword = new IntWritable();

        @Override
        protected void map(LongWritable key, Text value, Context context)
                throws IOException, InterruptedException {
            String line=value.toString();

            keyword.set(Integer.parseInt(line));

            // void write(KEYOUT var1, VALUEOUT var2) 此方法会按KEYOUT var1自动排序
            context.write(keyword, ints);
        }
    }

    /**
     * reduce:输出序号和map排序好的数字(序号 数字)
     */
    public static class SortReducer
            extends Reducer<IntWritable, IntWritable, IntWritable, IntWritable> {

        private IntWritable linenum  = new IntWritable(1);

        @Override
        protected void reduce(IntWritable key, Iterable<IntWritable> value, Context context)
                throws IOException, InterruptedException {

            for(IntWritable val:value){

                context.write(linenum, key);

                linenum = new IntWritable(linenum.get()+1);

            }
        }
    }

    public int run(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        //获取配置文件:
        Configuration conf = super.getConf();

        //创建job:
        Job job = Job.getInstance(conf, this.getClass().getSimpleName());
        job.setJarByClass(SortMapReduce.class);

        //配置作业:
        // Input --> Map --> Reduce --> Output
        // Input:
        Path inPath = new Path(args[0]);
        FileInputFormat.addInputPath(job, inPath);
        //FileInputFormat过程会将文件处理(Format)成 <偏移量,每一行内容> 的key value对。

        //Map  设置Mapper类,设置Mapper类输出的Key、Value的类型:
        job.setMapperClass(SortMapper.class);
        job.setMapOutputKeyClass(IntWritable.class);
        job.setMapOutputValueClass(IntWritable.class);

        //Reduce  设置Reducer类, 设置最终输出的 Key、Value的类型(setOutputKeyClass、setOutputValueClass):
        job.setReducerClass(SortReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        //Output 设置输出路径
        Path outPath = new Path(args[1]);
        FileOutputFormat.setOutputPath(job, outPath);

        //提交任务
        boolean isSucess = job.waitForCompletion(true);
        return isSucess ? 1 : 0;     //成功返回1 ,失败返回0
    }

    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        int status = ToolRunner.run(conf, new SortMapReduce(), args);
        System.exit(status);
    }
}

pom.xml

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>hadoop</groupId>
    <artifactId>countMapReduce</artifactId>
    <version>1.0-SNAPSHOT</version>

    <repositories>
        <repository>
            <id>apache</id>
            <url>http://maven.apache.org</url>
        </repository>
    </repositories>
    <dependencies>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>2.7.3</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs</artifactId>
            <version>2.7.3</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>2.7.3</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-core</artifactId>
            <version>1.2.1</version>
        </dependency>
    </dependencies>
    <build>
        <plugins>
            <plugin>
                <artifactId>maven-dependency-plugin</artifactId>
                <configuration>
                    <excludeTransitive>false</excludeTransitive>
                    <stripVersion>true</stripVersion>
                    <outputDirectory>./lib</outputDirectory>
                </configuration>

            </plugin>
        </plugins>
    </build>
</project>
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章