MapReduce之基本數據類的排序

MapReduce之基本數據類的排序

0.思路

因在MapReduce中基本數據類型(如int)默認是升序排序的,因此我們只需要寫一個類繼承IntWritable.Comparator,重寫compare方法即可。

1.在pom.xml中添加hadoop依賴
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.7.3</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.7.3</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-common</artifactId>
<version>2.7.3</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>2.7.3</version>
</dependency>
2.添加log4j.properties文件在資源目錄下即resources,文件內容如下
log4j.rootLogger=FATAL, dest1
log4j.logger.dsaLogging=DEBUG, dsa
log4j.additivity.dsaLogging=false

log4j.appender.dest1=org.apache.log4j.ConsoleAppender
log4j.appender.dest1.layout=org.apache.log4j.PatternLayout
log4j.appender.dest1.layout.ConversionPattern=%-5p:%l: %m%n
log4j.appender.dest1.ImmediateFlush=true

log4j.appender.dsa=org.apache.log4j.RollingFileAppender
log4j.appender.dsa.File=./logs/dsa.log
log4j.appender.dsa.MaxFileSize=2000KB
# Previously MaxBackupIndex=2
log4j.appender.dsa.MaxBackupIndex=5
log4j.appender.dsa.layout=org.apache.log4j.PatternLayout
log4j.appender.dsa.layout.ConversionPattern=%l:%d: %m%n
3.編寫序列化類Employee
package com.sort.BasicSort;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

public class Employee implements Writable {
    //7369,SMITH,CLERK,7902,1980/12/17,800,,20
    private IntWritable empNo;
    private Text empName;
    private Text empJob;
    private IntWritable leaderNo;
    private Text hireDate;
    private IntWritable empSalary;
    private Text empBonus;
    private IntWritable deptNo;

    public Employee() {
        this.empNo = new IntWritable();
        this.empName = new Text("");
        this.empJob = new Text("");
        this.leaderNo = new IntWritable();
        this.hireDate = new Text("");
        this.empSalary =new IntWritable();
        this.empBonus = new Text("");
        this.deptNo = new IntWritable();
    }

    public Employee(int empNo, String empName, String empJob, int leaderNo,
                    String hireDate, int empSalary, String empBonus, int deptNo) {
        this.empNo = new IntWritable(empNo);
        this.empName = new Text(empName);
        this.empJob = new Text(empJob);
        this.leaderNo = new IntWritable(leaderNo);
        this.hireDate = new Text(hireDate);
        this.empSalary =new IntWritable(empSalary);
        this.empBonus = new Text(empBonus);
        this.deptNo = new IntWritable(deptNo);
    }

    @Override
    public void write(DataOutput out) throws IOException {
//序列化
        this.empNo.write(out);
        this.empName.write(out);
        this.empJob.write(out);
        this.leaderNo.write(out);
        this.hireDate.write(out);
        this.empSalary.write(out);
        this.empBonus.write(out);
        this.deptNo.write(out);
    }

    @Override
    public void readFields(DataInput in) throws IOException {
        this.empNo.readFields(in);
        this.empName.readFields(in);
        this.empJob.readFields(in);
        this.leaderNo.readFields(in);
        this.hireDate.readFields(in);
        this.empSalary.readFields(in);
        this.empBonus.readFields(in);
        this.deptNo.readFields(in);
    }

    @Override
    public String toString() {
        return "Employee{" +
                "empNo=" + empNo +
                ", empName=" + empName +
                ", empJob=" + empJob +
                ", leaderNo=" + leaderNo +
                ", hireDate=" + hireDate +
                ", empSalary=" + empSalary +
                ", empBonus=" + empBonus +
                ", deptNo=" + deptNo +
                '}';
    }

    public IntWritable getEmpNo() {
        return empNo;
    }

    public void setEmpNo(IntWritable empNo) {
        this.empNo = empNo;
    }

    public Text getEmpName() {
        return empName;
    }

    public void setEmpName(Text empName) {
        this.empName = empName;
    }

    public Text getEmpJob() {
        return empJob;
    }

    public void setEmpJob(Text empJob) {
        this.empJob = empJob;
    }

    public IntWritable getLeaderNo() {
        return leaderNo;
    }

    public void setLeaderNo(IntWritable leaderNo) {
        this.leaderNo = leaderNo;
    }

    public Text getHireDate() {
        return hireDate;
    }

    public void setHireDate(Text hireDate) {
        this.hireDate = hireDate;
    }

    public IntWritable getEmpSalary() {
        return empSalary;
    }

    public void setEmpSalary(IntWritable empSalary) {
        this.empSalary = empSalary;
    }

    public Text getEmpBonus() {
        return empBonus;
    }

    public void setEmpBonus(Text empBonus) {
        this.empBonus = empBonus;
    }

    public IntWritable getDeptNo() {
        return deptNo;
    }

    public void setDeptNo(IntWritable deptNo) {
        this.deptNo = deptNo;
    }
}

4.編寫mapper類
package com.sort.BasicSort;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

public class EmployeeMapper extends Mapper<LongWritable, Text, IntWritable,Employee> {

    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
//數據格式:<0,7369,SMITH,CLERK,7902,1980/12/17,800,,20>
//1、分詞
        String[] splits = value.toString().split(",");
//2、創建Employee對象,並且賦值
        Employee employee = null;

//判斷員工是否有上級領導,如果沒有,則給該字段設置一個0
        if (null == splits[3] || "".equals(splits[3])){
            splits[3] = "0";
        }
//判斷員工是否有獎金
        if(null != splits[6] && !"".equals(splits[6])){
            employee = getEmpInstance(splits);
        }else{
            splits[6] = "0";
            employee = getEmpInstance(splits);
        }
//3、通過context寫出去
        context.write(employee.getDeptNo(),employee);
    }

    private Employee getEmpInstance(String[] splits){
        Employee employee = new Employee(
                Integer.parseInt(splits[0]),splits[1],splits[2],
                Integer.parseInt(splits[3]),splits[4],Integer.parseInt(splits[5]),
                splits[6],Integer.parseInt(splits[7])
        );
        return employee;
    }
}
5.編寫reducer類
package com.sort.BasicSort;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class EmployeeReducer extends Reducer<IntWritable,Employee,IntWritable,IntWritable> {
    @Override
    protected void reduce(IntWritable key, Iterable<Employee> values, Context context) throws IOException, InterruptedException {
//1、對數據進行處理:取出工資和獎金,求和操作
        int sum = 0;
        for (Employee e: values) {
            IntWritable salary = e.getEmpSalary();
            Text bonus = e.getEmpBonus();
            if (bonus.getLength() > 0){
                sum += salary.get() + Integer.valueOf(bonus.toString());
            }else{
                sum += salary.get();
            }
        }
//2、將結果通過context寫出去
        context.write(key,new IntWritable(sum));
    }
}
6.編寫自定義比較器類NumComparator.java
package com.sort.BasicSort;

import org.apache.hadoop.io.IntWritable;

public class NumComparator extends IntWritable.Comparator {
    @Override
    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
        return -super.compare(b1, s1, l1, b2, s2, l2);//-代表降序
    }
}
7.編寫Driver類
package com.sort.BasicSort;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.util.Random;


public class EmployeeJob {
    public static void main(String[] args) throws Exception {
        Job job = Job.getInstance(new Configuration());
        job.setJarByClass(EmployeeJob.class);

        job.setMapperClass(EmployeeMapper.class);
        job.setMapOutputKeyClass(IntWritable.class);
        job.setMapOutputValueClass(Employee.class);//Employee

        job.setReducerClass(EmployeeReducer.class);
        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(IntWritable.class);

//設置比較器
        job.setSortComparatorClass(NumComparator.class);

//先使用本地文件做測試
        FileInputFormat.setInputPaths(job,new Path("D:\\emp.csv"));
        FileOutputFormat.setOutputPath(job,new Path(getOutputDir()));

        boolean result = job.waitForCompletion(true);

        System.out.println("result:" + result);
    }

    //用於產生隨機輸出目錄
    public static String getOutputDir(){
        String prefix = "D:\\output\\";
        long time = System.currentTimeMillis();
        int random = new Random().nextInt();
        return prefix + "result_" + time + "_" + random;
    }

}
8.運行

在這裏插入圖片描述
在這裏插入圖片描述

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章