MapReduce的KeyValueTextInputFormat

如果行中有分隔符,那麼分隔符前面的作爲key,後面的作爲value;如果沒有分隔符,那麼整行作爲key,value爲空

當輸入數據的每一行是兩列,並用tab分離的形式的時候,KeyValueTextInputformat處理這種格式的文件非常適合。

代碼示例:

package com.bigdata.hadoop.mapred;

import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class MyKeyValueTextInputFormatApp {
	
	private static final String INPUT_PATH = "hdfs://hadoop1:9000/dir1/hello";
	private static final String OUTPUT_PATH = "hdfs://hadoop1:9000/dir1/out";
	
	public static void main(String[] args) throws Exception {
		Configuration configuration = new Configuration();
		//分隔符默認是\t
		configuration.set(KeyValueLineRecordReader.KEY_VALUE_SEPERATOR, "\t");
		Job job = new Job(configuration,MyKeyValueTextInputFormatApp.class.getSimpleName());
		
		final FileSystem fileSystem = FileSystem.get(new URI(OUTPUT_PATH), configuration);
		fileSystem.delete(new Path(OUTPUT_PATH),true);
		
		job.setJarByClass(MyKeyValueTextInputFormatApp.class);
		
		FileInputFormat.setInputPaths(job, INPUT_PATH);
		//指定使用KeyValueTextInputFormat解析內容 分隔key和value的分隔符默認是\t
		job.setInputFormatClass(KeyValueTextInputFormat.class);
		
		job.setMapperClass(MyMapper.class);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(LongWritable.class);
		
		job.setNumReduceTasks(0);
		
		FileOutputFormat.setOutputPath(job, new Path(OUTPUT_PATH));
		
		job.waitForCompletion(true);
	}
	//做簡單輸出
	public static class MyMapper extends Mapper<Text, Text, Text, LongWritable>{
		@Override
		protected void map(Text key, Text value,
				Mapper<Text, Text, Text, LongWritable>.Context context)
				throws IOException, InterruptedException {
			context.write(new Text(key), new LongWritable(1));
			context.write(new Text(value), new LongWritable(1));
		}
	}
	
}


發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章