mapreduce 讀取mysql

package com.jsptpd.mysqlintolocal;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class Main {
	private static String PATH_TO_CORE_SITE_XML = "D:\\workspace_scala1\\test1314\\conf\\core-site.xml";

	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		System.setProperty("HADOOP_USER_NAME", "hdfs");
		Configuration conf = new Configuration();
		conf.addResource(new Path(PATH_TO_CORE_SITE_XML));
		//conf.set("mapreduce.application.framework.path", "file:///D:/mapreduce.tar.gz");
		DBConfiguration.configureDB(conf,"com.mysql.jdbc.Driver", "jdbc:mysql://xxx.xxx.xxxx.xxxx:3306/hive?useUnicode=true", "pig", "123456");
		//Job job = Job.getInstance(conf);
		Job job = Job.getInstance(conf);
        job.setJarByClass(Main.class);
        job.setJobName("Helloworld");

        job.setMapperClass(Db2hdfsMapper.class);

        job.setReducerClass(Db2hdfsReducer.class);

        //job.setOutputKeyClass(LongWritable.class);

        job.setOutputValueClass(Text.class);

        job.setInputFormatClass(DBInputFormat.class);

        FileOutputFormat.setOutputPath(job, new Path("file:///D:/wjj1314"));
        String[] fields = {"ver_id","schema_version","version_comment"};
        DBInputFormat.setInput(job, User.class,"VERSION", "", "", fields);
        System.exit(job.waitForCompletion(true)? 0 : 1);
		

	}

}

以上是主函數

package com.jsptpd.mysqlintolocal;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;

import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.lib.db.DBWritable;

public class User implements DBWritable,WritableComparable<User>{
	private Integer ver_id;
	private String  schema_version;
	private String version_comment;
	public User() {
		
	}
	
	public User(Integer ver_id,String  schema_version,String version_comment) {
		this.ver_id = ver_id;
		this.schema_version = schema_version;
		this.version_comment = version_comment;
	}

	public Integer getVer_id() {
		return ver_id;
	}

	public void setVer_id(Integer ver_id) {
		this.ver_id = ver_id;
	}

	public String getSchema_version() {
		return schema_version;
	}

	public void setSchema_version(String schema_version) {
		this.schema_version = schema_version;
	}

	public String getVersion_comment() {
		return version_comment;
	}

	public void setVersion_comment(String version_comment) {
		this.version_comment = version_comment;
	}

	public String toString() {
		return ver_id+"|"+schema_version+"|"+version_comment;
		
	}
	
	public void readFields(DataInput datainput) throws IOException {
		// TODO Auto-generated method stub
		ver_id=datainput.readInt();
		schema_version=datainput.readUTF();
		version_comment=datainput.readUTF();
		
	}

	public void write(DataOutput dataoupt) throws IOException {
		dataoupt.writeInt(ver_id);
		dataoupt.writeUTF(schema_version);
		dataoupt.writeUTF(version_comment);
		
	}

	public void readFields(ResultSet resultset) throws SQLException {
		ver_id=resultset.getInt(1);
		schema_version=resultset.getString(2);
		version_comment=resultset.getString(3);
		
	}

	public void write(PreparedStatement preparestatement) throws SQLException {
		// TODO Auto-generated method stub
		preparestatement.setInt(1, ver_id);
		preparestatement.setString(2, schema_version);
		preparestatement.setString(3, version_comment);
	}
	
	public int compareTo(User o) {
		return this.ver_id.compareTo(o.getVer_id());
	}

}

數據函數

package com.jsptpd.mysqlintolocal;
import java.io.IOException;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class Db2hdfsMapper extends Mapper<LongWritable,User,LongWritable,Text> {

	@Override
	protected void map(LongWritable key, User value, Mapper<LongWritable, User, LongWritable, Text>.Context context)
			throws IOException, InterruptedException {
		context.write(new LongWritable(value.getVer_id()), new Text(value.toString()));
	}
	

}

map函數

package com.jsptpd.mysqlintolocal;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class Db2hdfsReducer extends Reducer<LongWritable, Text, LongWritable, Text> {

	protected void reduce(LongWritable key, Iterable<Text> values, Context context)
			throws IOException, InterruptedException {
		for (Iterator<Text> itr = values.iterator(); itr.hasNext();) {
			context.write(key, itr.next());
		}
	}

}

reduce函數

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章