Hadoop中的序列化

1 簡介

序列化和反序列化就是結構化對象和字節流之間的轉換,主要用在內部進程的通訊和持久化存儲方面。

通訊格式需求
hadoop在節點間的內部通訊使用的是RPC,RPC協議把消息翻譯成二進制字節流發送到遠程節點,遠程節點再通過反序列化把二進制流轉成原始的信息。RPC的序列化需要實現以下幾點:
1.壓縮,可以起到壓縮的效果,佔用的寬帶資源要小。
2.快速,內部進程爲分佈式系統構建了高速鏈路,因此在序列化和反序列化間必須是快速的,不能讓傳輸速度成爲瓶頸。
3.可擴展的,新的服務端爲新的客戶端增加了一個參數,老客戶端照樣可以使用。
4.兼容性好,可以支持多個語言的客戶端

存儲格式需求
表面上看來序列化框架在持久化存儲方面可能需要其他的一些特性,但事實上依然是那四點:
1.壓縮,佔用的空間更小
2.快速,可以快速讀寫
3.可擴展,可以以老格式讀取老數據
4.兼容性好,可以支持多種語言的讀寫

hadoop的序列化格式
hadoop自身的序列化存儲格式就是實現了Writable接口的類,他只實現了前面兩點,壓縮和快速。但是不容易擴展,也不跨語言。
我們先來看下Writable接口,Writable接口定義了兩個方法:

1.將數據寫入到二進制流中

2.從二進制數據流中讀取數據

package org.apache.hadoop.io;
public interface Writable {
    void write(java.io.DataOutput p1) throws java.io.IOException;
    void readFields(java.io.DataInput p1) throws java.io.IOException;
}

2 實例

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.StringUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestWritable {
	byte[] bytes = null;
	@Before
	public void init() throws IOException {
		IntWritable writable = new IntWritable(163);
		bytes = serialize(writable);
	}

	@Test
	public void testSerialize() throws IOException {
		// 序列化後的四個字節的字節流
		Assert.assertEquals(bytes.length, 4);
		// big-endian的隊列排列
		Assert.assertEquals(StringUtils.byteToHexString(bytes), "000000a3");
	}

	@Test
	public void testDeserialize() throws IOException {
		// 通過調用反序列化方法將bytes的數據讀入對象
		IntWritable newWritable = new IntWritable();
		deserialize(newWritable, bytes);
		// 通過調用get方法,獲得原始的值163
		Assert.assertEquals(newWritable.get(), 163);
	}

	/**
	 * 序列化
	 * 
	 * @param writable 待序列化對象
	 */
	public static byte[] serialize(Writable writable) throws IOException {
		ByteArrayOutputStream out = new ByteArrayOutputStream();
		DataOutputStream dataOut = new DataOutputStream(out);
		writable.write(dataOut);

		dataOut.close();
		return out.toByteArray();
	}

	/**
	 * 反序列化
	 * 
	 * @param writable 接受序列化後的對象
	 * @param bytes 待反序列化數據流
	 */
	public static byte[] deserialize(Writable writable, byte[] bytes)
			throws IOException {
		ByteArrayInputStream in = new ByteArrayInputStream(bytes);
		DataInputStream dataIn = new DataInputStream(in);
		writable.readFields(dataIn);

		dataIn.close();
		return bytes;
	}
}

import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparator;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestComparator {
	
	// key值的大小進行排序
	RawComparator<IntWritable> comparator;
	IntWritable w1;
	IntWritable w2;

	/**
	 * 獲得IntWritable的comparator,並初始化兩個IntWritable
	 */
	@SuppressWarnings("unchecked")
	@Before
	public void init() {
		comparator = WritableComparator.get(IntWritable.class);
		w1 = new IntWritable(163);
		w2 = new IntWritable(76);
	}

	/**
	 * 比較兩個對象大小
	 */
	@Test
	public void testComparator() {
		Assert.assertTrue(comparator.compare(w1, w2) > 0);
	}

	/**
	 * 序列化後進行直接比較
	 */
	@Test
	public void testcompare() throws IOException {
		byte[] b1 = serialize(w1);
		byte[] b2 = serialize(w2);
		Assert.assertTrue(comparator
				.compare(b1, 0, b1.length, b2, 0, b2.length) > 0);
	}

	/**
	 * 將一個實現了Writable接口的對象序列化成字節流
	 */
	public static byte[] serialize(Writable writable) throws IOException {
		ByteArrayOutputStream out = new ByteArrayOutputStream();
		DataOutputStream dataOut = new DataOutputStream(out);
		writable.write(dataOut);

		dataOut.close();
		return out.toByteArray();
	}
}

3 自定義

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.io.WritableComparable;

public class InfoBean implements WritableComparable<InfoBean> {

	private String account;
	private double income;
	private double expenses;
	private double surplus;

	public void set(String account, double income, double expenses) {
		this.account = account;
		this.income = income;
		this.expenses = expenses;
		this.surplus = income - expenses;
	}

	@Override
	public void write(DataOutput out) throws IOException {
		out.writeUTF(account);
		out.writeDouble(income);
		out.writeDouble(expenses);
		out.writeDouble(surplus);

	}

	@Override
	public void readFields(DataInput in) throws IOException {
		this.account = in.readUTF();
		this.income = in.readDouble();
		this.expenses = in.readDouble();
		this.surplus = in.readDouble();
	}

	@Override
	public int compareTo(InfoBean o) {
		if (this.income == o.getIncome()) {
			return this.expenses > o.getExpenses() ? 1 : -1;
		}
		return this.income > o.getIncome() ? 1 : -1;
	}

	@Override
	public String toString() {
		return income + "\t" + expenses + "\t" + surplus;
	}

	public String getAccount() {
		return account;
	}

	public void setAccount(String account) {
		this.account = account;
	}

	public double getIncome() {
		return income;
	}

	public void setIncome(double income) {
		this.income = income;
	}

	public double getExpenses() {
		return expenses;
	}

	public void setExpenses(double expenses) {
		this.expenses = expenses;
	}

	public double getSurplus() {
		return surplus;
	}

	public void setSurplus(double surplus) {
		this.surplus = surplus;
	}

}

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparator;

public class TestInfoBean {

	public static void main(String[] args) throws IOException {

		// 序列化
		InfoBean infoBean = new InfoBean();
		infoBean.set("abc", 100, 10);
		byte[] bytes = serialize(infoBean);
		System.out.println(bytes.length);

		// 反序列化
		InfoBean infoBeanRes = new InfoBean();
		deserialize(infoBeanRes, bytes);
		System.out.println(infoBeanRes);

		// 比較
		@SuppressWarnings("unchecked")
		RawComparator<InfoBean> comparator = WritableComparator
				.get(InfoBean.class);
		InfoBean infoBean1 = new InfoBean();
		infoBean1.set("abc", 110, 10);
		InfoBean infoBean2 = new InfoBean();
		infoBean2.set("abc", 100, 10);
		System.out.println(comparator.compare(infoBean1, infoBean2));
	}

	/**
	 * 序列化
	 * 
	 * @param writable 待序列化對象
	 */
	public static byte[] serialize(Writable writable) throws IOException {
		ByteArrayOutputStream out = new ByteArrayOutputStream();
		DataOutputStream dataOut = new DataOutputStream(out);
		writable.write(dataOut);

		dataOut.close();
		return out.toByteArray();
	}

	/**
	 * 反序列化
	 * 
	 * @param writable 接受序列化後的對象
	 * @param bytes 待反序列化數據流
	 */
	public static byte[] deserialize(Writable writable, byte[] bytes)
			throws IOException {
		ByteArrayInputStream in = new ByteArrayInputStream(bytes);
		DataInputStream dataIn = new DataInputStream(in);
		writable.readFields(dataIn);

		dataIn.close();
		return bytes;
	}
}

原貼地址:http://blog.csdn.net/lastsweetop/article/details/9193907


發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章