package com.ycit.hbase.test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
/**
* mapreduce操作hbase
*
* @author 江鵬飛
*
*/
public class HBaseMr {
/**
* 創建hbase配置
*/
static Configuration config = null;
static {
config = HBaseConfiguration.create();
// 創建連接
config = HBaseConfiguration.create();
config.set("hbase.zookeeper.quorum", "mini1,mini2,mini3");// zookeeper地址
config.set("hbase.zookeeper.property.clientPort", "2181");// zookeeper端口
}
/**
* 表信息
*/
public static final String tableName = "word";// 表名1
public static final String colf = "content";// 列族
public static final String col = "info";// 列
public static final String tableName2 = "stat";// 表名2
/**
* 初始化表結構,及其數據
*/
public static void initTB() {
HTable table = null;
HBaseAdmin admin = null;
try {
admin = new HBaseAdmin(config);// 創建表管理
/* 刪除表 */
if (admin.tableExists(tableName) || admin.tableExists(tableName2)) {
System.out.println("table is already exists!");
admin.disableTable(tableName);
admin.deleteTable(tableName);
admin.disableTable(tableName2);
admin.deleteTable(tableName2);
}
/* 創建表 */
HTableDescriptor desc = new HTableDescriptor(tableName);
HColumnDescriptor family = new HColumnDescriptor(colf);
desc.addFamily(family);
admin.createTable(desc);
HTableDescriptor desc2 = new HTableDescriptor(tableName2);
HColumnDescriptor family2 = new HColumnDescriptor(colf);
desc2.addFamily(family2);
admin.createTable(desc2);
/* 插入數據 */
table = new HTable(config, tableName);
table.setAutoFlush(false);
table.setWriteBufferSize(500);
List<Put> lp = new ArrayList<Put>();
Put p1 = new Put(Bytes.toBytes("1"));
p1.add(colf.getBytes(), col.getBytes(), ("The Apache Hadoop software library is a framework").getBytes());
lp.add(p1);
Put p2 = new Put(Bytes.toBytes("2"));
p2.add(colf.getBytes(), col.getBytes(),
("The common utilities that support the other Hadoop modules").getBytes());
lp.add(p2);
Put p3 = new Put(Bytes.toBytes("3"));
p3.add(colf.getBytes(), col.getBytes(), ("Hadoop by reading the documentation").getBytes());
lp.add(p3);
Put p4 = new Put(Bytes.toBytes("4"));
p4.add(colf.getBytes(), col.getBytes(), ("Hadoop from the release page").getBytes());
lp.add(p4);
Put p5 = new Put(Bytes.toBytes("5"));
p5.add(colf.getBytes(), col.getBytes(), ("Hadoop on the mailing list").getBytes());
lp.add(p5);
table.put(lp);
table.flushCommits();
lp.clear();
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (table != null) {
table.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* MyMapper 繼承 TableMapper TableMapper<Text,IntWritable> Text:輸出的key類型,
* IntWritable:輸出的value類型
*/
public static class MyMapper extends TableMapper<Text, IntWritable> {
private static IntWritable one = new IntWritable(1);
private static Text word = new Text();
@Override
// 輸入的類型爲:key:rowKey; value:一行數據的結果集Result
protected void map(ImmutableBytesWritable key, Result value, Context context)
throws IOException, InterruptedException {
// 獲取一行數據中的colf:col
String words = Bytes.toString(value.getValue(Bytes.toBytes(colf), Bytes.toBytes(col)));// 表裏面只有一個列族,所以我就直接獲取每一行的值
// 按空格分割
String itr[] = words.toString().split(" ");
// 循環輸出word和1
for (int i = 0; i < itr.length; i++) {
word.set(itr[i]);
context.write(word, one);
}
}
}
/**
* MyReducer 繼承 TableReducer TableReducer<Text,IntWritable> Text:輸入的key類型,
* IntWritable:輸入的value類型, ImmutableBytesWritable:輸出類型,表示rowkey的類型
*/
public static class MyReducer extends TableReducer<Text, IntWritable, ImmutableBytesWritable> {
@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
// 對mapper的數據求和
int sum = 0;
for (IntWritable val : values) {// 疊加
sum += val.get();
}
// 創建put,設置rowkey爲單詞
Put put = new Put(Bytes.toBytes(key.toString()));
// 封裝數據
put.add(Bytes.toBytes(colf), Bytes.toBytes(col), Bytes.toBytes(String.valueOf(sum)));
// 寫到hbase,需要指定rowkey、put
context.write(new ImmutableBytesWritable(Bytes.toBytes(key.toString())), put);
}
}
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
/*
* config.set("df.default.name", "hdfs://mini1:9000/");//設置hdfs的默認路徑
* config.set("hadoop.job.ugi", "hadoop,hadoop");//用戶名,組
* config.set("mapred.job.tracker", "mini1:9001");//設置jobtracker在哪
*/ // 初始化表
initTB();// 初始化表
// 創建job
Job job = new Job(config, "HBaseMr");// job
job.setJarByClass(HBaseMr.class);// 主類
// 創建scan
Scan scan = new Scan();
// 可以指定查詢某一列
scan.addColumn(Bytes.toBytes(colf), Bytes.toBytes(col));
// 創建查詢hbase的mapper,設置表名、scan、mapper類、mapper的輸出key、mapper的輸出value
TableMapReduceUtil.initTableMapperJob(tableName, scan, MyMapper.class, Text.class, IntWritable.class, job);
// 創建寫入hbase的reducer,指定表名、reducer類、job
TableMapReduceUtil.initTableReducerJob(tableName2, MyReducer.class, job);
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}