package com.sncfi.hbase
import org.apache.hadoop.hbase.client.HBaseAdmin
import org.apache.hadoop.hbase.client.Admin
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase{HBaseConfiguration,HTableDescriptor,TableName,HColumnDescriptor}
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.hadoop.hbase.client.ConnectionFactory
import org.apache.hadoop.hbase.client.Connection
import org.apache.hadoop.hbase.client.Put
import org.apache.hadoop.hbase.client.Get
import org.apache.hadoop.hbase.client.Delete
import org.apache.hadoop.hbase.client.Table
import org.apache.hadoop.hbase.client.Scan
import org.apache.hadoop.hbase.client.ResultScanner
import org.apache.hadoop.hbase.client.Result
/**
* Created by admin on 2016/12/14.
* 這是一個spark自帶一個hbase例子
* 在這裏要注意的是,自定義方法要寫在前面。
*/
object HBaseLocalTest {
//創建表
def createHTable(connection: Connection,tablename: String): Unit=
{
//Hbase表模式管理器
val admin = connection.getAdmin
//本例將操作的表名
val tableName = TableName.valueOf(tablename)
//如果需要創建表
if (!admin.tableExists(tableName)) {
//創建Hbase表模式
val tableDescriptor = new HTableDescriptor(tableName)
//創建列簇1 artitle
tableDescriptor.addFamily(new HColumnDescriptor("artitle".getBytes()))
//創建列簇2 author
tableDescriptor.addFamily(new HColumnDescriptor("author".getBytes()))
//創建表
admin.createTable(tableDescriptor)
println("create done.")
}
}
//刪除表
def deleteHTable(connection:Connection,tablename:String):Unit={
//本例將操作的表名
val tableName = TableName.valueOf(tablename)
//Hbase表模式管理器
val admin = connection.getAdmin
if (admin.tableExists(tableName)){
admin.disableTable(tableName)
admin.deleteTable(tableName)
}
}
//插入記錄
def insertHTable(connection:Connection,tablename:String,family:String,column:String,key:String,value:String):Unit={
try{
val userTable = TableName.valueOf(tablename)
val table=connection.getTable(userTable)
//準備key 的數據
val p=new Put(key.getBytes)
//爲put操作指定 column 和 value
p.addColumn(family.getBytes,column.getBytes,value.getBytes())
//驗證可以提交兩個clomun????不可以
// p.addColumn(family.getBytes(),"china".getBytes(),"JAVA for china".getBytes())
//提交一行
table.put(p)
}
}
//基於KEY查詢某條數據
def getAResult(connection:Connection,tablename:String,family:String,column:String,key:String):Unit={
var table:Table=null
try{
val userTable = TableName.valueOf(tablename)
table=connection.getTable(userTable)
val g=new Get(key.getBytes())
val result=table.get(g)
val value=Bytes.toString(result.getValue(family.getBytes(),column.getBytes()))
println("key:"+value)
}finally{
if(table!=null)table.close()
}
}
//刪除某條記錄
def deleteRecord(connection:Connection,tablename:String,family:String,column:String,key:String): Unit ={
var table:Table=null
try{
val userTable=TableName.valueOf(tablename)
table=connection.getTable(userTable)
val d=new Delete(key.getBytes())
d.addColumn(family.getBytes(),column.getBytes())
table.delete(d)
println("delete record done.")
}finally{
if(table!=null)table.close()
}
}
//掃描記錄
def scanRecord(connection:Connection,tablename:String,family:String,column:String): Unit ={
var table:Table=null
var scanner:ResultScanner=null
try{
val userTable=TableName.valueOf(tablename)
table=connection.getTable(userTable)
val s=new Scan()
s.addColumn(family.getBytes(),column.getBytes())
scanner=table.getScanner(s)
println("scan...for...")
var result:Result=scanner.next()
while(result!=null){
println("Found row:" + result)
println("Found value: "+Bytes.toString(result.getValue(family.getBytes(),column.getBytes())))
result=scanner.next()
}
}finally{
if(table!=null)
table.close()
scanner.close()
}
}
def main(args: Array[String]): Unit = {
// val sparkConf = new SparkConf().setAppName("HBaseTest")
//啓用spark上下文,只有這樣才能驅動spark並行計算框架
//val sc = new SparkContext(sparkConf)
//創建一個配置,採用的是工廠方法
val conf = HBaseConfiguration.create
val tablename = "blog"
conf.set("hbase.zookeeper.property.clientPort", "2181")
conf.set("zookeeper.znode.parent", "/hbase-unsecure")
conf.set("hbase.zookeeper.quorum", "hadoop36.newqd.com,hadoop37.newqd.com,hadoop38.newqd.com")
// conf.set("hbase.zookeeper.quorum", "hadoop1.snnu.edu.cn,hadoop3.snnu.edu.cn")
conf.set(TableInputFormat.INPUT_TABLE, tablename)
try{
//Connection 的創建是個重量級的工作,線程安全,是操作hbase的入口
val connection= ConnectionFactory.createConnection(conf)
//創建表測試
try {
createHTable(connection, "blog")
//插入數據,重複執行爲覆蓋
insertHTable(connection,"blog","artitle","engish","002","c++ for me")
insertHTable(connection,"blog","artitle","engish","003","python for me")
insertHTable(connection,"blog","artitle","chinese","002","C++ for china")
//刪除記錄
// deleteRecord(connection,"blog","artitle","chinese","002")
//掃描整個表
scanRecord(connection,"blog","artitle","engish")
//刪除表測試
// deleteHTable(connection, "blog")
}finally {
connection.close
// sc.stop
}
}
}
}
Scala操作Hbase
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.