hive中udtf編寫和使用(完整demo)

已有的字段名和值:

pos 0-1-2
oreq 125_126-127_128-129_130 
sreq 125_126-127_128-129_130
sres 125-127-129_130
sans 125-127-129

最終要實現按照中間橫槓-將表拆成多行

1.具體實現


import com.google.common.collect.Lists;
import org.apache.directory.api.util.Strings;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;

import java.util.List;

/**

 * this udtf 可以將5個字段按照某個分隔符對應,一行轉多行, output 多行多列
 *
 * 比如
 * pos 0-1-2
 * srequest 125_126-127_128-129_130
 * sresponse 125_126-127_128-129_130
 *
 * result:
 * 0 125_126 125_126
 * 1 127_128 127_128
 * 2 129-130 129-130
 */
public class get_pos_udtf extends GenericUDTF {

    //該方法中,我們將指定輸入輸出參數:輸入參數 ObjectInspector
    @Override
    public StructObjectInspector initialize(ObjectInspector[] args){
        // 輸出列名
        List<String> colName = Lists.newLinkedList();
        colName.add("pos");
        colName.add("oreq");
        colName.add("sreq");
        colName.add("sres");
        colName.add("sans");

        // 輸出列類型
        List<ObjectInspector> resType = Lists.newLinkedList();
        resType.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        resType.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        resType.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        resType.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        resType.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);

        // 返回列名 和 列類型
        return ObjectInspectorFactory.getStandardStructObjectInspector(colName, resType);
    }

    //我們將處理一條輸入記錄,輸出若干條結果記錄
    // objects - record
    @Override
    public void process(Object[] objects) throws HiveException {
        // pos is Empty
        if(Strings.isEmpty(objects[0].toString())) {
            String[] obj = {null,null,null,null,null};
            forward(obj);
            return;
        }

        // pos
        String[] arr1 = objects[0].toString().split("-");

        // srequest
        String[] arr2 = null;
        if(Strings.isNotEmpty(objects[1].toString())) {
            arr2 = objects[1].toString().split("-");
        }

        // srequest
        String[] arr3 = null;
        if(Strings.isNotEmpty(objects[2].toString())) {
            arr3 = objects[2].toString().split("-");
        }

        // sresponse
        String[] arr4 = null;
        if(Strings.isNotEmpty(objects[3].toString())) {
            arr4 = objects[3].toString().split("-");
        }

        // sanswer
        String[] arr5 = null;
        if(Strings.isNotEmpty(objects[4].toString())) {
            arr5 = objects[4].toString().split("-");
        }

        for(int i = 0; i < arr1.length ; i++ ) {
            // {pos, srequest, srequest, sresponse, sanswer}
            String[] obj = {null,null,null,null,null};
            obj[0] = arr1[i];
            if(arr2 != null && arr2.length > i) {
                obj[1] = arr2[i];
            }
            if(arr3 != null && arr3.length > i) {
                obj[2] = arr3[i];
            }
            if(arr4 != null && arr4.length > i) {
                obj[3] = arr4[i];
            }
            if(arr5 != null && arr5.length > i) {
                obj[4] = arr5[i];
            }
            forward(obj);
        }
    }

    @Override
    public void close() throws HiveException {

    }
}

2.使用過程

將上述代碼打包,在hql中聲明


 

add jar /home/hadoop/udf.jar;
create temporary function get_pos_map as 'report.get_pos_udtf';
select ....... from table
        lateral view get_pos_map(pos,srequest,srequest,sresponse,sanswer) )t as pos1,oreq,sreq,sres,sans

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章