(7)數倉搭建之DWD層事件表7

[hadoop@hadoop102 /opt/soft1]$ll /opt/module/hive/ |grep hivefunction-1.0-SNAPSHOT.jar
-rw-r–r--. 1 hadoop hadoop 5528 Feb 10 02:19 hivefunction-1.0-SNAPSHOT.jar

hive (gmall)> add jar /opt/module/hive/hivefunction-1.0-SNAPSHOT.jar;
Added [/opt/module/hive/hivefunction-1.0-SNAPSHOT.jar] to class path
Added resources: [/opt/module/hive/hivefunction-1.0-SNAPSHOT.jar]

4.2 DWD層事件表數據解析
4.2.1 創建基礎明細表

明細表用於存儲ODS層原始錶轉換過來的明細數據。
1)創建事件日誌基礎明細表
hive (gmall)>
drop table if exists dwd_base_event_log;
CREATE EXTERNAL TABLE dwd_base_event_log(
mid_id string,
user_id string,
version_code string,
version_name string,
lang string,
source string,
os string,
area string,
model string,
brand string,
sdk_version string,
gmail string,
height_width string,
app_time string,
network string,
lng string,
lat string,
event_name string,
event_json string,
server_time string)
PARTITIONED BY (dt string)
stored as parquet
location ‘/warehouse/gmall/dwd/dwd_base_event_log/’;
2)說明:其中event_name和event_json用來對應事件名和整個事件。這個地方將原始日誌1對多的形式拆分出來了。操作的時候我們需要將原始日誌展平,需要用到UDF和UDTF。
4.2.2 自定義UDF函數(解析公共字段)

4.2.3 自定義UDTF函數(解析具體事件字段)

2)打包

3)將hivefunction-1.0-SNAPSHOT上傳到hadoop102的/opt/module/hive/
4)將jar包添加到Hive的classpath
hive (gmall)> add jar /opt/module/hive/hivefunction-1.0-SNAPSHOT.jar;
5)創建臨時函數與開發好的java class關聯
hive (gmall)>
create temporary function base_analizer as ‘com.atguigu.udf.BaseFieldUDF’;
create temporary function flat_analizer as ‘com.atguigu.udtf.EventJsonUDTF’;
4.2.4 解析事件日誌基礎明細表

1)解析事件日誌基礎明細表
hive (gmall)>
set hive.exec.dynamic.partition.mode=nonstrict;

insert overwrite table dwd_base_event_log
PARTITION (dt=‘2019-02-10’)
select
mid_id,
user_id,
version_code,
version_name,
lang,
source,
os,
area,
model,
brand,
sdk_version,
gmail,
height_width,
app_time,
network,
lng,
lat,
event_name,
event_json,
server_time
from
(
select
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[0] as mid_id,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[1] as user_id,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[2] as version_code,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[3] as version_name,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[4] as lang,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[5] as source,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[6] as os,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[7] as area,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[8] as model,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[9] as brand,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[10] as sdk_version,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[11] as gmail,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[12] as height_width,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[13] as app_time,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[14] as network,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[15] as lng,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[16] as lat,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[17] as ops,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[18] as server_time
from ods_event_log where dt=‘2019-02-10’ and base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’)<>’’
) sdk_log lateral view flat_analizer(ops) tmp_k as event_name, event_json;
2)測試
hive (gmall)> select * from dwd_base_event_log limit 2;

4.2.5 DWD層數據解析腳本

1)在hadoop102的/home/atguigu/bin目錄下創建腳本
[atguigu@hadoop102 bin]$ vim dwd_base_log.sh
在腳本中編寫如下內容
#!/bin/bash

定義變量方便修改

APP=gmall
hive=/opt/module/hive/bin/hive

如果是輸入的日期按照取輸入日期;如果沒輸入日期取當前時間的前一天

if [ -n “$1” ] ;then
do_date=$1
else
do_date=date -d "-1 day" +%F
fi

sql="
add jar /opt/module/hive/hivefunction-1.0-SNAPSHOT.jar;

create temporary function base_analizer as ‘com.atguigu.udf.BaseFieldUDF’;
create temporary function flat_analizer as ‘com.atguigu.udtf.EventJsonUDTF’;

set hive.exec.dynamic.partition.mode=nonstrict;

insert overwrite table "APP".dwdbaseeventlogPARTITION(dt=APP".dwd_base_event_log PARTITION (dt='do_date’)
select
mid_id,
user_id,
version_code,
version_name,
lang,
source ,
os ,
area ,
model ,
brand ,
sdk_version ,
gmail ,
height_width ,
network ,
lng ,
lat ,
app_time ,
event_name ,
event_json ,
server_time
from
(
select
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[0] as mid_id,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[1] as user_id,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[2] as version_code,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[3] as version_name,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[4] as lang,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[5] as source,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[6] as os,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[7] as area,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[8] as model,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[9] as brand,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[10] as sdk_version,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[11] as gmail,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[12] as height_width,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[13] as app_time,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[14] as network,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[15] as lng,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[16] as lat,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[17] as ops,
split(base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’),’\t’)[18] as server_time
from "APP".odseventlogwheredt=APP".ods_event_log where dt='do_date’ and base_analizer(line,‘mid,uid,vc,vn,l,sr,os,ar,md,ba,sv,g,hw,t,nw,ln,la’)<>’’
) sdk_log lateral view flat_analizer(ops) tmp_k as event_name, event_json;
"

hivee"hive -e "sql"
2)增加腳本執行權限
[atguigu@hadoop102 bin]$ chmod 777 dwd_base_log.sh
3)腳本使用
[atguigu@hadoop102 module]$ dwd_base_log.sh 2019-02-11
4)查詢導入結果
hive (gmall)>
select * from dwd_base_event_log where dt=‘2019-02-11’ limit 2;
5)腳本執行時間
企業開發中一般在每日凌晨30分~1點

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章