(6)3.4.6DWD層數據導入腳本6

3.4.6DWD層數據導入腳本

1)在/home/atguigu/bin目錄下創建腳本dwd_db.sh
[atguigu@hadoop102 bin]$ vim dwd_db.sh
在腳本中填寫如下內容
#!/bin/bash

定義變量方便修改

APP=gmall
hive=/opt/module/hive/bin/hive

如果是輸入的日期按照取輸入日期;如果沒輸入日期取當前時間的前一天

if [ -n “$1” ] ;then
do_date=$1
else
do_date=date -d "-1 day"+%F
fi

sql="

set hive.exec.dynamic.partition.mode=nonstrict;

insert overwrite table “APP".dwdorderinfopartition(dt)selectfrom"APP".dwd_order_info partition(dt) select* from "APP”.ods_order_info
where dt=’$do_date’and id is not null;

insert overwrite table “APP".dwdorderdetailpartition(dt)selectfrom"APP".dwd_order_detail partition(dt) select* from "APP”.ods_order_detail
where dt=’$do_date’ and id is not null;

insert overwrite table “APP".dwduserinfopartition(dt)selectfrom"APP".dwd_user_info partition(dt) select* from "APP”.ods_user_info
where dt=’$do_date’and id is not null;

insert overwrite table “APP".dwdpaymentinfopartition(dt)selectfrom"APP".dwd_payment_info partition(dt) select* from "APP”.ods_payment_info
where dt=’$do_date’ and id is not null;

insert overwrite table “APP".dwdskuinfopartition(dt)selectsku.id,sku.spuid,sku.price,sku.skuname,sku.skudesc,sku.weight,sku.tmid,sku.category3id,c2.idcategory2id,c1.idcategory1id,c3.namecategory3name,c2.namecategory2name,c1.namecategory1name,sku.createtime,sku.dtfrom"APP".dwd_sku_info partition(dt) select sku.id, sku.spu_id, sku.price, sku.sku_name, sku.sku_desc, sku.weight, sku.tm_id, sku.category3_id, c2.id category2_id, c1.id category1_id, c3.name category3_name, c2.name category2_name, c1.name category1_name, sku.create_time, sku.dt from "APP”.ods_sku_info sku
join “APP".odsbasecategory3c3onsku.category3id=c3.idjoin"APP".ods_base_category3 c3 on sku.category3_id=c3.id join "APP”.ods_base_category2 c2 on c3.category2_id=c2.id
join "APP".odsbasecategory1c1onc2.category1id=c1.idwheresku.dt=APP".ods_base_category1 c1 on c2.category1_id=c1.id where sku.dt='do_date’ and c2.dt=‘dodateandc3.dt=do_date' and c3.dt='do_date’ and c1.dt=’$do_date’
and sku.id is not null;
"

hivee"hive -e "sql"

2)增加腳本執行權限
[atguigu@hadoop102 bin]$ chmod 777 dwd_db.sh
3)執行腳本導入數據
[atguigu@hadoop102 bin]dwddb.sh20190210[atguigu@hadoop102bin]dwd_db.sh 2019-02-10 [atguigu@hadoop102 bin]dwd_db.sh 2019-02-11
4)查看導入數據
hive (gmall)>
select * from dwd_sku_info where dt=‘2019-02-10’ limit 2;
select * from dwd_sku_info where dt=‘2019-02-11’ limit 2;

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章