6.3 ADS层
6.3.1 建表语句
hive (gmall)>
drop table ads_sale_tm_category1_stat_mn;
create external table ads_sale_tm_category1_stat_mn
(
tm_id string comment ‘品牌id’,
category1_id string comment '1级品类id ',
category1_name string comment '1级品类名称 ',
buycount bigint comment ‘购买人数’,
buy_twice_last bigint comment ‘两次以上购买人数’,
buy_twice_last_ratio decimal(10,2) comment ‘单次复购率’,
buy_3times_last bigint comment ‘三次以上购买人数’,
buy_3times_last_ratio decimal(10,2) comment ‘多次复购率’,
stat_mn string comment ‘统计月份’,
stat_date string comment ‘统计日期’
) COMMENT ‘复购率统计’
row format delimited fields terminated by ‘\t’
location ‘/warehouse/gmall/ads/ads_sale_tm_category1_stat_mn/’
;
6.3.2 数据导入
1)数据导入
hive (gmall)>
insert into table ads_sale_tm_category1_stat_mn
select
mn.sku_tm_id,
mn.sku_category1_id,
mn.sku_category1_name,
sum(if(mn.order_count>=1,1,0)) buycount,
sum(if(mn.order_count>=2,1,0)) buyTwiceLast,
sum(if(mn.order_count>=2,1,0))/sum( if(mn.order_count>=1,1,0)) buyTwiceLastRatio,
sum(if(mn.order_count>=3,1,0)) buy3timeLast ,
sum(if(mn.order_count>=3,1,0))/sum( if(mn.order_count>=1,1,0)) buy3timeLastRatio ,
date_format(‘2019-02-10’ ,‘yyyy-MM’) stat_mn,
‘2019-02-10’ stat_date
from
(
select sd.sku_tm_id,
sd.sku_category1_id,
sd.sku_category1_name,
user_id,
sum(order_count) order_count
from dws_sale_detail_daycount sd
where
date_format(dt,‘yyyy-MM’)=date_format(‘2019-02-10’ ,‘yyyy-MM’)
group by
sd.sku_tm_id, sd.sku_category1_id, user_id, sd.sku_category1_name
) mn
group by mn.sku_tm_id, mn.sku_category1_id, mn.sku_category1_name
;
2)查询导入数据
hive (gmall)>select * from ads_sale_tm_category1_stat_mn;
6.3.3数据导入脚本
1)在/home/atguigu/bin目录下创建脚本ads_sale.sh
[atguigu@hadoop102 bin]$ vim ads_sale.sh
#!/bin/bash
定义变量方便修改
APP=gmall
hive=/opt/module/hive/bin/hive
如果是输入的日期按照取输入日期;如果没输入日期取当前时间的前一天
if [ -n “$1” ] ;then
do_date=$1
else
do_date=date-d "-1 day"+%F
fi
sql="
set hive.exec.dynamic.partition.mode=nonstrict;
insert into table “do_date’ ,‘yyyy-MM’) stat_mn,
'APP”.dws_sale_detail_daycount od
where date_format(dt,‘yyyy-MM’)=date_format(’$do_date’ ,‘yyyy-MM’)
group by od.sku_tm_id, od.sku_category1_id, user_id, od.sku_category1_name
) mn
group by mn.sku_tm_id, mn.sku_category1_id, mn.sku_category1_name;
"
sql"
2)增加脚本执行权限
[atguigu@hadoop102 bin]$ chmod 777 ads_sale.sh
3)执行脚本导入数据
[atguigu@hadoop102 bin]$ads_sale.sh 2019-02-11
4)查看导入数据
hive (gmall)>
select * from ads_sale_tm_category1_stat_mn limit 2;