#! /bin/bash
#上个月1日
Last_Month_DATE=$(date -d "$(date +%Y%m)01 last month" +%Y-%m-01)

${HIVE_HOME} -e "
--分区
SET hive.exec.dynamic.partition=true;
SET hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
--hive压缩
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
--写入表时压缩生效
set hive.exec.orc.compression.strategy=COMPRESSION;
--分桶
set hive.enforce.bucketing=true;
set hive.enforce.sorting=true;
set hive.optimize.bucketmapjoin = true;
set hive.auto.convert.sortmerge.join=true;
set hive.auto.convert.sortmerge.join.noconditionaltask=true;
--并行执行
set hive.exec.parallel=true;
set hive.exec.parallel.thread.number=16;

insert into itcast_dwm.itcast_clue_dwm partition (yearinfo)
select count(id) as clue_nums,
       origin_type_stat,
       for_new_user,
       hourinfo,
       dayinfo,
       monthinfo,
       yearinfo
from itcast_dwd.itcast_clue_dwd
where customer_relationship_id not in (
    select customer_relationship_first_id
    from itcast_ods.customer_appeal 
    where appeal_status = 1 and customer_relationship_first_id != 0)
    and concat_ws('-',yearinfo,monthinfo,dayinfo) >= '${Last_Month_DATE}'
group by yearinfo, monthinfo, dayinfo, hourinfo, origin_type_stat, for_new_user;
"
