#!/bin/bash

######################程序脚本修改历史说明 START##################################
# 创建人: 张德印  创建时间: 2025-07-21  说明: 将ods层的数据导入到dwd层,首次导入
# 修改人: xxx	  修改时间: xxxx-xx-xx  说明: xxxxxx

######################程序脚本修改历史说明 END#################################

## 常用参数构建 
## 1.日期参数变量
current_dt=$(date +"%Y-%m-%d")
# 或者 current_dt=`date +"%Y-%m-%d"`
current_time=$(date +"%Y-%m-%d %H:%M:%S")
# 昨天
out_dt=$(date -d "-1 day" +"%Y-%m-%d")  

# 2.hivejdbc连接
HIVE_URL='jdbc:hive2://192.168.10.20:10000'
# 如何使用Linux指令执行Hive SQL, 第一代使用 hive -e 'SQL脚本'
# 第二代 beeline -u '配置jdbc连接' -e 'SQL脚本'  推荐

# 3.库名和表名相关变量
# 看情况,如果是多表的只需要写库名即可
database_name=xls_dwd_ps061614
table_name=dim_district_ps14

# 4.此外还有Jar包地址, 执行成功后HDFS的成功路径, 当前没有
# 维度表:
echo "维度表 共8张表"
# ====== 全量覆盖 =======
echo "1.区域字典表 dim_district_ps14"
# 声明SQL变量
# cat 指令 后面写注释内容, 会把多行注释的内容打印出来, 只需要将hive SQL 放入多行注释中即可
sql_dim_district_ps14=$(
cat <<EOF
    INSERT overwrite TABLE xls_dwd_ps061614.dim_district_ps14
    select *
    from xls_ods_ps061614.ods_district_ps14
    WHERE code IS NOT NULL AND name IS NOT NULL;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_dim_district_ps14}"
# 或者写成 hive -e "${sql_dim_district_ps14}"

if [ $? -eq 0 ]
then
    echo "dim_district_ps14 表更新成功"
    # 将运行结果记录到日志文件中
    echo "${current_time}  dim_district_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "dim_district_ps14 表更新失败"
    echo "${current_time}  dim_district_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi

echo "2.时间维度表 dim_date_ps14"
sql_dim_date_ps14=$(
cat <<EOF
   INSERT overwrite TABLE xls_dwd_ps061614.dim_date_ps14
   select * from xls_ods_ps061614.ods_date_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_dim_date_ps14}"

if [ $? -eq 0 ]
then
    echo "dim_date_ps14 表更新成功"
    echo "${current_time}  dim_date_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "dim_date_ps14 表更新失败"
    echo "${current_time}  dim_date_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi
#  ==========拉链表 首次导入
echo "3.店铺表 dim_store_ps14"
sql_dim_store_ps14=$(

cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
set hive.exec.orc.compression.strategy=COMPRESSION;

INSERT overwrite TABLE xls_dwd_ps061614.dim_store_ps14 PARTITION (start_date)
select 
	id,
	user_id,
	store_avatar,
	address_info,
	name,
	store_phone,
	province_id,
	city_id,
	area_id,
	mb_title_img,
	store_description,
	notice,
	is_pay_bond,
	trade_area_id,
	delivery_method,
	origin_price,
	free_price,
	store_type,
	store_label,
	search_key,
	end_time,
	start_time,
	operating_status,
	create_user,
	create_time,
	update_user,
	update_time,
	is_valid,
	state,
	idcard,
	deposit_amount,
	delivery_config_id,
	aip_user_id,
	search_name,
	automatic_order,
	is_primary,
	parent_store_id,
	'9999-12-31' end_date,
	dt as start_date
from xls_ods_ps061614.ods_store_ps14;

EOF
)

beeline -u "${HIVE_URL}" -e "${sql_dim_store_ps14}"

if [ $? -eq 0 ]
then
    echo "dim_store_ps14 表更新成功"
    echo "${current_time}  dim_store_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "dim_store_ps14 表更新失败"
    echo "${current_time}  dim_store_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi

echo "4.商圈表 dim_trade_area_ps14"
sql_dim_trade_area_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;

INSERT overwrite TABLE xls_dwd_ps061614.dim_trade_area_ps14 PARTITION(start_date)
SELECT 
	id,
	user_id,
	user_allinpay_id,
	trade_avatar,
	name,
	notice,
	distric_province_id,
	distric_city_id,
	distric_area_id,
	address,
	radius,
	mb_title_img,
	deposit_amount,
	hava_deposit,
	state,
	search_key,
	create_user,
	create_time,
	update_user,
	update_time,
	is_valid,
	'9999-12-31' end_date,
	dt as start_date
FROM xls_ods_ps061614.ods_trade_area_ps14;

EOF
)

beeline -u "${HIVE_URL}" -e "${sql_dim_trade_area_ps14}"

if [ $? -eq 0 ]
then
    echo "dim_trade_area_ps14 表更新成功"
    echo "${current_time}  dim_trade_area_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "dim_trade_area_ps14 表更新失败"
    echo "${current_time}  dim_trade_area_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi

echo "5.地址信息表 dim_location_ps14"
sql_dim_location_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE xls_dwd_ps061614.dim_location_ps14 PARTITION(start_date)
SELECT
	id,
	type,
	correlation_id,
	address,
	latitude,
	longitude,
	street_number,
	street,
	district,
	city,
	province,
	business,
	create_user,
	create_time,
	update_user,
	update_time,
	is_valid,
	adcode,
	'9999-12-31' end_date,
	dt as start_date
FROM xls_ods_ps061614.ods_location_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_dim_location_ps14}"

if [ $? -eq 0 ]
then
    echo "dim_location_ps14 表更新成功"
    echo "${current_time}  dim_location_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "dim_location_ps14 表更新失败"
    echo "${current_time}  dim_location_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi


echo "6.商品SKU表 dim_goods_ps14"
sql_dim_store_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE xls_dwd_ps061614.dim_goods_ps14 PARTITION(start_date)
SELECT
	id,
	store_id,
	class_id,
	store_class_id,
	brand_id,
	goods_name,
	goods_specification,
	search_name,
	goods_sort,
	goods_market_price,
	goods_price,
	goods_promotion_price,
	goods_storage,
	goods_limit_num,
	goods_unit,
	goods_state,
	goods_verify,
	activity_type,
	discount,
	seckill_begin_time,
	seckill_end_time,
	seckill_total_pay_num,
	seckill_total_num,
	seckill_price,
	top_it,
	create_user,
	create_time,
	update_user,
	update_time,
	is_valid,
	'9999-12-31' end_date,
	dt as start_date
FROM xls_ods_ps061614.ods_goods_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_dim_goods_ps14}"

if [ $? -eq 0 ]
then
    echo "dim_goods_ps14 表更新成功"
    echo "${current_time}  dim_goods_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "dim_goods_ps14 表更新失败"
    echo "${current_time}  dim_goods_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi

echo "7.商品分类表 dim_goods_class_ps14"
sql_dim_goods_class_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE xls_dwd_ps061614.dim_goods_class_ps14 PARTITION(start_date)
SELECT
	id,
	store_id,
	class_id,
	name,
	parent_id,
	level,
	is_parent_node,
	background_img,
	img,
	keywords,
	title,
	sort,
	note,
	url,
	is_use,
	create_user,
	create_time,
	update_user,
	update_time,
	is_valid,
	'9999-12-31' end_date,
	dt as start_date
FROM xls_ods_ps061614.ods_goods_class_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_dim_goods_class_ps14}"

if [ $? -eq 0 ]
then
    echo "dim_goods_class_ps14 表更新成功"
    echo "${current_time}  dim_goods_class_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "dim_goods_class_ps14 表更新失败"
    echo "${current_time}  dim_goods_class_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi

echo "8.品牌表 dim_brand_ps14"
sql_dim_brand_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE xls_dwd_ps061614.dim_brand_ps14 PARTITION(start_date)
SELECT
	id,
	store_id,
	brand_pt_id,
	brand_name,
	brand_image,
	initial,
	sort,
	is_use,
	goods_state,
	create_user,
	create_time,
	update_user,
	update_time,
	is_valid,
	'9999-12-31' end_date,
	dt as start_date
FROM xls_ods_ps061614.ods_brand_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_dim_brand_ps14}"

if [ $? -eq 0 ]
then
    echo "dim_brand_ps14 表更新成功"
    echo "${current_time}  dim_brand_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "dim_brand_ps14 表更新失败"
    echo "${current_time}  dim_brand_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi

echo "===事实表"
echo "===事实增量表"
# ======= 增量导入  ==========
echo "9.订单评价表 fact_goods_evaluation_ps14"
sql_fact_goods_evaluation_ps14=$(
cat <<EOF
--分区
SET hive.exec.dynamic.partition=true;
SET hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
--hive压缩
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
--写入时压缩生效
set hive.exec.orc.compression.strategy=COMPRESSION;
-- 导入的sql语句
INSERT overwrite TABLE xls_dwd_ps061614.fact_goods_evaluation_ps14 PARTITION(dt)
select
   id,
   user_id,
   store_id,
   order_id,
   geval_scores,
   geval_scores_speed,
   geval_scores_service,
   geval_isanony,
   create_user,
   create_time,
   update_user,
   update_time,
   is_valid,
   substr(create_time, 1, 10) as dt  -- 只需要年月日部分, 所以只截取前10位
from xls_ods_ps061614.ods_goods_evaluation_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_fact_goods_evaluation_ps14}"

if [ $? -eq 0 ]
then
    echo "fact_goods_evaluation_ps14 表更新成功"
    # 将运行结果记录到日志文件中
    echo "${current_time}  fact_goods_evaluation_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "fact_goods_evaluation_ps14 表更新失败"
    echo "${current_time}  fact_goods_evaluation_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi

echo "10.用户登录记录表 fact_user_login_ps14"
sql_fact_user_login_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE xls_dwd_ps061614.fact_user_login_ps14 PARTITION(dt)
select
	id,
	login_user,
	login_type,
	client_id,
	login_time,
	login_ip,
	logout_time,
	SUBSTRING(login_time, 1, 10) as dt
FROM xls_ods_ps061614.ods_user_login_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_fact_user_login_ps14}"

if [ $? -eq 0 ]
then
    echo "fact_user_login_ps14 表更新成功"
    echo "${current_time}  fact_user_login_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "fact_user_login_ps14 表更新失败"
    echo "${current_time}  fact_user_login_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi


echo "11.订单组支付表 fact_order_pay_ps14"
sql_fact_order_pay_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE xls_dwd_ps061614.fact_order_pay_ps14 PARTITION (start_date)
SELECT
	id
	,group_id
	,order_pay_amount
	,create_date
	,create_user
	,create_time
	,update_user
	,update_time
	,is_valid,
	'9999-12-31' AS end_date,
     dt AS start_date
FROM xls_ods_ps061614.ods_order_pay_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_fact_order_pay_ps14}"

if [ $? -eq 0 ]
then
    echo "fact_order_pay_ps14 表更新成功"
    echo "${current_time}  fact_order_pay_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "fact_order_pay_ps14 表更新失败"
    echo "${current_time}  fact_order_pay_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi

########事实 拉链表####################
echo "事实 拉链表"
echo "12.订单事实表 fact_shop_order_ps14"
sql_fact_shop_order_ps14=$(
cat <<EOF
-- 分区
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
-- hive压缩
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
-- 写入时压缩生效
set hive.exec.orc.compression.strategy=COMPRESSION;
-- 执行的sql语句
 INSERT OVERWRITE TABLE xls_dwd_ps061614.fact_shop_order_ps14 PARTITION (start_date)
   -- PARTITION (start_date) 因为是分区表在导入数据时需要将分区字段也写上
  SELECT id,
       order_num,
       buyer_id,
       store_id,
       -- order_from 的值 替换成对应的来源'是来自于app还是小程序,或者pc 1.安卓; 2.ios; 3.小程序H5 ; 4.PC'
       CASE order_from
           WHEN 1 THEN
               'android'
           WHEN 2 THEN
               'ios'
           WHEN 3 THEN
               'miniapp'
           WHEN 4 THEN
               'pcweb'
           ELSE
               'others'
           END      AS order_from,
       order_state,
       create_date,
       finnshed_time,
       is_settlement,
       is_delete,
       evaluation_state,
       way,
       is_stock_up,
       create_user,
       create_time,
       update_user,
       update_time,
       is_valid,
       '9999-12-31' AS end_date,
       dt AS start_date -- 生效时间是数据所在分区, 分区字段在select*里面写在最后, 在插入时系统会默认将select后最后一个字段作为分区字段的值
FROM xls_ods_ps061614.ods_shop_order_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_fact_shop_order_ps14}"

if [ $? -eq 0 ]
then
    echo "fact_shop_order_ps14 表更新成功"
    # 将运行结果记录到日志文件中
    echo "${current_time}  fact_shop_order_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "fact_shop_order_ps14 表更新失败"
    echo "${current_time}  fact_shop_order_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi

echo "13.订单详情表 fact_shop_order_address_detail_ps14"
sql_fact_shop_order_address_detail_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE xls_dwd_ps061614.fact_shop_order_address_detail_ps14 PARTITION (start_date)
SELECT 
	id,
	order_amount,
	discount_amount,
	goods_amount,
	is_delivery,
	buyer_notes,
	pay_time,
	receive_time,
	delivery_begin_time,
	arrive_store_time,
	arrive_time,
	create_user,
	create_time,
	update_user,
	update_time,
	is_valid,
	'9999-12-31' end_date,
	dt as start_date
FROM xls_ods_ps061614.ods_shop_order_address_detail_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_fact_shop_order_address_detail_ps14}"

if [ $? -eq 0 ]
then
    echo "fact_shop_order_address_detail_ps14 表更新成功"
    echo "${current_time}  fact_shop_order_address_detail_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "fact_shop_order_address_detail_ps14 表更新失败"
    echo "${current_time}  fact_shop_order_address_detail_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi


echo "14.订单结算表 fact_order_settle_ps14"
sql_fact_order_settle_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE  xls_dwd_ps061614.fact_order_settle_ps14 PARTITION (start_date)
SELECT
	id
	,order_id
	,settlement_create_date
	,settlement_amount
	,dispatcher_user_id
	,dispatcher_money
	,circle_master_user_id
	,circle_master_money
	,plat_fee
	,store_money
	,status
	,note
	,settle_time
	,create_user
	,create_time
	,update_user
	,update_time
	,is_valid
	,first_commission_user_id
	,first_commission_money
	,second_commission_user_id
	,second_commission_money
	,'9999-12-31' end_date,
	dt as start_date
FROM xls_ods_ps061614.ods_order_settle_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_fact_order_settle_ps14}"

if [ $? -eq 0 ]
then
    echo "fact_order_settle_ps14 表更新成功"
    echo "${current_time}  fact_order_settle_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "fact_order_settle_ps14 表更新失败"
    echo "${current_time}  fact_order_settle_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi


echo "15.退款订单表 fact_refund_order_ps14"
sql_fact_refund_order_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE xls_dwd_ps061614.fact_refund_order_ps14 PARTITION (start_date)
SELECT
	id
	,order_id
	,apply_date
	,modify_date
	,refund_reason
	,refund_amount
	,refund_state
	,refuse_refund_reason
	,refund_goods_type
	,refund_shipping_fee
	,create_user
	,create_time
	,update_user
	,update_time
	,is_valid
	,'9999-12-31' end_date
	,dt as start_date
FROM xls_ods_ps061614.ods_refund_order_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_fact_refund_order_ps14}"

if [ $? -eq 0 ]
then
    echo "fact_refund_order_ps14 表更新成功"
    echo "${current_time}  fact_refund_order_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "fact_refund_order_ps14 表更新失败"
    echo "${current_time}  fact_refund_order_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi

echo "16.订单组表 fact_shop_order_group_ps14"
sql_fact_shop_order_group_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE xls_dwd_ps061614.fact_shop_order_group_ps14 PARTITION (start_date)
SELECT
	id,
	order_id,
	group_id,
	is_pay,
	create_user,
	create_time,
	update_user,
	update_time,
	is_valid,
	'9999-12-31' end_date,
	dt as start_date
FROM xls_ods_ps061614.ods_shop_order_group_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_fact_shop_order_group_ps14}"

if [ $? -eq 0 ]
then
    echo "fact_shop_order_group_ps14 表更新成功"
    echo "${current_time}  fact_shop_order_group_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "fact_shop_order_group_ps14 表更新失败"
    echo "${current_time}  fact_shop_order_group_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi

echo "17.订单商品快照表 fact_shop_order_goods_details_ps14"
sql_fact_shop_order_goods_details_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE xls_dwd_ps061614.fact_shop_order_goods_details_ps14 PARTITION (start_date)
SELECT
	id,
	order_id,
	shop_store_id,
	buyer_id,
	goods_id,
	buy_num,
	goods_price,
	total_price,
	goods_name,
	goods_image,
	goods_specification,
	goods_weight,
	goods_unit,
	goods_type,
	refund_order_id,
	goods_brokerage,
	is_refund,
	create_user,
	create_time,
	update_user,
	update_time,
	is_valid,
	'9999-12-31' end_date,
	dt as start_date
FROM
xls_ods_ps061614.ods_shop_order_goods_details_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_fact_shop_order_goods_details_ps14}"

if [ $? -eq 0 ]
then
    echo "fact_shop_order_goods_details_ps14 表更新成功"
    echo "${current_time}  fact_shop_order_goods_details_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "fact_shop_order_goods_details_ps14 表更新失败"
    echo "${current_time}  fact_shop_order_goods_details_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi


echo "18.购物车表 fact_shop_cart_ps14"
sql_fact_shop_cart_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE xls_dwd_ps061614.fact_shop_cart_ps14 PARTITION (start_date)
SELECT
	id,
	shop_store_id,
	buyer_id,
	goods_id,
	buy_num,
	create_user,
	create_time,
	update_user,
	update_time,
	is_valid,
	'9999-12-31' end_date,
	dt as start_date
FROM
xls_ods_ps061614.ods_shop_cart_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_fact_shop_cart_ps14}"

if [ $? -eq 0 ]
then
    echo "fact_shop_cart_ps14 表更新成功"
    echo "${current_time}  fact_shop_cart_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "fact_shop_cart_ps14 表更新失败"
    echo "${current_time}  fact_shop_cart_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi

echo "19.收藏店铺记录表 fact_store_collect_ps14"
sql_fact_store_collect_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE xls_dwd_ps061614.fact_store_collect_ps14 PARTITION (start_date)
SELECT
	id,
	user_id,
	store_id,
	create_user,
	create_time,
	update_user,
	update_time,
	is_valid,
	'9999-12-31' end_date,
	dt as start_date
FROM xls_ods_ps061614.ods_store_collect_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_fact_store_collect_ps14}"

if [ $? -eq 0 ]
then
    echo "fact_store_collect_ps14 表更新成功"
    echo "${current_time}  fact_store_collect_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "fact_store_collect_ps14 表更新失败"
    echo "${current_time}  fact_store_collect_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi


echo "20.商品收藏表 fact_goods_collect_ps14"
sql_fact_goods_collect_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE xls_dwd_ps061614.fact_goods_collect_ps14 PARTITION (start_date)
SELECT
	id,
	user_id,
	goods_id,
	store_id,
	create_user,
	create_time,
	update_user,
	update_time,
	is_valid,
	'9999-12-31' end_date,
	dt as start_date
FROM xls_ods_ps061614.ods_goods_collect_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_fact_goods_collect_ps14}"

if [ $? -eq 0 ]
then
    echo "fact_goods_collect_ps14 表更新成功"
    echo "${current_time}  fact_goods_collect_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "fact_goods_collect_ps14 表更新失败"
    echo "${current_time}  fact_goods_collect_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi


echo "21.配送表 fact_order_delievery_ps14"
sql_fact_order_delievery_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE xls_dwd_ps061614.fact_order_delievery_item_ps14 PARTITION(start_date)
select
   id,
   shop_order_id,
   refund_order_id,
   dispatcher_order_type,
   shop_store_id,
   buyer_id,
   circle_master_user_id,
   dispatcher_user_id,
   dispatcher_order_state,
   order_goods_num,
   delivery_fee,
   distance,
   dispatcher_code,
   receiver_name,
   receiver_phone,
   sender_name,
   sender_phone,
   create_user,
   create_time,
   update_user,
   update_time,
   is_valid,
   '9999-12-31' end_date,
   substr(create_time, 1, 10) as start_date
FROM xls_ods_ps061614.ods_order_delievery_item_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_fact_order_delievery_ps14}"

if [ $? -eq 0 ]
then
    echo "fact_order_delievery_ps14 表更新成功"
    echo "${current_time}  fact_order_delievery_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "fact_order_delievery_ps14 表更新失败"
    echo "${current_time}  fact_order_delievery_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi




echo "22.商品评价表 fact_goods_evaluation_detail_ps14"
sql_fact_goods_evaluation_detail_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE xls_dwd_ps061614.fact_goods_evaluation_detail_ps14 PARTITION(start_date)
select 
	id,
	user_id,
	store_id,
	goods_id,
	order_id,
	order_goods_id,
	geval_scores_goods,
	geval_content,
	geval_content_superaddition,
	geval_addtime,
	geval_addtime_superaddition,
	geval_state,
	geval_remark,
	revert_state,
	geval_explain,
	geval_explain_superaddition,
	geval_explaintime,
	geval_explaintime_superaddition,
	create_user,
	create_time,
	update_user,
	update_time,
	is_valid,
	'9999-12-31' end_date,
	dt as start_date
from xls_ods_ps061614.ods_goods_evaluation_detail_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_fact_goods_evaluation_detail_ps14}"

if [ $? -eq 0 ]
then
    echo "fact_goods_evaluation_detail_ps14 表更新成功"
    echo "${current_time}  fact_goods_evaluation_detail_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "fact_goods_evaluation_detail_ps14 表更新失败"
    echo "${current_time}  fact_goods_evaluation_detail_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi





echo "23.交易记录表 fact_trade_record_ps14"
sql_fact_trade_record_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
 INSERT overwrite TABLE xls_dwd_ps061614.fact_trade_record_ps14 PARTITION (start_date)
SELECT
	id,
	external_trade_no,
	relation_id,
	trade_type,
	status,
	finnshed_time,
	fail_reason,
	payment_type,
	trade_before_balance,
	trade_true_amount,
	trade_after_balance,
	note,
	user_card,
	user_id,
	aip_user_id,
	create_user,
	create_time,
	update_user,
	update_time,
	is_valid,
	'9999-12-31' end_date,
	dt as start_date
FROM xls_ods_ps061614.ods_trade_record_ps14;
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_fact_trade_record_ps14}"

if [ $? -eq 0 ]
then
    echo "fact_trade_record_ps14 表更新成功"
    echo "${current_time}  fact_trade_record_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "fact_trade_record_ps14 表更新失败"
    echo "${current_time}  fact_trade_record_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi



echo "24.拒绝原因表 fact_refuse_reason_ps14"
sql_fact_refuse_reason_ps14=$(
cat <<EOF
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=10000;
set hive.exec.max.dynamic.partitions=100000;
set hive.exec.max.created.files=150000;
set hive.exec.compress.intermediate=true;
set hive.exec.compress.output=true;
INSERT overwrite TABLE xls_dwd_ps061614.fact_refuse_reason_ps14 PARTITION (start_date)
    select
	  id ,
	  type,
	  refuse_reason,
	  create_user,
	  create_time,
	  update_user,
	  update_time,
	  is_valid,
	 '9999-12-31' end_date,
	 dt as start_date
    from xls_ods_ps061614.ods_refuse_reason_ps14;
    
EOF
)

beeline -u "${HIVE_URL}" -e "${sql_fact_refuse_reason_ps14}"

if [ $? -eq 0 ]
then
    echo "fact_refuse_reason_ps14 表更新成功"
    echo "${current_time}  fact_refuse_reason_ps14 表更新成功" >> /home/ps061614/ps14/dwd.log
else
    echo "fact_refuse_reason_ps14 表更新失败"
    echo "${current_time}  fact_refuse_reason_ps14 表更新失败" >> /home/ps061614/ps14/dwd.log
fi

