#!/bin/bash

#
# @date:2021-10-10
# @author:deep as the sea 
# @orgnization:doitedu
# @desc:模拟拉取业务库中的用户注册信息表
#


export HIVE_HOME=/opt/apps/hive-3.1.2

dt=$(date -d'-1 day' +%Y-%m-%d)

if [ $1 ];then
echo "运行脚本，传入了指定的日期，将导指定日期的数据"
dt=$1
fi

pre_dt=`date -d"${dt} day" +%Y-%m-%d`

master=local
if [ $2 ];then
master=$2
echo "任务将运行为 ${master}  模式"
fi


max_uid=`hive -e "set mapreduce.framework.name=${master};add jar /opt/apps/hive-3.1.2/hcatalog/share/hcatalog/hive-hcatalog-core-3.1.2.jar;select nvl(max(user_id),0) from dim.user_info where dt='${pre_dt}'"| grep -v INFO | grep -v WARN | grep -v LOG4J | grep -v ERROR`


sql="
set mapreduce.framework.name=${master};
add jar /opt/apps/hive-3.1.2/hcatalog/share/hcatalog/hive-hcatalog-core-3.1.2.jar;
INSERT INTO TABLE dim.user_info PARTITION(dt='${dt}')
SELECT
row_number() over() +${max_uid} as user_id,
account
FROM ods.mall_app_log_dtl
WHERE dt='${dt}' AND account is not null AND trim(account) != '' 
GROUP BY account
"

${HIVE_HOME}/bin/hive -e "${sql}"


if [ $? -eq 0 ];then
  echo "任务成功：user_info业务表增量数据拉取"
  echo "入表的分区：dim.user_info partition(dt='${dt}')"
  echo "任务成功：user_info业务表增量数据拉取；入表的分区：dim.user_info partition(dt='${dt}')" | mail -s '多易集团,商城业务数据部，HIVE任务成功通知' 83544844@qq.com
  exit 0
else
  echo "任务失败：user_info业务表增量数据拉取"
  echo "入表的分区：dim.user_info partition(dt='${dt}')"
  echo "任务失败：user_info业务表增量数据拉取；入表的分区：dim.user_info partition(dt='${dt}')" | mail -s '多易集团,商城业务数据部，HIVE任务失败通知' 83544844@qq.com
  exit 1
fi  
