#!/bin/bash

#
# @date:2021-10-10
# @author:deep as the sea 
# @orgnization:doitedu
# @desc: app日志ods表etl到dwd表GUID映射
#


export HIVE_HOME=/opt/apps/hive-3.1.2
export SPARK_HOME=/opt/apps/spark-3.1.1-bin-hadoop2.7/
export JAVA_HOME=/opt/apps/jdk1.8.0_191/
export HADOOP_HOME=/opt/apps/hadoop-3.1.1/

dt=$(date -d'-1 day' +%Y-%m-%d)

if [ $1 ];then
echo "运行脚本，传入了指定的日期，将导指定日期 $1 的数据"
dt=$1
fi


${SPARK_HOME}/bin/spark-submit \
--master yarn \
--deploy-mode cluster \
--class cn.doitedu.dw_etl.ApplogOds2DwdGuid  \
--name app日志ods表etl到dwd表GUID映射  \
--conf spark.sql.shuffle.partitions=1  \
--files ${HIVE_HOME}/conf/hive-site.xml  \
--driver-memory 2G  \
--driver-cores 1   \
--executor-memory 2G  \
--executor-cores 1 \
--num-executors 1 \
--verbose  \
--queue default  \
/root/dw_etl.jar  ${dt}


if [ $? -eq 0 ];then
  echo "任务成功：app日志ods表etl到dwd表guid映射成功"
  echo "结果写入 dwd.mall_app_event_dtl "
  echo "任务成功：app日志ods表etl到dwd表guid映射成功；结果写入 dwd.mall_app_event_dtl  " | mail -s '多易集团,商城业务数据部，HIVE任务成功通知' 83544844@qq.com
  exit 0
else
  echo "任务失败：app日志ods表etl到dwd表guid映射失败"
  echo "结果写入 dwd.mall_app_event_dtl "
  echo "任务失败：app日志ods表etl到dwd表guid映射失败；结果写入 dwd.mall_app_event_dtl  " | mail -s '多易集团,商城业务数据部，HIVE任务失败通知' 83544844@qq.com
  exit 0
fi  





