#!/bin/bash

#
#  @auther: deep as the sea
#  @qq: 83544844
#  @wx: doit_edu
#  @date: 2021-09-21
#  @des: ods层app端日志表加工到dwd事件明细表
#
#

export HIVE_HOME=/opt/apps/hive
export HADOOP_HOME=/opt/apps/hadoop
export SPARK_HOME=/opt/apps/spark

dt=`date -d'-1 day' +%Y-%m-%d`


if [ $# -gt 0 ];then
dt=$1
fi


echo "INFO:  ods层app端日志表加工到dwd事件明细表 计算任务开始，ods表:ods.mall_app_action_log(${dt}) => dwd.mall_app_evt_dtl(${dt})"

${SPARK_HOME}/bin/spark-submit   \
--master yarn \
--deploy-mode cluster \
--class cn.doitedu.datayi.etl.AppLogOdsDwdEtl \
--conf spark.sql.shuffle.partitions=2  \
--driver-memory 2G  \
--driver-cores 1 \
--executor-memory 2G \
--executor-cores 1  \
--num-executors  2 \
--queue default  \
./spark-etl-1.0.jar ${dt} /cachefiles/ip2region.db

if [ $? -eq 0 ];then
echo "dw task sucessed :ds层app端日志表加工到dwd事件明细表计算任务成功，iods表:ods.mall_app_action_log(${dt}) => dwd.mall_app_evt_dtl(${dt})" | mail -s '数易平台任务运行成功通知' 83544844@qq.com
else
echo "dw task failed ds层app端日志表加工到dwd事件明细表计算任务失败，ods表:ods.mall_app_action_log(${dt}) => dwd.mall_app_evt_dtl(${dt})" | mail -s '数易平台任务运行失败通知' 83544844@qq.com
fi
