# 时间参数
ds=$1

# 获取当前脚本的位置
shell_home="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"

# 切换到脚本的位置
cd $shell_home

cd ../spark-dql

spark-submit --master yarn --deploy-mode client --class com.dyj.ads.ads_e_mz_ave7662_dhs spark-fuyou.jar ${ds}

hive -e "alter table bigdata03_ads.ads_e_mz_ave7662_dhs add if not exists partition (ds='${ds}');"

cd $shell_home
