#!/bin/bash
pt=$1
yesterday=$(date -d "yesterday ${pt}" +%Y%m%d)
source /etc/profile
# 切换到脚本所在的位置
shell_home="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd ${shell_home}

# 删除分区
hdfs dfs -rm -r -f /daas/shujia/ods/{ods_delta_table_name}/pt=${pt}/*
# 增加分区
spark-sql -e "alter table ods.{ods_delta_table_name} add if not exists partition(pt=${pt})"
# 执行datax的增量采集脚本
datax.py -p"-Dpt=${pt} -Dyesterday=${yesterday}" ../datax_delta/datax_{ods_delta_table_name}.json
# 执行增全量合并的SQL
spark-sql \
--master local[2] \
--conf spark.sql.shuffle.partitions=2 \
-d pt=${pt} \
-d yesterday=${yesterday} \
-f ../merge/merge_{ods_table_name}.sql

