#!/bin/bash


:<<EOF


  指定 日期( 20231102 )   环境(dev , prod )....
  示例  本地 bash   /home/sky/Desktop/shell/chapter13-project_data_platform/load_data_platform_application.sh  -t  2024-06-07  -e  local_dev
      测试 bash   /home/sky/Desktop/shell/chapter13-project_data_platform/load_data_platform_application.sh   -t  2023-04-10  -e   dev
      生产 bash   /home/sky/Desktop/shell/chapter13-project_data_platform/load_data_platform_application.sh   -t  2023-04-10  -e   prod
          bash   /home/sky/Desktop/shell/chapter13-project_data_platform/load_data_platform_application.sh   -t  2024-06-07  -e   local_dev -f
EOF



readonly  BASE_DIR="$(dirname "$0")"
cd $BASE_DIR || exit;


#---------------------------   导入 脚本  -------------------------------------------#
LOG4S=$BASE_DIR"/""LOG4S.sh"                                  #import  LOG4S.sh  给本次脚本运行上锁
source  "$LOG4S"
LOCK=$BASE_DIR"/""LOCK.sh"                                  #import  LOCK.sh  给本次脚本运行上锁
source  "$LOCK"                                             #先加锁,  防止重复导入
FILE_CONFIGURATION=$BASE_DIR"/""FILE_CONFIGURATION.sh"      #import  FILE_CONFIGURATION.sh
source  "$FILE_CONFIGURATION"                                #文件, 路径, 表, 配置
LOAD_CSV_FUNCTION=$BASE_DIR"/""LOAD_CSV_FUNCTION.sh"         #import  LOAD_CSV_FUNCTION.sh
source  "$LOAD_CSV_FUNCTION"                                 #获取配置后, 读取 数据



#---------------------------   配置 锁  -------------------------------------------#

readonly LOCK_NAME='data_platform_load_lock'   #锁的名字不能改动, 否则锁失效
LOCK  $BASE_DIR   $LOCK_NAME


#---------------------------   配置 日志  -----------------------------------------#
readonly  LOGGER_LEVEL=4            #先设置为debug  ， 稳定后再设置为 info : 3
LOG4S_config $LOGGER_LEVEL



#---------------------------   项目默认 配置   --------------------------------------#
readonly DATA_DIR="/opt/trx-data-importer/data/dataPlatform"   #数据文件路径
export TZ=Asia/Shanghai                          #指定时区, 防止 date 调用错误
shopt -s expand_aliases
[ "$(uname)" == "Darwin" ] &&  alias date='gdate'
DATE=$(date -d "yesterday" +%Y-%m-%d)             # 除非指定,  否则默认 yesterday
BRANCH="gx"                                        # TODO 默认gx 这个分支  未实现, 预留
ENV="dev"                                         # 默认测试环境
FORCE_IMPORT="NO"                                 # TODO  强制导入, 未实现, 预留
FORCE_IMPORT_ARG=""                               # TODO  强制导入, 未实现, 预留
DRY_RUN_ARG=""                                    #  TODO 试运行, Shell之脚本检查与调试参数,  未实现, 预留




#---------------------------   脚本参数 配置   --------------------------------------#
while getopts "e:t:bfD" opt; do
  case ${opt} in
    b )
      BRANCH="$OPTARG"  #  分支参数  预留
      ;;
    e )
      ENV="$OPTARG"     #  环境参数
      ;;
    t )
      DATE="$OPTARG"    # 时间参数
      ;;
    f )
      FORCE_IMPORT="YES"
      FORCE_IMPORT_ARG="--force"
      ;;
    D )
      DRY_RUN_ARG="--dryRun"
      echo "DRYRUN: DRY_RUN_ARG=$DRY_RUN_ARG"
      ;;
    * )
      echo "Invalid option: $opt requires an argument" 1>&2
      ;;
  esac
done
shift $((OPTIND -1))

#if [ "${BRANCH}" == "" ]; then
#  LOG4S_warn "BRANCH is required. valid options are: gx, gz, hn  ;   If not ,   default  is   gx"
#  BRANCH="gx"
#fi

LOG4S_debug  "指定环境是$ENV "


if [[ "${ENV}" ==  "" ]] ; then
  LOG4S_warn "ENV   is required. valid options are: dev, prod ;   If not ,   default  is  dev "
  ENV="dev"
fi



LOG4S_debug  " 导入的日期是    ==> ${DATE}  "         $log_file_path
year_month=$(date -d "$DATE" +%Y%m)   #  yyyy-MM-dd  ==>  yyyymm
year=$(date -d "$DATE" +%Y)           #  yyyy-MM-dd  ==>  yy
month=$(date -d "$DATE" +%m)          #  yyyy-MM-dd  ==> mm
day=$(date -d "$DATE" +%d)            #  yyyy-MM-dd   ==> dd



#---------------------------    开始处理数据  --------------------------------------#
# /opt/trx-data-importer/data/dataPlatform/prod/202412/26/nanningCIB/21121517703386Z_2024-12-26_mer.csv


PARENT_PATH="$DATA_DIR/$ENV/$year_month/$day"

cd   ${PARENT_PATH} || exit    #数据文件路径 /opt/trx-data-importer/data/dataPlatform/prod/202412/26

echo  " force import ====>  $FORCE_IMPORT "





if [[   ${ENV} = "prod" ]]; then
    config_file=$CONFIG_FILE_FOLDER/mysql_client_prod.conf
elif [[  ${ENV} = "dev"   ]]; then
    config_file=$CONFIG_FILE_FOLDER/mysql_client_dev.conf
else
    config_file=$CONFIG_FILE_FOLDER/mysql_client_local_dev.conf
fi

log_file_path="${LOG_FILE_FOLDER}/${ENV}/${DATE}/import_mer.log"
if [ -w $log_file_path ]; then
    echo  "log_file_path ==>  ${log_file_path}"
else
    mkdir  -p   "${LOG_FILE_FOLDER}/var/dataPlatform/${ENV}/${DATE}"
    echo  " creating  log_file_path ==>  ${log_file_path}"
    touch  ${log_file_path}
fi



#记录导入那天有多少条重复
count_sql="select count(*)  as  before_import_count  from   ${LOAD_TABLE}  where sync_date between '${DATE} 00:00:00' AND '${DATE} 23:59:59'   "
LOG4S_info  "count_sql ==> ${count_sql}"       $log_file_path
data_res=$(mysql --defaults-file=$config_file -e "$count_sql")
# echo  " data_res ==>   ${data_res}"
data_num=${data_res/before_import_count/""}                       #字符串替换 顶掉  resultSet 的 columnName
data_num=$(echo $data_num | xargs)                                       #trim 掉 数字旁边的空字符
LOG4S_info "${DATE} 重复的数据量是  ${data_num} "      $log_file_path
if [[ "${data_num}" ==  "0"   ]]; then

    LOG4S_debug "准备 导入${LOAD_TABLE}"        $log_file_path
else
    #预处理 ， 防止重复， 先删掉 当天导入的；
    delete_sql="delete from    ${LOAD_TABLE}   where sync_date between '${DATE} 00:00:00' AND '${DATE} 23:59:59'   "
    LOG4S_warn  "delete_sql ==> ${delete_sql}"      $log_file_path
    mysql --defaults-file=$config_file -e "$delete_sql"
    alter_sql="alter table  ${LOAD_TABLE}    auto_increment=1"
    LOG4S_warn  "alter_sql ==> ${alter_sql}"
    mysql --defaults-file=$config_file -e "$alter_sql"
fi




for((i = 0  ;  i < ${#BANK_NAME_ARRAY[@]}; i++))
do
  echo "正在处理的   bank_name  is  ${BANK_NAME_ARRAY[$i]} "
  echo "first_bank_name is  ${FIRST_BANK_NAME[$i]} "
  # echo "bank_orgid is  ${BANK_ORGID_ARRAY[$i]} "  #  TODO  未使用, 预留
  cd   ${BANK_NAME_ARRAY[$i]}   #进入专属银行文件夹
  echo -e  "\n\n-------------------------------------------------------------------------------------"
  file_absolute_path=${PARENT_PATH}'/'${BANK_NAME_ARRAY[$i]}'/'$(ls | grep  mer.csv)
  LOG4S_info "处理的文件 ==> $file_absolute_path     行数==>"$(wc -l  $file_absolute_path )
  #  sync_date='2024-12-25' ,   belong_bank='{银行参数}' , first_belong_bank ='{一级银行名称}' "   作为参数, 中间千万不要有空格
  extend_part="sync_date='${DATE}',belong_bank='${BANK_NAME_ARRAY[$i]}',first_belong_bank='${FIRST_BANK_NAME[$i]}' "
  LOAD_CSV_FUNCTION  ${file_absolute_path}   ${LOAD_TABLE}   ${TABLE_COLUMNS}   ${DATE}    ${ENV}    ${extend_part}
  cd  ${PARENT_PATH}   #回到 当天  目录

done

msg="数经平台银行数据导入到MariaDB完成, ENV=$ENV,  DATE=$DATE"
LOG4S_warn  ${msg}
touch  "all_file_synced.txt"
/opt/trx-data-importer/bin/console send --branch=gx --environment=prod   ${msg}

