#!/bin/bash

REMOTE_ADDR=172.18.180.228

REMOTEDIR=/home/work/changsha_project/hive_data
DATADIR=/data/dfs/dn/1/ware/bigdata/data/OUTPUT/ZSTP/base_data

#HIVE_DATA_DIR=/mnt/changsha/all_json_data_tmp

# 进入到hive result 目录
cd ${DATADIR}
for DT in `ls | grep 'sjjx_cs_.*\.tgz$'`
do

  # 计算MD5
  cs_md5=`cat ${DT} | md5sum | awk '{print $1}'`

  try_cnt=0
  while [ ${try_cnt} -lt 4 ];
  do
       echo "开始传输数据: ${DT}"
       # 先删除已经存在的数据
       ssh work@${REMOTE_ADDR} rm -rf ${REMOTEDIR}/${DT}
       rsync --append --partial --progress -e "ssh -p22" ${DT} work@${REMOTE_ADDR}:${REMOTEDIR}/
       # result=`ssh work@${REMOTE_ADDR} "sh" ${REMOTEDIR}"/cat_cs_md5.sh"`
       echo "开始计算MD5: ${DT}"
       result=`ssh work@${REMOTE_ADDR} cat ${REMOTEDIR}/${DT} | md5sum | awk '{print $1}'`
       if [ "$cs_md5" != "$result" ];then
           echo "error md5 不一致: ${DT}  result = ${result} cs_md5 = ${cs_md5}"
           try_cnt=`expr ${try_cnt} + 1`
       else
           echo " ${DT} md5一致: result = ${result} cs_md5 = ${cs_md5}"
           # 获得文件名
           var=${DT%.*}
           ssh work@${REMOTE_ADDR} "echo ${cs_md5} > ${REMOTEDIR}/${var}.md5"
           break
       fi
  done

  if [ ${try_cnt} -lt 4 ]; then
      rm -rf ${DT}_bak
      mv ${DT} ${DT}_bak
      echo "数据传输并删除成功: ${DT} "
  fi
done

echo "上传本地服务端 hive数据完成.."

# 添加一个删除历史数据逻辑  游丰 2018-11-05
for ((i=10;i<=17;i++));
do
  DT="sjjx_cs_`date -d "$i days ago" +%Y%m%d`"
  if [ -f "${DATADIR}/${DT}.tgz_bak" ];then
    rm -rf ${DATADIR}/${DT}.tgz_bak
    echo "删除历史数据: ${DT}.tgz_bak"
  fi

done

echo "删除本地服务端 hive历史数据完成.."


ssh work@${REMOTE_ADDR} sh ${REMOTEDIR}/upload_hive_data_to_changsha.sh

echo "hive 远程过程调用完成..."