#!/bin/bash
#hadoop 目录
m_hadoop=/opt/hadoop-3.1.1.3.1.4.0-315/bin/hadoop
#hdfs 前缀,解压成功文件
pre_hdfs_path=/user/quantum_social/dataBackup/underlyingData/chengdu/das/product/download/success
#数据源/bool/20230111/ec/jd.com
#供应商 平台 数据源 日期  s_s_d 开始时间，s_e_d 结束时间

#开始时间
startTime=$(date +%Y%m%d-%H:%M:%S)
startTime_s=$(date +%s)

check_dir() {
  c_dir=$(pwd)'/area_govern'
  #file = "hdfs://6.master.adh:8020/user/quantum_social/services/das/product/sina/20230626/weibo/weibo.com/data_12412_115.238.164.171_2306260000052063_20230626002746.zip"
  # file = "sftp:/bool/regulation/20230626/ec/jd.com/data_1687773009_9.zip"
  #sftp: bool regulation 20230626 ec jd.com data_1687773009_9.zip
  arr=(${file//\// })
  if [[ $file = hdfs* ]]; then
    echo "Start with hdfs"
    s_v=${arr[7]}
    s_d=${arr[8]}
    s_p=${arr[9]}
    s_s=${arr[10]}
    s_f=${arr[11]}
  else
    echo "Start with sftp"
    s_v=${arr[1]}
    s_d=${arr[3]}
    s_p=${arr[4]}
    s_s=${arr[5]}
    s_f=${arr[6]}
  fi
  base_dir=$s_v/$s_d/$s_p/$s_s/
  dataBackup_file=$pre_hdfs_path/$base_dir/$s_f
  download_local_file=$c_dir/$base_dir
  #判断本地目录是否存在，不存在则创建
  if [ ! -d $download_local_file ]; then
    echo "创建目录: "$download_local_file
    mkdir -p $download_local_file
  fi
}

downloadHandle() {
  #开始循环下载
  echo "下载文件："
  $($m_hadoop fs -get $dataBackup_file $download_local_file)

}
#开始循环下载
download_file=$1
need_down_files=$(cat $download_file | wc -l)
echo "需要下载文件："$need_down_files
cat $download_file | while read line; do
  file=$line
  check_dir
  downloadHandle

done

#执行结束时间
endTime=$(date +%Y%m%d-%H:%M:%S)
endTime_s=$(date +%s)
#执行总时间
sumTime=$(($endTime_s - $startTime_s))
echo "$startTime ---> $endTime" "Total:$sumTime seconds"