#!/bin/sh

source /root/.bashrc

#指定修改的日期文件
V_DATE=20180123

V_LOGS_PATH=/root/shell/logs

#FTP目标文件目录地址
V_SOURCE_DIRECTORY=/root/testData/hive

#hadoop集群存放文件路径
v_TARGET_DIRECTORY=/root/testData/hive

echo "开始执行手动修整数据脚本"$V_DATE

echo "第一步：删除mysql指定日期数据"

mysql -uroot -p123 -e "delete from btms.student where settle_date = $V_DATE"

#删除目标文件夹
rm -rf $v_TARGET_DIRECTORY/V_DATE/*
#更新修正文件
scp -r  hadoop2:$V_SOURCE_DIRECTORY/$V_DATE/* $v_TARGET_DIRECTORY/$V_DATE

echo "第二步：开始执行hive语句"

hive -e "
create table if not exists student (id int,name string) partitioned by (settle_date string) row format delimited fields terminated by '#' lines terminated by '\n' stored as textfile;
ALTER TABLE student DROP IF EXISTS PARTITION(settle_date = $V_DATE);
load data local inpath '$v_TARGET_DIRECTORY/$V_DATE/student_$V_DATE' into table student partition (settle_date= $V_DATE);
create table student_$V_DATE as select * from student where settle_date = $V_DATE
"

echo "第四步：hive ETL语句执行完毕,准备执行sqoop语句。。。"

sqoop export --connect jdbc:mysql://hadoop1/btms --username root --password 123 --table student  --export-dir /user/hive/warehouse/student_$V_DATE  --input-fields-terminated-by '\001'

echo "第五步：sqoop执行完毕,开始清理临时hive表"

hive -e "drop table if exists student_$V_DATE"

echo "手动修整数据脚本执行完毕"
