#!/bin/sh

source /root/.bashrc

V_DATE=$(date +%Y%m%d)

V_LOGS_PATH=/root/shell/logs

#FTP目标文件目录地址
V_SOURCE_DIRECTORY=/root/testData/hive

#hadoop集群存放文件路径
v_TARGET_DIRECTORY=/root/testData/hive

echo "开始执行自动数据脚本"$V_DATE

#mkdir -p $v_TARGET_DIRECTORY/V_DATE

scp -r  hadoop2:$V_SOURCE_DIRECTORY/$V_DATE/* $v_TARGET_DIRECTORY/$V_DATE

echo "第一步：开始执行hive语句"

hive -e "
 create table if not exists student (id int,name string)  partitioned by (settle_date string) row format delimited fields terminated by '#' lines terminated by '\n' stored as textfile;
 load data local inpath '$v_TARGET_DIRECTORY/$V_DATE/student_$V_DATE' into table student partition (settle_date= $V_DATE);
 create table student_$V_DATE as select * from student where settle_date = $V_DATE"

echo "第二步：hive ETL语句执行完毕,准备执行sqoop语句。。。"

sqoop export --connect jdbc:mysql://hadoop1/btms --username root --password 123 --table student  --export-dir /user/hive/warehouse/student_$V_DATE  --input-fields-terminated-by '\001'

echo "第三步：sqoop执行完毕,开始清理临时hive表"

hive -e "drop table if exists student_$V_DATE"

echo "手动修整数据脚本执行完毕"
