#!/bin/sh
### gx created
### 2018-06-28

#加载环境变量
ENV_FILE=shell_env.sh
CUR_PATH=$(cd "$(dirname "$0")"; pwd)
. $CUR_PATH/$ENV_FILE

`hadoop fs -test -e $STAGE_IMPORT`
if [ $? -eq 0 ]; then
        echo "目录已存在，请检查是否需要重新初始化"
        exit 1
fi

#初始化目录
hadoop fs -mkdir -p ${FLUME_SOURCE}
hadoop fs -chown -R flume ${FLUME_SOURCE}

for protocol in ${PROTOCOLS[@]}
  do
	#创建日志路径
	mkdir -p ${BASE_LOG_PATH}/$protocol
	#给路径授权
	chown hdfs:hdfs -R $BASE_LOG_PATH/$protocol
	#创建hdfs数据存储目录,外部表
	hadoop fs -mkdir -p ${STAGE_IMPORT}_$protocol
	#创建hdfs数据备份目录，转化前的数据存储路径
	hadoop fs -mkdir -p ${STAGE_BACKUP}_$protocol
	#给目录赋权限
	hadoop fs -chown -R flume ${STAGE_IMPORT}_$protocol
	hadoop fs -chown -R flume ${STAGE_BACKUP}_$protocol
done

hive -hivevar hive_db=${YARD_NAME} -hivevar create_time=${create_time} -hivevar data_path=${BASE_STAGE} -f $CUR_PATH/create_table_scripts/create_database.hql

echo "执行hive建表脚本"
for f in `ls $CUR_PATH/create_table_scripts/hive_create_table_*.hql`
do
   hive -hivevar hive_db=${YARD_NAME} -hivevar create_time=${create_time} -hivevar data_path=${BASE_STAGE} -f $f
done
hive -hivevar hive_db=${YARD_NAME} -hivevar create_time=${create_time} -hivevar data_path=${BASE_STAGE} -f $CUR_PATH/create_table_scripts/show_tables.hql
echo "初始化完成"
hadoop fs -ls $BASE_STAGE
