#!/bin/sh
###############################################################################
##  Author    : xuezhoyi
##  Name      : edw_parameter.sh
##  Functions : Initinal all variable and parameter
##  Purpose   : 
##  Revisions or Comments
##  VER        DATE        AUTHOR           DESCRIPTION
##---------  ----------  ---------------  ------------------------------------ 
##  1.0      2017-05-22  xuezhouyi        1. CREATED THIS SHELL.
##  2.0      2018-11-11  薛周毅           1. 所有参数集合
##  2.1      2019-02-12  陈贰浩           1. 增加db2数据库参数
###############################################################################

# Common path
V_HOME=/home/hadoop/dic
V_SHELL_HOME=${V_HOME}/shell
V_SHELL_PARM=${V_HOME}/parm
V_SHELL_PROC=${V_HOME}/proc
V_SHELL_SECU=${V_HOME}/secu
V_SHELL_LOGS=${V_HOME}/logs
V_SHELL_DATA=${V_HOME}/data
V_SHELL_DDL=${V_HOME}/ddl

# begin 定义任务状态
WAIT=1
RUN=2
SYNC=3
PROC=4
FAIL=5
DONE=6
SYNC_SPEED_FILE=${V_SHELL_HOME}/SYNC.SPEED
PROC_SPEED_FILE=${V_SHELL_HOME}/PROC.SPEED
# end

# Parallel run proc number
V_PARALLEL_PROC_NO=9

# Parallel run exporter&importer number
V_PARALLEL_SYNC_NO=13

# Parm file
V_PARM_FILE=${V_SHELL_PARM}/edw_parm.txt

# Script parameter
V_TIME_STAMP=$(date +%Y%m%d%H%M%S)
V_CURR_Y=$(date +%Y)
V_CURR_M=$(date +%m)
V_CURR_D=$(date +%d)

# Load data & parm file & same group
V_WAIT_TIME=10

# Bar
V_I=0
V_BAR=''

# Execute shell
V_REFRESH_P="${V_SHELL_HOME}/edw_parm_refresh.sh"
V_RUN_PROCS="${V_SHELL_HOME}/edw_proc_launcher.sh"
V_RUN_DB="${V_SHELL_HOME}/edw_db_executor.sh"
V_RUN_HIVQL="${V_SHELL_HOME}/edw_hivql_executor.sh"
V_EXPT_DATA="${V_SHELL_HOME}/edw_data_exporter.sh"
V_IMPT_DATA="${V_SHELL_HOME}/edw_data_importer.sh"

# DB connection parameter
# V_DB_TYPE: Only db2 or mysql can be chosen
V_DB_TYPE="db2"
V_DB_HOST="DWPBCGX"
V_DB_PORT="50000"
V_DB_USER="${V_SHELL_SECU}/${V_DB_TYPE}_query_user"
V_DB_PSWD="${V_SHELL_SECU}/${V_DB_TYPE}_query_pswd"

# Export file delimiter
V_DELIMITER="\t"

# Kettle
V_KETTLE_HOME=/home/hadoop/pentaho/kettle/data-integration
V_KETTLE_JOB=/home/hadoop/pentaho/Jobs

#hive conf
V_HIVE_CONF=/home/hadoop/dwhdp/core/hive/conf

#抽样下载
V_SCP_HOST=("11.72.192.235" "11.72.192.236")


# Kylin cube
V_KYLIN_MINUSTIME=$((8*60*60*1000))
V_KYLIN_CUBE=(CUBE_DEPB CUBE_LOAN)
V_KYLIN_LINK="11.72.192.245"

# Create the folder if not exists
if [[ ! -d "${V_SHELL_PARM}" ]];then
    mkdir -p "${V_SHELL_PARM}"
fi
if [[ ! -d "${V_SHELL_PROC}" ]];then
    mkdir -p "${V_SHELL_PROC}"
fi
if [[ ! -d "${V_SHELL_DDL}" ]];then
    mkdir -p "${V_SHELL_DDL}"
fi
if [[ ! -d "${V_SHELL_SECU}" ]];then
    mkdir -p "${V_SHELL_SECU}"
fi
if [[ ! -d "${V_SHELL_LOGS}" ]];then
    mkdir -p "${V_SHELL_LOGS}"
fi
if [[ ! -d "${V_SHELL_DATA}" ]];then
    mkdir -p "${V_SHELL_DATA}"
fi
