#!/bin/sh
###############################################################################
##  Author    : xuezhoyi
##  Name      : edw_parameter.sh
##  Functions : Initinal all variable and parameter
##  Purpose   : Initinal all variable and parameter
##  Revisions or Comments
##  VER        DATE        AUTHOR           DESCRIPTION
##---------  ----------  ---------------  ------------------------------------ 
##  1.0      2017-05-22  xuezhouyi        1. CREATED THIS SHELL.
###############################################################################

# Common parameter
#source ~/.bash_profile
#export LD_LIBRARY_PATH=/tools/oracle/instantclient_11_2
#export PATH=$PATH:$LD_LIBRARY_PATH
#export NLS_LANG=AMERICAN_AMERICA.AL32UTF8

# Common path
V_HOME=/home/hadoop/dev
V_SHELL_HOME=${V_HOME}/shell
V_SHELL_PARM=${V_HOME}/parm
V_SHELL_PROC=${V_HOME}/proc
V_SHELL_SECU=${V_HOME}/secu
V_SHELL_LOGS=${V_HOME}/logs
V_SHELL_DATA=${V_HOME}/data

# Parallel run number
V_PARALLEL_NUM=20

# Script parameter
V_TIME_STAMP=$(date +%Y%m%d%H%M%S)
V_DATE=$(date +%Y%m%d)
V_WAIT_TIME=180
V_WAIT_TIME_MIN=5
V_MAX_COUNT=24

# Execute shell
V_REFRESH_P="${V_SHELL_HOME}/edw_parm_refresh.sh"
V_REFRESH_P_1331="${V_SHELL_HOME}/edw_parm_refresh_1331.sh"
V_RUN_PROCS="${V_SHELL_HOME}/edw_proc_launcher.sh"
V_RUN_MYSQL="${V_SHELL_HOME}/edw_mysql_executor.sh"
V_RUN_HIVQL="${V_SHELL_HOME}/edw_hivql_executor.sh"
V_EXPT_DATA="${V_SHELL_HOME}/edw_data_exporter.sh"
V_IMPT_DATA="${V_SHELL_HOME}/edw_data_importer.sh"
V_PARALL_EXPT_DATA="${V_SHELL_HOME}/edw_parallel_data_exporter.sh"
V_PARALL_IMPT_DATA="${V_SHELL_HOME}/edw_parallel_data_importer.sh"

#hbase load data from hdfs by use phoenix
V_PHOENIX_LOAD_DATA="${V_SHELL_HOME}/Phoenix_load_data.sh"
V_RUN_PHOENIX_SQL="${V_SHELL_HOME}/edw_phoenix_executor.sh"


# Mysql connection parameter
V_MYSQL_HOST="11.28.4.220"
V_MYSQL_USER="${V_SHELL_SECU}/mysql_query_user"
V_MYSQL_PSWD="${V_SHELL_SECU}/mysql_query_pswd"

# Export file delimiter
V_DELIMITER="\t"

# Kettle
V_KETTLE_HOME=/home/hadoop/pentaho/kettle/data-integration
V_KETTLE_JOB=/home/hadoop/pentaho/Jobs

# Kylin cube
V_KYLIN_MINUSTIME=$((8*60*60*1000))
V_KYLIN_CUBE=(xue_cube)
V_KYLIN_LINK="192.168.100.222"

# Create the folder if not exists
if [[ ! -d "${V_SHELL_PARM}" ]];then
    mkdir -p "${V_SHELL_PARM}"
fi
if [[ ! -d "${V_SHELL_PROC}" ]];then
    mkdir -p "${V_SHELL_PROC}"
fi
if [[ ! -d "${V_SHELL_SECU}" ]];then
    mkdir -p "${V_SHELL_SECU}"
fi
if [[ ! -d "${V_SHELL_LOGS}" ]];then
    mkdir -p "${V_SHELL_LOGS}"
fi
if [[ ! -d "${V_SHELL_DATA}" ]];then
    mkdir -p "${V_SHELL_DATA}"
fi
