#!/bin/bash
# Author: 
# Func: 

set -e -x

curdir=`dirname $0`
# In production, "conf" dir should be in parallal with working dir which contains this running script.
export QITAN_CONF_PATH=${QITAN_CONF_PATH:-"$curdir/../conf"}
source $QITAN_CONF_PATH/user_rec.conf
# In production, "logutil.sh" file should be in the same working dir as current running script is.
source $curdir/logutil.sh

# get data path variables

hdfs_temp=${HDFS_TEMP_PATH:-"temp"}
ur_hdfs_data_path=${USER_REC_HDFS_DATA_PATH:-'user_rec/data'}
ur_hdfs_done_path=${USER_REC_HDFS_DONE_PATH:-'user_rec/done'}
raw_user_feed_hdfs_data_path=${QITAN_USER_RESOURCE_DATA_PATH:-"/data/SDP/raw/qitan_doc/user2feed/data"}
raw_user_feed_hdfs_done_path=${QITAN_USER_RESOURCE_DONE_PATH:-"/data/SDP/raw/qitan_doc/user2feed/done"}
raw_user_resource_hdfs_data_path=${QITAN_USER_RESOURCE_DATA_PATH:-"/data/SDP/raw/qitan_doc/user2resource/data"}
raw_user_resource_hdfs_done_path=${QITAN_USER_RESOURCE_DONE_PATH:-"/data/SDP/raw/qitan_doc/user2resource/done"}

# set parameters
now_day='2012-11-09'
#now_day=$(date +%Y-%m-%d)

year_info=$(date -d"$now_day" '+%Y')
month_info=$(date -d"$now_day" '+%m')
day_info=$(date -d"$now_day" '+%d')

yes_day=$(date -d"$now_day -1 day" '+%Y-%m-%d')
yes_day_year=$(date -d"$now_day -1 day" '+%Y')
yes_day_month=$(date -d"$now_day -1 day" '+%m')
yes_day_day=$(date -d"$now_day -1 day" '+%d')

queue_name=${QUEUE_NAME:-'temp'}

# from run_xxx.abc to xxx
now_level_name=$(level_name $0)

# define dependences
dep_level_name1='qitan_user_feed_data'
dep_level_name2='qitan_user_resource_data'
dep_level_name3=$(level_name run_get_friend_list.sh)
lookup_days=${USER_REC_RESOURCE_LOOKUP_DAYS:-'7'}
dep_input1="" # raw user2feed
dep_input2="" # raw user2resource
dep_input3=${ur_hdfs_data_path}/${dep_level_name3}/${year_info}/${month_info}/${now_day}
for i in `seq 1 $lookup_days`; do
    cur_year=$(date -d"$now_day -$i days" '+%Y')    
    cur_month=$(date -d"$now_day -$i days" '+%m')    
    cur_day=$(date -d"$now_day -$i days" '+%d')    
    cur_in1="${raw_user_feed_hdfs_data_path}/${cur_year}/${cur_month}/${cur_day}/${cur_year}${cur_month}${cur_day}*.txt"
    cur_in2="${raw_user_resource_hdfs_data_path}/${cur_year}/${cur_month}/${cur_day}/${cur_year}${cur_month}${cur_day}*.txt"
    exist1=$(is_hfile_exist $cur_in1)
    exist2=$(is_hfile_exist $cur_in2)
    if [ "$exist1" == "1" ]; then
        if [ "$dep_input1" == "" ]; then
            dep_input1=$cur_in1
        else
            dep_input1="$dep_input1,$cur_in1"
        fi
    fi
    if [ "$exist2" == "1" ]; then
        if [ "$dep_input2" == "" ]; then
            dep_input2=$cur_in2
        else
            dep_input2="$dep_input2,$cur_in2"
        fi
    fi
done
dep_done_path1="${raw_user_feed_hdfs_done_path}/${yes_day_year}/${yes_day_month}/${yes_day_year}${yes_day_month}${yes_day_day}22.done"
dep_done_path2="${raw_user_resource_hdfs_done_path}/${yes_day_year}/${yes_day_month}/${yes_day_year}${yes_day_month}${yes_day_day}22.done"
dep_done_path3="${ur_hdfs_done_path}/${dep_level_name3}/${year_info}/${month_info}/${now_day}.done"

declare -a dep_level_array=($dep_level_name1 $dep_level_name2 $dep_level_name3) 
declare -a dep_done_array=($dep_done_path1 $dep_done_path2 $dep_done_path3)

# defind data for current job
output=${ur_hdfs_data_path}/${now_level_name}/${year_info}/${month_info}/${now_day}
outdone=${ur_hdfs_done_path}/${now_level_name}/${year_info}/${month_info}/${now_day}.done
pid_path=${USER_REC_LOCAL_PLOG_PATH:-"$curdir/../logs_pid"}
pid_file=$pid_path/$now_level_name

####################################################################
#
#    test whether this script is excuted or not
#
####################################################################
if [ -e $pid_file ]
then
    running=$(is_run $pid_file)
    if [ "$running" = "1" ]
    then
        echo "$now_level_name is running"
        date
        exit 0
    fi
    rm -f $pid_file;
fi
pid=$$
mkdir -p $pid_path
echo $pid > $pid_file

####################################################################
#
#    test whether this job is done or not
#
####################################################################
exist=$(is_hfile_exist $outdone)
if [ "$exist" == "1" ]; then
    echo "$now_level_name in ${now_day} has been done successfully......";
    exit 0;
else
    echo "$now_level_name in ${now_day} has not been done yet, now continuing......";
fi

####################################################################
#
#    test whether required data are ready
#
####################################################################
for((i=0;i<${#dep_done_array[@]};i++))
do
    done_path=${dep_done_array[$i]}
    lev_name=${dep_level_array[$i]}
    exist=$(is_hfile_exist $done_path)
    if [ "$exist" == "0" ]; then
        echo "dep job $lev_name is not ready!"
        exit 1
    fi
done

##########################################################################
#
# preprocess raw user relation data
#
##########################################################################
$curdir/get_feedcontent_recommendation_stream.sh -f $dep_input1 -r $dep_input2 -d $dep_input3 -o $output
if [ "$?" == "0" ]; then
    echo "OK in $now_level_name in $now_day"
    hadoop fs -touchz $outdone
else
    echo "Error in $now_level_name in $now_day"
    exit 1
fi

exit 0

