#!/usr/bin/env bash
set -x
WORK_DIR=/home/dig/airflow/apps/graph-builder
HDFS_PATH="/user/graph_builder/data/lz-whole"
IMAGE_DUMP_PATH=/home/dig/graph_data/image_data

#ARANGO_HOST="120.76.198.171"
ARANGO_HOST="192.168.1.20"
ARANGO_PORT="18530"
ARANGO_DB="graphdata_20181009"

if [ ! -d "$IMAGE_DUMP_PATH" ]; then
    mkdir -p ${IMAGE_DUMP_PATH}
fi

function import_impl() {
/home/dig/airflow/apps/graph-builder/bin/arangoimp \
    --server.endpoint http+tcp://${ARANGO_HOST}:${ARANGO_PORT} \
    --server.database ${ARANGO_DB} \
    --server.request-timeout 12000 \
    --server.username haizhi \
    --server.password Haizhi!300680 \
    --threads 8 \
    --file $1 \
    --collection $2 \
    --on-duplicate "update" 2>&1 | cat > $WORK_DIR/log/arangoimp-$2.log

    ret=`grep "^updated/replaced" $WORK_DIR/log/arangoimp-$2.log `
    echo $ret
    if [ "$ret" == "" ];then
        echo "arangoimp import $2-$3 failed"
        return 1
    fi
}

function import_all() {
    cat ./source_version
    datas=`ls -rt ./image/ | grep "_txt$"`
    for i in ${datas}
    do
        name=`echo ${i} | awk -F '_txt' '{print $1;}'`
        tmp=`echo ${name} | grep "^[A-Z]"`
        t='EDGE'
        if [ "${tmp}" != "" ];then
           t='DOC'
        fi
        echo ${t}, ${name}
        parts=`ls ./image/${i}/part*`
        for p in $parts
        do
            try_cnt=0
            while [ $try_cnt -lt 4 ];
            do
                import_impl ${p} ${name} ${t}
                ret=$?
                if [ "$ret" != "0" ];then
                    echo "import ${p} failed"
                    echo "try "$try_cnt
                else
                    break
                fi
                try_cnt=`expr $try_cnt + 1`;
            done
            if [ $try_cnt -ge 4 ];then
                break
                exit -1
            fi
        done
    done
}

versions_path=`hadoop fs -ls ${HDFS_PATH}/parsed* |grep image|awk '{print $NF}'|sort|tail -n 5`
for vp in ${versions_path}
do
    echo ${vp}
    processing=`hadoop fs -ls ${vp}|grep PROCESSING_LOCK`
    imported=`hadoop fs -ls ${vp}|grep ARANGO_IMPORTED`
    version=`echo ${vp}|awk -F'/' '{print $6}'`
    echo $version
    if [ "${processing}" == "" -a -d ${IMAGE_DUMP_PATH}/data_${version} -a "${imported}" == "" ];then
        cd ${IMAGE_DUMP_PATH}/data_${version}
        import_all
        touch ARANGO_IMPORTED
        hadoop fs -put ./ARANGO_IMPORTED ${vp}/
        #rm -rf ${IMAGE_DUMP_PATH}/data_${version}
        touch ${IMAGE_DUMP_PATH}/data_${version}/imped
    fi
done

