source /etc/profile
set -e

bin=`dirname "$0"`
bin=`cd "$bin"; pwd`

if [ "$1" = "" ]; then
  echo "bash ${bin}/data-engine.sh [start|stop|status] [data-XXX|data-XXXX.YYY-server-rust]?"
  exit 1
fi

action=$1
selector=$2

components=()

unameOut="$(uname -s)"
case "${unameOut}" in
    Linux*)     OS=linux;;
    Darwin*)    OS=macos;;
esac

################################
#  data-api 
################################
# 1. data-api.api-proxy
if [ "$selector" = "" -o "$selector" = "data-api" -o "$selector" = "api-proxy.postgrest-server-shell" ]; then components+=("data-api/api-proxy.postgrest-server-shell"); fi 
if [ "$selector" = "" -o "$selector" = "data-api" -o "$selector" = "api-proxy.hdfs-minio-server-shell" ]; then components+=("data-api/api-proxy.hdfs-minio-server-shell"); fi 
if [ "$selector" = "" -o "$selector" = "data-api" -o "$selector" = "api-proxy.metabase-server-shell" ]; then components+=("data-api/api-proxy.metabase-server-shell"); fi 

# 2. data-api.api-infra
if [ "$selector" = "" -o "$selector" = "data-api" -o "$selector" = "api-infra.signature-server-rust" ]; then components+=("data-api/api-infra.signature-server-rust"); fi 
if [ "$selector" = "" -o "$selector" = "data-api" -o "$selector" = "api-infra.meta-server-java" ]; then components+=("data-api/api-infra.meta-server-java"); fi 


# 3. data-api.api-rpc
# if [ "$selector" = "" -o "$selector" = "data-api" -o "$selector" = "api-rpc.query-server-rust" ]; then components+=("data-api/api-rpc.query-server-rust"); fi 


# 0. data-api.api-gateway
if [ "$selector" = "" -o "$selector" = "data-api" -o "$selector" = "api-gateway.push-gateway-rust" ]; then components+=("data-api/api-gateway.push-gateway-rust"); fi 
if [ "$selector" = "" -o "$selector" = "data-api" -o "$selector" = "api-gateway.api-gateway-rust" ]; then components+=("data-api/api-gateway.api-gateway-rust"); fi 

################################
#  data-exchange
################################
# 1. data-exchange.exchange-agent
if [ "$selector" = "" -o "$selector" = "data-exchange" -o "$selector" = "exchange-agent.push-agent-shell" -o "$selector" = "exchange-agent.push-agent-shell.kafka_staging_avro" ]; then 
    components+=("data-exchange/exchange-agent.push-agent-shell.kafka_staging_avro"); 
fi 
if [ "$selector" = "" -o "$selector" = "data-exchange" -o "$selector" = "exchange-agent.push-agent-shell" -o "$selector" = "exchange-agent.push-agent-shell.kafka_staging_json" ]; then 
    components+=("data-exchange/exchange-agent.push-agent-shell.kafka_staging_json"); 
fi 
if [ "$selector" = "" -o "$selector" = "data-exchange" -o "$selector" = "exchange-agent.push-agent-shell" -o "$selector" = "exchange-agent.push-agent-shell.minio" ]; then 
    components+=("data-exchange/exchange-agent.push-agent-shell.minio"); 
fi 

# 2. data-exchange.exchange-gate
if [ "$selector" = "" -o "$selector" = "data-exchange" -o "$selector" = "exchange-gate.push-gate-java" -o "$selector" = "exchange-gate.push-gate-java.streaming" ]; then 
    components+=("data-exchange/exchange-gate.push-gate-java.streaming"); 
fi 
if [ "$selector" = "" -o "$selector" = "data-exchange" -o "$selector" = "exchange-gate.push-gate-java" -o "$selector" = "exchange-gate.push-gate-java.batch" ]; then 
    components+=("data-exchange/exchange-gate.push-gate-java.batch"); 
fi 
if [ "$selector" = "" -o "$selector" = "data-exchange" -o "$selector" = "exchange-gate.aliyun-gate-rust" ]; then components+=("data-exchange/exchange-gate.aliyun-gate-rust"); fi 

################################
#  data-pipeline
################################
# 1. data-pipeline.pipeline_online
#if [ "$selector" = "" -o "$selector" = "data-pipeline" -o "$selector" = "pipeline-online.online-server-rust" ]; then components+=("data-pipeline/pipeline-online.online-server-rust"); fi 
if [ "$selector" = "" -o "$selector" = "data-pipeline" -o "$selector" = "pipeline-online.online-server-java" ]; then components+=("data-pipeline/pipeline-online.online-server-java");fi 
if [ "$selector" = "" -o "$selector" = "data-pipeline" -o "$selector" = "pipeline-online.enum-server-java" -o "$selector" = "pipeline-online.enum-server-java.dim" ]; then 
    components+=("data-pipeline/pipeline-online.enum-server-java.dim");
fi 
# if [ "$selector" = "" -o "$selector" = "data-pipeline" -o "$selector" = "pipeline-online.enum-server-java" -o "$selector" = "pipeline-online.enum-server-java.menu:dashboard" ]; then 
#     components+=("data-pipeline/pipeline-online.enum-server-java.menu:dashboard");
# fi

# 2. data-pipeline.pipeline_bridge

# 3. data-pipeline.pipeline_streaming
if [ "$selector" = "" -o "$selector" = "data-pipeline" -o "$selector" = "pipeline-streaming.flink-etls" -o "$selector" = "pipeline-streaming.flink-etls.streaming:dc_event" ]; then 
    components+=("data-pipeline/pipeline-streaming.flink-etls.streaming:dc_event"); 
fi 
if [ "$selector" = "" -o "$selector" = "data-pipeline" -o "$selector" = "pipeline-streaming.flink-etls" -o "$selector" = "pipeline-streaming.flink-etls.streaming:ip_dt_dim" ]; then 
    components+=("data-pipeline/pipeline-streaming.flink-etls.streaming:ip_dt_dim"); 
fi 
# if [ "$selector" = "" -o "$selector" = "data-pipeline" -o "$selector" = "pipeline-streaming.flink-etls" -o "$selector" = "pipeline-streaming.flink-etls.streaming:dashboard" ]; then 
#     components+=("data-pipeline/pipeline-streaming.flink-etls.streaming:dashboard"); 
# fi 

if [ "$selector" = "" -o "$selector" = "data-pipeline" -o "$selector" = "pipeline-streaming.flink-etls" -o "$selector" = "pipeline-streaming.flink-etls.batch:dc_event_cold_start" ]; then 
    components+=("data-pipeline/pipeline-streaming.flink-etls.batch:dc_event_cold_start"); 
fi 
if [ "$selector" = "" -o "$selector" = "data-pipeline" -o "$selector" = "pipeline-streaming.flink-etls" -o "$selector" = "pipeline-streaming.flink-etls.batch:dc_event_hot_start" ]; then 
    components+=("data-pipeline/pipeline-streaming.flink-etls.batch:dc_event_hot_start"); 
fi 
if [ "$selector" = "" -o "$selector" = "data-pipeline" -o "$selector" = "pipeline-streaming.flink-etls" -o "$selector" = "pipeline-streaming.flink-etls.batch:dc_event_use_end" ]; then 
    components+=("data-pipeline/pipeline-streaming.flink-etls.batch:dc_event_use_end"); 
fi 
if [ "$selector" = "" -o "$selector" = "data-pipeline" -o "$selector" = "pipeline-streaming.flink-etls" -o "$selector" = "pipeline-streaming.flink-etls.batch:dc_event_click" ]; then 
    components+=("data-pipeline/pipeline-streaming.flink-etls.batch:dc_event_click"); 
fi 
if [ "$selector" = "" -o "$selector" = "data-pipeline" -o "$selector" = "pipeline-streaming.flink-etls" -o "$selector" = "pipeline-streaming.flink-etls.batch:dc_event_view_page" ]; then 
    components+=("data-pipeline/pipeline-streaming.flink-etls.batch:dc_event_view_page"); 
fi 
if [ "$selector" = "" -o "$selector" = "data-pipeline" -o "$selector" = "pipeline-streaming.flink-etls" -o "$selector" = "pipeline-streaming.flink-etls.batch:dc_event_custom" ]; then 
    components+=("data-pipeline/pipeline-streaming.flink-etls.batch:dc_event_custom"); 
fi 




################################
#  MAIN
################################
if [ "$components" = "" ]; then
    echo "NO TARGET MATCH, PLEASE CHECK THE NAME!"
    exit 1
fi


. ${bin}/meta-lib-static/env/.env
STATIC_DIR=$ACCESS_STATIC_DIR
export ACCESS_LOG_LEVEL ACCESS_EXTERNAL_DP_ELK_NODE ACCESS_ENV
API_SERVER_NODES=$(echo "${COORDINATOR_GATEWAY_HOSTS}" | sed "s/,/:${COORDINATOR_GATEWAY_HOSTS},/g"):${COORDINATOR_GATEWAY_PORT}

if [ "$action" = "start" ]; then
    for each_component in  "${components[@]}"; do
        CATEGORY=$(echo "${each_component}" | cut -d. -f1)
        PROJECT=$(echo "${each_component}" | cut -d. -f2)
        COMPONENT=$(echo "${each_component}" | cut -d. -f1-2)
        BRANCH=$(echo "${each_component}" | cut -d. -f3)
        PID=$(ps -ef | grep  "${COMPONENT}.*$BRANCH" | grep -v grep | awk '{print $2}' 2> /dev/null)
        if [ "$CATEGORY" == "data-pipeline/pipeline-streaming" ]; then
            MODE=$(echo $BRANCH | cut -d: -f1)
            SID=$(echo $BRANCH | cut -d: -f2)
            each_component_short=$(echo $each_component | cut -d/ -f2)
            if [ "$MODE" == "batch" ]; then
                CRON_CMD=$(crontab -l | grep "${each_component}" | cut -d' ' -f9-)
                if [ "$CRON_CMD" = "" ]; then
                    crontab -l | cat > /tmp/larluo.start.cron
                    CRON_ENV=PIPELINE_CRON_$(echo $SID | tr '[a-z]' '[A-Z]')
                    echo "${!CRON_ENV}  bash ${bin}/data-engine.sh start-foreground ${each_component_short} _DT_" >> /tmp/larluo.start.cron
                    crontab /tmp/larluo.start.cron
                    rm -rf /tmp/larluo.start.cron
                else
                    echo " [*] ${each_component} -> already running"
                fi
            else
                APP_NAME=${ACCESS_ENV}.${PROJECT}.${BRANCH}
                YID=$(yarn application -list | grep ${APP_NAME} | cut -f1)
                if [ "$YID" = "" ]; then 
                    bash ${bin}/data-engine.sh start-foreground ${each_component_short}
                else
                    echo " [*] ${each_component} -> already running"
                fi
            fi
        else
            if [ "$PID" = "" ]; then
              echo " [*] start ${each_component}..."
              XLANG=$(echo "${each_component}" | cut -d. -f2 | awk -F- '{print $NF}')
              if [ "$XLANG" = "rust" ]; then
                  CMD="RUST_LOG=$ACCESS_LOG_LEVEL ${bin}/${COMPONENT} $BRANCH"
              elif [ "$XLANG" = "java" ]; then
                  CMD="java -cp .:meta-lib-static/properties:${bin}/${COMPONENT}.jar App $BRANCH"
              elif [ "$XLANG" = "shell" ]; then
                  CMD="bash ${bin}/${COMPONENT}.sh $BRANCH"
              fi
              export COMPONENT=${each_component}

              # nohup bash -c "$CMD 2>&1 | ${STATIC_DIR}/$OS/vector-0.12.1 -c meta-lib-static/toml/elk.toml" 2>&1 > /dev/null &

              each_component_short=$(echo $each_component | cut -d/ -f2)
              SERVICE_CMD="/bin/bash -c 'cd $bin \&\& HADOOP_HOME=$HADOOP_HOME HADOOP_CONF_DIR=$HADOOP_CONF_DIR $CMD 2>\&1 | ACCESS_ENV=$ACCESS_ENV COMPONENT=$COMPONENT ACCESS_EXTERNAL_DP_ELK_NODE=$ACCESS_EXTERNAL_DP_ELK_NODE ${STATIC_DIR}/$OS/vector-0.12.1 -c ${bin}/meta-lib-static/toml/elk.toml'"
              mkdir -p ~/.config/systemd/user
              cat ${bin}/meta-lib-static/systemd.user/default.service.hbr | sed "s~{{SERVICE_CMD}}~$SERVICE_CMD~" | tee ~/.config/systemd/user/${each_component_short}.service
              systemctl --user enable ${each_component_short} --now
            else
              echo " [*] ${each_component} -> already running"
            fi
        fi
    done
elif [ "$action" = "start-foreground" ]; then
        export ACCESS_LOG_LEVEL=INFO
        each_component="${components[@]}"
        CATEGORY=$(echo "${each_component}" | cut -d. -f1)
        PROJECT=$(echo "${each_component}" | cut -d. -f2)
        COMPONENT=$(echo "${each_component}" | cut -d. -f1-2)
        BRANCH=$(echo "${each_component}" | cut -d. -f3)

        if [ "$CATEGORY" == "data-pipeline/pipeline-streaming" ]; then
            MODE=$(echo $BRANCH | cut -d: -f1) 
            if [ "$MODE" == "batch" ]; then
                if [ "$3" == "" ]; then
                    echo "DATE PARAMETER IS MISSING FOR FOREGROND MODE!"
                    exit 0
                fi

                START_DT=$3
                DT_RNG=$3_$4
                if [ "$3" == "_DT_" ]; then
                    START_DT=$(date --date="yesterday" '+%Y%m%d')
                    DT_RNG=$START_DT
                fi
                APP_NAME=${ACCESS_ENV}.${PROJECT}.${BRANCH}_${DT_RNG}
            else
                APP_NAME=${ACCESS_ENV}.${PROJECT}.${BRANCH}
            fi
            export HADOOP_CLASSPATH=$(hadoop classpath)
            hdfs dfs -mkdir -p /user/hive/${ACCESS_ENV}/jars/lib
            if ! hdfs dfs -test -e /user/hive/${ACCESS_ENV}/jars/lib/flink-dist*; then
                hdfs dfs -put $FLINK_HOME/lib/* /user/hive/${ACCESS_ENV}/jars/lib
            fi
            if [ -e ${bin}/data-pipeline/pipeline-streaming.flink-deps.jar ]; then
              hdfs dfs -rm -f /user/hive/${ACCESS_ENV}/jars/lib/pipeline-streaming.flink-deps.jar
              hdfs dfs -put ${bin}/data-pipeline/pipeline-streaming.flink-deps.jar /user/hive/${ACCESS_ENV}/jars/lib/pipeline-streaming.flink-deps.jar
              rm -rf ${bin}/data-pipeline/pipeline-streaming.flink-deps.jar
            fi
            if [ -e ${bin}/${COMPONENT}.jar ]; then
              hdfs dfs -rm -f /user/hive/${ACCESS_ENV}/jars/${PROJECT}.jar
              hdfs dfs -put ${bin}/${COMPONENT}.jar /user/hive/${ACCESS_ENV}/jars/${PROJECT}.jar
              rm -rf ${bin}/${COMPONENT}.jar
            fi
            HADOOP_USER_NAME=hadoop flink run-application -t yarn-application \
                    -Dyarn.application.name=${APP_NAME} \
                    -Dyarn.provided.lib.dirs=/user/hive/${ACCESS_ENV}/jars/lib \
                    -c App hdfs:///user/hive/${ACCESS_ENV}/jars/${PROJECT}.jar $BRANCH $API_SERVER_NODES $START_DT $4
            if [ "$MODE" == "batch" ]; then
            	while true; do
            	  YID=$(yarn application -list | grep ${APP_NAME} | cut -f1)
            	  if [ "$YID" = "" ]; then
            	      TABLE_NAME=$(echo ${BRANCH} | cut -d: -f2-)
            	      echo msck repair table data_pipeline_${ACCESS_ENV}.${TABLE_NAME}
            	      hive -e "msck repair table data_pipeline_${ACCESS_ENV}.${TABLE_NAME}"
            	      break
            	  else
            	      echo "wait job to finish [10s]..."
            	      sleep 10
            	  fi
            	done
            fi
        else
            PID=$(ps -ef | grep  "$COMPONENT.*$BRANCH" | grep -v grep | awk '{print $2}')
            if [ "$PID" = "" ]; then
              echo " [*] start ${each_component}..."
              XLANG=$(echo "${each_component}" | cut -d. -f2 | awk -F- '{print $NF}')
              if [ "$XLANG" = "rust" ]; then
                  RUST_LOG=$ACCESS_LOG_LEVEL ${bin}/${COMPONENT} $BRANCH
              elif [ "$XLANG" = "java" ]; then
                  java -cp .:meta-lib-static/properties:${bin}/${COMPONENT}.jar App $BRANCH
              elif [ "$XLANG" = "shell" ]; then
                  bash ${bin}/${COMPONENT}.sh $BRANCH
              fi
            else
              echo " [*] ${each_component} -> already running"
            fi
        fi
elif [ "$action" = "stop" ]; then
    set +e
    for each_component in  "${components[@]}"; do
        PROJECT=$(echo "${each_component}" | cut -d. -f2)
        CATEGORY=$(echo "${each_component}" | cut -d. -f1)
        COMPONENT=$(echo "${each_component}" | cut -d. -f1-2)
        BRANCH=$(echo "${each_component}" | cut -d. -f3)

        if [ "$CATEGORY" == "data-pipeline/pipeline-streaming" ]; then
            MODE=$(echo $BRANCH | cut -d: -f1) 

            if [ "$MODE" = "batch" ]; then
                each_component_short=$(echo $each_component | cut -d/ -f2)
                CRON_CMD=$(crontab -l | grep "${each_component_short}" | cut -d' ' -f9-)
                if [ "$CRON_CMD" = "" ]; then
                    echo " [*] ${each_component} -> already stop"
                else
                    echo " [*] ${each_component} -> killing"
                    crontab -l | grep -v ${each_component_short} | cat > /tmp/larluo.killing.cron
                    crontab /tmp/larluo.killing.cron
                    rm -rf /tmp/larluo.killing.cron
                fi
            else
                APP_NAME=${ACCESS_ENV}.${PROJECT}.${BRANCH}
                YID=$(yarn application -list | grep ${APP_NAME} | cut -f1)
                if [ "$YID" = "" ]; then
                    echo " [*] ${each_component} -> already stop"
                else
                    echo " [*] ${each_component} -> killing"
                    yarn application -kill ${YID}
                fi
            fi
        else
            each_component_short=$(echo $each_component | cut -d/ -f2)
            systemctl --user disable ${each_component_short} --now
        fi
    done
    set -e
elif [ "$action" = "status" ]; then
    for each_component in  "${components[@]}"; do
        PROJECT=$(echo "${each_component}" | cut -d. -f2)
        CATEGORY=$(echo "${each_component}" | cut -d. -f1)
        COMPONENT=$(echo "${each_component}" | cut -d. -f1-2)
        BRANCH=$(echo "${each_component}" | cut -d. -f3)

        if [ "$CATEGORY" == "data-pipeline/pipeline-streaming" ]; then
            MODE=$(echo $BRANCH | cut -d: -f1) 

            if [ "$MODE" = "batch" ]; then
                each_component_short=$(echo $each_component | cut -d/ -f2)
                CRON_CMD=$(crontab -l | grep "${each_component_short}" | cut -d' ' -f9-)
                if [ "$CRON_CMD" = "" ]; then
                    echo " [ ] ${each_component} -> stop"
                else
                    echo " [*] ${each_component} -> running"
                fi
            else
                APP_NAME=${ACCESS_ENV}.${PROJECT}.${BRANCH}
                YID=$(yarn application -list | grep ${APP_NAME} | cut -f1)
                if [ "$YID" = "" ]; then
                    echo " [ ] ${each_component} -> stop"
                else
                    echo " [*] ${each_component} -> running"
                fi
            fi
        else
            PID=$(ps -ef | grep  "$COMPONENT.*$BRANCH" | grep -v grep | awk '{print $2}')
            if [ "$PID" = "" ]; then
                echo " [ ] ${each_component} -> stop"
            else
                echo " [*] ${each_component} -> running"
            fi
        fi
    done
elif [ "$action" = "smoke" ]; then
    :
fi

