#!/bin/bash
source "${LMD_BASE_INSTALL_SCRIPT_DIR}/global/alias.sh"
source "${LMD_BASE_INSTALL_SCRIPT_DIR}/global/conda_run.sh"
source "${LMD_BASE_INSTALL_SCRIPT_DIR}/global/conda_and_pip_settings.sh"
source "${LMD_BASE_INSTALL_SCRIPT_DIR}/global/print_sys_info.sh"

APP_INSTALL_NAME="chatterbox"
APP_TARGET_DIR_NAME="chatterbox"

export CURRENT_APP_PATH="$LMD_APPS_DIR/$APP_TARGET_DIR_NAME"
echo "App Dir: ${CURRENT_APP_PATH}"

cd "$CURRENT_APP_PATH"

if [ -z "$HF_ENDPOINT" ] && [ -n "$HF_MIRROR" ]; then
    export HF_ENDPOINT="$HF_MIRROR"
fi

export GRADIO_TEMP_DIR="$CURRENT_APP_PATH/temp"
CONDA_PREFIX="./venv"


if [ "$LMD_LAUNCH_APP_OPT_USE_GPU" = "false" ]; then
  export CUDA_VISIBLE_DEVICES="-1"
fi

echo "CUDA_VISIBLE_DEVICES $CUDA_VISIBLE_DEVICES"

if [ -n "$LMD_LAUNCH_APP_MODULE" ]; then
  echo "LMD_LAUNCH_APP_MODULE $LMD_LAUNCH_APP_MODULE"
else 
  echo "set default LMD_LAUNCH_APP_MODULE: gradio_tts_app"
  LMD_LAUNCH_APP_MODULE="gradio_tts_app"
fi

transformers_cache_dir="$CURRENT_APP_PATH/tmp_hf_home_by_modelscope"
detect_model_dir="$transformers_cache_dir/hub/models--ResembleAI--chatterbox"
if [ -d "$detect_model_dir" ]; then
  echo "found hf models in $transformers_cache_dir"
  export TRANSFORMERS_OFFLINE=1
  export HF_HUB_DISABLE_SYMLINKS=1
  export TRANSFORMERS_CACHE="$transformers_cache_dir/hub"
  export HF_HOME="$transformers_cache_dir"
fi

# APP_FILE=gradio_tts_app.py
INIT_APP_FILE="${LMD_LAUNCH_APP_MODULE}.py"
APP_FILE="$INIT_APP_FILE"

if [ -f /System/Library/CoreServices/SystemVersion.plist ]; then
  ARCH=$(uname -m)
  if [ "$ARCH" = "x86_64" ]; then
    echo "Intel-based Mac."
  else
    echo "Apple Silicon (M-series) Mac. Change device to mps"
    cp "$APP_FILE" "temp_${APP_FILE}"
    sed -i '' 's|else "cpu"|else "mps"|g' "temp_${APP_FILE}"

    cp "$LMD_BASE_INSTALL_SCRIPT_DIR/apps/$APP_INSTALL_NAME/patch_torch.py" "$CURRENT_APP_PATH"
    cp "$LMD_BASE_INSTALL_SCRIPT_DIR/apps/$APP_INSTALL_NAME/lmd_run.py" "$CURRENT_APP_PATH"

    APP_FILE="temp_${APP_FILE}"
    if [ "$LMD_LAUNCH_APP_MODULE" = "multilingual_app" ]; then
      # change torch load default behavior to enable mps DEVICE.
      APP_FILE="lmd_run.py"
    fi
  fi
fi


if [ "$LMD_LAUNCH_APP_MODULE" = "multilingual_app" ] && [ -n "$GITHUB_PROXY" ]; then
  # If the app is multilingual_app and use ghp.
  # Replace the url prefix: https://storage.googleapis.com/chatterbox-demo-samples/mtl_prompts
  if [ -f "temp_${INIT_APP_FILE}" ]; then
      echo "temp file exist. dont copy."
    else 
      echo "temp file dose not exist. copy file."
      cp "$INIT_APP_FILE" "temp_${INIT_APP_FILE}"
  fi

  conda_run_cmd python ${LMD_BASE_INSTALL_SCRIPT_DIR}/global/replace_str.py "temp_${INIT_APP_FILE}" "https://storage.googleapis.com/chatterbox-demo-samples/" "https://seemts.com/pkgs/app-files/chatterbox/chatterbox-demo-samples/"
  if [ "$APP_FILE" != "lmd_run.py" ]; then
    APP_FILE="temp_${INIT_APP_FILE}"
  fi
fi



export PKUSEG_HOME="${CURRENT_APP_PATH}/pkuseg_home"
conda run -p $CONDA_PREFIX --no-capture-output python $APP_FILE
