#!/usr/bin/env zsh
set -u -o pipefail

#==============================================================================
# Initialization Table
#==============================================================================

_proto_scheme="https"
_nitter_host="${_proto_scheme}://nitter.snopyta.org"

#==============================================================================
# Error Table
#==============================================================================

_check_deps()
{
  declare -i local _dep_ct
  local _deps=(curl twint wget)

  for dep_ct in {1..${#deps[@]}}; do
    [[ "$(which ${_deps[${_dep_ct}]})" =~ "not found" ]] \
      && __err_dep_path ${_deps[${_dep_ct}]} || :
  done

  unset _dep_ct _deps
}

_err_username()
{
  declare -i local _err_code
  _err_code="${1}"
  
  case "${_err_code}" in
    1) echo "[ERR.] Illegal character in username string." ;;
    2) echo "[ERR.] usage: twitrip <username>" ;;
    *) echo "[ERR.] usage: twitrip <username>" ;;
  esac
      
  unset _err_code       
  exit
}
      
__err_dep_path()
{
  echo "[ERR.] ${1} not found. Ensure it is installed and in your PATH."
  exit
}


#==============================================================================
# Executive Table
#==============================================================================
      
_directory_init()
{
  echo "[${USERNAME}] Creating directory"
  
  mkdir -p "${HOME}/rips/twitter/${USERNAME}"
  _user_dir="${HOME}/rips/twitter/${USERNAME}"
}

__unshorten()
{
  echo "[${USERNAME}] Resolving URL"
  
  _media_url=$(curl 'https://wheregoes.com/retracer.php' 2>/dev/null \
                -H  'Content-Type: application/x-www-form-urlencoded' \
                --data-raw "traceme=${_short_url}&url=" \
                | grep tracecontent \
                | grep -Eo 'http[^ ]{1,}' \
                | sed 's|<br||g' \
                | grep -v "${_short_url}" \
                | tail -n 1 \
                | sed 's|twitter.com|nitter.snopyta.org|g')
}

_get_img_posts()
{
  echo "[${USERNAME}] Building post index"
                  
  _img_posts=($(twint -u "${USERNAME}" --images 2>/dev/null \
                 | grep -Eo 'pic.twitter.com[^ ]{1,}'))
}

_iterative_download()
{
  declare local    _username
  declare local    _img_url
  declare local    _img_param
  declare local    _img_filename
  declare -a local _media_post_array
  declare -i local _status_no
  declare local    _username

  for i in {1..${#_img_posts[@]}}; do
    _short_url="${_img_posts[${i}]}"
  
    __unshorten
  
    _status_no=$(cut -d '/' -f1 <<< ${_media_url##*status/})
    _username=$(cut -d '/' -f1 <<<  ${_media_url##$_nitter_host/})
    _media_post_array=($(wget $_media_url -qO- /dev/null \
                          | grep -Eo '/[^ ]{1,}orig'))

    echo "[${_username}] post:${_status_no} [${i}/${#_img_posts[@]}]"

    for j in {1..${#_media_post_array[@]}}; do
      _img_url="${_nitter_host}${_media_post_array[${j}]}"
      _img_param="${${_img_url##*(%2F|/)}%.jpg}"
      _img_param=$(awk -F '%3F' '{print $1}' <<< "${_img_param}")
      _img_filename="${_username}_${_status_no}_${_img_param}"

      echo "[${_status_no}] Downloading image [${j}/${#_media_post_array[@]}]"
                  
      wget -U mozilla $_img_url \
        -O "${_user_dir}"/"${_img_filename}" \
        -nc 2>/dev/null
    done 
  done
}
                  
                  
                  
###############################################################################
# Main Function
###############################################################################

_main()
{
  [[ -z "${1}" ]] && _err_username 2 || :
  [[ "${1}" =~ [^a-zA-Z0-9] ]] && err_username 1 || USERNAME="${1}"

  _check_deps
  _directory_init
  _get_img_posts
  _iterative_download
}
                  
                  
                  
###############################################################################
# Main Call
###############################################################################

_main "$@"
                  
echo "[${USERNAME}] Exiting"
