#!/bin/bash
[ $sock_pxy ] || sock_pxy=$1
curl="curl -g -A Mozilla "${sock_pxy:+"--socks5 "$sock_pxy}

xoff=0
yoff=0
if [ -n "$sock_pxy" ]; then
  ID=$((${sock_pxy##*:}-5000))
  xoff=$(((ID / 10) * 360))
  yoff=$(((ID % 10) * 88))
fi
#todl="urls.p"
todl="urls.to"
dled="dled.txt"
errs="err.txt"
#2>>$errs

sites="megaupload core.tora down.tora u.115 rapidshare hotfile duckload fileserve netload"
get_site() {
  for site in $sites; do
    [[ $1 == *$site* ]] && echo $site && return;
  done
}

get_v() {
  echo "$logf" | grep "var $1" | grep -o "'[^']*'" | tr -d "'"
}

rapidshare() {
  read FILEID FILENAME < <(echo "$1" | awk -F"/" {'print $5, $6'})
  BASE_APIURL="https://api.rapidshare.com/cgi-bin/rsapi.cgi?sub=download_v1&fileid=${FILEID}&filename=${FILENAME}"
  PAGE=`$curl -s "$BASE_APIURL"`
  echo "$PAGE" | grep "ERROR" && return 1

  read RSHOST DLAUTH WTIME < \
    <(echo "$PAGE" | grep "^DL:" | cut -d":" -f2- | awk -F"," '{print $1, $2, $3}')
  test "$RSHOST" -a "$DLAUTH" -a "$WTIME" ||
    { echo "bad page: $PAGE"; return 1; }
  echo sleep $WTIME
  sleep $WTIME
  BASEURL="http://$RSHOST/cgi-bin/rsapi.cgi?sub=download_v1"
  $curl -# -o "$FILENAME" "$BASEURL&fileid=$FILEID&filename=$FILENAME&dlauth=$DLAUTH"
}

megaupload() {
  if [[ $1 == *"com/?f="* ]]; then
    echo MU folder $1
    $curl -s "http://www.megaupload.com/xml/folderfiles.php?folderid=${1##*=}" | grep -o 'http[^"]*' >> $todl
    return
  fi
  logf=`$curl -s "$1"`
  link=`echo "$logf" | grep downloadlink |grep -o 'href="[^"]*' | cut -d'"' -f 2`
  [ -z "$link" ] && { echo no link found; return 1; }
  sleep 46
  if [[ $link == *"&#"* ]]; then
    fname=$(python -c "import re; print re.sub('&#\w+;', lambda t: unichr(int(t.group(0)[2:-1])), '''${link##*/}''').encode('utf8')")
    $curl -C - -# "$link" -o "$fname"
  else
    $curl -C - -# "$link" -O
  fi
}

u.115() {
  logf=`$curl -s "$1"`
  links=`echo "$logf" | grep 'key1=' | grep -o 'href=\"http[^"]*' | cut -d '"' -f 2`
  fname=`get_v file_name`
  # fix for v4.u.115
  [ -z "$fname" ] && fname=`echo "$logf" | grep -m 1 "<title>" | cut -d'|' -f 1 | cut -d'>' -f 2`
  echo $links | tr ' ' '\t' | aria2c -i - -o "$fname" && return
  link=`echo "$links"|grep tel`
  [ -n "$link" ] && { $curl -# -C - "$link" -o "$fname"; return; }
  while read link; do
    $curl -s -I "$link" | grep "Content-Disposition"
    if [ $? -eq 0 ]; then
      $curl -# -C - "$link" -o "$fname"
      return
    fi
  done <<< "$links"
  return 1
}

down.tora() {
  fname=${1##*/}
  wget -c ${1##*=} -O "$(python -c "import urllib; print urllib.unquote('''$fname''').decode('gbk').encode('utf8')")"
}

function enc() {
  tmp=${1//=/%253D}
  tmp=${tmp//+/%252B}
  tmp=${tmp//\//%252F}
  echo $tmp
}

core.tora() {
  logf=`$curl -s "$1"`
  fname=`echo "$logf" | grep 'span id="name"' | grep -o ">[^<]*<" | tr -d "><"`
  timer=`get_v cpc`
  contextPath=`get_v contextPath`
  xph=`get_v xph`
  cph=`get_v cph`
  nph=`get_v nph`
  cpp=`get_v cpp`
  npu=`get_v npu`
  #newurl=$contextPath"/download/downloadAction.do?method=download&cph="`enc $cph`"&xph="$xph"&nph="`enc $nph`"&cpp="`enc $cpp`"&npu="`enc $npu`
  newurl="$contextPath/download/mc/$cph/$xph/$nph/$cpp/$npu/"
  echo wait $timer
  sleep $timer
  $curl -# -C - -o "$fname" -L "$newurl" 
}

recap() {
  local du=6LfGxb4SAAAAAE8mO-WYX1YP8W3SvNFNLpwYygdF #duckload
  local hf=6LfRJwkAAAAAAGmA3mAiAcAsRsWvfkBijaZWEvkD #hotfile
  local fs=6LdSvrkSAAAAAOIwNj-IY-Q-p90hQrLinRIpZBPi #fileserve

  rcu="http://www.google.com/recaptcha/api/noscript?k=${!1}"
  rcid=`$curl -s "$rcu" | grep recaptcha_challenge_field | grep -o 'value="[^"]*'|cut -d'"' -f 2`
  imgfn=logs/${sock_pxy##*:}_${rcid:6:8}.jpg
  $curl -s "http://www.google.com/recaptcha/api/image?c=$rcid" -o $imgfn
  feh -Zg 360x68+$xoff+$yoff $imgfn &
  echo -n "enter $imgfn: " >&2
  read res
  kill $!
  rm $imgfn
  echo $rcid $res
}

hotfile() {
  if [[ $1 == *"com/list/"* ]]; then
    echo Hotfile list $1
    $curl -s "$1" | grep -o 'http://hotfile.com/dl/[^"]*' >> $todl
    return
  fi
  TRY=0
  while [ $TRY -lt 3 ]; do
    read rcid res < <(recap hf)
    data="action=checkcaptcha&recaptcha_challenge_field=$rcid&recaptcha_response_field=${res/ /+}"
    link=`$curl -s -d "$data" "$1" | grep -o 'http://hotfile.com/get/[^"]*'`
    [ -n "$link" ] && { $curl -# -L "$link" -O; return; }
    ((TRY++))
  done
}

fileserve() {
  if [[ $1 == *"com/list/"* ]]; then
    echo Fileserve list $1
    $curl -s "$1" | grep -o '/file/[^"]*' | while read link; do
      echo "http://fileserve.com$link" >> $todl
    done
    return
  fi
  cookie=`$curl -s -I "$1" | grep -o "PHPSESSID[^;]*"`
  echo $cookie
  fileid=`echo "$1"|cut -d'/' -f 5`
  fname=`echo "$1"|cut -d'/' -f 6`
  [ -z "$fname" ] && fname=`$curl -s -b "$cookie" "$1"|grep -o "<h1>[^<]*"|cut -d'>' -f 2`
  read rcid res < <(recap fs)
  data="recaptcha_challenge_field=$rcid&recaptcha_response_field=${res/ /+}&recaptcha_shortencode_field=$fileid"
  $curl -s -b "$cookie" -d "$data" "http://www.fileserve.com/checkReCaptcha.php"
  timer=`$curl -s -b "$cookie" -d "downloadLink=wait" "$1"`
  sleep ${timer:1:2}
  sleep 2
  $curl -s -b "$cookie" -d "downloadLink=show" "$1"
  $curl -# -b "$cookie" -d "download=normal" -L -e "$1" "$1" -o "$fname"
}

netload() {
  echo netload
}

duckload() {
  cookie=`$curl -s -I "$1" | grep -o "PHPSESSID[^;]*"`
  logf=`$curl -s -b "$cookie" $1 | grep -A 1 'type="hidden"'`
  data=`echo "$logf"| grep -o '"[^"]*"'|sed -n -e '2,4 s/"//gp'|sed -e 'N;s/\n/=/' -e 'N;s/\n/\&/'`
  sleep 30
  logf=`$curl -s -b "$cookie" -d "$data=" $1 | grep -A 1 'type="hidden"'`
  data=`echo "$logf"| grep -o '"[^"]*"'|sed -n -e '2,4 s/"//gp'|sed -e 'N;s/\n/=/' -e 'N;s/\n/\&/'`
  sleep 30
  $curl -L -v -b "$cookie" -d "$data=" $1 -O
  [[ ${1##*.} == txt ]] && cat ${1##*/}|grep megaupload.com >> $todl
}

pxylive() {
  [ -z "$sock_pxy" ] && return
  netstat -tln | grep $sock_pxy &>/dev/null
}

clean_up() {
  [ -n "$url" ] && echo "$url" >> $errs
  exit 1
}

trap clean_up SIGTERM SIGINT

while true; do
  url=`head -n 1 $todl`
  [ -z "$url" ] && { sleep 60; continue; }
  ! pxylive && { echo "Proxy Error: "$sock_pxy >> $errs; exit 1; }
  url=`flock $todl bash -c "head -n 1 $todl;sed -i 1d $todl"`
  dlsite=`get_site "$url"`
  $dlsite "$url" && echo "$url" >> $dled || echo "$url" >> $errs
  url=''
  sleep 10
done
