#!/bin/bash
#
# Author:   Joshua Chen <iesugrace@gmail.com>
# Date:     2017-08-02 01:04:57
# Location: Shenzhen
# Desc: fetch web page from lagou.com
#

# 1. 每次從代理IP 地址池中取出一個狀態爲活躍的IP。
# 2. 每組5次請求一次性用curl 完成，然後把該代理IP的活躍時間點後移61秒。
# 3. 如果請求結果是302，則稍等20秒再重試失敗的url，
#    因爲有可能別人也在使用同一個代理IP訪問拉勾。
# 4. 如果請求超時，則稍等5秒再重試，3次失敗後把代理標記爲不可用。
# 5. 如果IP 地址池無可用IP 地址，則稍等5秒重試。

#
# Set the 'next' and 'stop' from arguments,
# if a pending task exists, continue it from the stop point.
#
init() {
    if test -f "$task_file"
    then
        start=$(head -n1 "$task_file")
        stop=$(tail -n1 "$task_file")

        if test -f "$next_id_file"
        then
            next=$(<"$next_id_file")
        else
            next=$start
        fi

        if test "$next" -gt "$stop"
        then
            echo "task finished"
            set_task_finished
            exit 0
        fi
    else
        if test $# -ne 2
        then
            echo "usage: $(basename $0) [start amount]" >&2
            exit 1
        fi
        if ! grep -qE '^[0-9]+' <<< $1
        then
            echo "invalid value of argument 'start': $1" >&2
            exit 1
        fi
        if ! grep -qE '^[0-9]+' <<< $2
        then
            echo "invalid value of argument 'amount': $2" >&2
            exit 1
        fi
        start=$1
        stop=$((start+$2-1))
        next=$start
        set_task $start $stop $next
    fi
}

#
# Record the task info to files
#
set_task() {
    echo $1 >> $task_file
    echo $2 >> $task_file
    echo $3 > $next_id_file
}

#
# Set the task status to finish
#
set_task_finished() {
    rm -f $task_file $next_id_file
}

#
# Get the start id of the next group,
# update the next_id_file.
#
get_next_group() {
    next=$((next + group_size))
    if test "$next" -le "$stop"
    then
        echo "$next"
        echo "$next" > "$next_id_file"
    fi
}

#
# Generate a curl config file for downloads,
# write to standard output.
#
make_curl_config() {
    local first=$1 last=$2
    local id url file
    for id in $(seq $first $last)
    do
        url="$url_prefix/$id.html"
        file="$save_prefix/$id.html"
        echo "url = \"$url\""
        echo "output = \"$file\""
    done
}

#
# Fetch
#
fetch() {
    #debug
    log "actually fetch from $1 to $2"

    local first=$1 last=$2 proxy=$3 i status=
    for i in $(seq $proxy_try)
    do
        if curl -m$curl_maxtime -s -x $proxy -K <(make_curl_config $first $last)
        then
            status=1
            break
        fi
    done

    #
    # Falid, mark the proxy as unusable
    #
    if test -z "$status"
    then
        disable_proxy "$proxy"
        return 1
    else
        return 0
    fi
}

#
# Find the failed downloads
# The size of the failed download is 242 bytes.
# Check missing files also.
#
find_failure() {
    eval "ls -l $save_prefix/{$1..$2}.html 2>/dev/null" \
        | awk '$5==242 {print $NF}' \
        | sed -r -e 's|^.*/||' -e 's/.html$//'

    # missing files
    local i
    for i in $(seq $1 $2)
    do
        test -f $save_prefix/$i.html || echo $i
    done
}

#
# Log message to syslog
#
log() {
    logger -t Spider -p local0.info "$*"
}

#
# Fetch a group of web pages
#
fetch_group() {
    local next last id
    next=$1
    last=$((next + group_size - 1))
    test "$last" -gt "$stop" && last=$stop

    log "start to fetch from $next to $last"

    #
    # proxy may failed, so put it in a loop,
    # if one failed, try with a new one.
    #
    while true
    do
        proxy=$(get_proxy)  # block when no proxy is usable
        fetch $next $last $proxy && break
        #debug
        log "failed to fetch via $proxy"
    done

    # try to fetch again if any url in the group failed
    while read id
    do
        #debug
        log "retry on id: $id"

        while true
        do
            fetch $id $id $proxy && break
            #debug
            log "failed to fetch via $proxy"

            proxy=$(get_proxy)
        done
    done < <(find_failure $next $last)

    # update the proxy next-available time
    set_proxy_time "$proxy"
}

#
# Get one usable proxy from the pool, when no proxy
# is usable, sleep for some time and then retry.
# A proxy can not be used simultaneously by multiple
# clients, so we set a flag for the proxy we chose.
#
# Proxy pool record structure: ip:port active-time status
#
get_proxy() {
    #debug
    log "start to get a proxy"

    local now sql lock_fd
    exec {lock_fd}>"$lock_file"
    while true
    do
        flock $lock_fd
        now=$(date +%s)
        sql="select address from $proxy_table "
        sql+="where status = $ST_READY and time <= $now limit 1"
        proxy=$(sqlite3 $proxy_db "$sql")
        if test -n "$proxy"
        then
            occupy_proxy "$proxy"
            #debug
            log "got proxy $proxy"
            echo "$proxy"
            exec {lock_fd}>&-
            break
        fi
        flock -u $lock_fd
        sleep $timeout
    done
}

#
# Set the status of a proxy to zero, meaning it's being used.
#
occupy_proxy() {
    #debug
    log "occupy proxy $1"

    local proxy=$1
    sql="update $proxy_table set status = $ST_BUSY where address = '$proxy'"
    sqlite3 $proxy_db "$sql"
}

disable_proxy() {
    #debug
    log "disable proxy $1"

    local proxy=$1
    exec {lock_fd}>"$lock_file"
    flock $lock_fd
    sql="update $proxy_table set status = $ST_DISABLED where address = '$proxy'"
    sqlite3 $proxy_db "$sql"
    exec {lock_fd}>&-
}

#
# Set the usable time of the proxy to a future time
# and reset the status to usable.
#
set_proxy_time() {
    #debug
    log "set time for proxy $1"

    local proxy=$1 time
    time=$(date +%s)
    time=$((time + interval))
    exec {lock_fd}>"$lock_file"
    flock $lock_fd
    sql="update $proxy_table set time=$time, status=$ST_READY where address = '$proxy'"
    sqlite3 $proxy_db "$sql"
    exec {lock_fd}>&-
}

group_size=5
interval=61
curl_maxtime=3
timeout=5
sleep_302=20
proxy_try=3
basedir=$(dirname $0)
task_file="$basedir/.task"
next_id_file="$basedir/.next"
proxy_db="/tmp/proxy.db"
proxy_table="proxy"
lock_file="/tmp/proxy_mod_lock"
url_prefix="https://www.lagou.com/jobs"
save_prefix="html"
ST_DISABLED=0
ST_READY=1
ST_BUSY=2

mkdir -p "$save_prefix"

#
# Initialize the 'next' and 'stop' variables.
# Pending task may continue.
#
init "$@"

#
# Work until the job id list is exhausted
#
while true
do
    next=$(get_next_group)  # empty if list exhausted
    test -z "$next" && set_task_finished && break
    fetch_group $next
done
