#!/bin/sh -e
# $Id$
# feedtime.sh  (c) ALord 05/12/2008 GPL v3

# Backend Script to download nzbs
#
# Requires a list of tv programs in a file called feedtime.list
# Format contains program name with words separated by dots. Wildcard is '*' eg.
# The.Mentalist.S01*xvid
# The.Mentalist.S01*720
#
# Include the <dot>series where applicable. eg .S03 
# This ensures the right seson is downloaded and also helps prevent mismatches.
# eg 'Life' may match a number of programs.
# 
# Mar 1 2010
# Removed bintube.
# Added parsing of <enclosure url="xxx"
# Added nzbindex

UNIXTIME=`date +%s`

d=`echo $0 | sed 's/[^/]*$//'` # dirname not on nmt
d=`cd "$d" ; pwd`

FEEDTIME_HOME="$d"

#Thanks to user Jasch for pointers for feedtime c200 tweaks
arch=unknown

group=$USER # assume user has a personal gid
owner=$USER:$group
crondir="/etc"
wget_bin=wget

#Note the busybox has wget in /share/bin but NMT has full gnu wget in /bin  but its buggy
#Ive bundled latest gnu wget but its big!
pid=$$

path2=
if [ -f /mnt/syb8634/MIN_FIRMWARE_VER ] ; then

    arch=nmt100
    owner="nmt:nmt"
    crondir="/etc"
    path2=/mnt/syb8634/bin:
    wget_bin=/bin/wget #dont use busybox wget

else
    if [ -f /nmt/apps/MIN_FIRMWARE_VER ] ; then

        arch=nmt200
        owner="nmt:nmt"
        crondir="/etc/cron"
        path2=/nmt/apps/bin:
        wget_bin=/bin/wget #dont use busybox wget
    else
	if [ -d /ffp/bin ] ; then
	   arch=dns323
           owner="nobody:501"
           path2=/ffp/bin:
	fi
    fi

fi

is_dns323() {
   [ "$arch" = dns323 ]
}
   
is_nmt() {
    [ "$arch" = nmt100 -o "$arch" = nmt200 ]
}

TMPDIR="$FEEDTIME_HOME/tmp" #/tmp has root only permissions on some NMT
if [ ! -d $TMPDIR ] ; then
    mkdir -p $TMPDIR && chown $owner $TMPDIR && chmod 777 $TMPDIR 
fi

export PATH="$path2$PATH" 
if is_nmt ; then
    export PATH="$FEEDTIME_HOME/bin:$FEEDTIME_HOME/bin/$arch:$PATH"
fi

AWK=awk

appname="feedtime"

flagPrefix="flag."

rss_fields="rss_id rss_name rss_enabled rss_url rss_priority"

site_fields="site_id site_name site_enabled site_user site_password site_loginurl site_loginpost site_logouturl site_sed"

FEEDTIME_DATADIR="$FEEDTIME_HOME/data"
mkdir -p "$FEEDTIME_DATADIR"

LOG_FILE="$FEEDTIME_DATADIR/$appname.out"
HISTORY_FILE="$FEEDTIME_DATADIR/$appname.history"

download_dir="$TMPDIR"

#echo "FEEDTIME_HOME=$FEEDTIME_HOME"

config_file="$FEEDTIME_DATADIR/$appname.cfg"
gui_feedname="gui"

FIND_FILE() {
    f="$1" ; shift;
    for i in "$@" ; do
        if [ -f "$i/$f" ] ; then
            echo "$i/$f"
            return 0
        fi
    done
    echo "Cant find $f" 1>&2
    echo UNSET
    return 0
}


bb() {
    # find mtime switch siliently fails on nmt100 platform
    case "$arch" in
        nmt100)
            "busybox" "$@"
            ;;
        *)
            "$@"
            ;;
    esac
}

# For normal linux we would just look on the path. For NMT nzbget can be in one of two
# places , neither of which are necessarily on the PATH.
path="`echo $PATH | sed 's/:/ /g'`"
unpak_nzbget_bin="`FIND_FILE nzbget $path /share/Apps/NZBget/bin /usr/local/bin`"
unpak_nzbget_conf="`FIND_FILE nzbget.conf /share/Apps/NZBget/.nzbget /share/.nzbget $HOME/.nzbget /etc /usr/etc /usr/local/etc /opt/etc`"

NZBGET() {
    if ! "$unpak_nzbget_bin" -c "$unpak_nzbget_conf" "$@" >"$FEEDTIME_HOME/nzbget.out" 2>"$FEEDTIME_HOME/nzbget.err" ; then
        grep -v 'no version' "$FEEDTIME_HOME/nzbget.err"
    fi
    rm -f "$FEEDTIME_HOME/nzbget.out" "$FEEDTIME_HOME/nzbget.err"
}

nzbget_remove_nzb() {

    b=`bname "$1" ".nzb"`

    ids="`"$unpak_nzbget_bin" -c "$unpak_nzbget_conf" -L G 2>/dev/null | sed -rn "s/^.([0-9]+-[0-9]+)\] $b .*/\1/p"`"
    #LOG "`"$unpak_nzbget_bin" -c "$unpak_nzbget_conf" -L G 2>/dev/null `"
    case "$ids" in
        *-*)
            "$unpak_nzbget_bin" -c "$unpak_nzbget_conf" -E D "$ids" 2>/dev/null
            LOG "deleted ids [$ids]"
            ;;
    esac
}

INIT_FILE() {
    if [ ! -f "$1" ] ; then
        cp "$1.example" "$1"
        chown $owner "$1"
    fi
}

SET_DOWNLOAD_DIR() {
    if grep -q CHANGE_TO_NZB_DOWNLOAD_DIR "$config_file" ; then
        # Set the download dir to be the first of the following locations found:
        # first look in Zebedee as this may be managing the entire job in the future.
        # Then look in the new nzbget location.
        # Finally look in the original nzbget location.
        for f in /share/Apps/zebedee /share/Apps/NZBget/.nzbget /share/.nzbget ; do
            if [ -d "$f/nzb/." ] ; then
                sed -i "s|CHANGE_TO_NZB_DOWNLOAD_DIR|$f/nzb|" "$config_file"
                return
            fi
        done
    fi
}

INIT_FILE "$config_file"

feed_db="$FEEDTIME_DATADIR/feeds2"
site_db="$FEEDTIME_DATADIR/sites2"

INIT_FILE "$feed_db"
INIT_FILE "$site_db"

SET_DOWNLOAD_DIR

eval `(cd $FEEDTIME_HOME ; ./options.sh GET feedtime.cfg.help data/feedtime.cfg)`

FLAGDIR() {
    flagDir="$FEEDTIME_DATADIR/feed_$1"
    mkdir -p "$flagDir"
    chown -R "$owner" "$flagDir"
    echo "$flagDir"
}

pat1flag="LIST"
globalflag="GLOBAL"

# -----------------------------------------------------------------

LOG() {
    echo "$@" >> "$LOG_FILE"
}
logtime() {
    #LOG "`date +%a,\ %e\ %b,\ %H:%M` $@"
    LOG "`date +%a,%e.%b,%H:%M:%S` $@"
    roll_log "$LOG_FILE"
}

HISTORY() {
    #d="`date +%a,\ %e\ %b,\ %H:%M`"
    d="`date +%a,%e.%b,%H:%M:%S`"
    LOG "$d: $@"
    echo "$d: $@" >> "$HISTORY_FILE"
    roll_log "$HISTORY_FILE"
}

roll_log() {
    if bb find "$1" -size +500k 2>/dev/null | grep -q .  ; then
        mv "$1" "$1.1"
        echo > "$1"
    fi
}



#$1=patternfile $2=Prefix used by awk)
SHOW_PATTERNS() {
    sed "/^#/ d;s/^/$2:/" $1
    echo #In case noel on last line
}

insert_line_feeds() {
    awk '{ if (index($0,"><")) gsub(/></,">\n<"); print ; }'
}

# not all busybox have basename
bname() {
    echo "$1" | sed -r "s/.*\///;s/$2$//"
}

err_file_link() {
   echo "<a href="'"'"./err/`bname "$2"`"'"'">$1</a>"
}

# <stdin> = RSS FEED
scan_rss() {

    #LOG "scan_rss:$@"
    url="$1"
    mode="$2"    # TEST|LIVE|SKIP
    feedid="$3"
    priority="$4"
    feedname="$5"

    flagDir=`FLAGDIR "$feedid"`

    #ls -l "$@"
    tmp="$TMPDIR/$appname.$feedname.rss"
    nzbs="$TMPDIR/$appname.$feedname.nzbs"

    WGET "$url" | insert_line_feeds  > "$tmp"

    if ! grep -qi '<rss' "$tmp" ; then
        mv "$tmp" "$tmp.html"
        mv "$tmp.html" "$bad_nzb_dir" && chown -R "$owner" "$bad_nzb_dir"
        HISTORY "[$feedname] <a href="'"'"$url"'"'">url</a> does not contain RSS data. [ `err_file_link "view bad rss" "$tmp.html"` ]"
        return 1
    fi

    #Ids are not tracked numerically but now stored in the file system. 
    #This fixes situation where ids arrive out of order, and allows use of feeds that
    #use alphanumeric ids.
    #using files for this is a little yucky, but actually quite good for a ram constrained device.
    #otherwise we would have to track indexes in memory.
    #Could have put them all in a file, but then you need to do lots of greps etc.


    logtime "begin scan"
$AWK -f "$FEEDTIME_HOME/bin/scan_rss.awk" \
    g_mode=$mode \
    g_live_match_limit="$option_liveMatchLimit" \
    g_group_by="$option_group_by" \
    g_nzb_dir="$download_dir" \
    g_nzbs="$nzbs" \
    feedid="$feedid" \
    g_feedname="$feedname" \
    g_data_dir="$FEEDTIME_DATADIR" \
    rsstext="$tmp" /dev/null

    #Now process the list of NZBs created by the awk script
    #This is outside the main awk script because the WGET needs
    #to attempt to use gzip and fall back to cat.
    #With full gnu tools it would have been simpler than busybox 
    if [ -f "$nzbs" ] ; then 
        cat "$nzbs" | while IFS=\| read cmd id link file ; do

            case "$cmd" in
                FETCH)
                    flag="$flagDir/$flagPrefix$id"
                    case "$mode" in
                    LIVE)
                        thread $option_concurrent_nzb download $mode "$link" "$file" "$priority" "$feedname" "$flag"
                        sleep 1
                        ;;
                    SKIP)
                        if [ ! -f "$flag" ] ; then
                            echo "$mode|$link|$file"
                            touch $flag
                        else
                            echo "Already skipped|$link|$file"
                        fi
                        ;;
                    TEST)
                        thread $option_concurrent_nzb download $mode "$link" "$file" "$priority" "$feedname" "$flag" 
                        sleep 1
                        ;;
                    esac
                    ;;
                *)
                    echo "$cmd"
                    ;;
            esac
        done
    fi

    #Delete feed flags older than n days
    bb find "$flagDir" -type f -mtime +21 -name flag.\* | sed 's/^/rm "/;s/$/"/' | sh 
    rm -f "$tmp" "$nzbs"
    logtime "end scan"
}


# $1 = TEST or LIVE
GETNZB() {
    GETNZB2 "$@" >> "$LOG_FILE" 2>&1
    if id | grep -q root ; then
        chown -R $owner "$FEEDTIME_HOME"
    fi
}


CHECK_STREAMS() {
    if [ -f "$FEEDTIME_DATADIR/$appname.active" ] ; then
        GETNZB LIVE "*"
    #else
        #LOG "`date`: FeedTime Service Disabled"
    fi
}

#-------------------------------------------------------------------------------
thread() {
    c="$1" ; shift
    if [ "$c" = 1 ] ; then
        "$@" &
    else
        "$@"
    fi
}

#$1 = Mode TEST/LIVE
#$2 = id filter eg * or bar seperated list of ids.
GETNZB2() {
    mode="$1"
    logtime "Getting nzbs : $1 mode "

    while IFS=\| read $rss_fields ; do

        if [ "$2" = "*" ] || echo "$rss_id" | egrep -q "^($2)$" ; then

            if [ "$rss_enabled" = 1 -o "$mode" != LIVE ] ; then

               thread $option_concurrent_rss process_rss "$mode" "$rss_id" "$rss_name" "$rss_url" "$rss_priority"

            fi

        fi

    done < "$feed_db"

    bb find "$bad_nzb_dir" -type f -mtime +7 -name \*.nzb\* | sed 's/^/rm "/;s/$/"/' | sh 

}

process_rss() {
    mode="$1" ; shift
    rss_id="$1" ; shift
    rss_name="$1" ; shift
    rss_url="$1" ; shift
    rss_priority="$1" ; shift

    logtime "BEGIN feed:$rss_name nzb:$option_concurrent_nzb rss:$option_concurrent_rss"

    if site_login "$rss_url" ; then

        scan_rss "$rss_url" "$mode" "$rss_id" "$rss_priority" "$rss_name" || true

        #site_logout "$rss_url" 

        #LOG "-"

    fi
}

# Extract domain from url
get_site_name() {
    echo "$1" | sed -r '
s/^[a-z]+:\/\///; # remove protocol
s/[:\/].*//; # remove port and path
s/.*\.([a-z]+\.[a-z]+)$/\1/; #get domain part
'
}

#Add '\' to regular expression metacharacters in a string.
#resulting string can be passed to grep,awk or sed -r (not plain sed)
#Required so we can search for the string whilst using regualr expressions.
# eg grep "^$string$". this will fail if string contains '[].* etc.
re_escape() {
    #sed 's/\([].[*/\(|)]\)/\\\1/g'
    echo "$1" | sed -r 's/([^a-zA-Z0-9_])/\\\1/g'
}

site_tmp() {
    echo "$TMPDIR/`get_site_name "$1"`.$pid"
}

cookie_file() {
    echo "`site_tmp "$1"`.cookie"
}


# $1 = rss url
site_login() {
    site_name_in="`get_site_name "$1"`"
    while IFS=\| read  $site_fields ; do
        if [ "$site_name" = "$site_name_in" ] ; then
           if [ "$site_enabled" = 0 ] ; then

               logtime "[$site_name] Site Definition ignored"
               return 0
           fi
           login_site2 "$site_name" "$site_user" "$site_password" "$site_loginurl" "$site_loginpost" 
           return $?
        fi
    done < "$site_db"
    logtime "No site definition for [$site_name_in]"
}


# $1 = rss url
site_logout() {
    site_name_in="`get_site_name "$1"`"
    while IFS=\| read  $site_fields ; do
        if [ "$site_name" = "$site_name_in" ] ; then
            logout_site2 "$site_name" "$site_logouturl" 
            return $?
        fi
    done < "$site_db"
    logtime "No site definition for [$site_name_in]"
}
# $1 = rss url
site_transform() {
    site_name_in="`get_site_name "$1"`"
    while IFS=\| read  $site_fields ; do
    
        if [ "$site_name" = "$site_name_in" ] ; then

            #logtime "transformer[$site_sed]"
            if echo "$1" | sed -r "$site_sed;s/@UNIXTIME@/$UNIXTIME/g" ; then
                return $?
            else
                logtime "Error with sed -r '$site_sed'"
                echo "$1"
            fi
        fi
    done < "$site_db"
    echo "$1" # unchanged
}

# $1 = site name
# $2 = username
# $3 = password
# $4 = login url
# $5 = login post
login_site2() {

    site="$1" ; shift
    u="$1" ; shift
    p="$(re_escape "$1")" ; shift
    url="$1" ; shift
    post="$1" ; shift

    login_ret=0

    if [ -n "$url" -a -n "$u" ] ; then

        x="`site_tmp $url`"
        cookie_file="$x.cookie"

        wget_err="$x.err"
        wget_out="$x.out"

        post="$( echo "$post" |  sed -r "s/@USER(|NAME)@/$u/g;s/@PASS(|WO?RD)@/$p/g" )"

        rm -f "$cookie_file"
        if $wget_bin --no-check-certificate --keep-session-cookies --save-cookies "$cookie_file" --post-data "$post" "$url" -O - >"$wget_out" 2>"$wget_err" ; then
            if grep -q 'FALSE.[1-9]' "$cookie_file" ; then
                logtime "Login $site OK"
                #LOG "Cookies:`cat "$cookie_file"`"
            else
                logtime "Login $site Failed - BEGIN REPORT"
                LOG "Out:`cat "$wget_out"`"
                LOG "Err:`cat "$wget_err"`"
                if [ -f "$cookie_file" ] ; then LOG "Cookies:`grep 'FALSE.[1-9]' "$cookie_file" | WCL `" ; fi
                logtime "Login $site Failed - END REPORT"
                login_ret=1
            fi
        fi
        rm -f "$wget_out" "$wget_err"

    fi
    return $login_ret
}

logout_site2() {
    site="$1" ; shift
    url="$1" ; shift

    if [ -n "$url" ] ; then

        x="`site_tmp $site`"
        cookie_file="$x.cookie"

        wget_tmp="$x.tmp"
        wget_err="$x.err"
        wget_out="$x.out"

        if $wget_bin --no-check-certificate --keep-session-cookies --load-cookies "$cookie_file"  "$url" -O - > "$wget_out" 2>"$wget_err" ; then
            #logtime "Logout $site OK"
            #LOG "Cookies:`cat "$cookie_file"`"
            true

        else
            logtime "Logout $site messages"
            LOG "Out:`cat "$wget_out"`"
            LOG "Err:`cat "$wget_err"`"
            if [ -f "$cookie_file" ] ; then LOG "Cookies:`grep 'FALSE.[1-9]' "$cookie_file" | WCL `" ; fi
        fi
        rm -f "$cookie_file" "$wget_out" "$wget_err"

    fi
}


SHOWHISTORY() {
    if [ -s "$HISTORY_FILE.1" -o -s "$HISTORY_FILE" ] ; then
        cat "$HISTORY_FILE.1" "$HISTORY_FILE" 2>/dev/null || true
    else
        echo "No History"
    fi
}


SHOWLOG() {
    if [ -s "$LOG_FILE" ] ; then
        cat "$LOG_FILE"
    else
        echo "Log file empty"
    fi
}


CLEARLOG() {
    echo > "$LOG_FILE"
    rm -f -- $TMPDIR/feedtime.[0-9]*
}

CLEARHISTORY() {
    echo > "$HISTORY_FILE"
    echo > "$HISTORY_FILE.1"
}



#invoke wget and deal with compressed content
# $1=url
WGET() {
    wget_ret=0

    cookie_file="`cookie_file "$1"`"

    x="$TMPDIR/feedtime.$pid.`counter`.`echo "$1" | sed -r 's/[^a-zA-Z0-9.]+//g'`"
    wget_tmp="$x.tmp"
    wget_err="$x.err"
    wget_out="$x.out"

    echo > $wget_tmp

    referer=`echo "$1" |  sed -r 's,([^/])/[^/].*,\1,'`

    wget_opts="--referer=$referer --no-check-certificate -q -t 2 -O $wget_tmp --user-agent='Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.1.1) Gecko/20090715 Firefox/3.5.1' --header='Accept-Encoding: gzip' --load-cookies $cookie_file"

    #logtime "wget [$1]"
    if eval "$wget_bin -S $wget_opts '$1' " > "$wget_out" 2>"$wget_err" ; then
        #logtime "unzip [$1]"
        if gunzip -c "$wget_tmp" 2>/dev/null || gzip -d -f -c  "$wget_tmp" 2>/dev/null ; then
           #LOG "[$1] gzipped"
           true
       else
           if unzip -p "$wget_tmp" 2>/dev/null ; then
              #LOG "[$1] zipped"
              true
           else
              cat "$wget_tmp"
           fi
        fi
        #LOG "Cookies:`cat "$cookie_file"`"
        #LOG "$wget_bin -S $wget_opts '$1'"
    else
        LOG "Error getting $1 : $wget_bin $wget_opts"
        LOG "Out:`cat "$wget_out"`"
        LOG "Err:`cat "$wget_err"`"
        if [ -f "$cookie_file" ] ; then LOG "Cookies:`grep 'FALSE.[1-9]' "$cookie_file" | WCL `" ; fi
        wget_ret=1
    fi
    rm -f "$wget_tmp" "$wget_out" "$wget_err"
    #logtime "endwget [$1]"
    return $wget_ret
}

update_nzbget_weekday_schedule() {
    update_nzbget_schedule "$appname.nzbget_weekday" "1-5" $option_nzbget_weekday_pause_hour $option_nzbget_weekday_unpause_hour
}

update_nzbget_weekend_schedule() {
    update_nzbget_schedule "$appname.nzbget_weekend" "0,6" $option_nzbget_weekend_pause_hour $option_nzbget_weekend_unpause_hour
}

#$1 cronid 
#$2 crondays
#$3 stop time
#$4 start time
update_nzbget_schedule() {
    echo "Update: $@"
    case "$3$4" in
        *-*) 
        echo "Removing $1.stop start"
        cron_del "$1"
         ;;
        *)
        echo "Adding $1.stop start"
        cron_add "$1" "0 $3,$4 * * $2 $FEEDTIME_HOME/$appname.sh check_nzbget_schedule" 
         ;;
    esac
}

cron_add() {
    if is_nmt ; then
        "$FEEDTIME_HOME/install.sh" NMT_CRON_ADD nmt "$@"
    else
        id="$1" ; shift
        echo "adding cron id [$id]"
        ( crontab -l | fgrep -v "#{$id}" || true ; echo "$@ #{$id}" ) | crontab -
    fi
}

cron_del() {
    if is_nmt ; then
        "$FEEDTIME_HOME/install.sh" NMT_CRON_DEL nmt "$@"
    else
        id="$1" ; shift
        echo "deleting cron id [$id]"
        ( crontab -l  | grep -v "#{$id}$" || true ) | crontab -
    fi
}

update_nzbget_check_frequency() {
    echo "Update check frequency: $@"
    case "$option_nzbget_check_frequency" in
        0)
         cron_del "$appname.nzbget_check"
         ;;
        *)
         cron_add "$appname.nzbget_check" "*/$option_nzbget_check_frequency * * * * $FEEDTIME_HOME/$appname.sh check_nzbget_schedule" 
         ;;
     esac
}


check_nzbget_schedule() {
    h=`date +%H`
    if [ `date +%u` -ge 6 ] ; then
        peak_start="$option_nzbget_weekend_pause_hour"
        peak_stop="$option_nzbget_weekend_unpause_hour"
    else
        peak_start="$option_nzbget_weekday_pause_hour"
        peak_stop="$option_nzbget_weekday_unpause_hour"
    fi
    # logtime "$peak_start $h $peak_stop"
    if [ "$peak_stop" = 0 ] ; then
        peak_stop=23
    fi
    case "$peak_start$peak_stop" in 
      *-*)
            logtime "no nzbget schedule defined for this time"
            ;; 
      *)
        if [ "$peak_start" -le "$h" -a "$h" -lt "$peak_stop" ] ; then
            logtime "pausing nzbget"
            NZBGET -P
        else
            logtime "unpausing nzbget"
            NZBGET -U
        fi
    esac
}

UPDATE_SCHEDULE() {
    option_schedule_hrs=`echo "$option_schedule_hrs" | sed 's/  *//g'` 
    cron_add $appname "$option_schedule_mins $option_schedule_hrs * * * $FEEDTIME_HOME/$appname.sh cron" 
}

WCL() {
    awk 'END { print NR; }'
}

# Check if a file should be blacklisted according to patterns in the blacklist file.
blacklisted() {
    feedname="$1"
    file="$2"
    while IFS= read pattern ; do
        case "$pattern" in
            \#*) ;;
            *)
            if grep -q "$pattern" "$file" ; then
                HISTORY "$feedname:SKIPPED nzbfile|$link|$base blacklisted by [$pattern]"
                return 0
            fi
            ;;
        esac
    done < "$FEEDTIME_HOME/data/blacklist"
    return 1
}

# Float comparison 
float_test() {
 echo | awk 'END { exit ( !( '"$1"')); }'
}
float_val() {
 echo | awk 'END { print ('"$1"'); }'
}

# Generate a different number to use for unique file names.
# This could be more robust but the downloads are forced to be 
# 1 sec appart. This helps prevent overload and also allows 
# use of this simple counter. It only needs to be unique if 
# processing two NZBs with the same name.
counter() {
 echo | awk '{ print strftime(systime(),"%s") % 10;}'
}

# $1 = file
get_score() {
    echo "$1" | awk '
    {
        $0 = tolower($0);
        split(tolower(g_group_priority),pri,","); # eg 720p:10,proper:1,repack:1,immerse:-1

        score=0;
        for(i in pri) {
            split(pri[i],p,":"); # eg 720p:20
            if (match($0,"\\<"p[1]"\\>")) score += p[2];
        }
        print score;
    }
    ' g_group_priority="$option_group_priority"
}

# $1 = flag file
# $2 = nzb file
set_flagfile() {
   echo "$2" > "$1"
   chown "$owner" "$1"
}

# best effort to wait on a lock
lock() {
    if [ -n "$1" ] ; then
        for i in 0 1 2 3 4 5 ; do
            for j in 0 1 2 3 4 5 6 7 8 9  ; do
                if [  -d "$1.lock" ] ; then
                    sleep 1
                else
                    if mkdir "$1.lock" > /dev/null ; then
                        break
                    fi
                fi
            done
        done
    fi
    # if we get here - just continue any way
}

unlock() {
    if [ -n "$1" ] ; then
        rm -fr "$1.lock"
    fi
}

download() {

    mode="$1"
    link="$2"
    nzb_file="$3"
    priority="$4"
    feedname="$5"
    flagfile="$6" #can be blank
    if [ -n "$flagfile" ] ; then
        lock "$flagfile"
    fi

    download2 "$@" || true

    if [ -n "$flagfile" ] ; then
        unlock "$flagfile"
    fi
    echo end download
}

download2() {

    mode="$1"
    link="$2"
    nzb_file="$3.`counter`.nzb"
    priority="$4"
    feedname="$5"
    flagfile="$6" #can be blank

    score="`get_score "$nzb_file"`"

    #logtime "Site input [$link]"
    link="`site_transform "$link"`"
    #logtime "Site transformed to [$link]"

    base="$TMPDIR/`bname "$nzb_file" ".nzb[.0-9]+.nzb"`"

    ## race condition - if two downloads with same flag occur at the same time. They might both download.
    ## need to add more locking around this.

    supercede=0

    # check no flag file
    if [ -n "$flagfile" ] ; then
       case "$mode" in
           LIVE)
           if [ -f "$flagfile" ] ; then

               # make sure the file stays around as long as its getting hits.
               touch "$flagfile"

               if [ -s "$flagfile" ] ; then
                   old_nzb_file=`awk 'NR==1' "$flagfile"`
                   if [ -n "$old_nzb_file" ] ; then
                       old_base="$TMPDIR/`bname "$old_nzb_file" "\.nzb[.0-9]+.nzb"`"
                       if [ "$base" = "$old_base" ] ; then
                           return 0
                       else
                           old_score="`get_score "$old_nzb_file"`"
                           if [ "$old_score" -ge "$score" ] ; then
                               if ! fgrep -q "@$old_base@" "$flagfile" ; then
                                   echo "@$old_base@" >> "$flagfile"
                                   HISTORY "$feedname:skipped|$link|$base | ($score) <= ($old_score)"
                               else
                                   #LOG "$feedname:already skipped|$link|$base | ($score) <= ($old_score)"
                                   true
                               fi

                               return 0
                           else
                               # new nzb is better
                               supercede=1
                           fi
                       fi
                   fi
               else
                   # old version of feedtime - flagfile is empty
                   supercede=1
               fi


           else
               set_flagfile "$flagfile" "$nzb_file"
           fi
           ;;
       esac
    fi

    logtime "downloading `bname "$flagfile"`"
    rm -f "$nzb_file"


    WGET "$link" > "$nzb_file" 

    if grep -iql '<nzb' "$nzb_file" ; then
        # For compound zip files just filter out the xml (nzbmatrix include the nfo too)
        # note sed -i doesnt work here on all platforms
        sed -rn "/<\?xml/,/<\/nzb>/ p" "$nzb_file" > "$nzb_file.tmp" && mv "$nzb_file.tmp" "$nzb_file"
    fi

    sed -ir "$ a <!-- Feedtime: $Id$ $link -->" "$nzb_file"

    #WGET "$link" "$compress" > "$nzb_file" 
    #WGET "$link" "$compress" > "$nzb_file" 


    errmsg=
    quality=
    silent=0
    keep_bad_file=0
    clear_flagfile=0

    if [ ! -f "$nzb_file" ] ; then
        errmsg="FAILED"
    fi

    if [ -z "$errmsg" ] ; then

       nzb_info_file_count=0;
       nzb_info_par_count=0;

       quality="`$FEEDTIME_HOME/bin/nzb_info.sh "$nzb_file"`"
       eval "$quality"
       # These are set by the eval bin/nzb_info.sh 
       #nzb_info_file_count=0;
       #nzb_info_sample_count=0;
       #nzb_info_par_count=0;
       #nzb_info_size=0;
       #nzb_info_par_size=0;
       #nzb_info_data_segments_expected=0;
       #nzb_info_data_segments_found=0;
       #nzb_info_data_segments_missing=0;
       #nzb_info_data_missing_percent=0;

       #nzb_info_par_segments_expected=0;
       #nzb_info_par_segments_found=0;
       #nzb_info_par_segments_missing=0;
       #nzb_info_par_missing_percent=0;

       #nzb_info_par_percent=0;

       if [ $nzb_info_file_count = 0 -a $nzb_info_par_count = 0 ] ; then
           errmsg="bad nzbfile"
           keep_bad_file=1
           clear_flagfile=1
       fi

    fi

    if [ "$feedname" != "$gui_feedname" ] ; then

       if [ -z "$errmsg" ] && blacklisted "$feedname" "$nzb_file" ; then
           
           errmsg="blacklisted"
          
       fi

       if [ -z "$errmsg" -a "$nzb_info_sample_count" -ge "$nzb_info_file_count" ] ; then

            errmsg="sample";
       fi

       if [ -z "$errmsg" ] ; then



           if [ $nzb_info_data_segments_missing != 0 ] ; then

              errmsg="missing $nzb_info_data_segments_missing data segments"
              # Try to download again later
              clear_flagfile=1

            fi

            max_size_mb=$option_max_size_gb

            if [ "$max_size_mb" = "-" ] ; then
               max_size_mb=999999
            else
               max_size_mb="${option_max_size_gb}999" # * 1024 but mulitplication nor bc/dc not available on all embedded devices.
            fi

            if  [ -z "$errmsg" ] && float_test "$nzb_info_size > $max_size_mb" ; then

                errmsg="too big ${nzb_info_size}Mb > ${option_max_size_gb}Gb"
            fi

            if [ -z "$errmsg" ] &&  float_test "$nzb_info_par_percent < $option_par_percent" ; then

                errmsg="par_percent $nzb_info_par_percent < $option_par_percent"

            fi
        fi
    fi

    if [ -z "$errmsg" ] ; then

       okmsg=

       if [ $mode = LIVE -o "$feedname" = "$gui_feedname" ] ; then


           if [ $mode = LIVE -a $supercede = 1 ] ; then
               nzbget_remove_nzb "$old_nzb_file"
               set_flagfile "$flagfile" "$nzb_file"
               HISTORY "$feedname:removed|$old_nzb_file from queue"
           fi

          case "$priority" in

              0) NZBGET -A "$nzb_file"
                ;;
              1)
                NZBGET -A "$nzb_file" -T
                ;;
              *) true ;;

          esac
          okmsg="ok"
       else
          if [ $mode = TEST ] ; then
              okmsg="test"
          fi
       fi

       cp "$nzb_file" "$bad_nzb_dir" && chown -R "$owner" "$bad_nzb_dir" # this line just for debugging
       rm -f "$nzb_file"

       if [ -n "$okmsg" ] ; then
         HISTORY "$feedname:$okmsg|$link|$base | $nzb_info_size Mb $nzb_info_par_percent% pars ($score)."
       fi
    else
       keepmsg=
       if [ $keep_bad_file = 1 ] ; then
           mv "$nzb_file" "$nzb_file.html"
           mv "$nzb_file.html" "$bad_nzb_dir" && chown -R "$owner" "$bad_nzb_dir"
           keepmsg=" [ `err_file_link "view bad nzb" "$nzb_file.html"` ]"
       else
           rm "$nzb_file"
       fi
        if [ "$silent" = 0 -o $mode = TEST ] ; then
           HISTORY "$feedname: failed $errmsg |$link|$base $keepmsg"
           if [ -n "$quality" -a $mode = TEST ] ; then
               LOG "$quality"
           fi
       fi
    fi
    if [ -n "$flagfile" -a "$clear_flagfile" = 1 -a "$mode" = LIVE ] ; then
        rm -f "$flagfile"
    fi
}


#####################################################################
# MAIN PROGRAM
#####################################################################



GMODE=LIVE
if [ "${1:-}" = "test" ] ; then
    GMODE=TEST
else
    if [ "${1:-}" = "showlog" ] ; then
        GMODE=${2:-}
    fi
fi

bad_nzb_dir="$FEEDTIME_HOME/err"
mkdir -p "$bad_nzb_dir"
chown -R $owner "$bad_nzb_dir"

#logtime "$0 $@"

case "$1" in 
    debug)
        shift;
        "$@"
        ;;
	test)
        CLEARLOG
        shift
        GETNZB TEST "$@"
        ;;
    cron)
        CLEARLOG
        CHECK_STREAMS
        ;;
        
    start)
        CLEARLOG
        touch "$FEEDTIME_DATADIR/$appname.active"
        logtime "Service Started"
        GETNZB LIVE "*"
        ;;

    reboot|install)
        if is_nmt ; then
            #daylight savings bug
            pflash set dst `pflash get daylight_sav`
            ln -sf "$FEEDTIME_HOME" "/opt/sybhttpd/default/."
        fi
        CLEARLOG
        UPDATE_SCHEDULE
        update_nzbget_weekday_schedule
        update_nzbget_weekend_schedule
        update_nzbget_check_frequency
        check_nzbget_schedule
        CHECK_STREAMS


        ;;

    update_schedule)
        CLEARLOG
        UPDATE_SCHEDULE ;;

    update_nzbget_schedule)
        CLEARLOG
        update_nzbget_weekday_schedule
        update_nzbget_weekend_schedule
        update_nzbget_check_frequency
        ;;

    check_nzbget_schedule)
        check_nzbget_schedule
        ;;

    stop)
        CLEARLOG
        rm -f "$FEEDTIME_DATADIR/$appname.active" 
        logtime "Service Stopped"
        ;;

    force)
        shift
        export FORCEFEED=1
        GETNZB LIVE "$@"
        ;;

    once)
        shift
        GETNZB LIVE "$@"
        ;;

    skip)
        shift
        GETNZB SKIP "$@"
        ;;

    now)
        shift
        GETNZB LIVE "*"
        SHOWLOG 
        ;;

    show|showlog) SHOWLOG ;;

    clear|clearlog) CLEARLOG ;;

    showhistory) SHOWHISTORY ;;

    clearhistory) CLEARHISTORY ;;

    set)
        $FEEDTIME_HOME/options.sh SET "$config_file" "$2" "$3"
        case "$2" in
            schedule_hrs|schedule_mins)
                eval option_$2="\$3"
                UPDATE_SCHEDULE
                ;;
            nzbget_weekday*hour)
                eval option_$2="\$3"
                update_nzbget_weekday_schedule
                ;;
            nzbget_weekend*hour)
                eval option_$2="\$3"
                update_nzbget_weekend_schedule
                ;;
            nzbget_check_frequency)
                eval option_$2="\$3"
                update_nzbget_check_frequency
        esac
        ;;

    download)
        if site_login "$2" ; then

            download LIVE "$2" "$download_dir/$3" 1 "$gui_feedname" "" 
            
            echo end download logout...

           site_logout "$2" 
        fi
        ;;
    show_config)
        set | grep ^option_
        ;;
	*)
		echo usage "$0 start|stop|now|skip|test|showlog|pause|resume|update_schedule|update_nzbget_schedule|show_config"
        echo "'start'   : Start the $appname scanner "
        echo "'stop'    : Stop the $appname scanner "
        echo "'test'    : Start in test mode - nzbs not downloaded"
        echo "'now'     : Force an immediate scan of the rss feed"
        echo "'showlog' : Display results of last scan"
        echo "'clearlog': Clear log file"
        echo "'skip'    : Skip over all nzbs on the feed."
        echo "update_schedule : Update when cronjob runs according to cfg file"
        echo "update_nzbget_schedule: Update when nzbget is paused according to cfg file"
        echo "check_nzbget_schedule: Set nzbget activity according to current time - called from cron"
        echo ""
        echo "start and test optionally take number of pages for initial scan of the rss feeed"
        ;;
esac



# vi:shiftwidth=4:tabstop=4:expandtab
