#!/usr/bin/env python

"""
Call wget with specific options and output a file containing a list
of filtered URLs (see filter_urls.py for details on filtering).

Example call to wget:
$ wget -r -l 1 -nd -nv -o output.txt --reject .css,.gif,.jpg,.js,.txt \
--spider http://illinois.edu/

After the wget output file has been produced and the main function
calls filter_urls, it saves the returned list of URLs to the file
specified as outfile, overwriting the original wget output file.

To save the original wget output, set the debug variable to True.
"""

import optparse
import subprocess
import os, sys

from utils import StartingURL
from filter_urls import filter_urls

usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage)
debug = False

def call_wget(options, s_url):
    """
    Call wget utilizing command-line options and StartingURL object.
    """
    reject_list = '.css,.gif,.jpg,.js,.txt';

    # construct command
    wget = ['wget']
    wget.extend(['-r', '-l', options.depth])

    if options.span == True:
        wget.extend(['--span-hosts', '--domains=%s' % s_url.next_level_domain])

    # current FAE settings
    wget.extend(['--tries=3', '--timeout=30', '--waitretry=3'])
    wget.extend(['-e', 'robots=off'])

    wget.extend(['-nd', '-nv', '-o', options.outfile])
    wget.extend(['--reject', reject_list])
    wget.extend(['--spider', options.url])

    print "Calling: %s" % ' '.join(wget)
    subprocess.call(wget)

def save_wget_output(outfile):
    os.rename(outfile, outfile + '.wget')

def display_urls(urls):
    for url in urls:
        print url[0], url[1]

    print 'length: ', len(urls)

def save_urls(urls, outfile):
    print "Saving filtered URLs to file %s" % outfile
    f = open(outfile, 'w')
    for url in urls:
        f.write(url[0] + '\n')

    f.close()

def main():
    parser.add_option("-d", "--depth", dest="depth",
                      help="wget recursion depth: (must be 1 or 2)", metavar="<int>")

    parser.add_option("-s", "--span-hosts", dest="span", action="store_true", default="False",
                      help="span hosts (next level subdomains)")

    parser.add_option("-o", "--outfile", dest="outfile",
                      help="wget output file (list of URLs)", metavar="<file>")

    parser.add_option("-u", "--url", dest="url",
                      help="wget starting URL", metavar="<url>")

    (options, args) = parser.parse_args()

    # check options and args
    if (options.depth != '1' and options.depth != '2'):
        parser.print_help()
        sys.exit()

    if os.path.exists(options.outfile):
        print 'Output file %s already exists! Exiting...' % options.outfile
        sys.exit()

    # get the raw list of URLs from wget
    starting_url = StartingURL(options.url)
    call_wget(options, starting_url)

    # filter the list of URLs based on user-specified parameters
    urls = filter_urls(options.outfile, starting_url, options.span)

    if debug:
        save_wget_output(options.outfile)
        display_urls(urls)

    # overwrite the wget output file with list of filtered URLs
    save_urls(urls, options.outfile)

if __name__ == "__main__":
    main()
