#!/bin/env python
# -*- coding: UTF-8 -*-
import sys
import urllib2
import socket
import httplib
import re
from proxycfg import *
from verifyproxy import *
import time

def getListPage():
    for (templateUrl, numWidth, (startpage, endpage)), needproxy in candidate_sites:
        #yield templateUrl, numWidth, startpage, endpage, needproxy
        for idx in range(startpage, endpage + 1):
            idxstr = ("%%0%dd" % numWidth) % idx
            pageUrl = templateUrl.replace("(*)", idxstr)
            yield pageUrl, needproxy

def getProxy(pageUrl, needProxy):
    #print pageUrl
    pattern = re.compile(r"^http://(www.)?(?P<sitehost>[^/]+)(/.*)?$", flags = re.I)
    match = pattern.match(pageUrl)
    if match is not None:
        sitehost = match.group("sitehost")
    else:
        # TODO
        pass
    sitehost = sitehost.replace('.', '_')
    #print sitehost
    parsermodule = __import__("siteparser." + sitehost, fromlist = ["info", "parsepage"])
    #print parsermodule.info()

    pagedata = parsermodule.getPage(pageUrl, needProxy)
    if pagedata is None:
        return

    for host, port, protocol in parsermodule.parse(pagedata):
        #print host + ":" + str(port)
        starttime = time.time()
        vr = verify(host = host, port = port, protocol = protocol)
        endtime = time.time()
        if vr != "fail":
            yield host, port, vr, endtime - starttime



if __name__ == "__main__":
    outfile = sys.argv[1]
    with open(outfile, "w") as out:
        out.write("# -*- coding: UTF-8 -*-\nproxylist = [\n")
        for url, gfwed in getListPage():
            for host, port, protocol, speed in getProxy(url, gfwed):
                #print protocol, host, port, speed
                out.write("    ('" + protocol + "', '" + host + "', " + str(port) +
                        ", " + str(speed) + "),\n")
                out.flush()
        out.write("]\n")

