#/usr/bin/env python
# -*- coding: utf-8 -*-

import re
import time
import random
from BeautifulSoup import BeautifulSoup

from crawlerhttp import UrlSummary, crawle

urlsProxy = ["http://proxy.ipcn.org/proxylist.html"]
#urlsProxy = ["http://www.proxycn.com/html_proxy/http-1.html"]
desSite = 'http://www.360buy.com'

class ChoiceProxy(object):
    proxyList = []
    def __init__(self):
        pass
    def __new__(cls):
        if '_inst' not in vars(cls):
            cls.__initProxyList()
            cls._inst = super(ChoiceProxy, cls).__new__(cls)
        return cls._inst 
    
    @classmethod  
    def __initProxyList(cls):
        ipcnProxyPageResult = crawle(urlsProxy[0])
        if ipcnProxyPageResult.code == 200:
            #soup = BeautifulSoup(ipcnProxyPageResult.content)
            #proxyContents = soup.find('pre').contents[0]
            p = re.compile(r'(\d+\.\d+\.\d+\.\d+:[0-9]+)')
            for proxyIp in p.findall(ipcnProxyPageResult.content):
                if(cls.__testProxy(proxyIp)):
                    print proxyIp
                    cls.proxyList.append(proxyIp)
        
    @classmethod
    def __testProxy(cls, proxy):
        proxyDicts = {'http':proxy}
        start = time.time()
        result = crawle(desSite, proxy = proxyDicts)
        end = time.time()
        estime = end - start
        print proxy, estime
        if result.code != 200 or estime > 10:
            return False
        return True
    
    @staticmethod
    def choice():
        if len(ChoiceProxy.proxyList) == 0:
            return None
        return random.choice(ChoiceProxy.proxyList)
    
def choiceHttpProxy():
    return {'http':ChoiceProxy.choice()}  

if __name__ == '__main__':
    for i in range(10):
        print ChoiceProxy().choice()
        