from scrapyProject.spiders.proxy.proxyManage import proxyManageForMySQL
from collections import defaultdict
import random

class proxyPool():
    _instance = None

    def __init__(self):
        self.manage = proxyManageForMySQL()
        # 分别用两个列表维护HTTP和HTTPS的代理，{'http': [...], 'https': [...]}
        self.proxies = defaultdict(list)

    def __new__(cls, *args, **kw):
        if not cls._instance:
            cls._instance = super(proxyPool, cls).__new__(cls, *args, **kw)
        return cls._instance

    def updateData(self, isNeedCheck = True):
        if len(self.proxies) == 0:
            isNeedCheck = False

        for data in self.proxies:
            if len(self.proxies[data]) ==0:
                isNeedCheck = False
        if isNeedCheck and self.manage.isNeedUpdate()==False :
            return

        self.proxies.clear()

        for proxyData in self.manage.getValidData():
            self.proxies[proxyData["scheme"].lower()].append(proxyData["proxy"])

    def remove_proxy(self, proxy):
        for proxyScheme in self.proxies:
            if  proxy in self.proxies[proxyScheme]:
                self.proxies[proxyScheme].remove(proxy)
        iCount =0
        for proxyScheme in self.proxies:
            iCount+=len(self.proxies[proxyScheme])
        if iCount == 0:
            self.updateData(False)

    def getData(self):
        if self.manage.isNeedUpdate() and len(self.proxies) > 0:
            return self.proxies

        self.proxies.clear()

        for proxyData in self.manage.getValidData():
            self.proxies[proxyData["scheme"].lower()].append(proxyData["proxy"])
        return  self.proxies

    def get_random_proxy(self, scheme):
        self.updateData()
        assert(len(self.proxies[scheme]) > 0)
        return  random.choice(self.proxies[scheme])