# coding:utf-8
from gevent import monkey
monkey.patch_all()

import sys
import time
import gevent

from multiprocessing import Queue, Process, Value

from config import THREADNUM, UPDATE_TIME, MINNUM, MAX_CHECK_CONCURRENT_PER_PROCESS, MAX_DOWNLOAD_CONCURRENT
from db.DataStore import store_data, sqlhelper
from validator.Validator import validator, getMyIP, detect_from_db
from spider import ProxyGet
'''
这个类的作用是描述爬虫的逻辑
'''


def startProxyCrawl(queue, db_proxy_num,myip):
    crawl = ProxyCrawl(queue, db_proxy_num,myip)
    crawl.run()


class ProxyCrawl(object):
    proxies = set()

    def __init__(self, queue, db_proxy_num,myip):
        self.queue = queue
        self.db_proxy_num = db_proxy_num
        self.myip = myip


    def run(self):
        while True:
            self.proxies.clear()
            strs = 'IPProxyPool----->>>>>>>>beginning'
            sys.stdout.write(strs + "\r\n")
            sys.stdout.flush()
            proxylist = sqlhelper.select()

            spawns = []
            for proxy in proxylist:
                spawns.append(gevent.spawn(detect_from_db, self.myip, proxy, self.proxies))
                if len(spawns) >= MAX_CHECK_CONCURRENT_PER_PROCESS:
                    gevent.joinall(spawns)
                    spawns= []
            gevent.joinall(spawns)
            self.db_proxy_num.value = len(self.proxies)
            strs = 'IPProxyPool----->>>>>>>>db exists ip:%d' % len(self.proxies)

            if len(self.proxies) < MINNUM:
                strs += '\r\nIPProxyPool----->>>>>>>>now ip num < MINNUM,start crawling...'
                sys.stdout.write(strs + "\r\n")
                sys.stdout.flush()
                spawns = []

                for fun in [getattr(ProxyGet,ss) for ss in ProxyGet.Source]:
                    spawns.append(gevent.spawn(self.crawl, fun))
                    if len(spawns) >= MAX_DOWNLOAD_CONCURRENT:
                        gevent.joinall(spawns)
                        spawns= []
                gevent.joinall(spawns)
                strs += '\r\nIPProxyPool----->>>>>>>>now ip %s,crawling flinsh...'%len(self.proxies)
                sys.stdout.write(strs + "\r\n")
                sys.stdout.flush()
            else:
                strs += '\r\nIPProxyPool----->>>>>>>>now ip num meet the requirement,wait UPDATE_TIME...'
                sys.stdout.write(strs + "\r\n")
                sys.stdout.flush()

            time.sleep(UPDATE_TIME)

    def crawl(self, fun):
        proxylist = fun.get_proxy()
        if proxylist is not None:
            for proxy in proxylist:
                proxy_str = '%s:%s' % (proxy['ip'], proxy['port'])
                if proxy_str not in self.proxies:
                    self.proxies.add(proxy_str)
                    while True:
                        if self.queue.full():
                            time.sleep(0.1)
                        else:
                            self.queue.put(proxy)
                            break


if __name__ == "__main__":
    pass