#!/usr/bin/python3
# -*- coding: utf-8 -*-
import json
import time
import requests
import configparser

from retrying import retry
from sunnspidertools import SunnnThreadPool

class SunnnThreadPool(SunnnThreadPool):

    def __init__(self):


        self.cf = configparser.ConfigParser()
        self.cf.read('cofing.ini')

        self.proxyUrl = self.cf.get('proxy', 'proxyUrl')
        self.proxyMaxSize = self.cf.get('proxy', 'proxyMaxSize')
        self.getProxyInterval = self.cf.get('proxy', 'getProxyInterval')

        self.maxWorkers = self.cf.get('system', 'maxWorkers')

        self.logFileName = self.cf.get('log', 'logFileName')

        super().__init__(self.maxWorkers, self.proxyMaxSize, self.logFileName)

    @retry(stop_max_attempt_number=5, wait_fixed=2000)
    def getProxyWorker(self):

        try:
            data = requests.get(self.proxyUrl, timeout=(3, 5))
            data = json.dumps(data.content.decode()).strip('"')
            data = data.split('\\r\\n')[0:-1]

            if len(data) < 5:
                raise Warning('代理获取数量异常，请检查代理相关配置')
            [self.proxyPool.put(i) for i in data]
        except Exception as e:
            self.logger.error(e)
            raise e

    def getProxy(self):
        while True:
            time.sleep(int(self.getProxyInterval))
            self.proxyPool.queue.clear()
            self.getProxyWorker()
            self.logger.info('获取代理任务执行成功')
            time.sleep(int(self.getProxyInterval))

    def coreWorker(self):
        while True:
            pass



    def leader(self):
        self.logger.info('PcoptimumSpider v0.1版本 From  Nus 爬虫小组')
        self.logger.info(' 仅供学习参考使用，禁止用于违法用途，民主和谐')
        self.logger.info('               ———— 醉卧千山下，风过谢桃花。')

        # start getProxy Worker
        self.threadPool.submit(self.getProxy)

        # start core Worker
        self.threadPool.submit(self.coreWorker)





if __name__ == '__main__':

    a = SunnnThreadPool()
    a.leader()
