from time import sleep
import datetime
import re

import requests
from lxml import etree
from pymongo import MongoClient

from freeProxyWebs.proxyweb import ProxyWebs

class SixSixProxy(ProxyWebs):
    ##获取66网的代理    http://www.66ip.cn/
    def _get_web(self,url):
        return super()._get_web(url)

    def _parser(self,html):
        ##解析网页,获取当天代理，如果代理不是当天，返回False
        Today = True
        tree = etree.HTML(html)
        #取得代理更新日期的正则表达式
        patternDate = '[0-9]{4}年[01][0-9]月[0-3][0-9]日'
        ##找到代理表
        xpathProxies =  '//table[@width="100%"]/tr'  
        treeProxiesList = tree.xpath(xpathProxies)[1:]
        ##找到每个代理的信息
        xpathInfos = "td/text()"
        for treeProxy in treeProxiesList:
            infos = treeProxy.xpath(xpathInfos)
            proxyIP = infos[0].strip()
            proxyPort = infos[1].strip()
            oneProxy = {'ip':proxyIP,'port':proxyPort,'protocol':'http'}
            ##判断时间
            dateProxy = re.findall(patternDate,infos[-1])[0]
            proxyTime = datetime.datetime.strptime(dateProxy,
                '%Y年%m月%d日')    #代理更新时间
            nowTime = datetime.datetime.now()   #现在时间
            yestardayTime = nowTime + datetime.timedelta(days= -1)  #昨天时间
            if proxyTime < yestardayTime:
                ##如果代理更新时间早于昨天，Today设为假
                Today = False
            ##将代理加入列表
            self._proxyList.append(oneProxy)
        return Today

    
    def refresh_daily_proxy(self):
        ##更新每日代理
        maxPage = 10    #最多爬的页数
        i = 1
        url = self._url
        while i <= maxPage and self._parser(self._get_web(url)):
            i += 1
            url = url + f'{i}.html'
            sleep(1)

    def init_proxy_pool(self):
        ##初始化代理池，爬取前20页的代理
        url = self._url
        i = 1
        while i <= 20:
            self._parser(self._get_web(url))
            print(url + f' 第{i}页完成')
            i += 1
            url = self._url + f'{i}.html'
            sleep(0.5)
        #将保存的集合初始化
        client = MongoClient('mongodb://localhost:27017/')
        with client:
            db = client.ipPool
            try:
                db.sixsix.drop()
            except:
                print('没有集合，直接创建')
            finally:
                db.sixsix.insert_many(self._proxyList)
                print("66网集合初始化完成")
        
    
    def save_proxies(self):
        ##将代理列表保存入数据库
        client = MongoClient('mongodb://localhost:27017/')
        with client:
            db = client.ipPool
            db.sixsix.insert_many(self._proxyList)
            print("66代理保存完成")
        