from time import sleep
import datetime
import re

import requests
from lxml import etree
from pymongo import MongoClient

from freeProxyWebs.proxyweb import ProxyWebs

class CloudProxy(ProxyWebs):
    ##获取云代理的代理ip    http://www.ip3366.net/free/

    def _get_web(self,url,params):
        ##得到网页
        requests.adapters.DEFAULT_RETRIES = 3   #设置重试次数
        try:
            resp = requests.get(url,params=params,timeout=3)
        except requests.Timeout:
            print("请求超时")
        
        return resp.content
    
    def _parser(self,html):
        #解析网页
        Today = True
        tree = etree.HTML(html)
        proxiesInfoList = tree.xpath(
            '//table[@class="table table-bordered table-striped"]/tbody/tr'
        )
        for proxyInfos in proxiesInfoList:
            infos = proxyInfos.xpath('td/text()')
            ip = infos[0].strip()
            port = infos[1].strip()
            protocol = infos[3].strip()
            proxy = {'ip':ip,'port':port,'protocol':protocol}

            self._proxyList.append(proxy)
            ##判断时间
            dateProxy = infos[6].split(' ')[0]
            proxyTime = datetime.datetime.strptime(dateProxy,
                '%Y/%m/%d')    #代理更新时间
            nowTime = datetime.datetime.now()   #现在时间
            yestardayTime = nowTime + datetime.timedelta(days= -1)  #昨天时间
            if proxyTime < yestardayTime:
                ##如果代理更新时间早于昨天，Today设为假
                Today = False
        return Today
    
    def refresh_daily_proxy(self):
        ##更新每日代理
        maxPage = 7    #最多爬的页数
        for stype in range(1,2):
            for page in range(1,maxPage+1):
                params = {'stype':stype,'page':page}
                Today = self._parser(self._get_web(self._url,params))
                if not Today:
                    break
                sleep(1)

    def init_proxy_pool(self):
        ##初始化代理池,爬取所有代理
        for stype in range(1,2):
            for page in range(1,8):
                params = {'stype':stype,'page':page}
                self._parser(self._get_web(self._url,params))
                sleep(1) 
        
        #将保存的集合初始化
        client = MongoClient('mongodb://localhost:27017/')
        with client:
            db = client.ipPool
            try:
                db.cloud.drop()
            except:
                print('没有集合，直接创建')
            finally:
                db.cloud.insert_many(self._proxyList)
                print("云代理网集合初始化完成")

    def save_proxies(self):
        ##将代理列表保存入数据库
        client = MongoClient('mongodb://localhost:27017/')
        with client:
            db = client.ipPool
            db.cloud.insert_many(self._proxyList)
            print("云代理保存完成")