from crawler.crawler_mongo_client import *
import json
import random
import time
import requests

_time_format = "%Y-%m-%d %H:%M:%S"
# _proxy_url = "http://http.tiqu.qingjuhe.cn/getip?num={}&type=2&pro=&city=0&yys=0&port=11&pack=25629&ts=1&ys=0&cs=0&lb=1&sb=0&pb=4&mr=0&regions="
_proxy_url = "http://http.tiqu.qingjuhe.cn/getip?num={}&type=2&pro=&city=0&yys=0&port=1&pack=27024&ts=1&ys=0&cs=0&lb=1&sb=0&pb=4&mr=0&regions="


class ProxyService:
    __proxies = []

    def __init__(self):
        self.__add_new_proxies(10)

    def random_proxy(self):
        proxy = random.choice(self.__proxies)
        if time.strptime(proxy['expire_time'], _time_format) <= time.localtime():
            self.remove(proxy)
            return self.random_proxy()

        return proxy

    def __add_new_proxies(self, count):
        proxy_resp = requests.get(_proxy_url.format(count))
        new_proxies = proxy_resp.json()
        self.__proxies.extend(new_proxies['data'])
        return new_proxies['data']

    def remove(self, proxy):
        if proxy in self.__proxies:
            self.__proxies.remove(proxy)
            self.__add_new_proxies(1)
