import threading
from spider_xue.settings import COOKIES, URL1
import re
import random
import requests
import time
from spider_xue.utils import send_msg
from db import mydb

from bs4 import BeautifulSoup


class IP():
    def __init__(self, headers="Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:61.0) Gecko/20100101 Firefox/61.0"):
        self.mainURL = "https://www.kuaidaili.com/free/"                     # 西刺代理首页
        self.nnURL = self.mainURL+"inha/"                                     # 国内高匿代理
        # self.ntURL = self.mainURL+"intr/"                                     # 国内普通代理

        self.cache = {}
        self.session = requests.Session()
        self.db = mydb["new_ip"]
        # self.get_nn_IP()
        # self.get_nt_IP()

    def get_nn_IP(self, page=""):                                             # 获取国内高匿代理
        # time.sleep(1)

        headers = {"User-Agent": random.choice(headers_list)}
        headers["cookie"] = "channelid=0; sid=1631954816021254; _gcl_au=1.1.486517588.1631956553; _ga=GA1.2.469883286.1631956553; _gid=GA1.2.1582827583.1632370251; Hm_lvt_7ed65b1cc4b810e9fd37959c9bb51b31=1632370256,1632451843,1632453851,1632453911; Hm_lpvt_7ed65b1cc4b810e9fd37959c9bb51b31=1632457110"
        # proxies = {"https": "https://27.191.60.144:3256"}
        # self.session.proxies = proxies
        html = self.session.get(
            self.nnURL+page, headers=headers, timeout=4).text
        soup = BeautifulSoup(html, "lxml")
        div = soup.find_all("div", id="list")
        trs = div[0].find_all("tr")
        ret = []
        for tdss in trs[1:]:
            tds = tdss.find_all("td")
            ret.append(f'{tds[0].get_text()}:{tds[1].get_text()}')
        return ret

    def refreash(self):
        self.get_nn_IP()

    def more(self):
        l = self.get_nn_IP()
        # t1 = test_cookies(l, self.db, self.db)
        z = []
        t1 = threading.Thread(target=test_cookies,
                              args=(l, self.db, self.db, ))
        t1.start()
        z.append(t1)
        for i in range(2, 200):
            time.sleep(1)
            ll = self.get_nn_IP(page=f"{i}/")
            tx = threading.Thread(target=test_cookies,
                                  args=(ll, self.db, self.db, ))
            tx.start()
            z.append(tx)

        for p in z:
            p.join()

    def get_one(self):
        tmp = self.db.find_one({"status": 0})
        if tmp:
            self.db.update_one({"ip": tmp["ip"], "port": tmp['port']}, {
                               "$set": {"status": 1}})
        return tmp

    def set_back_one(self, tmp):
        self.db.update_one({"ip": tmp["ip"], "port": tmp['port']}, {
                           "$set": {"status": 0}})

    def delete_one(self, tmp):
        self.db.delete_many({"ip": tmp["ip"], "port": tmp['port']})

    def parse(self):
        f = open("news/ip.html", "r")
        all = f.read()
        soup = BeautifulSoup(all, "lxml")
        div = soup.find_all("div", id="list")
        trs = div[0].find_all("tr")
        ret = []
        for tdss in trs[1:]:
            tds = tdss.find_all("td")
            ret.append(f'{tds[0].get_text()}:{tds[1].get_text()}')
        test_cookies(ret, self.db, self.db)


def test_cookies(ips, newip, failip):
    l1 = []
    l2 = []
    print("starting................")
    for i in ips:
        try:
            send_req(i)
            l1.append(i)
        except Exception as e:
            print("fail111111111", i)
            # failip.update_one(ip_str, {"$set": ip_str}, upsert=True)
            continue
    for i in l1:
        try:
            send_req(i)
            l2.append(i)
        except Exception as e:
            print("fail22222222222", i)
            # failip.update_one(ip_str, {"$set": ip_str}, upsert=True)
            continue
    for i in l2:
        try:
            send_req(i)
            ip_str = {"ip": i, 'count': 1, 'status': 1}
            print("success................................", ip_str)
            newip.update_one({"ip": i}, {"$set": ip_str}, upsert=True)
        except Exception as e:
            print("fail33333333333333", i)
            # failip.update_one(ip_str, {"$set": ip_str}, upsert=True)
            continue
    print("ending................")


def send_req(i):
    tt = str(int(time.time())) + "000"
    ss = requests.Session()
    proxies = {"http": "http://%s" % i}
    ss.headers = {"user-agent": random.choice(headers_list), "cookie": COOKIES}
    html = ss.get(URL1.format("SH600096", tt, 142), proxies=proxies, timeout=2)
    if html.status_code != 200:
        raise ValueError("not 200")
    if not html.json():
        print("jiexishibai ")
        raise ValueError("not jiexi")


headers_list = [
    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1623.0 Safari/537.36",
    "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; Media Center PC 6.0; InfoPath.3; MS-RTC LM 8; Zune 4.7)",
    "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:29.0) Gecko/20120101 Firefox/29.0",
    "Opera/9.80 (X11; Linux x86_64; U; Ubuntu/10.10 (maverick); pl) Presto/2.7.62 Version/11.01",
    "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; it-it) AppleWebKit/533.20.25 (KHTML, like Gecko) Version/5.0.4 Safari/533.20.27",
    "Mozilla/5.0 (Windows x86; rv:19.0) Gecko/20100101 Firefox/19.0",
    "Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36",
    "Mozilla/5.0 (Microsoft Windows NT 6.2.9200.0); rv:22.0) Gecko/20130405 Firefox/22.0",
    "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.17 Safari/537.36",
    "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1468.0 Safari/537.36",
    'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1464.0 Safari/537.36',
    'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.16 Safari/537.36',
    'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.3319.102 Safari/537.36',
    'Mozilla/5.0 (X11; CrOS i686 3912.101.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36',
    'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36',
    'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36',
    'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:17.0) Gecko/20100101 Firefox/17.0.6',
    'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1468.0 Safari/537.36',
    'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36',
    'Mozilla/5.0 (X11; CrOS i686 3912.101.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36'
]

ip_obj = IP()


if __name__ == "__main__":
    ip_obj.save()
    # p.parse()
    # tmps = p.db.find()
    # for tmp in tmps:
    #     p.db.update_one({"ip": tmp["ip"], "port": tmp['port']}, {"$set": {"status": 0}})

    # g = p.get_one()
    # # p.delete_one(g)
    # pass
