# coding=utf-8
import random

import pandas as pd
import time

import requests
from openpyxl import Workbook

# Workbook().save("aaa.xlsx")
#
#
# a = [{"name": "zhagnsan", "age": 12}, {"name": "lisi", "age": 13}]
#
# with pd.ExcelWriter("aaa.xlsx", mode='a') as f:
#     pd.DataFrame(a).to_excel(f, index=False, sheet_name="ccc")
#
#
# time.sleep(20)
#
# print("结束")

from DrissionPage import Chromium
from DrissionPage.items import MixTab
from concurrent.futures import ThreadPoolExecutor
import threading
from fake_useragent import UserAgent
from config import cookies as ck

# def openWeb(tab: MixTab):
#     tab.get("https://www.baidu.com")
#     time.sleep(random.randint(1, 3))
#     tab.close()
#
#
# def main():
#     browser = Chromium()
#     # with ThreadPoolExecutor(max_workers=3) as executor:
#     #     for _ in range(6):
#     #         tag = browser.new_tab()
#     #         print(isinstance(tag, MixTab))
#     #         executor.submit(openWeb, tag)
#     for _ in range(6):
#         t1 = threading.Thread(target=openWeb, args=(browser.new_tab(),))
#         t2 = threading.Thread(target=openWeb, args=(browser.new_tab(),))
#         t1.start()
#         t2.start()
#         t1.join()
#         t2.join()
#
#
# if __name__ == '__main__':
#     main()
# a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
#
# for i in range(0, len(a), 3):
#     print(a[i])

# browser = Chromium()
# tab = browser.latest_tab
# t1 = browser.new_tab("https://www.baidu.com")
# t2 = browser.new_tab("https://www.baidu.com")
# t3 = browser.new_tab("https://www.baidu.com")
# t4 = browser.new_tab("https://www.baidu.com")
#
# time.sleep(3)
# browser.clear_cache()
# browser.close_tabs([tab, t1,t2,t3,t4])
#
# browser.quit()

# print(round(0.5567321))

# tab = Chromium().latest_tab
# tab.get("http://www.baidu.com")
# while True:
#     try:
#         su = tab.ele('@id=su')
#         print(su.attr('value'))
#
#         time.sleep(6)
#         print(tab.url)
#
#     except Exception as e:
#         print(e)
#         time.sleep(6)
#         continue

# u = "https://passport.baidu.com/cap/img?ak=uvErNgFVAw2s19CYSdl6FLj1U3ACoUjB&id=1250-gh9GK9WqMlAL%2Fbvh2J02lMjgSO6Sd%2F0ML0%2FGPTDNNwZD9IylwerKC3j4fjIAd5D1IeTi3eLb3u%2BmUfRObQS07pvBDTK0Qg6%2B6eC9UetifRCXe25Ak3wo5aXfbHFrSOdv5Er8nrvL62XSG%2Bv3bPQhMNcO2XgHZoLs87Y1SNwN1IggJXMjD6sCmwplOrBloiw5RtzDVG4Qww5fTz66NdHlJvLQ6hA0SifVVunDRl%2BUnBRHudHzWcc9CN6V257gI%2Bt1&tk=4742IC7Ic%2F%2F2a39cBuNWbHnzmm5dDTKzY7ZIcA%2BRchtf9gODijJhTnMU5rycRnNhxnOq7KwBFYhFq5ryyO5Cwj4nfGXmB5W215ecQR86Vh3aeMkaxJAW8Bx1d0GvoeznRy32wc9sPw8WmsniunTFbZaA9io5AuoEH0RWjpOgfOHehhM%3D"
# res = requests.get(url=u, cookies=ck, headers={"User-Agent": UserAgent().random, "Referer": u})
# print(res.status_code)
# print(res.content)
#
# with open("spin-img.jpeg", 'wb') as f:
#     f.write(res.content)

# data = {
#     "token": "nukHZ6acJSEEkEG6RLP3p1ySuchLQTcH-Y-0bDNlJA8",
#     "type": "900011",
#     "image": "123",
# }
# data.update({"image": "77778888"})
# print(data)

# def aa():
#     count = 1
#     while True:
#         time.sleep(1)
#         count += 1
#
#         if count == 6:
#             print(count)
#             return
#
#         print(count)
# aa()
# from urllib.parse import unquote
# aaaa = 'https://aiqicha.baidu.com/s?city=%E6%AD%A6%E6%B8%85%E5%8C%BA&f=%7B%22provinceCode%22:[%22120114%22]%7D&province=%E5%A4%A9%E6%B4%A5%E5%B8%82'
# bbb = 'https://aiqicha.baidu.com/s?city=武清区&f={"provinceCode":["120114"]}&province=天津市'
# print(unquote(aaaa) == bbb)

tab = Chromium().latest_tab
tab.set.cookies(ck)
tab.get("https://aiqicha.baidu.com/index/index")
