import os.path
import pickle
import time

from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import chromedriver_autoinstaller
import pickle


driver = webdriver.Chrome()

URL = "https://gitee.com/dashboard/pull-requests?status=merged&page=1&scope=created_by_me"

# driver.get(URL)
#
# # 等待登录
# element = WebDriverWait(driver, 600).until(
#     EC.presence_of_element_located((By.CLASS_NAME, 'ant-menu-title-content'))
# )
#
# pull_urls = set()
# # 不可能有100页 pull request吧
# for i in range(100):
#     page_url = f"https://gitee.com/dashboard/pull-requests?status=merged&page={i}&scope=created_by_me"
#     driver.get(page_url)
#     time.sleep(3)
#     data_div = driver.find_element(By.XPATH, """/html/body/div[1]/div[1]/main/div/div[2]/div[1]/div[2]""")
#     child_divs = data_div.find_elements(By.XPATH, "./*")[2:-1]
#     add_flag = False
#     for div in child_divs:
#         pull_url = div.find_elements(By.CSS_SELECTOR, "div")
#         if len(pull_url)<1:
#             break
#         pull_url = pull_url[1].find_element(By.CSS_SELECTOR, "a").get_attribute("href")
#         pull_urls.add(pull_url)
#         add_flag = True
#     if not add_flag:
#         break
#
# print(f"你所有的pull request共计{len(pull_urls)}条，分别是：")
# for url in pull_urls:
#     print(url)
#
# with open("pull_urls.pkl","wb") as fp:
#     pickle.dump(pull_urls, fp)

with open("pull_urls.pkl","rb") as fp:
    pull_urls = pickle.load(fp)

if not os.path.exists("result"):
    os.makedirs("result")

repository_urls = set()
click_flag = False
for idx, url in enumerate(pull_urls):
    driver.get(url)
    clip_area = WebDriverWait(driver, 10).until(
        EC.presence_of_element_located((By.CLASS_NAME, "main-container"))
    )
    if not click_flag:
        maybe_exit_button = driver.find_elements(By.XPATH,"/html/body/div[3]/div[1]/div[4]/div/div/div/div/div[2]/button")
        if len(maybe_exit_button)!=0:
            try:
                maybe_exit_button[0].click()
            except Exception as e:
                pass
            time.sleep(1)
            click_flag = True

    title = driver.find_element(By.XPATH,"/html/body/div[3]/div[2]/div/div[2]/div[1]/div[1]/div[1]/div[1]/strong").text
    target_rep = driver.find_element(By.XPATH, "/html/body/div[3]/div[2]/div/div[2]/div[1]/div[1]/div[1]/div[3]/a[2]").get_attribute("href")
    repository_urls.add(target_rep)
    img_name = f"result/{idx}_{title}.png"
    clip_area.screenshot(img_name)
    print(f"图片{img_name}已保存")

driver.quit()

with open("repository_urls.pkl","wb") as fp:
    pickle.dump(repository_urls, fp)