import csv
import time
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.common.by import By

url = "https://quote.eastmoney.com/center/gridlist.html#us_wellknown"

driver = webdriver.Edge()
driver.get(url)
html = driver.page_source

def save(data, filename='2.csv', exists=False):
    if exists:
        with open(filename, mode='w', newline='', encoding='utf-8') as f:
            write = csv.writer(f)
            write.writerows(data)
    else:
        with open(filename, mode='a', newline='', encoding='utf-8') as f:
            write = csv.writer(f)
            write.writerows(data[1:])


num = 1
while True:
    try:
        print(f"正在处理第 {num} 页...")
        time.sleep(3)
        html = driver.page_source
        soup = BeautifulSoup(html, 'lxml')
        trs = soup.find_all('tr')

        data = []
        for tr in trs[1:]:
            tds = tr.find_all(['th', 'td'])
            s = [td.get_text(strip=True) for td in tds]
            data.append(s)
        save(data)
        print(f"第 {num} 页保存成功，共 {len(data)} 条记录")
        try:
            time.sleep(1)
            menu_xpath = '//*[@id="mainc"]/div/div[2]/div/a[last()]'
            menu = driver.find_element(By.XPATH, menu_xpath)
            # print(menu.text)
            if menu.text == '>':
                menu.click()
                print(f"已点击下一页，等待页面加载...")
                time.sleep(1)
                num += 1
            else:
                print(data[0])
                print("抓取数据完毕")
                break

        except Exception as e:
            print(f"未找到下一页按钮或点击失败: {e}")
            print("爬取完成，共获取", num, "页数据")
            break
    except Exception as e:
        print(f"处理页面时出错: {e}")
        break

driver.quit()