import requests
from bs4 import BeautifulSoup
import pandas as pd


def get_stock_list(exchange):
    if exchange == 'sh':
        url = 'http://quote.eastmoney.com/stocklist.html'  # 上证
    elif exchange == 'sz':
        url = 'http://quote.eastmoney.com/stocklist.html'  # 深证
    else:
        raise ValueError("Exchange must be 'sh' or 'sz'.")

    response = requests.get(url)
    response.encoding = 'utf-8'  # 设置编码
    soup = BeautifulSoup(response.text, 'html.parser')

    stocks = []

    # 解析股票列表
    for stock_info in soup.find_all('a'):
        stock_code = stock_info.get('href')
        if stock_code and (stock_code.startswith('sh') or stock_code.startswith('sz')):
            stock_name = stock_info.text.strip()
            stocks.append({"code": stock_code[2:], "name": stock_name})  # 提取股票代码

    return stocks


# 获取上证和深证A股名单
sh_stocks = get_stock_list('sh')
sz_stocks = get_stock_list('sz')

# 合并列表
all_stocks = sh_stocks + sz_stocks

# 将数据保存为DataFrame并导出为CSV
df = pd.DataFrame(all_stocks)
df.to_csv('A股名单.csv', index=False, encoding='utf-8-sig')

print("A股名单已保存为 'A股名单.csv'")