# 打开文件
from bs4 import BeautifulSoup

with open('city.html', 'r', encoding='utf-8') as file:
    # 读取文件内容
    html_content = file.read()

# 打印文件内容
print(html_content)

# 使用BeautifulSoup解析HTML
soup = BeautifulSoup(html_content, 'html.parser')

# 找到所有城市名字的链接
city_links = soup.select('.js-city-name')  # 使用CSS选择器定位到所有的.js-city-name类的a标签

cities = [link.text for link in city_links]  # 提取每个链接中的文本（即城市名）
print("-----------")
# 打印城市列表
for city in cities:
    print(city)

print("-----------")
# 找到所有城市名字的链接并提取城市名及choosecityid
cities_with_id = [
    {'name': link.text, 'choosecityid': link['data-val'].split('choosecityid:')[1].split('}')[0]}
    for link in soup.select('.js-city-name')
]

# 打印城市名及对应的choosecityid
for city_info in cities_with_id:
    print(f"城市名称: {city_info['name']}, choosecityid: {city_info['choosecityid']}")

print("-----------")
# 找到所有城市名字的链接并提取城市名及data-ci作为ID
cities_with_id = [
    {'name': link.text, 'city_id': link['data-ci']}
    for link in soup.select('.js-city-name')
]

# 打印城市名及对应的ID
for city_info in cities_with_id:
    print(f"城市名称: {city_info['name']}, ID: {city_info['city_id']}")
