import requests
from bs4 import BeautifulSoup as bs


def get_city_aqi(city_pinyin):
    url = 'http://www.air-level.com/air/' + city_pinyin
    r = requests.get(url, timeout=30)
    if r.status_code == 200:
        # print('连接成功！')
        # print(r)
        html = r.text
        # print(html)
        soup = bs(html, 'html.parser')
        aqi = soup.find('span')

        # h2=soup.find('h2')
        # print(h2.string+":"+aqi.string)
        if aqi != None:
            return aqi.string
        else:
            return None


def get_all_cities():
    url = 'http://www.air-level.com/'
    city_list = []
    r = requests.get(url, timeout=30)
    # print(r)
    html = r.text
    # print(html)
    soup = bs(html, 'lxml')
    soup_div = soup.find_all(attrs={'class': 'citynames'})
    # print(soup_div)
    for a in soup_div:
        soup_a = a.find_all('a')
        # print(soup_a)
        for k in soup_a:
            # print(k.text)
            # print(k['href'][5:-1])
            city_name = k.text
            city_pinyin = k['href'][5:-1]
            city_list.append((city_name, city_pinyin))
    # print(city_list)
    return city_list


def main():
    # city_pinyin=input('请输入城市拼音:')
    # get_city_aqi(city_pinyin)
    city_list = get_all_cities()
    for city in city_list:
        city_name = city[0]
        city_pinyin = city[1]
        city_aqi = get_city_aqi(city_pinyin)
        if city_aqi != None:
            print(city_name, city_aqi)
    # get_all_cities()


if __name__ == '__main__':
    main()
