#! /usr/bin/env/python3
# _*_ coding:UTF-8 _×_
'''
    网络爬虫
    作者：王楠
    日期：2020/03/03
    版本：5.0:网络爬虫
         6.0:bs4 beautifulsoup4
         8.0:写入到csv文件
'''

import requests  # 网络方法
from bs4 import BeautifulSoup
import threading
import time
import csv

global space_time_chick, city, timer, chick_times, time_now, count, inquiry_times
count = 0


def inquiry_aqi(city_list):
    '''
    根据城市列表查询
    '''
    global count     # 记录查询次
    count += 1
    city_dict_list = []
    lines = []     # 用于写入csv
    # 记录查询次数
    lines.append(['查询次数', count])
    lines.append(['查询时间：', time.strftime('%Y-%m-%d %H:%M:%S')])
    lines.append([
        '城市名称', '数据更新时间', 'AQI', 'PM2.5/1h', 'PM10/1h', 'CO/1h', 'NO2/1h',
        'O3/1h', 'O3/8h', 'SO2/1h'
    ])
    print('××××××××××第{}次查询×××××××××××××'.format(count))
    print('查询时间：', time.strftime('%Y-%m-%d %H:%M:%S'))
    print('空气质量情况如下：')
    print('正在查询请稍等...')
    for i, city_1 in enumerate(city_list):
        if (i+1) % 10 == 0:
            print('已处理{}条记录，共{}条记录'.format(i+1, len(city_list)))
        city_name = city_1[0]
        city = city_1[1]
        url = 'http://pm25.in/' + city
        r = requests.get(url, timeout=30)
        # print('输出r.text:\n', r.text)
        soup = BeautifulSoup(r.text, features='html.parser')
        # print('输出ｓｏｕｐ：\n', soup)
        div_list = soup.find_all('div', {'class': 'span1'})
        live_time = soup.find('div', class_='live_data_time').text.strip()
        live_time_l = live_time.split('：')  # 分割成['数据更新时间','yyyy-mm-dd']
        # print(live_time)
        aqi_dict = dict()
        aqi_dict['城市名称'] = city_name
        aqi_dict[live_time_l[0]] = live_time_l[1]
        value_list = [city_name, live_time_l[1]]
        city_aqi = []
        for i in range(8):
            div_content = div_list[i]
            # print(div_content)
            key = div_content.find('div', {'class': 'caption'}).text.strip()
            value = div_content.find('div', {'class': 'value'}).text.strip()
            aqi_dict[key] = value
            value_list.append(value)
            # print(key+':' + value)
            city_aqi.append((key, value))
        # print(city_name, city_aqi)
        lines.append(value_list)
        # print(aqi_dict)
        aqi_dict['查询次数'] = count
        city_dict_list.append(aqi_dict)
    for i in city_dict_list:
        print(i)
    # print(lines)
    with open('_AQI_.csv',
              'a',
              encoding='utf-8',
              newline='') as f:
        writer = csv.writer(f)
        writer.writerows(lines)
    global timer
    if chick_times == 'y' and count < inquiry_times:
        timer = threading.Timer(60*space_time_chick, inquiry_aqi, ([city_list]))
        timer.start()
    elif chick_times == 'y' and count == inquiry_times:
        timer.cancel()


def get_all_city():
    '''
    获取所有ｃｉｔｙ
    '''
    city_list = []
    url = 'http://pm25.in/'
    r = requests.get(url, timeout=30)
    # print('输出r.text:\n', r.text)
    soup = BeautifulSoup(r.text, features='html.parser')
    city_div = soup.find_all('div', {'class': 'bottom'})[1]
    city_link_l = city_div.find_all('a')
    for city_link in city_link_l:
        # print(city_link)
        # print(type(city_link))
        city_name = city_link.text
        # print(city_link['href'])
        # print(type(city_link['href']))
        city_pinyin = city_link['href'][1:]
        city_list.append((city_name, city_pinyin))
    # print(city_list)
    return city_list


def aqi_main():
    '''
    主函
    '''
    global space_time_chick, timer, chick_times, time_now, inquiry_times, inq_city_list
    all_or_some = input('s:查询某些城市  a:查询所有城市 (s/a):')
    if all_or_some == 's':
        city_str = input('请输入一个或多个城市的拼音(多个用空格隔开)：')
        print('您要查询的城市是：')
        city_list_pinyin = city_str.split(' ')
        all_city_list = get_all_city()
        city_list = []
        for city_pinyin in city_list_pinyin:
            for x in all_city_list:
                if city_pinyin == x[1]:
                    city_name = x[0]
                    print(city_name, end=' ')
                    city_list.append((city_name, city_pinyin))
        print('\n')
    elif all_or_some == 'a':
        city_list = get_all_city()
    chick_times = input('是否一直查询（y/n）:')
    if chick_times == 'n':
        inquiry_aqi(city_list)
    elif chick_times == 'y':
        space_time_chick = int(input('请输入查询间隔时间(min)：'))
        inquiry_times = int(input('请输入总的查询次数：'))
        global timer
        timer = threading.Timer(1, inquiry_aqi, ([city_list]))
        timer.start()


if __name__ == '__main__':
    global space_time_chick, timer, chick_times, time_now, inquiry_times
    time_now = time.strftime('%Y-%m-%d %H:%M:%S')
    print('当前时间是：', time_now)
    # print(type(time_now))
    print('这是一个ｐｍ2.5查询工具')
    aqi_main()
