import requests
import logging
import pymysql
from bs4 import BeautifulSoup

def GetUpDataTime(WebsiteUpdataTime, Headers, logger, f1_hanlder, cityid, stationcount):
    try:
        # 爬取网站更新时间
        res = requests.get("http://www.air-level.com/air/beijing/", Headers)
        soup = BeautifulSoup(res.text, "html.parser")
        news = soup.select(".label,label-info")[0].text
        # 转换成数据库能读取的时间格式
        b = news[:4] + "-" + news[5:7] + "-" + news[8:10] + " " + news[12:17]
        # 暂存网页更新时间的数组
        WebsiteUpdataTime.append(b)
    except:
        # 把相应的处理器装到logger上
        logger.addHandler(f1_handler)
        # 写入日志messsage
        logger.exception("网页更新时间爬去失败")


def GetAlluil(Headers, cityid, stationcount, t_dsave, cityname, stationname):
    try:
        res = requests.get("http://www.air-level.com/", Headers)
        soup = BeautifulSoup(res.text, "html.parser")
        # 链接数据库
        database = pymysql.connect("localhost", "root", "Love1303", "date", charset='utf8')
        # 爬取所有的url
        for news0 in soup.select("#citylist a "):
            m = news0["href"]
            # 创建游标
            cursor = database.cursor()
            res = requests.get("http://www.air-level.com" + m, Headers)
            soup = BeautifulSoup(res.text, "html.parser")
            # 爬一个url获取并存储一次信息
            for news in soup.select("body div table  tr ")[1:]:
                for tds in news.select("td"):
                    # 暂存td类容的数组，每存进一行tr，进行一次数据库读取，并清空数组
                    t_dsave.append(tds.text.replace("\n", ""))
                sql = "INSERT INTO airdata(city_id,city_name,city_url,station_id,station_name,aqi,level,pm25,pm10,major_pullution,WebsiteUpdataTime)\
                        VALUES('%d','%s','%s','%d','%s','%s','%s','%s','%s','%s','%s')" % \
                      (cityid, news0.text, "http://www.air-level.com" + m, stationcount, t_dsave[0], t_dsave[1],
                       t_dsave[2], t_dsave[3], t_dsave[4], t_dsave[5], WebsiteUpdataTime[0])
                # 存取已经爬到的城市及监测站，为错误日志提供数据
                stationname.append(t_dsave[0])
                cityname.append(news0.text)
                try:
                    cursor.execute(sql)
                    database.commit()
                except:
                    database.rollback()
                stationcount = stationcount + 1
                t_dsave.clear()
            cityid = cityid + 1
            print(str("%.2f" % ((cityid - 1) / 378 * 100)) + "%")
        # time.sleep(0.1)
        database.close()
        # 把相应的处理器装到logger上
        logger.addHandler(f2_handler)
        # 数字转换成字符串
        a = str(cityid)
        b = str(stationcount)
        logger.exception("一共爬取" + a + "个城市" + b + "个监测站点的数据")
    except:
        # 把相应的处理器装到logger上
        logger.addHandler(f1_handler)
        try:
            logger.exception(
                "只爬取到城市--" + cityname[len(cityname) - 1] + "--的--" + stationname[len(stationname) - 1] + "监测站")
        except:
            logger.exception("未爬取到任何信息")


if __name__ == '__main__':
    # 暂存td类容的数组，每存进一行tr，进行一次数据库读取，并清空数组
    t_dsave = []
    # 暂存网页更新时间的数组
    WebsiteUpdataTime = []
    # 存取已经爬到的城市及监测站，为错误日志提供数据
    cityname = []
    stationname = []
    # 监测站，城市id自定义计数
    stationcount = 1
    cityid = 1
    # hedars信息
    Headers = {
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; …) Gecko/20100101 Firefox/59.0",
        "Upgrade-Insecure-Requests": "1",
        "Host": "www.air-level.com",
        "Referer": "www.air-level.com"
    }
    # 定义一个日志
    logger = logging.getLogger()
    # 设置最低输出日志等级
    logger.setLevel(logging.DEBUG)
    # 为文件设置handler处理器：程序失败处理器
    f1_handler = logging.FileHandler('test.log')
    # 设置日志输出格式
    f1_handler.setFormatter(logging.Formatter("%(asctime)s - %(levelname)s - %(filename)s[:%(lineno)d] - %(message)s"))
    # 为文件设置handler处理器：程序成功处理器
    f2_handler = logging.FileHandler('test.log')
    # 设置日志输出格式
    f2_handler.setFormatter(logging.Formatter("%(asctime)s -%(message)s "))
    # 调用函数
    GetUpDataTime(WebsiteUpdataTime, Headers, logger, f1_handler, cityid, stationcount);
    GetAlluil(Headers, cityid, stationcount, t_dsave, cityname, stationname)