"""
本模块根据提供的城市URL，爬取当前城市未来7天的天气信息，并写入到数据库
"""
# 引入模块
import requests
from bs4 import BeautifulSoup
from utils import sqlhelper
from datetime import datetime, timedelta


# 实现爬虫的类
class CityWeather:
    # 初始化函数
    def __init__(self):
        self.urls = []  # 存储从数据库从数据库中取出的url
        self.error = ""  # 记录执行过程中的错误信息
        self.weathers = []

        # 定义一个返回值的结构
        self.res = {'status': 0, 'date': datetime.now().strftime('%Y-%m-%d %H:%M:%S'), 'spider': 0, 'update': 0, 'insert': 0}

    def get_urls_from_db(self):
        """获取所有城市的URL"""
        sql = "Select Area, URL from CityURL where City=Area And URL is not Null;"
        # 执行
        response = sqlhelper.get_db_data(sql)
        # 判断
        if response['status']:
            self.urls = list(response['data'])
        else:
            self.error = response['error']

    def get_content_form_url(self, url:str):
        """
        根据提供的URL获取该URL网页的内容
        :param url: 提供的URL
        :return: 页面的HTML标签
        """
        # 准备请求的headers
        header = {
            'user-agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36"
        }
        # 请求
        response = requests.get(url, params=header)
        # 把返回的内容解码
        content = response.content.decode("utf-8")
        # 返回
        return content

    def get_data_from_content(self,value):
        """从爬取的html中筛选信息"""
        # 爬取内容
        content = self.get_content_form_url(value[1])
        # 实例化一个soup对象
        soup = BeautifulSoup(content, 'lxml')
        # 获取未来6天的天气
        six_days = soup.find('div', id="day7").find_all('div', class_='weather pull-left')
        # 定义一个集合存储当前城市未来的天气
        # weathers = []  # [{'area':'北京','date':'2021-02-23', },{},{},{}]
        # 开始遍历
        for index, day in enumerate(six_days):
            # 定义一个和
            one_day_dict = {}
            # 添加城市
            one_day_dict['area'] = value[0]
            # 添加日期
            today = datetime.now()
            weather_date = today + timedelta(days=(index+1))
            one_day_dict['date'] = weather_date.strftime('%Y-%m-%d')
            # 添加天气
            one_day_dict['weather'] = str(day.find('div',class_='desc').text).strip()
            # 添加风向
            one_day_dict['windd'] = str(day.find('div', class_='windd').text).strip()
            # 添加风力
            one_day_dict['winds'] = str(day.find('div', class_='winds').text).strip()
            # 添加温度
            temp_content = day.find_all('div',class_='tmp')
            # 添加最高温度℃
            one_day_dict['hight'] = str(temp_content[0].text).strip().replace("℃","")
            # 添加最高温度℃
            one_day_dict['low'] = str(temp_content[1].text).strip().replace("℃", "")
            # 附加到集合
            self.weathers.append(one_day_dict)

    def save_weather_db(self):
        """把爬取的天气信息写入数据库"""
        # 遍历
        for weather in self.weathers:
            # 判断这个城市天气是否存在
            sql_get = "Select Area from CityWeather Where Area='%s' and Date ='%s'" %(weather['area'], weather['date'])
            # 执行
            response_get = sqlhelper.get_db_data(sql_get)
            # 判断是否有记录
            if len(response_get['data']) == 0:
                # 拼接SQL语句
                sql_insert = "Insert Into CityWeather(Area,Date,Weather,Windd,Winds,Hight,Low) Value ('%s','%s','%s','%s','%s'," \
                  "'%s','%s')" %(weather['area'],weather['date'],weather['weather'],weather['windd'],weather['winds'],
                                 weather['hight'],weather['low'])

                # 执行
                response = sqlhelper.update_db(sql_insert)
                # 判断
                if response['status']:
                    # 数量 +1
                    self.res['insert'] += 1
                else:
                    print("%s的%s的天气写入失败！！！！" % (weather['area'], weather['date']))
            else:
                # 准备更新的SQL语句
                sql_update = "Update CityWeather Set Weather='%s',Windd='%s',Winds='%s',Hight='%s',Low='%s' " \
                             "where Area='%s' and Date ='%s'" %(weather['weather'],weather['windd'],weather['winds'],
                                 weather['hight'],weather['low'],weather['area'],weather['date'])
                # 执行
                response = sqlhelper.update_db(sql_update)
                # 判断
                if response['status']:
                    # 数量 +1
                    self.res['update'] += 1
                else:
                    print("%s的%s天气更新失败！！！！" % (weather['area'], weather['date']))

        # 修改状态
        self.res['status'] = 1

    def get_weather_from_url(self):
        """根据url爬取天气信息"""
        # 遍历url
        for index, value in enumerate(self.urls):
            # 爬取url的内容
            self.get_data_from_content(value)
        # 重置数量
        self.res['spider'] = len(self.weathers)


if __name__ == '__main__':
    import time

    start = time.time()
    # 实例化
    spider_obj = CityWeather()
    # 获取url
    spider_obj.get_urls_from_db()
    # 爬取天气
    spider_obj.get_weather_from_url()
    # 写入数据库
    spider_obj.save_weather_db()
    # 打印结果
    print(spider_obj.res)
    end = time.time()
    print("程序执行的时间：%.4f" % (end - start))
