"""
爬取全国主要城市的URL信息并写入数据库
"""
import requests
from bs4 import BeautifulSoup
from utils import sqlhelper


class SpiderCityURL:
    # 初始化函数
    def __init__(self, url:str):
        self.url = url
        self.city_urls = []  # {'city':北京，  'url': }

    # 自定义函数
    def get_content_form_url(self):
        """
        根据提供的URL获取该URL网页的内容
        :param url: 提供的URL
        :return: 页面的HTML标签
        """
        # 准备请求的headers
        header = {
            'user-agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36"
        }
        # 请求
        response = requests.get(self.url, params=header)
        # 把返回的内容解码
        content = response.content.decode("utf-8")
        # 返回
        return content

    def get_data_from_content(self):
        """从返回的HTML中筛选有价值的数据"""
        # 获取页面的内容
        content = self.get_content_form_url()
        # 实例化一个soup对象
        soup = BeautifulSoup(content, 'lxml')
        # 获取内容
        urls  = soup.find('div', class_='tab-content hb').find_all("div", class_="col-xs-4")
        # 遍历
        for value in urls:
            # 定义一个dict
            temp_dict = {}
            # 获取城市的名称
            city_name = str(value.find('div',class_='col-xs-3').text).strip()
            # 添加城市名称
            temp_dict['city'] = city_name
            # 获取城市的url
            city_url = "http://www.nmc.cn" + str(value.find('a', class_='city').attrs['href'])
            # 添加url
            temp_dict['url'] = city_url
            # 添加到集合中
            self.city_urls.append(temp_dict)

    def save_url_db(self):
        """存储Url信息到数据库"""
        # 遍历爬取所有的城市
        for city_index, city_value in enumerate(self.city_urls):
            # print(city_value['city']) # 城市名
            # print(city_value['url'])  # url名
            # 查询
            sql_get = "Select Id, Area from CityURL where City = Area And Area Like '%s'" %(city_value['city'] + '%')
            # 执行
            response_get = sqlhelper.get_db_data(sql_get)
            # 判断是否存在
            if (len(response_get['data']) > 0):
                # 修改URL
                sql_update  = "Update CityURL Set URL ='%s' Where Id='%s'" %(city_value['url'], response_get['data'][0][0])
                # 执行
                response_update = sqlhelper.update_db(sql_update)
                # 判断
                if response_update['status']:
                    print("%s的URL写入成功！" %(city_value['city']))


if __name__ == '__main__':
    # url
    url = "http://www.nmc.cn/publish/forecast.html"
    # 实例化对象
    spider_obj = SpiderCityURL(url)
    # 开始获取内容
    spider_obj.get_data_from_content()
    # 写入到数据库
    spider_obj.save_url_db()