# 抓取天气数据

from bs4 import BeautifulSoup
from urllib import request
import chardet
import json
import sys
sys.path.append('E:\\TJ-study\\python3\\Frame\\flask\\weather')

import requests  # 需要自己安装
import random
import time
from assets.userAgentList import userAgentList


class ParseWeather(object):

    def __init__(self, *args, **kwargs):
        self.__city_code = '101010100'  # 默认返回北京数据

    @property
    def weatherUrl(self):
        '''此处构造要解析网页的url'''
        return 'http://www.weather.com.cn/weather1d/%s.shtml' % self.__city_code

    @property
    def randomUserAgent(self):
        '''构建随机的 User-agent'''
        randomIndex = random.randint(0, len(userAgentList)-1)  # randint(start, end) 包含端点
        return userAgentList[randomIndex]

    def mockRequest(self, city_code):
        '''模拟动态请求，获取动态数据'''

        self.__city_code = city_code
        currentTime = int(time.time())
        result = {'temp_unit': "℃"}

        # data from js mock request
        headers = {  # 创建一个随机的 headers
            'User-agent':self.randomUserAgent,
            'Cookie': str(currentTime), # 只要不为空 | 仅针对此类不用登陆验证的网站
            'Connection': 'keep-alive',
            'Accept': '*/*',
            'Accept-Encoding':'gzip, deflate',
            'Accept-Language':'zh-CN,zh;q=0.9',
            'Connection':'keep-alive',
            'Host':'d1.weather.com.cn',
            'Referer':self.weatherUrl,
        }
        url = 'http://d1.weather.com.cn/sk_2d/%s.html?_=%s' % (self.__city_code, currentTime)
        dataFromJs = self.getResponseText(url=url, headers=headers)
        result.update(json.loads(dataFromJs.split("=")[1]))

        # data from parse html
        dataFromHtml = self.parseHtml()
        result.update(dataFromHtml)

        return result

    def batchResult(self, page=1, page_size=50):
        '''根据page、page_size返回批量结果'''
        with open('assets/cityCode.txt', 'r') as f:
            all_lines = [line for line in f.read().splitlines() if line]
        
        start = (page - 1) * page_size
        end = start + page_size
        result = []
        for line in all_lines[start:end]:
            result.append(self.mockRequest(line.split(" ")[-1]))
        
        return len(all_lines), result

    def getResponseText(self, **kwargs):  # 可写作一个工具
        '''处理response的text'''
        response = requests.get(**kwargs)
        if response.status_code != 200:
            raise Exception("请求失败！")
        return response.text.encode(response.encoding).decode('utf-8')

    def parseHtml(self):
        '''解析html内容'''
        result = {}
        soup = BeautifulSoup(self.getResponseText(url=self.weatherUrl), 'html.parser')
        # 实况温度
        res_data = soup.find_all('script')[3]
        if res_data:
            day_weather = json.loads(res_data.string.split('=')[-1]).get("1d", None)
            if day_weather:
                temperature = [int(temp.split(',')[3].replace("℃", "")) for temp in day_weather]
                temperature.sort()
                result["temp_highest"] = temperature[-1]
                result["temp_lowest"] = temperature[0]
        
        return result
