import csv
from typing import Iterable

import scrapy
from scrapy import Selector, Request
from scrapy.http import HtmlResponse

from Visualization.items import WeatherInfo


class WeatherSpider(scrapy.Spider):
    name = "weather"
    allowed_domains = ["www.tianqi.com"]
    #start_urls=["https://www.tianqi.com/beijing/7/"]

    def start_requests(self) -> Iterable[Request]:
        with open('city.csv','r',encoding='utf-8')as f:
            file=csv.reader(f)
            for index,row in enumerate(file):
                if index==0:
                    continue
                else:
                    city_name = row[1]

                    city_url = row[0]+'7/'
                    yield Request(url=city_url,meta={'name':city_name})

    def parse(self, response:HtmlResponse):
        city_name=response.meta.get('name')
        sel=Selector(response)
        slt="body > div.w1100.newday40_top > div.inleft > ul.weaul > li"
        list_items=sel.css(slt)
        #print(len(list_items))
        for list_item in list_items:
            w=WeatherInfo()
            date = list_item.css('div.weaul_q span.fl::text').get()
            high_temp = list_item.css('div.weaul_z span:nth-child(2)::text').get()
            low_temp = list_item.css('div.weaul_z span:nth-child(1)::text').get()
            weather_desc = list_item.css('div.weaul_z::text').extract_first()

            print(f'{city_name},日期: {date.strip()}, 高温: {high_temp}℃, 低温: {low_temp}℃, 天气: {weather_desc.strip()}')
            w['name']=city_name
            w['date']=date
            w['high_temp']=high_temp
            w['low_temp']=low_temp
            w['weather_desc']=weather_desc
            yield w