# -*- coding: utf-8 -*-
import scrapy
import bs4
import sys
import re
import datetime
from weatherdb.models import DayWeather,CityError
from weatherdb.city import CITY,AccuCity,YahooCity

#华氏度转摄氏度
def change(item):
    f_num = float(int(item)-int(32))/1.8
    return int(f_num)

# 墨迹抓取当前月爬虫
class MojiWeatherSpider(scrapy.Spider):
    name = "mojiweatherspider"
    allowed_domains = ["tianqi.moji.com"]
    base_domain = 'http://tianqi.moji.com/weather/china/'
    start_urls = (
        'http://tianqi.moji.com/weather/china/jiangsu/nanjing',
    )

    def get_data(self,response):
        dayweather = DayWeather.objects.all()
        soup = bs4.BeautifulSoup(response.body)
        province = response.meta.get('province',u'江苏')
        city = response.meta.get('city',u'南京')
        year = soup.find('em', id='select_year').text.split(u'年')[0]
        # 月抓取有问题
        # month = soup.find('em', id='select_month').text.split(u'月')[0]
        month =  str(datetime.datetime.now().date()).split('-')[1]
        li = soup.find('div', id='calendar_grid').find('ul').find_all('li')
        for i in li:
            if i.text:
                try:
                    day = i.find('em').text
                    name = i.find('b').find('img')['alt']
                    date = u'%s-%s-%s' % (year,month,day)
                    p = i.find_all('p')
                    temperature = p[0].text.split('/')
                    low = temperature[0]
                    height = temperature[1][:-1]
                    wind = str(p[1].text)
                    alreay = dayweather.filter(date=date,source='moji',province=province,city=city)
                    if not alreay.count():
                        DayWeather.objects.create(name=name,low=low,height=height,wind=wind,date=date,source='moji',province=province,city=city)
                    else:
                        obj = alreay[0]
                        obj.name = name
                        obj.low = low
                        obj.height = height
                        obj.wind = wind
                        obj.save()
                except Exception,e: 
                    print('error____________',unicode(e))

    def parse(self, response):
        if response.status == 200:
            self.get_data(response)
            for c in CITY:
                yield scrapy.Request(self.base_domain +c[0]+'/'+c[1],meta={'province': c[2],'city':c[3]}, callback=self.parse_item)

    def parse_item(self,response):
        if response.status == 200:
            try:
                self.get_data(response)
            except Exception,e: 
                CityError.objects.create(province=response.meta['province'],city=response.meta['city'],info='moji:'+unicode(e))
                # print('error____________',unicode(e))

# accuweather抓取当前月爬虫
class AccuWeatherSpider(scrapy.Spider):
    name = "accuweatherspider"
    allowed_domains = ["www.accuweather.com"]
    base_domain = 'http://www.accuweather.com/zh/cn/'
    start_urls = (
        'http://www.accuweather.com/zh/cn/nanjing/105570/month/105570?monyr=%s' % (datetime.datetime.now().strftime("%m/%d/%Y")),
    )

    def get_data(self,response):
        soup = bs4.BeautifulSoup(response.body)
        dayweather = DayWeather.objects.all()
        province = response.meta.get('province',u'江苏')
        city = response.meta.get('city',u'南京')
        year = soup.find('a',class_="btr-increment").text
        table= soup.find('table',class_='calendar')
        trs = table.find('tbody').find_all('tr')
        data = []
        for item in trs:
            for td in item.find_all('td'):
                td = td.find('div',class_="box")
                try:
                    date_str = td.find('h3',class_='date').text.split(' ')[1]
                    date = u'%s-%s-%s' % (year,date_str.split('/')[0],date_str.split('/')[1])
                except:
                    date = datetime.datetime.now().date()-datetime.timedelta(1)

                height = filter(str.isalnum,str(td.find('span',class_='large-temp').text))
                low = filter(str.isalnum,str(td.find('span',class_='small-temp').text))
                name = td.find('div',class_='cond').text
                alreay = dayweather.filter(date=date,source='accu',province=province,city=city)
                if not alreay.count():
                    if name:
                        obj = DayWeather.objects.create(name=name,low=low,height=height,date=date,source='accu',province=province,city=city)
                        data.append([td['data-href'],obj])
                    else:
                        pass
                else:
                    if name:
                        obj = alreay[0]
                        obj.name = name
                        obj.low = low
                        obj.height = height
                        obj.save()
                        data.append([td['data-href'],obj])
        return data
                       

    def parse(self, response):
        if response.status == 200:
            data = self.get_data(response)
            for item in data:
                yield scrapy.Request(item[0], meta={'obj': item[1]}, callback=self.parse_item)
            for c in AccuCity:
                yield scrapy.Request(self.base_domain +c[0]+'?monyr='+datetime.datetime.now().strftime("%m/%d/%Y"),meta={'province': c[1],'city':c[2]}, callback=self.parse_next)

    def parse_item(self,response):
        if response.status == 200:
            soup = bs4.BeautifulSoup(response.body)
            obj = response.meta['obj']
            wind = soup.find('ul',class_="wind-stats").find('strong').text
            rain = soup.find('span',class_="precip").text
            obj.wind = wind
            obj.rain = rain
            obj.save()

    def parse_next(self,response):
        if response.status == 200:
            try:
                data = self.get_data(response)
                for item in data:
                    yield scrapy.Request(item[0], meta={'obj': item[1]}, callback=self.parse_item)
            except Exception,e: 
                CityError.objects.create(province=response.meta['province'],city=response.meta['city'],info='accu:'+unicode(e))

# yahooweather抓取今天往后10天爬虫
class YahooWeatherSpider(scrapy.Spider):
    name = "yahooweatherspider"
    allowed_domains = ["www.yahoo.com"]
    base_domain = 'https://www.yahoo.com/news/weather/china/'
    start_urls = (
        'https://www.yahoo.com/news/weather/china/nanjing/nanjing-2137081',
    )

    def get_data(self, response):
        today = datetime.datetime.now().date()
        dayweather = DayWeather.objects.all()
        province = response.meta.get('province',u'江苏')
        city = response.meta.get('city',u'南京')
        soup = bs4.BeautifulSoup(response.body)
        div_list = soup.find('div',class_="accordion").find_all('div',class_='BdB')
        for num,item in enumerate(div_list):
            date = today + datetime.timedelta(num)
            rain = item.find('span',class_="Fz(12px)").find('span',class_="M(5px)").text
            span = item.find('span',class_="Ta(end)")
            height = change(filter(str.isalnum,str(span.find('span',class_="high").text)))
            low = change(filter(str.isalnum,str(span.find('span',class_="low").text)))
            content = item.find('span',class_="day-part").text
            name = content.split('with')[0]
            alreay = dayweather.filter(date=date,source='yahoo',province=province,city=city)
            if not alreay.count():
                obj = DayWeather.objects.create(name=name,low=low,height=height,date=date,rain=rain,source='yahoo',province=province,city=city)
            else:
                obj = alreay[0]
                obj.name = name
                obj.low = low
                obj.height = height
                obj.rain = rain
                obj.save()

    def parse(self, response):
        if response.status == 200:
            self.get_data(response)
            for c in YahooCity:
                yield scrapy.Request(self.base_domain +c[0],meta={'province': c[1],'city':c[2]}, callback=self.parse_item)

    def parse_item(self,response):
        if response.status == 200:
            try:
                self.get_data(response)
            except Exception,e: 
                CityError.objects.create(province=response.meta['province'],city=response.meta['city'],info='yahoo:'+unicode(e))