import scrapy
from scrapy_splash import SplashRequest
import re
import bs4
import json

import mafengwo.db

import datetime
import logging


class CitySpider(scrapy.Spider):
    name = "city"
    allowed_domains=[
        "mafengwo.cn",
    ]
    start_urls = [
        "http://www.mafengwo.cn/mdd/citylist/21536.html",
    ]

    def start_requests(self):
        script = """
            function main(splash, args)
                assert(splash:go(args.url))
                assert(splash:wait(0.5))
                
                local index = 1
                local max_index=9999
                local citylist_list = ""
                
                while splash:evaljs("$('a.pg-next').length>0") and index <= max_index do
                    splash:runjs("$('a.pg-next').click()")
                    splash:wait(1)
                    local citylist_html = splash:evaljs("$('#citylistlist').html()")
                    if citylist_html ~= nil then
                        citylist_list = citylist_list..citylist_html
                        index=index+1
                    end
                end 
                
                return citylist_list
            end
        """
        for url in self.start_urls:
            yield SplashRequest(
                url, 
                self.parse_citylist_list,
                endpoint='execute',
                args={
                    'wait': 1,
                    'timeout': 7200,
                    'lua_source': script
                }
            )

    def parse_citylist_list(self, response):
        city_item_list = response.xpath("//li[has-class('item')]")
        for city_item in city_item_list:
            mddid = int(city_item.xpath(".//a[@data-type='目的地']/@data-id").get())
            city = mafengwo.db.find_city(mddid)
            if city is None:
                name = city_item.xpath(".//div[@class='title']/text()").re(r"\s([^\s]*)\n")[0]
                en_name = city_item.xpath(".//p[@class='enname']/text()").get()           
                nums = city_item.xpath(".//div[@class='nums']/b/text()").get()
                self.log("[{}] {} ({})".format(mddid,name,en_name),level=logging.INFO)
                mafengwo.db.insert_city(mddid, name, en_name, nums)          
                yield scrapy.Request(
                    url='http://www.mafengwo.cn/cy/{}/'.format(mddid),
                    callback=self.parse_catering,
                    meta={"mddid": mddid}
                )

    def parse_catering(self, response):
        mddid = response.meta["mddid"]

        rank_list = {}
        rank_items = response.xpath("//li[contains(@class,'rank-item')]")       
        for rank_item in rank_items:
            item_name = rank_item.xpath(".//a[contains(@href,'/cy/{}/')]/@title".format(mddid)).get()
            item_trend = rank_item.xpath(".//span[@class='trend']/text()").get()     
            rank_list[item_name] = {
                'trend': int(item_trend) if item_trend is not None else 0
            }

        city = mafengwo.db.find_city(mddid)   
        city["foods"] = {
            "rank": rank_list,
        }
        mafengwo.db.save_city(city)

        yield scrapy.Request(
            url='http://www.mafengwo.cn/hotel/{}/'.format(mddid),
            callback=self.parse_hotel,
            meta={"mddid": mddid}
        )

    def parse_hotel(self, response):
        mddid = response.meta["mddid"]
        
        price_rate_3 = response.xpath("//dd[@data-id='-1']//span[@class='hotel-rate rate3']/../text()").re(r"￥([0-9]*)")
        price_rate_3 = price_rate_3[0] if len(price_rate_3) > 0 else 0

        price_rate_4 = response.xpath("//dd[@data-id='-1']//span[@class='hotel-rate rate4']/../text()").re(r"￥([0-9]*)")
        price_rate_4 = price_rate_4[0] if len(price_rate_4) > 0 else 0

        price_rate_5 = response.xpath("//dd[@data-id='-1']//span[@class='hotel-rate rate5']/../text()").re(r"￥([0-9]*)")
        price_rate_5 = price_rate_5[0] if len(price_rate_5) > 0 else 0

        hotel_dist = response.xpath("//p").re(r"共<em>([0-9]*)</em>家酒店")
        hotel_dist = [int(x) for x in hotel_dist]

        hotel_count = sum(hotel_dist)
        key_words = response.xpath("//div[@id='_j_feature_tab']/ul/li/a/text()").re(r"\s([^\s]*)\n")[1:]
        
        city = mafengwo.db.find_city(mddid)
        city["hotels"] = {
            "price_rate_3": int(price_rate_3),
            "price_rate_4": int(price_rate_4),
            "price_rate_5": int(price_rate_5),
            "hotel_count": int(hotel_count),
            "key_words": key_words,
        }
        mafengwo.db.save_city(city)

        yield scrapy.Request(
            url='http://www.mafengwo.cn/jd/{}/'.format(mddid),
            callback=self.parse_point_of_interest,
            meta={"mddid": mddid}
        )

    def parse_point_of_interest(self, response):
        mddid = response.meta["mddid"]
        
        summary = response.xpath("//h2[contains(text(),'景点概况')]/following-sibling::div//p//text()").get()
        
        rank_list = {}
        top5_html_list = response.xpath("//div[@class='row row-top5']//div[contains(@class,'item')]")
        for item in top5_html_list:
            name = item.xpath(".//div[@class='info']//a[contains(@href,'/poi/')]/@title").get()
            trend = item.re(r"<em>([0-9]*)</em> 条点评")
            if len(trend) >0:
                trend = int(trend[0])
            else:
                trend = 0

            rank_list[name] = {
                "trend": trend,
                "detail": item.xpath(".//div[@class='info']//p/text()").get()
            }
        
        city = mafengwo.db.find_city(mddid) 
        city["poi"] = {
            "summary": summary,
            "rank": rank_list
        }
        mafengwo.db.save_city(city)

        yield scrapy.Request(
            url='http://www.mafengwo.cn/gw/{}/'.format(mddid),
            callback=self.parse_shopping,
            meta={"mddid": mddid}
        )

    def parse_shopping(self, response):
        mddid = response.meta["mddid"]

        rank_list = {}
        randk_item_list = response.xpath("//ol[@class='list-rank']/li[contains(@class,'rank-item')]")
        for item in randk_item_list:
            name = item.xpath("./a/@title").get()
            trend = item.xpath(".//span[@class='trend']/text()").get()
            rank_list[name] = {
                'trend': int(trend) if trend is not None else 0
            }

        city = mafengwo.db.find_city(mddid)
        city["shopping"] = {
            "rank": rank_list,
        }
        mafengwo.db.save_city(city)

        yield scrapy.Request(
            url='http://www.mafengwo.cn/yl/{}/'.format(mddid),
            callback=self.parse_entertainment,
            meta={"mddid": mddid}
        )

    def parse_entertainment(self, response):
        mddid = response.meta["mddid"]

        rank_list = {}
        randk_item_list = response.xpath("//ol[@class='list-rank']/li[contains(@class,'rank-item')]")
        for item in randk_item_list:
            name = item.xpath("./a/@title").get()
            trend = item.xpath(".//span[@class='trend']/text()").get()
            rank_list[name] = {
                'trend': int(trend) if trend is not None else 0,
            }

        city = mafengwo.db.find_city(mddid)
        city["entertainment"] = {
            "rank": rank_list,
        }
        mafengwo.db.save_city(city)

        yield scrapy.Request(
            url='http://www.mafengwo.cn/yj/{}/'.format(mddid),
            callback=self.parse_post_list,
            meta={"mddid": mddid}
        )

    def parse_post_list(self, response):
        mddid = response.meta["mddid"]

        postitem_list = response.xpath("//li[@class='post-item clearfix']")
        for post_item in postitem_list:
            post_id = post_item.xpath(".//a[@class='title-link']/@href").re(r"/i/([0-9]*)\.html")[0]
            post_title = post_item.xpath(".//a[@class='title-link']/text()").get()
            
            city = mafengwo.db.find_city(mddid)     
        
            if "posts" not in city:
                city["posts"] = {}

            city["posts"][post_id] = {
                "title": post_title
            }

            mafengwo.db.save_city(city)

            yield scrapy.Request(
                url="http://www.mafengwo.cn/i/{}.html".format(post_id),
                callback=self.parse_post,
                meta={
                    "mddid": mddid,
                    "post_id": post_id
                }
            )

        next_page = response.xpath("//a[@class='ti next']/@href").get()
        if next_page is not None:
            yield response.follow(
                next_page, 
                callback=self.parse_post_list,
                meta={"mddid": mddid}
            )

    def parse_post(self, response):
        mddid = response.meta["mddid"]
        post_id = response.meta["post_id"]

        date_time = response.xpath("//li[contains(@class,'time')]").re(r"[0-9]*-[0-9]*-[0-9]*")
        if len(date_time) > 0:
            date_time = date_time[0]
        else:
            date_time = None 
        
        day = response.xpath("//li[contains(@class,'day')]").re(r"([0-9]*)\s天")
        if len(day) >0:
            day = day[0]
        else:
            day = None 
        
        people = response.xpath("//li[contains(@class,'people')]").re(r"<span>/</span>(.*)</li>")
        if len(people) >0:
            people = people[0]
        else:
            people = None 

        cost = response.xpath("//li[contains(@class,'cost')]").re(r"<span>/</span>(.*)</li>")
        if len(cost) >0:
            cost = cost[0]
        else:
            cost = None    

        city = mafengwo.db.find_city(mddid)     
        
        city["posts"][post_id]["datetime"] = date_time
        city["posts"][post_id]["day"] = day
        city["posts"][post_id]["people"] = people
        city["posts"][post_id]["cost"] = cost

        self.log(
            "post[{}]: {} 共{}天 {} 人均花费{} : {}".format(
                post_id,
                date_time, day, people,cost,
                city["posts"][post_id]["title"],
            ),
            level=logging.INFO
        )

        mafengwo.db.save_city(city) 
        
