# -*- coding: utf-8 -*-
import json
import scrapy
from meituanwang.items import MeituanwangItem


class MeituanSpider(scrapy.Spider):
    name = 'meituan'
    allowed_domains = ['meituan.com']

    # 美团类别：
    __cateIds = [1, 2, 3, 22, 20178, 20007, 20252, 20179, 20285, 20274]
    __api_url = 'http://apimobile.meituan.com/group/v4/poi/pcsearch/'

    # "http://apimobile.meituan.com/group/v4/poi/pcsearch/387?userid=-1&limit=32&offset=0&cateId=2&areaId=-1&sort=rating"
    # 'http://bt.meituan.com/meishi/api/poi/getPoiList?platform=1&partner=126&originUrl=http%3A%2F%2Fbt.meituan.com%2Fmeishi%2Fsales%2F&riskLevel=1&optimusCode=1&cityName=%E5%8C%85%E5%A4%B4&cateId=0&areaId=0&sort=sales&dinnerCountAttrId=&page=1&userId=0'
    __city = 387

    # 改写strat_requests方法，获取每个类别的数量
    def start_requests(self):
        reqs = []
        for cateId in self.__cateIds:
            num_urls = self.__api_url + '{}?userid=-1&limit=32&offset=0&cateId={}&areaId=-1&sort=rating'.format(self.__city, cateId)
            req = scrapy.Request(url=num_urls)
            reqs.append(req)
        return reqs

    # 构造所有的URL，并将请求结果转换成字典
    def parse(self, response):
        print(response.text)
        num = json.loads(response.text)['data']['totalCount']
        print('num:', num)
        for cateId in self.__cateIds:
            for page in range(0, num, 32):
                data_urls = self.__api_url + '387?limit=32&offset={}&cateId={}&sort=rating'.format(self.__city, page, cateId)
                yield scrapy.Request(url=data_urls, callback=self.mtdata, meta={'cateId': cateId})

    # 提取数据
    def mtdata(self, response):
        print('------')
        print(response.text)
        cateId = response.meta['cateId']
        items = MeituanwangItem()
        data = json.loads(response.text)['data']['searchResult']

        print('data:', data)
        for info in data:
            items['cateId'] = cateId
            items['shopid'] = info['id']    # 店铺ID
            items['title'] = info['title']   # 店铺名
            items['address'] = info['address']  # 地址
            items['lowestprice'] = info['lowestprice']  # 最低消费
            items['avgprice'] = info['avgprice']    # 平均消费
            items['latitude'] = info['latitude']    # 纬度
            items['longitude'] = info['longitude']  # 经度
            items['avgscore'] = info['avgscore']    # 评分
            items['comments'] = info['comments']    # 评论数
            items['historyCouponCount'] = info['historyCouponCount']    # 购买人次
            items['areaname'] = info['areaname']    # 区域
            items['deals'] = info['deals']  # 套餐汇总
            yield items


