import scrapy
from faker import Faker
from meishi.items import MztrItem
from meishi.items import StrItem
import random
import time
import json
import sys
import requests as req
from lxml import etree
import math

class TestSpider(scrapy.Spider):
	name = 'test'
	allowed_domains = ['meituan.com']
	start_urls = ['https://www.meituan.com/changecity/']
	header={
	'User-Agent':Faker(locale='zh_CN').chrome()
	}

	def parse(self, response):
		city_list = response.xpath("//div[@class='city-area']/span/a")
		pds = city_list[655]
		# city_name = pds.xpath("./text()").extract()[0]
		pds_url = "https:"+pds.xpath("./@href").extract()[0]+"/meishi/"
		url1= "https://pds.meituan.com/meishi/c17/"
		yield scrapy.Request(url1,callback=self.shou,headers=self.header)

	def shou(self,response):
		jx = response.xpath("//script/text()").extract()[5]
		hide_str = jx+""
		hide_dict = hide_str[19:-1]
		hide_json = json.loads(hide_dict)
		all_shop_num = hide_json['poiLists']['totalCounts']
		print(all_shop_num)
