import json
from urllib.parse import urlencode
import scrapy
from guazi.items import GuaziItem
import sys
sys.path.append(r"D:\Python\Projects\tuling-code")
from example.kaoshi.demo6 import ZBMongoDB


class GzSpider(scrapy.Spider):
    name = 'gz'
    start_urls = ["https://mapi.guazi.com/car-source/carList/pcList"]

    def start_requests(self):
        for i in range(1, 2):
            params = {
                "versionId": "0.0.0.0",
                "sourceFrom": "wap",
                "deviceId": "a815462c-3d31-4572-9c52-5facd0aa7ff8",
                "osv": "Windows 10",
                "tag": "-1",
                "priceRange": "0,-1",
                "page": i,
                "pageSize": "20",
                "city_filter": "16",
                "city": "16",
                "guazi_city": "16",
                "qpres": "568978945492897792",
                "platfromSource": "wap'"
            }
            url = f"{self.start_urls[0]}?{urlencode(params)}"
            print(f"获取第 {i} 页数据~~~")
            yield scrapy.Request(url=url)

    def parse(self, response):
        map_dict = {'&#120;': 'x', '&#57808;': 7, '&#58149;': 4, '&#58397;': 1,
                    '&#58585;': 'uniE4D9', '&#58670;': 9, '&#58928;': 2, '&#59246;': 8,
                    '&#59537;': 5, '&#59854;': 0,
                    '&#60146;': 3, '&#60492;': 6, '&#63426;': 'uniF7C2', '&#63626;': 'uniF88A'}
        text = response.text
        for uni_K, num in map_dict.items():
            text = text.replace(uni_K, str(num))
        gz_data = json.loads(text)
        car_list = gz_data.get("data").get("postList")
        for car in car_list:
            item = GuaziItem()
            item["title"] = car["title"]
            item["road_haul"] = car["road_haul"]
            item["first_pay"] = car["road_haul"]
            item["price"] = car["price"]
            item["_id"] = ZBMongoDB.md5_str(f'{car["title"]}{car["price"]}')
            yield item
