# -*- coding: utf-8 -*-
# @Time : 2024/4/27 14:34
# @Author : cute
# @Email : Aggressive_cute@126.com
import copy

import requests
import json
from lxml import etree
from urllib.parse import urljoin

data_mapping = {
    '表显里程': 'mileage',
    '上牌时间': 'card_time',
    '变速箱': 'gearbox',
    '排放标准': 'emission_stand',
    '排量': 'displacement',
    '发布时间': 'release_date',
    '年检到期': 'annual_date',
    '保险到期': 'cancellat_date',
    '质保到期': 'warranty_expires',
    '过户次数': 'num_of_transfers',
    '所在地': 'city',
    '发动机': 'engine',
    '车辆级别': 'car_level',
    '车身颜色': 'body_color',
    '燃油标号': 'fuel_label',
    '驱动方式': 'driving_means',
}
headers = {
    "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
    "accept-language": "zh-CN,zh;q=0.9",
    "cache-control": "no-cache",
    "pragma": "no-cache",
    "priority": "u=0, i",
    "referer": "https://www.che168.com/shanghai/baoma/",
    "sec-ch-ua": "\"Chromium\";v=\"124\", \"Google Chrome\";v=\"124\", \"Not-A.Brand\";v=\"99\"",
    "sec-ch-ua-mobile": "?0",
    "sec-ch-ua-platform": "\"Windows\"",
    "sec-fetch-dest": "document",
    "sec-fetch-mode": "navigate",
    "sec-fetch-site": "same-origin",
    "sec-fetch-user": "?1",
    "upgrade-insecure-requests": "1",
    "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36"
}
cookies = {
    "fvlid": "1701874634480k33CPY1oHOz3",
    "sessionid": "d2c85c78-c18f-49b1-8b4f-9c45549df0e3",
    "che_sessionid": "CAC9968D-83FD-4DDE-8510-D07BAC046E25%7C%7C2023-12-06+22%3A57%3A16.322%7C%7Cwww.autohome.com.cn",
    "UsedCarBrowseHistory": "0%3A50563040",
    "sessionip": "183.227.30.23",
    "area": "500112",
    "sessionvisit": "62eb6fd6-2828-4bf4-9a7d-f7eb001691d0",
    "sessionvisitInfo": "d2c85c78-c18f-49b1-8b4f-9c45549df0e3||100519",
    "Hm_lvt_d381ec2f88158113b9b76f14c497ed48": "1714193200",
    "che_sessionvid": "DBB95FFE-0E81-4F4B-A950-D6F07A6A52B3",
    "carDownPrice": "1",
    "userarea": "310000",
    "listuserarea": "310100",
    "ahpvno": "5",
    "showNum": "7",
    "ahuuid": "E3E6B0DA-DAE2-4BF3-9C81-8886134B6545",
    "Hm_lpvt_d381ec2f88158113b9b76f14c497ed48": "1714193504",
    "v_no": "7",
    "visit_info_ad": "CAC9968D-83FD-4DDE-8510-D07BAC046E25||DBB95FFE-0E81-4F4B-A950-D6F07A6A52B3||-1||-1||7",
    "che_ref": "www.autohome.com.cn%7C0%7C100484%7C0%7C2024-04-27+12%3A51%3A45.444%7C2023-12-06+22%3A57%3A16.322",
    "sessionuid": "d2c85c78-c18f-49b1-8b4f-9c45549df0e3"
}
url = "https://www.che168.com/shanghai/baoma/a0_0msdgscncgpi1ltocsp2exx0/"


def clear_data(data_str):
    return data_str.replace(' ', '').replace('\n', '').replace('\xa0', '').strip()


def parse_detail(base_dict):
    detail_href = base_dict['detail_href']
    response = requests.get(detail_href, headers=headers, cookies=cookies, params=params)
    detail_source_data = response.text
    tree = etree.HTML(detail_source_data)
    item_info = copy.copy(base_dict)
    car_specid = ''.join(tree.xpath("//input[@id='car_specid']/@value"))
    car_cid = ''.join(tree.xpath("//input[@id='car_cid']/@value"))
    item_info['new_price'] = get_new_price(car_specid, car_cid)

    # 该数据在车辆档案里存在
    # for li in tree.xpath("//ul[@class='brand-unit-item fn-clear']/li"):
    #     title = ''.join(li.xpath('./p//text()'))
    #     val = ''.join(li.xpath('./h4/text()')).strip()
    #     key = data_mapping.get(title)
    #     if key:
    #         item_info[key] = val

    # 指数信息: 热度 关注 follow  咨询 consult 搜索 search
    car_seriesid = ''.join(tree.xpath("//input[@id='car_seriesid']/@value"))
    infoid = detail_href.split('.html?')[0].rsplit('/', 1)[-1]
    exponent_info: dict = get_exponent_info(car_seriesid, infoid)
    item_info.update(exponent_info)

    for li_label in tree.xpath("//div[@class='all-basic-content fn-clear']/ul/li"):
        item_val = clear_data(''.join(li_label.xpath("./text()")))
        item_key = clear_data(''.join(li_label.xpath("./span[@class='item-name']/text()")))
        if not item_val:
            continue
        key = data_mapping.get(item_key)
        if key:
            item_info[key] = item_val

    print(item_info)


def get_exponent_info(seriesid, infoid):
    url = "https://yccacheapigo.che168.com/api/carinfo/getheatrank"
    params = {
        "callback": "getHeatRankCallback",
        "_appid": "2sc",
        "seriesid": seriesid,
        "infoid": infoid
    }
    response = requests.get(url, headers=headers, cookies=cookies, params=params, timeout=5)
    data_dict = json.loads(
        response.text.replace(params['callback'], '')[1:-1]
    )
    result = data_dict['result']

    return {
        'search_info': {
            'search_score': result['search_score'],
            'search_title': result['search_title'],
        },
        'focus': {
            'focus_score': result['focus_score'],
            'focus_title': result['focus_title'],
        },
        'consults_info': {
            'consults_score': result['consults_score'],
            'consults_title': result['consults_title'],
        },
        'other_info': {  # 热度指数 车况 特定车辆或汽车品牌的喜好程度
            'heat_exponent': result['heat_exponent'],
            'car_condition': result['car_condition'],
            'preference': result['preference'],
        },
    }


def get_new_price(car_specid, car_cid):
    url = "https://apiassess.che168.com/api/NewCarPriceInTax.ashx"
    params = {
        "_callback": "dtcommon.load4SPriceCallBack",
        "_appid": "2sc",
        "pid": "0",

        "specid": car_specid,  # 规格ID，用于指定特定产品或内容的规格。
        "cid": car_cid  # 规格ID，用于指定特定产品或内容的规格。 详情url 参数: usercid
    }
    response = requests.get(url, headers=headers, cookies=cookies, params=params, timeout=5)
    data_dict = json.loads(
        response.text.replace(params['_callback'], '')[1:-1]
    )
    newcarprice = data_dict['result']['newcarprice']
    return newcarprice


params = {
    "pvareaid": "102179"
}
response = requests.get(url, headers=headers, cookies=cookies, params=params, timeout=5)

source_data = response.text
tree = etree.HTML(source_data)
for li in tree.xpath("//ul[@class='viewlist_ul']/li"):
    detail_href = urljoin(url, ''.join(li.xpath('./a/@href')))
    img_href = urljoin(url, ''.join(li.xpath(".//div[@class='img-box ']/img/@src")))
    img_title = ''.join(li.xpath(".//div[@class='img-box ']/img/@alt"))
    card_name = ''.join(li.xpath(".//h4[@class='card-name']/text()"))
    price = ''.join(li.xpath(".//span[@class='pirce']/em/text()"))
    base_dict = {
        'card_name': card_name,
        'detail_href': detail_href,
        'img_href': img_href,
        'img_title': img_title,
        'price': price,
    }
    parse_detail(base_dict)
