﻿#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
@Project ：spider-qichezhijia 
@File    ：qczj.py
@IDE     ：PyCharm 
@Author  ：Postive
@Date    ：4/6/2025 10:16 AM 
"""
import asyncio
import csv

import aiohttp
import chardet
from lxml import etree
import io

cookie = "fvlid=1743903715325putwvqB3mVfs; sessionid=f26fc778-7343-4b60-9b38-cbfda5b5240f; sessionip=183.200.181.218; area=140224; sessionvisit=f9d4932f-e609-49bd-9fe8-9ca87b83b436; sessionvisitInfo=f26fc778-7343-4b60-9b38-cbfda5b5240f||0; qimo_seosource_7a783820-ec84-11ec-b95f-79694d4df285=%E7%AB%99%E5%86%85; qimo_seokeywords_7a783820-ec84-11ec-b95f-79694d4df285=; qimo_xstKeywords_7a783820-ec84-11ec-b95f-79694d4df285=; href=https%3A%2F%2Fwww.che168.com%2Fchina%2F; accessId=7a783820-ec84-11ec-b95f-79694d4df285; Hm_lvt_d381ec2f88158113b9b76f14c497ed48=1743903716; HMACCOUNT=1DCC9315CCE1C659; che_sessionid=32ACED4B-CC9C-4771-BCC5-C88D73322EB3%7C%7C2025-04-06+09%3A41%3A56.042%7C%7C0; che_sessionvid=7A275393-E575-4FA0-ACBD-20DF5EC0F392; listuserarea=0; carDownPrice=1; SessionSeries=0; pageViewNum=4; UsedCarBrowseHistory=0%3A54248676%2C0%3A54080968%2C0%3A53959522%2C0%3A54278867%2C0%3A53976128; userarea=0; ahpvno=27; Hm_lpvt_d381ec2f88158113b9b76f14c497ed48=1743906182; ahuuid=D2854071-09D6-44C9-97F5-68A4A1BF7C67; showNum=22; v_no=23; visit_info_ad=32ACED4B-CC9C-4771-BCC5-C88D73322EB3||7A275393-E575-4FA0-ACBD-20DF5EC0F392||-1||-1||23; che_ref=0%7C0%7C0%7C0%7C2025-04-06+10%3A23%3A02.248%7C2025-04-06+09%3A41%3A56.042; sessionuid=f26fc778-7343-4b60-9b38-cbfda5b5240f"


class QCZJSpider(object):
    def __init__(self):
        self.main_url = 'https://www.che168.com/china/a0_0ms3dgscncgpi1ltocsp{}exx0/'
        self.main_headers = {
            "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36",
            "cookie": cookie
        }
        self.info_url = "https://cacheapigo.che168.com/CarProduct/GetParam.ashx?specid={}"
        self.info_headers = {
            "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36",
        }

    # 获取主页
    async def get_main_html(self, page):
        async with aiohttp.ClientSession() as session:
            async with session.get(self.main_url.format(page), headers=self.main_headers) as response:
                response.encoding = 'utf-8'
                main_html = await response.content.read()
                main_encoding = self.get_data_encoding(main_html)
                main_html = self.get_convert_text(main_encoding, main_html)
                return main_html

    # 提取车辆Id
    def get_car_specids(self, html):
        html_tree = etree.HTML(html)
        return html_tree.xpath("//li[@name='lazyloadcpc']/@specid")

    # 提取车辆图片
    def get_car_images(self, html):
        html_tree = etree.HTML(html)
        images = html_tree.xpath("//li[@name='lazyloadcpc']//img/@src")
        image_urls = []
        for image in images:
            if not image.startswith('https:'):
                image_url = 'https:' + image

            image_urls.append(image_url)

        return image_urls

    # 获取车辆详情
    async def get_info_json(self, specid):
        async with aiohttp.ClientSession() as session:
            async with session.get(self.info_url.format(specid), headers=self.info_headers) as response:
                info_json = await response.json()
                return info_json

    def get_car_data(self, car_json):
        items = car_json["result"]["paramtypeitems"][0]["paramitems"]
        car_arr = []
        # specid
        car_arr.append(car_json["result"]["specid"])
        # 名称
        car_arr.append(items[0]["value"])
        # 指导价
        car_arr.append(items[1]["value"])
        # 厂商
        car_arr.append(items[2]["value"])
        # 能源类型
        car_arr.append(items[4]["value"])
        # 上市时间
        time = next((item['value'] for item in items if item["name"] == "上市时间"), "无上市时间")
        car_arr.append(time)
        # 整车质包
        zb = next((item['value'] for item in items if item["name"] == "整车质保"), "无质保")
        car_arr.append(zb)

        return car_arr

    # 保存到csv
    def save_car_to_csv(self, headers, rows):
        with open("cars.csv", "a+", encoding="utf-8", newline="") as f:
            writer = csv.writer(f)
            writer.writerow(headers)
            for row in rows:
                writer.writerow(row)

    @staticmethod
    def get_data_encoding(data):
        return chardet.detect(data)['encoding']

    @staticmethod
    def get_convert_text(encoding, html):
        match (encoding.upper()):
            case 'GB2312':
                return html.decode("gbk")
            case 'utf-8':
                return html.decode("utf-8")

        return html

async def main():
    spider = QCZJSpider()
    html_tasks = [
        asyncio.create_task(spider.get_main_html(1)) for i in range(1, 26)
    ]
    task_htmls = await asyncio.gather(*html_tasks)

    car_rows = []
    for task_html in task_htmls:
        image_urls = spider.get_car_images(task_html)
        specids = spider.get_car_specids(task_html)
        json_tasks = [
            asyncio.create_task(spider.get_info_json(specid)) for specid in specids
        ]
        json_results = await asyncio.gather(*json_tasks)
        for index, result in enumerate(json_results):
            car_row = spider.get_car_data(result)
            car_row.insert(0, image_urls[index])
            print(car_row)
            car_rows.append(car_row)

    headers = ["图片", "id", "名称", "厂商指导价(元)", "厂商", "能源类型", "上市时间", "整车质保"]
    spider.save_car_to_csv(headers, car_rows)


if __name__ == '__main__':
    asyncio.run(main())
