import redis
import aiohttp
import hashlib
import aiomysql
import asyncio
import time
from lxml import etree



class CarInfo:
    headers = {
        "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
        "accept-language": "zh-CN,zh;q=0.9",
        "cache-control": "no-cache",
        "pragma": "no-cache",
        "priority": "u=0, i",
        "sec-ch-ua": "\"Google Chrome\";v=\"129\", \"Not=A?Brand\";v=\"8\", \"Chromium\";v=\"129\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\"",
        "sec-fetch-dest": "document",
        "sec-fetch-mode": "navigate",
        "sec-fetch-site": "none",
        "sec-fetch-user": "?1",
        "upgrade-insecure-requests": "1",
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36"
    }
    cookies = {
        "fvlid": "1728192154905Omp7hiAhLQoW",
        "sessionid": "fc960953-aae2-4da2-ac9b-8d975c1c1072",
        "Hm_lvt_d381ec2f88158113b9b76f14c497ed48": "1728192155",
        "HMACCOUNT": "DA4C4B31E32015C2",
        "che_sessionid": "595B5D47-24DC-4AEB-BC77-A50F7D3CF21F%7C%7C2024-10-06+13%3A22%3A35.547%7C%7Ccn.bing.com",
        "href": "https%3A%2F%2Fwww.che168.com%2F",
        "accessId": "7a783820-ec84-11ec-b95f-79694d4df285",
        "userarea": "440300",
        "pageViewNum": "3",
        "listuserarea": "440300",
        "UsedCarBrowseHistory": "0%3A52308186",
        "carDownPrice": "1",
        "ahpvno": "22",
        "showNum": "18",
        "Hm_lpvt_d381ec2f88158113b9b76f14c497ed48": "1728200387",
        "sessionip": "61.144.174.219",
        "area": "440305",
        "sessionvisit": "2582808a-e0c4-41a4-841c-74901a8dfafb",
        "sessionvisitInfo": "fc960953-aae2-4da2-ac9b-8d975c1c1072||0",
        "ahuuid": "4DB3948C-62FF-497E-A976-35314E2372D9",
        "v_no": "19",
        "visit_info_ad": "595B5D47-24DC-4AEB-BC77-A50F7D3CF21F||E3456BF3-73FB-4968-A74A-C19DB5A941E3||-1||-1||19",
        "che_ref": "cn.bing.com%7C0%7C0%7C0%7C2024-10-06+15%3A39%3A48.135%7C2024-10-06+13%3A22%3A35.547",
        "che_sessionvid": "E3456BF3-73FB-4968-A74A-C19DB5A941E3",
        "sessionuid": "fc960953-aae2-4da2-ac9b-8d975c1c1072"
    }

    index_url = 'https://www.che168.com/shenzhen/a0_0msdgscncgpi1ltocsp{}exx0/'
    car_json_url = 'https://cacheapigo.che168.com/CarProduct/GetParam.ashx?specid={}'

    redis_client = redis.Redis(host='localhost', port=6379, db=0)

    async def get_car_id(self, page, client, pool):
        index_resp = await client.get(self.index_url.format(page))
        index_html = await index_resp.text(encoding='gbk')
        tree = etree.HTML(index_html)
        car_id_lis = list(set(tree.xpath("//ul[@class='viewlist_ul']/li/@specid")))
        tasks = list()
        for car_id in car_id_lis:
            coroutine_obj = self.get_car_info(car_id, client, pool)
            task = asyncio.create_task(coroutine_obj)
            tasks.append(task)
        await asyncio.wait(tasks)

    async def get_car_info(self, specid, client, pool):
        car_info = await client.get(self.car_json_url.format(specid))
        res = await car_info.json()
        if res['result'].get('paramtypeitems'):
            item = dict()
            item['name'] = res['result']['paramtypeitems'][0]['paramitems'][0]['value']
            item['price'] = res['result']['paramtypeitems'][0]['paramitems'][1]['value']
            item['brand'] = res['result']['paramtypeitems'][0]['paramitems'][2]['value']
            item['height'] = res['result']['paramtypeitems'][1]['paramitems'][2]['value']
            item['width'] = res['result']['paramtypeitems'][1]['paramitems'][1]['value']
            item['length'] = res['result']['paramtypeitems'][1]['paramitems'][0]['value']
            await self.save_info(item, pool)

    @staticmethod
    def get_md5(val):
        md5_hash = hashlib.md5(str(val).encode('utf-8')).hexdigest()
        return md5_hash

    async def save_info(self, item, pool):
        async with pool.acquire() as con:
            async with con.cursor() as cursor:
                print(item)
                md5_val = self.get_md5(item)
                flag = self.redis_client.sadd('car:filter', md5_val)
                if flag:
                    sql = """
                        insert into car_info(
                        id,
                        c_name,
                        price,
                        brand,
                        height_mm,
                        width_mm,
                        length_mm
                        )values (%s,%s,%s,%s,%s,%s,%s);
                    """
                    try:
                        await cursor.execute(sql, (
                            0,
                            item['name'],
                            item['price'], item['brand'],
                            item['height'],
                            item['width'],
                            item['length']
                        ))
                        await con.commit()
                        print('插入成功')
                    except Exception as e:
                        print(e, "插入失败")
                        await con.rollback()
                else:
                    print('重复数据')

    async def main(self):
        pool = await aiomysql.create_pool(user='root', password='888888', db='test')
        conn = await pool.acquire()
        cursor = await conn.cursor()

        sql = """
            create table if not exists car_info(
            id int primary key auto_increment not null,
            c_name varchar(255) not null,
            price varchar(255) not null,
            brand varchar(255) not null,
            height_mm varchar(255) not null,
            width_mm varchar(255) not null,
            length_mm varchar(255) not null
        );
        """
        await cursor.execute(sql)

        async with aiohttp.ClientSession(headers=self.headers, cookies=self.cookies) as client:
            tasks = list()
            for page in range(1, 20):
                coroutine_obj = self.get_car_id(page, client, pool)
                task = asyncio.create_task(coroutine_obj)
                tasks.append(task)


            await asyncio.wait(tasks)

            await cursor.close()
            conn.close()


if __name__ == '__main__':
    start = time.time()
    car_info = CarInfo()
    asyncio.run(car_info.main())
    print(time.time() - start)
