#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2024/4/25 11:06
# @Author  : 王凯
# @File    : proxy.py
# @Project : scrapy_spider
import json
import os

import parsel
from loguru import logger

from components.config import WFQ_SOURCE_MYSQL_CONFIG
from utils.db.mysqldb import MysqlDB

wfq_source_db = MysqlDB(
    ip=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_IP"],
    port=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_PORT"],
    db=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_DB"],
    user_name=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_USER_NAME"],
    user_pass=WFQ_SOURCE_MYSQL_CONFIG["MYSQL_USER_PASS"],
)


def _resolve_raw_data(html, url):
    try:
        response = parsel.Selector(html)
        raw_data = response.re_first(r"window\.rawData\s*=\s*(\{.*\});")
        if not raw_data:
            logger.error(f"未获取到数据 {url}")
            return False
    except Exception as e:
        logger.error(f"解析数据失败: {e} {url}")
        return False
    raw_data = json.loads(raw_data)
    return raw_data


def response(flow):
    if flow.response.headers.get("content-type") not in ["image/webp", "image/jpeg", "image/png"]:
        print()
        if any(
            c in flow.request.url
            for c in ["/api/turing/mall/query_mall_category_list", "/api/turing/mall/query_cat_goods"]
        ):
            item = {
                "request_url": flow.request.url,
                "base_url": flow.request.url.split("?")[0],
                "mall_id": flow.request.url.split("mall_id=")[-1].split("&")[0],
                "page_no": flow.request.url.split("page_no=")[-1].split("&")[0]
                if "page_no" in flow.request.url
                else None,
                "response": flow.response.json(),
            }
            print(item)
            wfq_source_db.add_smart("net_pdd_proxy_log", item)
        logger.info(f"\n{flow.request.url}\n{flow.response.text}\n")


if __name__ == "__main__":
    "1.设置代理 192.168.16.154:2000"
    "2.浏览器访问 mitm.it 下载证书"
    "3.安装证书"
    "4.重启电脑"
    os.system("mitmdump -s proxy.py -p 2000")
