import time
import requests
from bs4 import BeautifulSoup
import urllib3
from utils.req import get_request, post_request

# import warnings
from urllib3.exceptions import InsecureRequestWarning

urllib3.disable_warnings(InsecureRequestWarning)


headers = {
    "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
    "Accept-Language": "zh-CN,zh;q=0.9",
    "Cache-Control": "no-cache",
    "Connection": "keep-alive",
    "Pragma": "no-cache",
    "Referer": "https://www.4adi.com/",
    "Sec-Fetch-Dest": "document",
    "Sec-Fetch-Mode": "navigate",
    "Sec-Fetch-Site": "same-origin",
    "Sec-Fetch-User": "?1",
    "Upgrade-Insecure-Requests": "1",
    "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36",
    "sec-ch-ua": '"Not)A;Brand";v="8", "Chromium";v="138", "Google Chrome";v="138"',
    "sec-ch-ua-mobile": "?0",
    "sec-ch-ua-platform": '"macOS"',
}
# cookies = {
#     "JSESSIONID": "7A6475661EE87C07A8C0C15AD3423E4C",
#     "ASP_COREZON_SESSION_AUTHENTICATION_TOKEN": "7A6475661EE87C07A8C0C15AD3423E4C"
# }


def get_prodes(url: str):
    response = get_request(url=url, headers=headers, verify=False)
    res = response.text
    return res


def extract_prodes(desc: str):
    _style = """
    <style>    .detailtitle {        max-width: 900px;        margin: 0 auto;        background: white;        padding: 20px 30px;        border-radius: 8px;        box-shadow: 0 4px 20px rgba(0, 0, 0, 0.08);    }    .detailtitle h1 {        color: #2c3e50;        font-size: 30px;        margin-bottom: 25px;        border-bottom: 2px solid #eaeaea;        padding-bottom: 15px;        font-weight: 600;    }    .detailtitle table {        width: 100%;        border-collapse: collapse;    }    .detailtitle table tr {        border-bottom: 1px solid #f0f0f0;    }    .detailtitle table tr:last-child {        border-bottom: none;    }    .detailtitle table tr:nth-child(even) {        background-color: #fafbfc;    }    .detailtitle table td {        padding: 15px 10px;        vertical-align: top;    }    .detailtitle table td:first-child {        width: 150px;        min-width: 150px;        font-weight: 600;        color: #555;        background-color: #fcfcfc;        border-right: 1px solid #eaeaea;    }    .cxsw-description .detailtitle table td:last-child {        padding-left: 20px;    }        .cxsw-description .detailtitle table tr:first-child td {        padding-top: 20px;    }    .cxsw-description .detailtitle table tr:last-child td {        padding-bottom: 20px;    }        .status-badge {        display: inline-block;        padding: 3px 10px;        border-radius: 4px;        font-size: 16px;        font-weight: 500;        background-color: #e8f5e9;        color: #2e7d32;    }</style>
    """
    soup = BeautifulSoup(desc, "html.parser")
    desc = soup.find("div", class_="detailtitle")
    return _style + str(desc)


def search_product(keyword) -> str:
    """商品搜索"""
    url = "https://www.4adi.com/render.cz"
    params = {"method": "searchProducts", "categoryId": "", "keywords": keyword}
    response = get_request(url=url, headers=headers, params=params, verify=False)
    res = response.text
    return res


def extract_product_url(search_res: str) -> str:
    """提取商品信息"""
    soup = BeautifulSoup(search_res, "html.parser")
    table_dom = soup.find_all("div", class_="table-body")
    # href_list = []
    for item in table_dom:
        href = item.find("a")["href"]
        # href_list.append(href)
        return "https://www.4adi.com" + href
    return None


def process_prodes(procode: str):
    search_res = search_product(procode)
    if product_url := extract_product_url(search_res):
        print(product_url)
        time.sleep(1)
        prodes = get_prodes(product_url)
        return extract_prodes(prodes)
    return None


if __name__ == "__main__":
    res = process_prodes("5120")
    print(res)
