import requests
from bs4 import BeautifulSoup
from utils.req import get_request

headers = {
    "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
    "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
    "Connection": "keep-alive",
    "DNT": "1",
    "Referer": "https://www.affbiotech.cn",
    "Sec-Fetch-Dest": "document",
    "Sec-Fetch-Mode": "navigate",
    "Sec-Fetch-Site": "same-origin",
    "Sec-Fetch-User": "?1",
    "Upgrade-Insecure-Requests": "1",
    "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36 Edg/138.0.0.0",
    "sec-ch-ua": '"Not)A;Brand";v="8", "Chromium";v="138", "Microsoft Edge";v="138"',
    "sec-ch-ua-mobile": "?0",
    "sec-ch-ua-platform": '"macOS"',
}


def search_product(procode):
    url = "https://www.affbiotech.cn/search"
    params = {"q": procode, "cid": "0"}
    response = get_request(url=url, headers=headers, params=params)

    print(response.status_code)

    res_url = response.url
    if res_url.startswith("https://www.affbiotech.cn/search?q="):
        return None
    return res_url


def get_product_info(url: str) -> str:
    response = get_request(url=url, headers=headers)
    return response.text


def extract_product_info(html: str) -> str:
    soup = BeautifulSoup(html, "html.parser")
    product_info = soup.find("div", attrs={"id": "collapseGoodsinfo"})
    # _style = """
    # <style>        #collapseGoodsinfo .panel-body {        max-width: 1000px;        margin: 20px auto;        padding: 30px;        background-color: #ffffff;        border-radius: 10px;        box-shadow: 0 3px 10px rgba(0, 0, 0, 0.08);        font-family: "Segoe UI", Arial, sans-serif;        line-height: 1.6;    }        #collapseGoodsinfo .attr-item,    #collapseGoodsinfo>.panel-body>div:not(.attr-item) {        display: flex;        align-items: flex-start;        padding: 12px 0;        border-bottom: 1px solid #f0f0f0;    }    #collapseGoodsinfo>.panel-body>div:last-child {        border-bottom: none;    }        .s-title {        font-weight: 600;        color: #2c3e50;        padding: 8px 15px;        min-width: 120px;        background-color: #f8f9fa;        border-radius: 6px 0 0 6px;        margin: 0;    }        .s-text {        padding: 8px 15px;        color: #34495e;        margin: 0;        flex: 1;        background-color: #fcfcfc;        border-radius: 0 6px 6px 0;    }        .s-text a {        color: #2980b9;        text-decoration: none;        padding: 2px 4px;        border-radius: 3px;        transition: all 0.3s;    }    .s-text a:hover {        color: #3498db;        background-color: #eef7ff;        text-decoration: underline;    }        .s-text p[style] {        font-size: 12px !important;        color: #666 !important;        margin: 8px 0 0 0;        padding: 8px;        background-color: #f9f9f9;        border-left: 3px solid #e0e0e0;    }        .alias {        position: relative;    }    .alias .text-right {        text-align: right;        margin-bottom: 10px;    }    .alias .text-right i {        margin-right: 5px;    }    .alias .pointer {        cursor: pointer;        color: #3498db !important;        font-weight: 500;        display: inline-block;        padding: 5px 10px;        background-color: #f0f7ff;        border-radius: 4px;        transition: all 0.2s;    }    .alias .pointer:hover {        background-color: #e1f0fa;    }    .alias-text {        margin: 0;        padding: 10px;        background-color: #f8f9fa;        border-radius: 6px;        word-break: break-all;    }        @media (max-width: 768px) {        #collapseGoodsinfo .panel-body {            padding: 15px;        }        .col-md-2,        .col-md-10 {            width: 100%;            margin-bottom: 8px;        }        .s-title {            border-radius: 6px 6px 0 0;        }        .s-text {            border-radius: 0 0 6px 6px;        }    }</style>
    # """

    url_list = []
    img_dom = soup.find("div", attrs={"class": "carousel-inner"})
    for img in img_dom.find_all("img"):
        url_list.append(
            {
                "photo": (
                    "https:" + img.get("data-src")
                    if img.get("data-src")
                    else "//img.affbiotech.cn/images/af-thumb400x400.jpg"
                )
            }
        )

    return str(product_info).replace("<!-- -->", ""), url_list[:5]


def process_desc(procode: str):
    url = search_product(procode)
    if url is None:
        return None
    html = get_product_info(url)
    product_info = extract_product_info(html)
    return product_info


# if __name__ == "__main__":
#     res = process_desc("AF0931")
#     print(res)
