from bs4 import BeautifulSoup
from utils.req import get_request


headers = {
    "accept": "*/*",
    "accept-language": "zh-CN,zh;q=0.9",
    "content-type": "application/json",
    "priority": "u=1, i",
    "referer": "https://www.iba-lifesciences.com/",
    "sec-ch-ua": '"Not)A;Brand";v="8", "Chromium";v="138", "Google Chrome";v="138"',
    "sec-ch-ua-mobile": "?0",
    "sec-ch-ua-platform": '"macOS"',
    "sec-fetch-dest": "empty",
    "sec-fetch-mode": "cors",
    "sec-fetch-site": "same-origin",
    "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36",
    "x-requested-with": "XMLHttpRequest",
}
cookies = {
    "timezone": "Asia/Shanghai",
    "session-": "4b01ad518f1bad8cfb419dd9ef461260",
    "dvsn-context-prompt": "%7B%22languageId%22%3A%222fbb5fe2e29a4d70aa5854ce7ce3e20b%22%7D",
}
# url = "https://www.iba-lifesciences.com/suggest"
# params = {"search": "2-1002-100"}
# response = get_request(url=url, headers=headers, params=params)


def search_product(search_text):
    url = "https://www.iba-lifesciences.com/suggest"
    params = {"search": search_text}
    response = get_request(url=url, headers=headers, params=params)
    print(response.status_code)
    return response.text


def get_product_detail(product_url) -> BeautifulSoup:
    headers = {
        "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
        "accept-language": "zh-CN,zh;q=0.9",
        "priority": "u=0, i",
        "sec-ch-ua": "\"Not)A;Brand\";v=\"8\", \"Chromium\";v=\"138\", \"Google Chrome\";v=\"138\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"macOS\"",
        "sec-fetch-dest": "document",
        "sec-fetch-mode": "navigate",
        "sec-fetch-site": "none",
        "sec-fetch-user": "?1",
        "upgrade-insecure-requests": "1",
        "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36"
    }
    response = get_request(url=product_url, headers=headers)
    print(response.status_code)
    return BeautifulSoup(response.text, "html.parser")


def extract_product_url(html):
    soup = BeautifulSoup(html, "html.parser")
    if product_dom := soup.find("div", class_="tab-content"):
        if ul_dom := product_dom.find("ul", class_="search-suggest-container"):
            product_list = ul_dom.find_all(
                "li", class_="search-suggest-product js-result"
            )
            for product in product_list:
                product_url = product.find("a").get("href")
                return product_url
    return None


def extract_product_image(soup) -> str:
    img_dom = soup.find("div", class_="gallery-slider-single-image")
    if img_dom:
        return img_dom.find("img").get("src")
    return None


def extract_product_desc(soup) -> str:
    desc_dom = soup.find("div", class_="product-detail-tabs-content")
    if desc_dom:
        return str(desc_dom)
    return None


def get_product_info(procode):
    if search_text := search_product(procode):
        if product_url := extract_product_url(search_text):
            print(product_url)
            product_soup = get_product_detail(product_url)
            product_desc = extract_product_desc(product_soup)
            product_image = extract_product_image(product_soup)
            return product_desc, product_image
    return None, None


if __name__ == "__main__":
    product_desc, product_image = get_product_info("2-1324-000")
    print(product_desc, product_image)
    
