import logging
import threading
import time
from queue import Queue
from threading import Lock

from bs4 import BeautifulSoup

import config as c
import request as r


class PROCESS(threading.Thread):
    def __init__(self, thread_num: int, queue: Queue):
        threading.Thread.__init__(self)
        self.locker = Lock()
        self.thread_num = thread_num
        self.q = queue
        self.req = r.REQUEST()

        self.product_lst = list()
        self.data = dict()
        self.countries_name = str()
        self.countries_url = str()

    def run(self):
        logging.info(f"------ [{self.thread_num}]线程启动 ------")
        while True:
            self.data = self.q.get()
            self.countries_name = self.data['countries_name']
            self.countries_url = self.data['countries_url']
            self.get_products(self.countries_url)
            logging.info(f"[{self.thread_num}]线程 共获取到{len(self.product_lst)}个商品")
            with self.locker:
                c.SRC_CTX_MAIN_DICT[self.countries_name] = list()
                # num = 1
                for i in self.product_lst:
                    # print(f"正在获取第{num}个：{i.split('com/')[1][:30]}...")
                    # num += 1
                    c.SRC_CTX_MAIN_DICT[self.countries_name].append(self.analyse(i))
            self.product_lst.clear()
            self.q.task_done()

    def get_products(self, base_url):
        u = f"{base_url}{c.PRODUCT_LIST_SUFFIX}?page=home&name=justBought&country=CH&language=en&carouselSize=50&ccm=-"
        # try:
        ctx = self.req.get(u).json()
        logging.info(f"[{self.thread_num}]线程 获取到：{u}")
        for i in ctx["productTiles"]:
            self.product_lst.append(f"{base_url}{i['productPageLink']['href']}")
        # except ValueError:
        #     print("解析Json错误：" + u)
            # return [] TODO

    def analyse(self, product_url) -> c.SrcContext:
        product = c.SrcContext()
        resp = self.req.get(product_url)
        soup = BeautifulSoup(resp.text, "html.parser")
        title_tag = soup.find("h1", "js-product-title")
        price_tag = soup.find("span", "js-meta-price")
        desc_tag = soup.find("div", "js-product-description")
        img_tags = soup.find_all("div", "ProductGallery__Thumb")
        img_lst = []
        for i in img_tags:
            img_lst.append(i['data-image'])
        if len(img_lst) < 5:
            for i in range(5 - len(img_lst)):
                img_lst.append("")

        category_str = ""
        category = soup.find("ol", "breadcrumb")
        if category is not None:
            for b in category.children:
                if b != '\n':
                    category_str += " > " + b.a.string

        color_str = ""
        size_str = ""
        product_details = soup.find("form", "Product__Configurable")
        if product_details is not None:
            size = product_details.find("select", attrs={'id': "Size"})
            if size is not None:
                for i in size.children:
                    if i.string != "\n":
                        size_str += i.string + ","
            color = product_details.find("select", attrs={'id': "Colour"})
            if color is not None:
                for i in color.children:
                    if i.string != "\n":
                        color_str += i.string + ","

        product.page_url = product_url
        product.product_title = title_tag.string if title_tag is not None else "NULL"
        product.product_price = price_tag.string if price_tag is not None else "NULL"
        product.description = desc_tag.string.strip() if desc_tag is not None else "NULL"
        product.create_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
        product.pic_1 = img_lst[0]
        product.pic_2 = img_lst[1]
        product.pic_3 = img_lst[2]
        product.pic_4 = img_lst[3]
        product.pic_5 = img_lst[4]
        product.category = category_str
        product.size = size_str
        product.color = color_str
        return product
