import json

from bs4 import BeautifulSoup

from com.arcfox.manager.redis_task_manager import RedisTaskManager
from com.arcfox.school.processor.base_processor import BaseProcessor


class SchoolListProcessor(BaseProcessor):

    def __init__(self):
        super().__init__()

    def parse_total_pages(self, html, page_size=0):
        assert page_size > 0, "page_size需大于0"
        soup = BeautifulSoup(html, "lxml")
        span_tag = soup.find("span", {"class": "count"})
        if span_tag:
            total_records = int(span_tag.text)
            return int(total_records / page_size) if total_records % page_size == 0 else int(
                total_records / page_size) + 1
        return 0

    async def parse_and_store_school_list(self, html, task_keys):
        soup = BeautifulSoup(html, "lxml")
        li_tags = soup.find("ul", {"id": "lists"}).find_all("li")
        school_list = []
        for li_tag in li_tags:
            labels = []
            label_tags = li_tag.find_all("span", {"class": "label"})
            if label_tags:
                for label in label_tags:
                    labels.append(label.text)
            school_id = li_tag.find("a").attrs['href'].replace("/school/", "")[:-5]
            school_name = li_tag.find("div", {"class": "sch-name"}).find("a").text
            logo_url = li_tag.find("img").attrs['lay-src']
            school_item = {"id": school_id, "school_name": school_name, "logo_url": logo_url,
                           "labels": ",".join(labels)}
            school_list.append(school_item)
            # with open("data.txt", "r", encoding='utf-8') as f:
            #     lines = f.readlines()
            #     for line in lines:
            #         line = line.replace("\n", "")
            #         if line == school_item['school_name']:
            #             school_list.append(school_item)
        for task_key in task_keys:
            await RedisTaskManager(task_key).add_tasks(school_list)
