import json

from bs4 import BeautifulSoup
from loguru import logger

from com.arcfox.middleware import async_mysql_middleware as db
from com.arcfox.util.muilty_coroutine_util import concurrency


class AreaProcessor:
    def __init__(self):
        self.mapping = db.MysqlMapping("tb_area_code")

    async def parse_and_save_data(self, html):
        soup = BeautifulSoup(html, "lxml")
        div_tags = soup.find_all("div", {"class": "ip"})[1:]
        data_list = []
        for div_tag in div_tags:
            province_name = div_tag.find("h4").text.split(" ")[0]
            province_code = div_tag.find("h4").text.split(" ")[1]
            city_name = div_tag.find("h5").text.split(" ")[0]
            city_code = div_tag.find("h5").text.split(" ")[1]
            li_tags = div_tag.find_all("li")
            for li_tag in li_tags:
                if li_tag.find("li"):
                    continue
                area_name = li_tag.text.split(" ")[0].replace("\n", "")
                area_code = li_tag.text.split(" ")[1]
                data_list.append({
                    "area_name": area_name,
                    "area_code": area_code,
                    "city_code": city_code,
                    "city_name": city_name,
                    "province_code": province_code,
                    "province_name": province_name
                })
        await self.__save(tasks=data_list)

    @concurrency(10)
    async def __save(self, **params):
        task = params['task']
        old_data = await self.mapping.query({"area_code": task['area_code']})
        if old_data:
            logger.info("数据已存在!{}-{}", task['area_name'], task['area_code'])
        else:
            await self.mapping.insert(task)
