from .base_spider import xml_parser, json_parser, BaseSpider
from bs4 import BeautifulSoup
from datetime import datetime

# from http.client import HTTPConnection
# import logging
# HTTPConnection.debuglevel = 1
# logging.basicConfig()  # 初始化 logging，否则不会看到任何 requests 的输出。
# logging.getLogger().setLevel(logging.DEBUG)
# requests_log = logging.getLogger("requests.packages.urllib3")
# requests_log.setLevel(logging.DEBUG)
# requests_log.propagate = True


class LianJiaSpider(BaseSpider):
    def __init__(self, city):
        BaseSpider.__init__(self)
        self.host = f"https://{self.city(city)}.lianjia.com"

    @xml_parser(lambda soup: soup.find("li", class_="xiaoquListItem", attrs={"data-index": "0"})["data-id"])
    def get_community_id(self, name: str):
        """根据小区的名字，查找对应的小区ID"""
        return self.get(f"xiaoqu/rs{name}/")

    @json_parser(lambda x: {
            "district": x['data']['info']['districtName'],  # 地区
            "area": x['data']['info']['bizcircleName'],  # 板块
            "three_month_sell_count": x['data']['info']["90saleCount"],  # 90天成交量
            "month_see": x['data']['info']["day30See"],  # 30天带看量
            "sell_count": x['data']['info']['sellNum']  # 出售数量
        })
    def community_basic_info1(self, community_id: str):
        """获取小区的基本信息：地区-板块；90天成交套数；30天带看量"""
        params = {
            "semParams[semResblockId]": community_id,
            "semParams[semType]": "resblock",
            "semParams[semSource]": "ershou_xiaoqu"
        }
        return self.get(uri="api/listtop", params=params)

    @xml_parser(lambda soup: {
        "house_count": int(soup.find("span", text='房屋总数').find_next_sibling('span').text[:-1]),
        "fees": soup.find("span", text='物业费').find_next_sibling('span').text,
        "followed_num": soup.find("span", attrs={"data-role": "followNumber"}).text
    })
    def community_basic_info2(self, community_id: str):
        """获取小区的基本信息：物业费，该小区的房屋总数, 关注数量"""
        return self.get(f"xiaoqu/{community_id}/")

    def community_house_list(self, community_id: str, conditions=None):
        """获取指定小区的二手房列表"""
        condition = ""
        if conditions:
            condition = self.create_house_condition(conditions, community_id)

        result = self.get(f"ershoufang/{condition}")
        if result.status_code != 200:
            raise Exception("http返回码错误!")
        soup = BeautifulSoup(result.text, "lxml")
        total = int(soup.find("h2", class_="total fl").find("span").text)
        ret = [
            elem["href"] for elem in soup.find("ul", class_="sellListContent").find_all("a", class_="noresultRecommend")
        ]
        page_no = 1
        while page_no*30 < total:
            page_no += 1
            result = self.get(f"ershoufang/pg{page_no}{condition}")
            if result.status_code != 200:
                raise Exception("http返回码错误!")
            soup = BeautifulSoup(result.text, "lxml")
            ret += [
                elem["href"] for elem in
                soup.find("ul", class_="sellListContent").find_all("a", class_="noresultRecommend")
            ]
        return ret

    @xml_parser(lambda soup: {
        "type_graph": (e := soup.find("li", attrs={"data-desc": "户型图"})) and e["data-src"],  # 可能还没有拍照片
        "living_graph": (e := soup.find("li", attrs={"data-desc": "客厅"})) and e["data-src"],  # 第一张客厅照即可
        "price": int(soup.find("div", class_="price-container").find("span", class_="total").text),  # 售价
        "floor": soup.find("span", text='所在楼层').find_next_sibling(text=True).strip().split('(')[0].strip(),
        "total_floors": int(soup.find("span", text='所在楼层').find_next_sibling(text=True).strip().split('(')[1][1:-2]),
        "decoration": soup.find("span", text='装修情况').find_next_sibling(text=True).strip(),
        "type": soup.find("span", text='房屋户型').find_next_sibling(text=True).strip(),
        "size": soup.find("span", text='建筑面积').find_next_sibling(text=True).strip()[:-1],
        "attribute": soup.find("span", text='房屋用途').find_next_sibling('span').text,
        "pre_trading_date": None if (e := soup.find("span", text='上次交易').find_next_sibling('span').text) == "暂无数据" else datetime.strptime(e, "%Y-%m-%d"),
        "age_limit": None,  # 不需要
        "code": (e := soup.find("span", text='房源核验统一编码')) and e.find_next_sibling('span').text,
        "focus_count": int(soup.find("span", id="favCount").text)  # 关注数
    })
    def house_info(self, href: str):
        """获取指定房子的信息：价格、户型图、客厅图、楼层、装修、面积、房屋年限、房源编码、关注人数"""
        return self.session.get(href, headers=self.headers)

    @staticmethod
    def create_house_condition(conditions: dict[str, list[str] | str], community_id: str) -> str:
        """创建 二手房的查询条件, 这里仅实现 楼层，房型和用途
            todo: 其他查询条件实现
        """
        # 注意顺序
        order_options = {
            "楼层": {
                "order": {"低楼层": 1, "中楼层": 2, "高楼层": 3},
                "key": "lc"
            },
            "用途": {
                "order": {"普通住宅": 1, "商业类": 2, "别墅": 3, "四合院": 4, "其他": 5, "车位": 6},
                "key": "sf"
            },
            "房型": {
                "order": {"一室": 1, "二室": 2, "三室": 3, "四室": 4, "四室以上": 5},
                "key": "l"
            }
        }
        res = ""
        for option in order_options:
            if option in conditions:
                order = order_options[option]["order"]
                key = order_options[option]["key"]
                sorted_condition = sorted(conditions[option], key=lambda x: order[x])
                res += "".join([f"{key}{order[i]}" for i in sorted_condition])

        res += "c"+community_id

        if conditions.__contains__("面积") and conditions["面积"]:
            try:
                [small, large] = conditions["面积"].split('-')
            except:
                raise Exception("面积配置错误，请设置如：80-100")
            res += f"ba{small}ea{large}"

        return res
