# -*- coding:utf-8 -*-
import random

from client import Client, DataBaseError
from fetcher import Fetcher
from main.util import debug, warning
import re
from bs4 import BeautifulSoup as BS
from parseError import ParseError
from pojo.community import Community
import time

url_prefix = 'https://ts.lianjia.com/xiaoqu'
client = None
table_regions = 'regions'
table_communities = 'communities'


def get_regions():
    if not client.has_collection(table_regions):
        raise DataBaseError(f"{table_regions} 不存在！")
    table = client.get_collection(table_regions)
    result = table.find()
    regions = [i["region"] for i in result]
    debug(regions, "数据库中获取到的区")
    return regions


def write_data(c_list):
    comms = client.get_collection(table_communities)
    res = comms.insert_many(c_list)
    if len(res.inserted_ids) > 0:
        debug("写入数据库成功！")


def check_database(collection, clean=False):
    if not client.has_collection(collection):
        warning("数据库不存在，新建")
    # 数据库非空确认
    table = client.get_collection(collection)
    if table.count_documents({}) > 0:
        if input("数据库非空！继续？(Y/N)") != 'Y':
            raise DataBaseError("终止操作")
    if clean:
        table.drop()
        debug("清空数据库")


def get_total_page(html):
    pattern = r'\"totalPage\"\:(.*?),\"curPage\"'
    res = re.findall(pattern, html)
    if len(res) != 1:
        raise ParseError("获取 总页数 失败！")
    return int(res[0])


def parse_communities(html,reg):
    soup = BS(html, "lxml")
    res = soup.find_all("li", class_="clear xiaoquListItem")
    debug(len(res), "获取到小区列表数目")
    if len(res) == 0:
        raise ParseError("获取 小区列表 失败！")
    # 解析房屋信息
    c_list = []
    for item in res:
        c = Community()
        c.region = reg
        get_community_base_info(item, c)
        c_list.append(c.__dict__)
    debug(c_list, "获取到的小区基本信息")
    return c_list


def get_community_base_info(item, community):
    info = item.select('.title')[0]
    community.cid = info.a['href'].split('/')[-2]
    community.name = info.a.string

    house_info = item.select('.houseInfo')[0]
    community.recent_deal_count = re.findall(r'成交(.*?)套', house_info.find_all('a')[0].string)[0]
    community.avg_price = item.select('.totalPrice')[0].span.string
    community.in_stock_count = item.select('.totalSellCount')[0].span.string
    print(community)


if __name__ == '__main__':

    """
        根据区，获取每个区内所有小区
    """

    # 初始化
    client = Client()
    region_list = get_regions()
    fetcher = Fetcher(url_prefix)

    # 必须检查 clean 参数 ！！！
    check_database(table_communities,True)

    # 获取
    for reg in region_list[1:]:
        community_list = []
        current_page = 1
        total_page = 1
        # 循环分页
        while current_page <= total_page:
            debug(f"获取{reg}区 {current_page}/{total_page}","获取中")
            html = fetcher.get_content(reg, current_page)
            if current_page == 1:
                total_page = get_total_page(html)
            community_list.extend(parse_communities(html,reg))
            current_page += 1
            time.sleep(random.randint(1, 3))
        write_data(community_list)
