# -*- coding:utf-8 -*-
import random

from client import Client, DataBaseError
from fetcher import Fetcher
from main.util import debug, warning
import re
from bs4 import BeautifulSoup as BS
from parseError import ParseError
import time

from pojo.house import House

url_prefix = 'https://ts.lianjia.com/chengjiao'
client = None
table_communities = 'communities'
table_houses = 'houses'
# 从此ID继续
resume_cid = ''


def get_resume_cid_index(cid):
    table = client.get_collection(table_communities)
    result = [i["cid"] for i in table.find({}, {'cid': 1})]
    try:
        i_cid = result.index(cid)
    except ValueError:
        i_cid = 0
    return i_cid


def get_community_list():
    table = client.get_collection(table_communities)
    comm_id_list = [i["cid"] for i in table.find({}, {'cid': 1})]
    comm_name_list = [i["name"] for i in table.find({}, {'name': 1})]
    return (comm_id_list, comm_name_list)


def write_data(list1, collection):
    table = client.get_collection(collection)
    res = table.insert_many(list1)
    # table.insert_one(_dict)
    if len(res.inserted_ids) > 0:
        debug("写入数据库成功！")


def check_database(collection, clean=False):
    if not client.has_collection(collection):
        warning(f"数据表 {collection} 不存在，新建")
    # 数据库非空确认
    table = client.get_collection(collection)
    if table.count_documents({}) > 0:
        if input(f"数据表 {collection} 非空！继续？(Y/N)") != 'Y':
            raise DataBaseError("终止操作")
    if clean:
        table.drop()
        debug("清空数据表 " + collection)


def get_total_page(html):
    pattern = r'\"totalPage\"\:(.*?),\"curPage\"'
    res = re.findall(pattern, html)
    if len(res) != 1:
        raise ParseError("获取 总页数 失败！")
    return int(res[0])


def parse_houses(html, comm_id, comm_name):
    soup = BS(html, "lxml")
    res = soup.select('ul.listContent')[0].children
    # 解析房屋信息
    h_list = []
    for item in res:
        h = House()
        h.name = comm_name
        h.cid = comm_id
        get_house_base_info(item, h)
        h_list.append(h.__dict__)
    debug(h_list, "获取到的小区基本信息")
    return h_list


def get_house_base_info(item, house):
    title = item.select('.info>.title')[0]
    house.hid = title.a['href'].split('/')[-1].split('.')[0]
    titles = title.a.text.split(' ')
    house.structure = titles[1]
    if len(titles) == 3:
        house.design_area = titles[2][:-2]
    h_i = item.select('.houseInfo')[0].text
    house.face_direction = h_i.split(" | ")[0].strip()
    house.decoration = h_i.split(" | ")[1].strip()
    house.deal_time = item.select('.dealDate')[0].text
    house.deal_value = item.select('.totalPrice>.number')[0].text
    position = item.select('.positionInfo')[0].text
    house.floor = position.split(' ')[0]
    house.building_type = position.split(' ')[1]
    house.price = item.select('.unitPrice>span')[0].text
    house.origin_value = re.findall('挂牌(.*?)万', item.select('.dealCycleTxt>span')[0].text)[0]
    house.deal_duration = re.findall('成交周期(.*?)天', item.select('.dealCycleTxt>span')[1].text)[0]


if __name__ == '__main__':

    """
        根据小区，获取每个小区内所有已成交房屋基本信息
    """

    # 初始化
    client = Client()
    fetcher = Fetcher(url_prefix)

    # 必须检查 clean 参数 ！！！
    check_database(table_houses, True)

    i_cid = get_resume_cid_index(resume_cid)
    comm_id_list, comm_name_list = get_community_list()

    # 遍历小区ID
    for comm_id, comm_name in zip(comm_id_list[i_cid:], comm_name_list[i_cid:]):
        house_list = []
        current_page = 1
        total_page = 1
        debug(comm_id, f"进入{comm_id}-{comm_name}小区")
        # 循环分页
        while current_page <= total_page:

            html = fetcher.get_content2(comm_id, current_page)
            if current_page == 1:
                try:
                    total_page = get_total_page(html)
                except ParseError as e:
                    warning(e.reason, "没有房子，跳过")
                    break
            house_list.extend(parse_houses(html, comm_id, comm_name))
            current_page += 1
            time.sleep(random.randint(1, 3))

        if len(house_list) > 0:
            payload = house_list
            print(payload)
            write_data(payload, table_houses)
