import os
from datetime import datetime

from DrissionPage._base.chromium import Chromium
from DrissionPage._configs.chromium_options import ChromiumOptions
from DrissionPage._elements.chromium_element import ChromiumElement
from DrissionPage._functions.settings import Settings
from DrissionPage._pages.chromium_tab import ChromiumTab
from DrissionPage.errors import NoRectError, ElementLostError, PageDisconnectedError, ElementNotFoundError
from fake_useragent import UserAgent
from sqlmodel import select

from config.db import get_session
from dict.crwaler_dict import transaction_status_dict, sale_method_dict
from entity.model.community_model import TblHouseManageLand
from entity.model.crawler_record_model import CrawlerDataRecord
from entity.schema.crawler_schema import CrawlerLandArgs
from enums.crawler_enum import TransactionStatus, SaleMethod, CrawlerType
from util.crawler_util import decode_base64, get_hash_value
from util.data_change_util import get_year_month, process_date_str
from util.mylog import my_logger


class JinanLandHandler:
    data_api_url = "https://www.wanshudata.com/api/dai/api/data/list"
    city_list_url = "https://www.wanshudata.com/api/saasapi/city/list"
    login_url_api = "https://www.wanshudata.com/api/saasapi/user/login"
    def __init__(self, tab: ChromiumTab, crawler_args: CrawlerLandArgs):
        self.tab = tab
        self.crawler_args = crawler_args
        self.all_counts = None
        self.all_page = None
        self.crawler_url = crawler_args.crawler_url
        self.crawler_year = crawler_args.year
        self.crawler_month = crawler_args.month
        self.crawler_day = crawler_args.day
        self.crawler_city = crawler_args.city
        self.crawler_type = crawler_args.crawler_type.value
        self.crawler_last_time = get_year_month(self.crawler_year, self.crawler_month, self.crawler_day)
        self.transaction_status_condition = crawler_args.transaction_status_condition
        self.sale_method_condition = crawler_args.sale_method_condition
        self.page = 0
        self.total = 0
        self.crawler_time_type = "transaction"
        self.origin_data = []
        self.is_check = True
    def filter_data(self):
        if self.is_check:
            pass
        else:
            # 清除搜索条件
            self.tab.ele(".clear-btn el-icon-circle-close").click()

    def get_last_crawler_record(self):
        with get_session() as session:
            if self.crawler_time_type == "transaction":
                stat = select(CrawlerDataRecord.crawler_total).where(
                    CrawlerDataRecord.city == self.crawler_city,
                    CrawlerDataRecord.crawler_type == self.crawler_type,
                    CrawlerDataRecord.transaction_time == self.crawler_last_time
                )
            else:
                stat = select(CrawlerDataRecord.crawler_total).where(
                    CrawlerDataRecord.city == self.crawler_city,
                    CrawlerDataRecord.crawler_type == self.crawler_type,
                    CrawlerDataRecord.other_time == self.crawler_last_time
                )
            crawler_total = session.exec(stat).first()
        return 0 if crawler_total is None else crawler_total

    def judge_record(self, count_number):
        try:
            if count_number <= 0:
                my_logger.info("已结束")
                return True
            # 看上一次的爬取记录
            last_cnt = self.get_last_crawler_record()
            if last_cnt >= count_number:
                my_logger.info("数据无变化已结束")
                return True  # 标记数据无变化
            records_to_fetch = count_number - last_cnt
            pages_to_fetch = (records_to_fetch + 199) // 200
            self.page = pages_to_fetch
            my_logger.info(f"需要爬取 {pages_to_fetch} 页数据")
            return False  # 标记需要继续处理
        except Exception as e:
            my_logger.error(f"获取数据记录时发生错误: {e}")
            return True

    def judge_record_is_change(self):
        return not self.judge_record(self.total)

    def process_data(self, data_dict):
        self.total = self.get_total_record(data_dict)
        return self.judge_record_is_change()

    def process_origin_data(self, data_list: list[dict]):
        for item in data_list:
            land_data = TblHouseManageLand()
            land_data.land_name = item["LandCaption"]
            land_data.hash_value = get_hash_value(item["CityCaption"] + land_data.land_name)
            land_data.tenant_id = 8888888888888888888
            land_data.province_id = 37
            land_data.area_id = 3701
            land_data.notice_number_or_plot_code = item["AfficheNo"]
            land_data.land_address = item["Address"]
            land_data.plan_total_construction_area = item["TotalBuildArea"]
            land_data.total_area = item["OccupyArea"]
            land_data.land_property = item["UseType"]
            use_type = item["UseType"]
            if "商" in use_type:
                land_data.duration = 40
            elif "住" in use_type:
                land_data.duration = 70
            elif "工" in use_type:
                land_data.duration = 50
            elif "公" in use_type:
                land_data.duration = 50
            land_data.plot_ratio = item["CapabilityRates"]
            land_data.base_price = item["BasePrice"]
            land_data.announcement_date = process_date_str(item["DocumentIssueDate"])
            land_data.transaction_date = process_date_str(item["ChangeDateTime"])
            land_data.winning_bidder = item["Invisor"]
            land_data.total_sale_price = item["Price"]
            land_data.floor_price = item["BlockPrice"]
            if item["LandTradeState"] in TransactionStatus._value2member_map_:
                land_data.transaction_status = transaction_status_dict[TransactionStatus._value2member_map_[item["LandTradeState"]]]
            else:
                my_logger.error(f"交易状态错误: {item['LandTradeState']}")
                # raise ValueError(f"交易状态错误: {item['LandTradeState']}")
                return
            if item["ChangeMethodItem"] in SaleMethod._value2member_map_:
                land_data.sale_method = sale_method_dict[SaleMethod._value2member_map_[item["ChangeMethodItem"]]]
            else:
                land_data.sale_method = 4
            self.origin_data.append(land_data)
    @staticmethod
    def choose_date(tab: ChromiumTab, year: int, month: int):
        year = datetime.now().year - year + 1
        try_cnt = 1
        while True:
            try:
                date = tab.ele(".result-date placeholder", timeout=try_cnt)
                date.wait.clickable()
                date.click(by_js=True)
                start_content = tab.ele("#start-con", -1).child(".content_scroll")
                start_year = start_content.ele(".year_con", year).child(index=1)
                start_month = start_year.next().child(index=month)
                my_logger.info(f"开始日期: {start_year.text} {start_month.text}")
                start_month.wait.clickable()
                start_month.click(by_js=True)
                end_content = tab.ele(".content_scroll", -1)
                end_year = end_content.ele(".year_con", year).child(index=1)
                end_month = end_year.next().child(index=month)
                my_logger.info(f"结束日期: {end_year.text} {end_month.text}")
                end_month.wait.clickable()
                end_month.click(by_js=True)
                btn = tab.ele(".date_btn is_sure", -1)
                btn.wait.clickable()
                btn.click(by_js=True)
                break
            except NoRectError:
                my_logger.warning("日期选择框未加载完成，重试...")
                try_cnt += 1
            except ElementLostError:
                my_logger.warning("元素失效了，重试...")

    def search_data(self):
        search_btn = self.tab.ele(".btns").child(index=-2)
        search_btn.click()
        res = self.tab.listen.wait()
        data = self.process_res(res.response.body)
        if not data:
            return False
        return data

    def export_data(self, tab: ChromiumTab):
        i = 1
        while i <= self.page:
            my_logger.debug(f"总页数: {self.page} - 当前页数: {i}")
            tab.wait(0.3)
            next_btn = tab.ele(".btn-next")
            next_btn.wait.clickable(timeout=1)
            next_btn.click()
            i += 1
            if i <= self.page:
                res = tab.listen.wait()
                data = self.process_res(res.response.body)
                if not data:
                    return False
                self.process_origin_data(data["list"])
                tab.wait(1, 4)
        my_logger.debug("爬取完成")
        # my_logger.info(self.origin_data)
        # my_logger.info(len(self.origin_data))
        return True

    def add_crawler_record(self):
        if self.crawler_time_type == "transaction":
            t = {
                "transactionTime": self.crawler_time
            }
        else:
            t = {
                "otherTime": self.crawler_time
            }
        body = {
                   "city": "济南",
                   "crawlerType": self.crawler_type,
                   "crawlerTotal": self.total,
               } | t
        with get_session() as session:
            session.add(CrawlerDataRecord(**body))
            session.commit()

    def crawl_data(self):
        # 选择日期
        self.choose_date(self.tab, self.crawler_year, self.crawler_month)
        # 过滤数据
        self.filter_data()
        # 获取数据
        self.tab.listen.start(self.data_api_url, method="POST")
        data_dict = self.search_data()
        if not data_dict:
            return
        is_export = self.process_data(data_dict)
        if is_export:
            # 第一页数据
            self.process_origin_data(data_dict["list"])
            is_success = self.export_data(self.tab)
            if is_success:
                # 存入数据库
                res = self.save_origin_data()
                if res["success"]:
                    # 记录爬取记录
                    self.add_crawler_record()
                    # 同步数据
                    # self.sync_data()
                else:
                    my_logger.error("存入数据库失败")
        # 重置页数
        self.page = 0
        # 换成 other time
        self.crawler_time_type = "other"
        # 重置
        self.origin_data.clear()

    def crawl(self):
        self.crawl_data()
        self.is_check = False
        self.crawl_data()

    def save_origin_data(self):
        try:
            with get_session() as db:
                db.bulk_insert_mappings(TblHouseManageLand, self.origin_data)
                db.commit()
            return {"success": True}
        except Exception as e:
            my_logger.error(f"保存数据失败: {e}")
            db.rollback()
            return {"success": False}

    def choose_city(self, city: str):
        while True:
            try:
                my_logger.debug(f"要爬取的城市: {city}")
                self.tab.wait.ele_displayed('.city-name')
                city_name = self.tab.ele('.city-name').text
                my_logger.debug(f"当前页面城市: {city_name}")
                if city_name != city:
                    change_city = self.tab.ele('.change')
                    change_city.wait.clickable()
                    change_city.click()
                    self.tab.wait.ele_displayed('.el-dialog el-dialog-qy')
                    city_list_dialog = self.tab.ele('.el-dialog el-dialog-qy')
                    search_input = city_list_dialog.wait.has_rect().ele('.search-input')
                    # print(search_input)
                    input_city = search_input.children()[1]
                    input_city.input(city)
                    search_area = search_input.children()[-2]
                    # print(search_area)
                    search_area.wait.clickable()
                    search_area.click()
                    # self.tab.wait(1)
                    self.tab.wait.load_start()
                    self.tab.wait.ele_displayed('.city-name')
                    city_name = self.tab.ele('.city-name').text
                    my_logger.debug(f"切换成: {city_name}")
                else:
                    my_logger.debug(f"开始爬取城市: {city_name}")
                    break
            except ElementNotFoundError as e:
                my_logger.warning('城市未找到, 重试...')
            except ElementLostError as e:
                my_logger.warning('元素对象已失效, 重试...')

    def login(self):
        self.tab.listen.start(targets=JinanLandHandler.login_url_api, method="POST")
        flag = True
        while True:
            try:
                login_btn = self.tab.ele('.login-btn-text', timeout=3)
                login_btn.click()
                self.tab.wait.ele_displayed('.el-form form-main form-main-login')
                form = self.tab.ele('.el-form form-main form-main-login')
                i1, i2, commit_btn = form.children()
                input_phone = i1.children()[0].children()[-1].child(index=-1).child()
                if os.getenv("PHONE") is None:
                    os.environ["PHONE"] = "17368852851"
                input_phone.input(os.getenv("PHONE"), clear=True)
                self.tab.wait(0.8)
                if flag:
                    send_msg = self.tab.ele('.opt-btns')
                    self.tab.wait(0.3)
                    send_msg.click()
                input_code: ChromiumElement = i2.children()[0].children()[-1].child(index=-1).child().child()
                code = input('请输入短信验证码: ')
                input_code.input(code)
                my_logger.debug("正在登录...")
                self.tab.wait(0.3)
                commit_btn.click()
                res = self.tab.listen.wait()
                if res.response.body["code"] != 0:
                    my_logger.warning(f"登录失败: {res.response.body['msg']}")
                    self.tab.ele('.el-icon-close').click()
                    flag = False
                    self.tab.wait(0.3)
                    continue
                self.tab.wait.load_start()
                my_logger.debug("登录成功")
                break
            except ElementNotFoundError as e:
                my_logger.warning('未找到元素, 重试...')

    def perform_task(self):
        self.tab.listen.start(JinanLandHandler.city_list_url, method="POST")
        try:
            self.tab.get(self.crawler_url)
        except PageDisconnectedError:
            my_logger.warning("页面断开连接, 正在重启页面...")
            self.tab.get(self.crawler_url)

        res = self.tab.listen.wait()
        self.choose_city(self.crawler_city)
        token = res.request.headers.get("Authorization", None)
        if token is None:
            my_logger.warning("未登录..")
            self.login()
        self.crawl()
        self.tab.close()

    @staticmethod
    def get_total_record(data_dict: dict):
        # 搜索条数
        total: int = data_dict["total"]
        my_logger.debug(f"搜索结果共计: {total} 条")
        return total

    @staticmethod
    def process_res(res: dict):
        if res["code"] != 0:
            my_logger.error(res["msg"])
            raise Exception(res["msg"])
        data_dict = decode_base64(res["data"], res["tk"])
        if data_dict is None:
            my_logger.error("转换数据出现错误")
            return False
        return data_dict

if __name__ == '__main__':
    # co = ChromiumOptions().headless()
    co = ChromiumOptions()
    # 启用多例：
    Settings.set_singleton_tab_obj(False)
    co.set_retry(2, 30)
    # co.set_load_mode('eager')
    co.set_user_agent(UserAgent().random)
    co.incognito()
    co.ignore_certificate_errors()
    b = Chromium(co)
    t = b.new_tab()
    args = CrawlerLandArgs(crawler_type=CrawlerType.LAND)
    handler = JinanLandHandler(
        t,
        args
    )
    handler.perform_task()