import contextlib
import json
from typing import Generator, Any

from DrissionPage._base.chromium import Chromium
from DrissionPage._configs.chromium_options import ChromiumOptions
from DrissionPage._functions.keys import Keys
from DrissionPage._functions.settings import Settings
from DrissionPage._pages.session_page import SessionPage
from fake_useragent import UserAgent

from config.db import get_session
from entity.model.crawler_record_model import CrawlerCityRecord
from util.mylog import my_logger
import pandas as pd

file_url = '/home/jdz/Documents/xwechat_files/wxid_umhbc48gcpdh22_7bd8/msg/file/2025-03/Result_170.xlsx'
data_url = 'https://house.jinfuyun.cn/Map/GetGLLayer?north_lat=63.92223218522551&north_lng=156.51164136155964&south_lat=-10.67337866508622&south_lng=49.09697579750832'

def fetch_page(url) -> SessionPage:
    """封装了获取页面的逻辑"""
    page = SessionPage()
    page.set.user_agent(UserAgent().random)
    page.set.retry_times(3)
    page.set.retry_interval(30)
    page.set.timeout(60)
    cookie = "auth=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJTZXNzaW9uIjoiMzJhNjFlNmU4NzNmNGQyNGJmNGRlNWM0NTYyZWM5ZWUifQ.HiGCZa99YZYHqkT8_paZhfZQru06y-sBEjcGqzmf7UQ"
    page.get(url, headers={"Cookie": cookie})
    return page
@contextlib.contextmanager
def fetch_data(url) -> Generator[str, Any, None]:
        """封装了获取数据的逻辑"""
        page = SessionPage()
        page.set.user_agent(UserAgent().random)
        page.set.retry_times(3)
        page.set.retry_interval(30)
        page.set.timeout(60)
        cookie = "auth=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJTZXNzaW9uIjoiMzJhNjFlNmU4NzNmNGQyNGJmNGRlNWM0NTYyZWM5ZWUifQ.HiGCZa99YZYHqkT8_paZhfZQru06y-sBEjcGqzmf7UQ"
        page.get(url, headers={"Cookie": cookie})
        try:
            data = json.loads(page.html)
            yield data
        except Exception as e:
            my_logger.error(f"Error fetch_data task: {e}")
            yield ''
        page.close()
# co = ChromiumOptions().headless()
co = ChromiumOptions()
co.set_retry(2, 30)
co.set_user_agent(UserAgent().random)
co.ignore_certificate_errors()
b = Chromium(co)
tab = b.latest_tab
tab.get('https://house.jinfuyun.cn/home/index')
tab.ele('@text()=城市区域').click()
tab.ele('@text()=城市设置').click()




df = pd.read_excel(file_url, header=None)
city_name_list = []
for idx, item in enumerate(df.values.tolist()[1::]):
    tab.actions.move_to('#Name')
    tab.actions.click('#Name')
    input_name = tab.ele('#Name')
    input_name.set.value('')
    tab.actions.type(item[1], 0.8)
    # input_name.set.value(item[1])
    # input_name.click()
    input_name.next().wait.has_rect()
    ul = input_name.next()
    ul.child().click()
    tab.wait(1)
    tab.listen.start(targets='https://house.jinfuyun.cn/Item/ItemSetUp', method='POST')
    tab.ele('.layui-footer').child().click()
    tab.wait(0.6)
    res = tab.listen.wait()
    data = res.response.body
    if data['status'] == 'ok':
        # req = fetch_page(data_url)
        with fetch_data(data_url) as data:
            my_logger.debug(data)
        # with get_session() as session:
        #     c = CrawlerCityRecord()
        #     c.city = item[1]
        #     c.data = req.html
        #     session.add(c)
        #     session.commit()
    else:
        my_logger.error(f"响应失败 {data}")
    tab.wait(1)
    tab.ele('.layui-layer-btn0').click()