from fastapi import APIRouter, HTTPException, status, Header, Body
from fastapi.responses import JSONResponse
from squirrel_bakend.task.task_logic import start_scrapy_spider
from pydantic import BaseModel, Field
from typing import Optional, Dict, Any, List
from concurrent.futures import ThreadPoolExecutor
import time
import os
import json
import asyncio
import logging
import pyssdb
import psutil
from logging.handlers import RotatingFileHandler

executor = ThreadPoolExecutor()

scrapy_app = APIRouter()
task_list = []
client = pyssdb.Client(host="1.14.96.163", port=8888)


def configure_file_logging():
    log_dir = os.environ.get("LOG_PATH", "/")
    full_path = os.path.join(log_dir, "squirrel_bakend")
    if not os.path.isdir(full_path):
        os.makedirs(full_path)
    log_file = os.path.join(full_path, "app.log")
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    for handler in logger.handlers[:]:
        logger.removeHandler(handler)
    console_handler = logging.StreamHandler()
    console_handler.setLevel(logging.INFO)
    file_handler = RotatingFileHandler(
        filename=log_file,
        maxBytes=10 * 1024 * 1024,
        backupCount=5,
        encoding="utf-8"
    )

    formatter = logging.Formatter(
        "%(asctime)s | %(levelname)-8s | %(name)s:%(lineno)d | %(message)s"
    )
    file_handler.setFormatter(formatter)
    logger.addHandler(console_handler)
    logger.addHandler(file_handler)

    # uvicorn_logger = logging.getLogger("uvicorn")
    # uvicorn_logger.handlers = [file_handler]
    # uvicorn_logger.propagate = False
    #
    # uvicorn_access = logging.getLogger("uvicorn.access")
    # uvicorn_access.handlers = [file_handler]
    # uvicorn_access.propagate = False

    return logger


@scrapy_app.on_event("startup")
async def startup_event():
    logger = configure_file_logging()
    logger.info("应用启动")


class Data(BaseModel):
    serialNumber: str = None
    webType: str = None
    crawlerType: str = None
    spiderConfig: Optional[Dict[str, Any]] = Field(description="爬虫所需的参数，如：身份证号码，姓名")

    class Config:
        json_schema_extra = {
            "example": {
                "serialNumber": "222222",
                "webType": "malasong",
                "crawlerType": "race",
                "spiderConfig": {
                    "name": "张三",
                    "id": "510111111111111",
                    "filterFiled": ["startTime", "address"]
                }
            }
        }


class NoData(BaseModel):
    url: str = None
    webType: str = None
    crawlerType: str = None
    taskId: str = None
    race_id: str = None
    race_name: str = None
    phone: Optional[str] = None
    password: Optional[str] = None
    userList: List[Dict[str, Any]]


@scrapy_app.post("/start_spider", summary="爬虫启动接口", response_description="任务状态信息")
async def get_request(data: Data = Body(...)):
    try:
        spider_config = data.spiderConfig if data.spiderConfig else {}
        param = {
            "serialNumber": data.serialNumber,
            "crawler_type": data.crawlerType,
            "web_type": data.webType,
            'spider_config': spider_config
        }
        logging.info(f"请求参数：{param}")
        try:
            if data.webType == "signup":
                return {'serialNumber': data.serialNumber, 'webType': data.webType, 'crawlerType': data.crawlerType,
                        'message': '接口调用错误', 'code': 202, 'success': True}
            else:
                data_str = json.dumps(param)
                if data.webType == "malasong" and data.crawlerType == "user":
                    queue_name = "task_queue_vip"
                else:
                    if data.webType == "itra":
                        queue_name = "task_itra_queue"
                    else:
                        queue_name = "task_queue"
                result = client.qpush_back(queue_name, data_str)
                logging.info(f"任务【{param}】插入ssdb成功：{result}")
                return {'serialNumber': data.serialNumber, 'webType': data.webType, 'crawlerType': data.crawlerType,
                        'message': '任务启动成功', 'code': 200, 'success': True}
        except Exception as e:
            logging.info(f"任务【{param}】插入ssdb失败：{e}")
            return {'serialNumber': data.serialNumber, 'webType': data.webType, 'crawlerType': data.crawlerType,
                    'message': '任务启动失败', 'code': 201, 'success': True}
    except Exception as e:
        logging.info(e)
        raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="send request failure")


@scrapy_app.post("/start/spider/task/raceno", summary="参赛号爬虫启动接口", response_description="任务状态信息")
async def get_request(data: NoData = Body(...)):
    try:
        param = {
            "crawler_type": data.crawlerType,
            "web_type": data.webType,
            'task_id': data.taskId,
            "race_id": data.race_id,
            "race_name": data.race_name,
            "url": data.url,
            "phone": data.phone,
            "password": data.password,
            'user_list': data.userList
        }
        logging.info(f"请求参数：{param}")
        if data.webType == "signup":
            data_str = json.dumps(param)
            result = client.qpush_back('task_queue', data_str)
            logging.info(f"任务【{param}】插入ssdb成功：{result}")
            return {'taskId': data.taskId, 'webType': data.webType, 'crawlerType': data.crawlerType,
                    'message': '任务启动成功', 'code': 200, 'success': True}
        else:
            return {'race_id': data.race_id, 'race_name': data.race_name, 'webType': data.webType, 'crawlerType': data.crawlerType,
                    'message': '接口调用错误', 'code': 202, 'success': True}
    except Exception as e:
        logging.info(e)
        raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="send request failure")


@scrapy_app.get(
    "/heart_beat",
    summary="服务心跳检测",
    response_description="服务状态信息",
    status_code=status.HTTP_200_OK,
)
async def heartbeat_check():
    await asyncio.sleep(0.01)
    return JSONResponse(
        status_code=status.HTTP_200_OK,
        headers={"Connection": "keep-alive", "Keep-Alive": "timeout=60"},
        content={
            "status": "alive",
            "service": "Spider Service",
        }
    )