from fastapi import APIRouter, Depends, BackgroundTasks
from config import get_settings, Settings
from typing import Annotated
from app.models.web_scraper_models import (
    BasicAuthBody,
    StartListBody,
    StartDetailBody,
    StartCookieBody,
    GetPresignedUrlBody,
)
from app.mcps.constant import SCRAPE_BASE
from app.mcps.scraper import crawl_poetries_by_basic_auth, crawl_poetry_by_basic_auth, crawl_poetries_with_cookie
from app.mcps.auth import get_login_cookies
from app.mcps.tigres import (
    get_client,
    head_object,
    upload_file,
    download_file,
    create_presigned_url,
)
from uuid import uuid4
import json

# Create a router with a general tag for API documentation organization
router = APIRouter(tags=["Web_Scrape"], prefix="/web")


@router.post("/basic_auth")
async def basic_auth(
    body: BasicAuthBody,
    settings: Annotated[Settings, Depends(get_settings)],
):
    """do basic auth"""
    print("====income request body", body)
    print("-------no cookies, login to fetch--------")
    try:
        cookie_file_url = f"cookies-{body.user}.json"
        cookies = await get_login_cookies(
            f"{SCRAPE_BASE}/user/login.aspx",
            f"{SCRAPE_BASE}/user/collect.aspx",
            user=body.user,
            pwd=body.pwd,
            cookie_file_url=cookie_file_url,
            # headless=False,
        )
        print(f"Successfully authenticated. Retrieved {len(cookies)} cookies.")
        s3_client = get_client(settings)
        upload_file(s3_client, cookie_file_url, settings.bucket_name)

    except Exception as e:
        print(f"Authentication failed: {str(e)}")


@router.post("/start_list")
async def start_list(
    body: StartListBody,
    settings: Annotated[Settings, Depends(get_settings)],
    background_tasks: BackgroundTasks,
):
    """Start a crawl job for list with basic auth"""
    object_name = uuid4().hex
    print("====income request body", body)
    try:
        cookie_file_url = f"cookies-{body.user}.json"
        s3_client = get_client(settings)
        exist = head_object(s3_client, settings.bucket_name, cookie_file_url)
        if exist:
            download_file(s3_client, cookie_file_url, settings.bucket_name)

        background_tasks.add_task(
            crawl_poetries_by_basic_auth,
            # **body.model_dump(),
            cookie_file_url=cookie_file_url,
            user=body.user,
            pwd=body.pwd,
            list_segment=body.list_segment,
            settings=settings,
            object_name=object_name,
        )
    except Exception as e:
        print(type(e))
        print(f"Get Cookies from tigres failed: {str(e)}")
    return object_name
    # await crawl_poetries(**body.model_dump())

@router.post("/start_detail")
async def start_detail(
    body: StartDetailBody,
    settings: Annotated[Settings, Depends(get_settings)],
    background_tasks: BackgroundTasks,
):
    """Start a crawl job for detail with basic auth"""
    print("====income request body", body)
    try:
        cookie_file_url = f"cookies-{body.user}.json"
        s3_client = get_client(settings)
        exist = head_object(s3_client, settings.bucket_name, cookie_file_url)
        if exist:
            download_file(s3_client, cookie_file_url, settings.bucket_name)

        background_tasks.add_task(
            crawl_poetry_by_basic_auth,
            # **body.model_dump(),
            cookie_file_url=cookie_file_url,
            user=body.user,
            pwd=body.pwd,
            detail_segment=body.detail_segment,
            settings=settings,
        )
    except Exception as e:
        print(type(e))
        print(f"Get Cookies from tigres failed: {str(e)}")
    return "ok"
    # await crawl_poetries(**body.model_dump())


@router.post("/start_cookie")
async def start_cookie(
    body: StartCookieBody,
    settings: Annotated[Settings, Depends(get_settings)],
    background_tasks: BackgroundTasks,
):
    """Start a crawl job with cookie"""
    object_name = uuid4().hex
    print("====income request body", body)
    cookies = json.loads(body.cookie)
    background_tasks.add_task(
        crawl_poetries_with_cookie,
        cookies=cookies,
        list_segment=body.list_segment,
        settings=settings,
        object_name=object_name,
    )
    # await crawl_poetries(**body.model_dump())
    return object_name


@router.post("/upt")
async def upt(settings: Annotated[Settings, Depends(get_settings)]):
    """test tigres object storage"""
    s3_client = get_client(settings)
    upload_file(s3_client, "complete_poetries.csv", settings.bucket_name)


@router.post("/get_presigned_url")
async def get_presigned_url(
    body: GetPresignedUrlBody, settings: Annotated[Settings, Depends(get_settings)]
):
    """get tigres presigned_url"""
    s3_client = get_client(settings)
    url = create_presigned_url(s3_client, settings.bucket_name, body.object_name)
    return url
