from typing import Annotated

from fastapi import Path, Depends, HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession

from scaner.core.models import db_helper, SearchOrder, User
from .utils.ads_scrap import ADSScrapper
from . import crud


async def order_by_id(
    order_id: Annotated[int, Path],
    session: AsyncSession = Depends(db_helper.get_async_session),
) -> SearchOrder:

    order = await crud.get_order_by_id(session, order_id)
    if order is not None:
        return order

    raise HTTPException(
        status_code=status.HTTP_404_NOT_FOUND,
        detail=f"Запрос на поиск {order_id} не найден",
    )


async def get_scrapper_by_last_order(
    user: User,
    session: AsyncSession = Depends(db_helper.get_async_session),
) -> ADSScrapper:

    order: SearchOrder | None = await crud.get_last_order(
        user_id=user.id,
        session=session,
    )

    if order is not None:
        scrapper = ADSScrapper(
            rubric_url=order.rubric_url,
            query_string=order.query_string,
            deep_pages=order.deep_pages,
            city=order.city,
            main_url=order.main_url,
        )
        return scrapper

    raise HTTPException(
        status_code=status.HTTP_404_NOT_FOUND,
        detail=f"Нет сохраненных запросов на поиск",
    )


async def get_scrapper_by_order_id(
    order_id: Annotated[int, Path],
    session: AsyncSession = Depends(db_helper.get_async_session),
) -> ADSScrapper:

    order: SearchOrder | None = await crud.get_order_by_id(
        session=session,
        id=order_id,
    )

    if order is not None:
        scrapper = ADSScrapper(
            rubric_url=order.rubric_url,
            query_string=order.query_string,
            deep_pages=order.deep_pages,
            city=order.city,
            main_url=order.main_url,
        )
        return scrapper

    raise HTTPException(
        status_code=status.HTTP_404_NOT_FOUND,
        detail=f"Нет сохраненных запросов на поиск",
    )
