import json
import requests
import os
from celery import Celery
from celery.schedules import crontab
from fastapi import HTTPException, status
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker

from scaner.core.config.project_config import settings
from scaner.core.models import ADS, Rubricator
from scaner.api_v1.scrap.utils.ads_scrap import ADSScrapper
from scaner.api_v1.scrap.utils.rubric_crawl import RubricatorCrawler
from scaner.api_v1.scrap import schemas
from scaner.api_v1.scrap.dependencies import get_scrapper_by_order_id
from scaner.core.models.search_order import SearchOrder


TASK_DIR = os.getcwd()

celery_app = Celery(
    "tasks",
    broker=settings.celery_broker_url,
    backend=settings.celery_result_backend,
)

celery_app.conf.beat_schedule_filename = f"{TASK_DIR}/tasks/celerybeat-schedule"

sync_engine = create_engine(settings.db.sync_url, echo=False)
SyncSessionLocal = sessionmaker(
    bind=sync_engine,
    autocommit=False,
    autoflush=False,
    expire_on_commit=False,
)


@celery_app.task
def run_scraping(order_id: int, chat_id: int):
    with SyncSessionLocal() as session:

        ads_result = session.query(ADS.url).filter(ADS.order_id == order_id).all()
        old_urls: list[str] = [result[0] for result in ads_result]

        order: SearchOrder | None = session.query(SearchOrder).get(order_id)

        if order is None:
            raise HTTPException(
                status_code=status.HTTP_404_NOT_FOUND,
                detail=f"Нет сохраненных запросов на поиск",
            )

    scrapper = ADSScrapper(
        rubric_url=order.rubric_url,
        query_string=order.query_string,
        deep_pages=order.deep_pages,
        city=order.city,
        main_url=order.main_url,
    )

    scrapper.start_scrapping()

    result: list[schemas.ADSDataBase] = scrapper.get_ads_data()

    ads: list[ADS] = []

    for ad in result:
        item_ads = ADS(**ad.model_dump())
        item_ads.order_id = order_id

        if item_ads.url not in old_urls:
            ads.append(item_ads)

    with SyncSessionLocal() as session:
        session.add_all(ads)
        session.commit()

    if ads:
        send_notification(chat_id)


@celery_app.task
async def run_crawling():
    rubrics = RubricatorCrawler()
    data = rubrics.get_rubricator()

    rubricator = Rubricator(rubrics=json.dumps(data))

    with SyncSessionLocal() as session:
        session.add(rubricator)
        session.commit()


def add_periodic_task(
    task_name: str,
    task_func,
    schedule: crontab,
    args: tuple,
):
    celery_app.conf.beat_schedule[task_name] = {
        "task": task_func,
        "schedule": schedule,
        "args": args,
    }


def delete_periodic_task(task_name: str):

    scheduler = celery_app.conf.beat_scheduler

    if task_name in scheduler.schedule:
        del scheduler.schedule[task_name]


def send_notification(chat_id: int):
    token = settings.tg_bot_token
    message: str = (
        "Обнаружены новые объявления, \nдля просмотра введите команду '/list'"
    )
    url = f"https://api.telegram.org/bot{token}/sendMessage?chat_id={chat_id}&text={message}"
    response = requests.get(url)


def get_tasks_by_user_id(user_id: int):
    return [
        {"task": task_name, "order_id": task_config["args"][0]}
        for task_name, task_config in celery_app.conf.beat_schedule.items()
        if task_name.startswith(f"{user_id}-")
    ]


def count_tasks_by_user_id(user_id: int):
    return len(get_tasks_by_user_id(user_id))


#
#  task_name = f"{user.id}-add-evert-{day_of_week}-at-{hour}-{minute}"
#
