use dioxus::{logger::tracing, prelude::*};

use crate::entities::spider_task::{SpiderTaskDTO, TaskStatus};

/// 删除指定ID的爬虫任务
///
/// # Arguments
/// * `id` - 要删除的爬虫任务ID
///
/// # Returns
/// * `Ok(())` - 删除成功
/// * `Err(ServerFnError)` - 删除过程中发生的错误
#[server]
pub async fn delete_spider_task(id: i32) -> Result<(), ServerFnError> {
    Ok(db::delete_spider_task(id).await?)
}

/// 批量删除多个爬虫任务
///
/// # Arguments
/// * `ids` - 要删除的爬虫任务ID列表
///
/// # Returns
/// * `Ok(())` - 删除成功
/// * `Err(ServerFnError)` - 删除过程中发生的错误
#[server]
pub async fn delete_spider_tasks(ids: Vec<i32>) -> Result<(), ServerFnError> {
    Ok(db::delete_spider_tasks(ids).await?)
}

/// 分页获取爬虫任务列表
///
/// # Arguments
/// * `page` - 当前页码
/// * `page_size` - 每页显示的任务数量
///
/// # Returns
/// * `Ok((Vec<SpiderTaskDTO>, u64))` - 成功时返回任务列表和总页数
/// * `Err(ServerFnError)` - 获取过程中发生的错误
#[server]
pub async fn get_spider_tasks(page: u64, page_size: u64) -> Result<(Vec<SpiderTaskDTO>, u64), ServerFnError> {
    Ok(db::get_spider_tasks(page, page_size).await?)
}

/// 更新爬虫任务的配置信息
///
/// # Arguments
/// * `id` - 要更新的任务ID
/// * `name` - 任务名称
/// * `target_url` - 目标网站URL
/// * `url_pattern` - URL匹配模式
/// * `article_list_selector` - 文章列表选择器
/// * `article_title_selector` - 文章标题选择器
/// * `article_selector` - 文章内容选择器
/// * `breadcrumb_selector` - 面包屑导航选择器
///
/// # Returns
/// * `Ok(())` - 更新成功
/// * `Err(ServerFnError)` - 更新过程中发生的错误
#[server]
pub async fn update_spider_task(
    id: i32,
    name: String,
    target_url: String,
    url_pattern: String,
    article_list_selector: String,
    article_title_selector: String,
    article_selector: String,
    breadcrumb_selector: String,
) -> Result<(), ServerFnError> {
    Ok(db::update_spider_task(id, name, target_url, url_pattern, article_list_selector, article_title_selector, article_selector, breadcrumb_selector).await?)
}

/// 启动指定ID的爬虫任务
///
/// # Arguments
/// * `id` - 要启动的爬虫任务ID
///
/// # Returns
/// * `Ok(())` - 任务启动成功
/// * `Err(ServerFnError)` - 启动过程中发生的错误
#[server]
pub async fn run_spider_task(id: i32) -> Result<(), ServerFnError> {
    Ok(db::run_spider_task(id).await?)
}

/// 创建新的爬虫任务
///
/// # Arguments
/// * `name` - 任务名称
/// * `target_url` - 目标网站URL
/// * `url_pattern` - URL匹配模式
/// * `article_list_selector` - 文章列表选择器
/// * `article_title_selector` - 文章标题选择器
/// * `article_selector` - 文章内容选择器
/// * `breadcrumb_selector` - 面包屑导航选择器
///
/// # Returns
/// * `Ok(())` - 创建成功
/// * `Err(ServerFnError)` - 创建过程中发生的错误
#[server]
pub async fn save_spider_task(
    name: String,
    target_url: String,
    url_pattern: String,
    article_list_selector: String,
    article_title_selector: String,
    article_selector: String,
    breadcrumb_selector: String,
) -> Result<(), ServerFnError> {
    // 打日志
    tracing::info!(
        "Spider task saved: name={}, target_url={}, url_pattern={}, article_list_selector={}, article_title_selector={}, article_selector={}, breadcrumb_selector={}",
        name,
        target_url,
        url_pattern,
        article_list_selector,
        article_title_selector,
        article_selector,
        breadcrumb_selector,
    );

    // 保存到数据库
    db::save_spider_task(
        name,
        target_url,
        url_pattern,
        article_list_selector,
        article_title_selector,
        article_selector,
        breadcrumb_selector,
    ).await?;

    Ok(())
}


mod db {
    use db::dblogic::db::{establish_connection, DbLogicError};
    use dioxus::fullstack::once_cell;
    use dioxus::logger::tracing;
    use sea_orm::{QueryOrder, EntityTrait, PaginatorTrait};
    use sea_orm::{ActiveModelTrait, Set};
    use crate::backend::spider_runner;
    use crate::backend::markdown_logic::html_to_md;
    use crate::entities::spider_task;
    use crate::entities::spider_task::{SpiderTaskDTO, TaskStatus};
    use crate::entities::article;
    use std::future::Future;
    use std::pin::Pin;
    use std::time::{SystemTime, UNIX_EPOCH};
    use std::sync::Mutex;
    use tokio::task::JoinHandle;
    use once_cell::sync::Lazy;

    
    
    // 添加全局任务管理器
    static RUNNING_TASKS: Lazy<Mutex<Vec<JoinHandle<()>>>> = Lazy::new(|| Mutex::new(Vec::new()));
    
    pub async fn update_spider_task(
        id: i32,
        name: String,
        target_url: String,
        url_pattern: String,
        article_list_selector: String,
        article_title_selector: String,
        article_selector: String,
        breadcrumb_selector: String,
    ) -> Result<(), DbLogicError> {
        let conn = establish_connection().await;
        let now = SystemTime::now()
            .duration_since(UNIX_EPOCH)
            .unwrap()
            .as_secs() as i64;

        let task = spider_task::Entity::find_by_id(id)
            .one(&conn)
            .await?
            .ok_or(DbLogicError::Custom("任务不存在".to_string()))?;

        if task.status == TaskStatus::Running as i32 {
            return Err(DbLogicError::Custom("无法编辑运行中的任务".to_string()));
        }

        let mut task: spider_task::ActiveModel = task.into();
        task.name = Set(name);
        task.target_url = Set(target_url);
        task.url_pattern = Set(url_pattern);
        task.article_list_selector = Set(article_list_selector);
        task.article_title_selector = Set(article_title_selector);
        task.article_selector = Set(article_selector);
        task.breadcrumb_selector = Set(breadcrumb_selector);
        task.updated_at = Set(now);

        task.update(&conn).await?;
        Ok(())
    }

    pub async fn save_spider_task(
        name: String,
        target_url: String,
        url_pattern: String,
        article_list_selector: String,
        article_title_selector: String,
        article_selector: String,
        breadcrumb_selector: String,
    ) -> Result<(), DbLogicError> {
        let conn = establish_connection().await;
        let now = SystemTime::now()
            .duration_since(UNIX_EPOCH)
            .unwrap()
            .as_secs() as i64;

        let task = spider_task::ActiveModel {
            name: Set(name),
            target_url: Set(target_url),
            url_pattern: Set(url_pattern),
            article_list_selector: Set(article_list_selector),
            article_title_selector: Set(article_title_selector),
            article_selector: Set(article_selector),
            breadcrumb_selector: Set(breadcrumb_selector),
            status: Set(TaskStatus::Pending as i32),
            last_run_at: Set(0),
            crawled_count: Set(0),
            created_at: Set(now),
            updated_at: Set(now),
            ..Default::default()
        };

        task.insert(&conn).await?;
        Ok(())
    }

    pub async fn delete_spider_task(id: i32) -> Result<(), DbLogicError> {
        let conn = establish_connection().await;
        let task = spider_task::Entity::find_by_id(id)
            .one(&conn)
            .await?;

        if let Some(task) = task {
            if task.status == TaskStatus::Running as i32 {
                return Err(DbLogicError::Custom("无法删除运行中的任务".to_string()));
            }
            spider_task::Entity::delete_by_id(id)
                .exec(&conn)
                .await?;
        }

        Ok(())
    }

    pub async fn delete_spider_tasks(ids: Vec<i32>) -> Result<(), DbLogicError> {
        let conn = establish_connection().await;
        
        // 检查是否有运行中的任务
        for id in &ids {
            let task = spider_task::Entity::find_by_id(*id)
                .one(&conn)
                .await?;
            
            if let Some(task) = task {
                if task.status == TaskStatus::Running as i32 {
                    return Err(DbLogicError::Custom(format!("任务 {} 正在运行中，无法删除", task.name)));
                }
            }
        }

        // 批量删除任务
        for id in ids {
            spider_task::Entity::delete_by_id(id)
                .exec(&conn)
                .await?;
        }

        Ok(())
    }

    pub async fn run_spider_task(id: i32) -> Result<(), DbLogicError> {
        let conn = establish_connection().await;
        let now = SystemTime::now()
            .duration_since(UNIX_EPOCH)
            .unwrap()
            .as_secs() as i64;
    
        // 获取任务信息
        let task = spider_task::Entity::find_by_id(id)
            .one(&conn)
            .await?
            .ok_or(DbLogicError::Custom("任务不存在".to_string()))?;
    
        if task.status == TaskStatus::Running as i32 {
            return Err(DbLogicError::Custom("任务已在运行中".to_string()));
        }
    
        // 更新任务状态为运行中
        let mut task_model: spider_task::ActiveModel = task.clone().into();
        task_model.status = Set(TaskStatus::Running as i32);
        task_model.last_run_at = Set(now);
        task_model.update(&conn).await?;
    
        // 克隆所有需要在新线程中使用的数据
        let task_id = task.id;
        let target_url = task.target_url.clone();
        let article_list_selector = task.article_list_selector.clone();
        let article_title_selector = task.article_title_selector.clone();
        let article_selector = task.article_selector.clone();
        let breadcrumb_selector = task.breadcrumb_selector.clone();
    
        // 启动后台任务并保存句柄
        let handle = tokio::spawn(async move {
            // 在新线程中执行爬虫任务
            if let Err(e) = run_spider_task_internal(
                task_id,
                target_url,
                article_list_selector,
                article_title_selector,
                article_selector,
                breadcrumb_selector,
            ).await {
                tracing::error!("Spider task failed: {}", e);
            }

            let conn = establish_connection().await;
            
            // 任务完成后更新状态
            if let Ok(Some(task)) = spider_task::Entity::find_by_id(task_id).one(&conn).await {
                let mut task_model: spider_task::ActiveModel = task.into();
                task_model.status = Set(TaskStatus::Completed as i32);
                let _ = task_model.update(&conn).await;
            }
        });
    
        // 将任务句柄添加到全局管理器
        RUNNING_TASKS.lock().unwrap().push(handle);
    
        // 清理已完成的任务
        cleanup_finished_tasks();
        Ok(())
    }

    async fn run_spider_task_internal(
        task_id: i32,
        target_url: String,
        article_list_selector: String,
        article_title_selector: String,
        article_selector: String,
        breadcrumb_selector: String,
    ) -> Result<(), DbLogicError> {
        let conn = establish_connection().await;
        
        // 创建文章解析器
        let article_parser = spider_runner::ArticleParser::new(
            &article_title_selector,
            &article_selector,
            &breadcrumb_selector
        ).map_err(|e| DbLogicError::Custom(format!("创建文章解析器失败: {}", e)))?;
    
        // 这里的闭包现在是 'static 的，因为它不再捕获外部变量的引用
        let next_page_creator = |_: &str| -> Option<String> { None };

        let target_url_clone = target_url.clone();
    
        // 创建结果处理回调
        let result_callback = move |results: Vec<spider_runner::ParseResult>| -> Pin<Box<dyn Future<Output = Result<(), spider_runner::ResultHandlerError>> + Send>> {
            let conn = conn.clone();
            let target_url = target_url.clone();
            let task_id = task_id;
            let now = SystemTime::now()
                .duration_since(UNIX_EPOCH)
                .unwrap()
                .as_secs() as i64;
    
            Box::pin(async move {
                for result in results {
                    let article = article::ActiveModel {
                        title: Set(result.title),
                        content: Set(html_to_md(&result.content)),
                        source_url: Set(target_url.clone()),
                        spider_task_id: Set(task_id),
                        breadcrumb: Set(result.breadcrumb),
                        created_at: Set(now),
                        updated_at: Set(now),
                        ..Default::default()
                    };
                    article.insert(&conn).await.map_err(|e| 
                        spider_runner::ResultHandlerError::Custom(format!("保存文章失败: {}", e))
                    )?;
                }
                Ok(())
            })
        };
    
        // 创建爬虫运行器
        let mut spider = spider_runner::SpiderRunner::new(
            task_id,
            &target_url_clone,
            &article_list_selector,
            Box::new(next_page_creator) as Box<dyn Fn(&str) -> Option<String> + Send + Sync>,
            article_parser,
            result_callback,
            Some(5),
        );
        
        spider.run().await.map_err(|e| DbLogicError::Custom(format!("爬虫运行失败: {}", e)))
    }

    pub async fn get_spider_tasks(page: u64, page_size: u64) -> Result<(Vec<SpiderTaskDTO>, u64), DbLogicError> {
        let conn = establish_connection().await;
        let paginator = spider_task::Entity::find()
            .order_by_desc(spider_task::Column::CreatedAt)
            .paginate(&conn, page_size);

        let total_pages = paginator.num_pages().await?;
        let tasks = paginator
            .fetch_page(page - 1)
            .await?
            .into_iter()
            .map(|model| SpiderTaskDTO {
                id: model.id,
                name: model.name,
                target_url: model.target_url,
                url_pattern: model.url_pattern,
                article_selector: model.article_selector,
                breadcrumb_selector: model.breadcrumb_selector,
                article_list_selector: model.article_list_selector,
                article_title_selector: model.article_title_selector,
                status: model.status,
                last_run_at: if model.last_run_at == 0 { None } else { Some(model.last_run_at) },
                crawled_count: model.crawled_count,
            })
            .collect();

        Ok((tasks, total_pages))
    }

    // 添加清理函数
    fn cleanup_finished_tasks() {
        let mut tasks = RUNNING_TASKS.lock().unwrap();
        tasks.retain(|handle| !handle.is_finished());
    }
}