use anyhow::Result;
use clap::Parser;
use std::time::Duration;

use reqwest::Client;
use tokio::task;

/// 多线程HTTP请求处理模块
pub struct HttpRequestHandler {
    client: Client,
    concurrency: usize,
}

impl HttpRequestHandler {
    /// 创建新的HttpRequestHandler实例
    pub fn new(concurrency: usize) -> Self {
        Self {
            client: Client::new(),
            concurrency,
        }
    }

    /// 向单个URL发送GET请求并返回响应大小
    async fn fetch_url(&self, url: String) -> anyhow::Result<usize> {
        let response = self.client.get(&url).send().await?;
        
        match response.error_for_status() {
            Ok(res) => {
                let content_length = res.content_length().unwrap_or(0);
                Ok(content_length as usize)
            },
            Err(e) => {
                anyhow::bail!("Error fetching {}: {}", url, e);
            }
        }
    }
    
    /// 向指定URL发送POST请求并返回JSON响应
    pub async fn post_json<T: serde::Serialize, R: serde::de::DeserializeOwned>(&self, url: String, body: T) -> anyhow::Result<R> {
        let response = self.client.post(&url).json(&body).send().await?;
        
        match response.error_for_status() {
            Ok(res) => {
                let json_response = res.json::<R>().await?;
                Ok(json_response)
            },
            Err(e) => {
                anyhow::bail!("Error posting to {}: {}", url, e);
            }
        }
    }

    /// 并发执行多个URL的GET请求
    pub async fn fetch_all(&self, urls: Vec<String>) -> anyhow::Result<Vec<(String, usize)>> {
        // 将URL列表分割成并发块
        let mut results = Vec::with_capacity(urls.len());
        
        // 使用迭代器将URL列表分割为concurrency大小的块
        for chunk in urls.chunks(self.concurrency) {
            // 为当前块中的每个URL创建任务
            let mut tasks = Vec::new();
            
            for url in chunk {
                let url_clone = url.clone();
                let handler = self.clone();
                
                // 创建异步任务
                let task = task::spawn(async move{
                    match handler.fetch_url(url_clone.clone()).await {
                        Ok(size) => Some((url_clone, size)),
                        Err(e) => {
                            eprintln!("Failed to fetch {}: {}", url_clone, e);
                            None
                        }
                    }
                });
                
                tasks.push(task);
            }
            
            // 等待当前块中的所有任务完成
            for task in tasks {
                if let Ok(Some(result)) = task.await {
                    results.push(result);
                }
            }
            
            // 在块之间添加小延迟以避免服务器过载
            tokio::time::sleep(Duration::from_millis(100)).await;
        }
        
        Ok(results)
    }
}

// 为HttpRequestHandler实现Clone trait
impl Clone for HttpRequestHandler {
    fn clone(&self) -> Self {
        Self {
            client: self.client.clone(),
            concurrency: self.concurrency,
        }
    }
}

/// 命令行参数结构体
#[derive(Parser, Debug)]
#[clap(author, version, about, long_about = None)]
pub struct Args {
    /// 要请求的URL列表，用逗号分隔
    #[clap(short, long, value_parser, default_value = "https://httpbin.org/get")]
    pub urls: String,
    
    /// 并发级别（同时运行的请求数量）
    #[clap(short, long, value_parser, default_value_t = 5)]
    pub concurrency: usize,
    
    /// 运行次数
    #[clap(short, long, value_parser, default_value_t = 1)]
    pub iterations: u32,
}
