use std::{
    collections::HashMap,
    sync::{Arc, Mutex},
};

use model_graph_common::transport::http::http_get;
use model_graph_types::{
    container::workflow::{
        HtmlExtractorStatement, HttpStatement, WorkflowBlockExecuteResult,
        WorkflowBlockExecuteResultBuilder, WorkflowBlockExecuteResultStatus, WorkflowHttpMethod,
    },
    modeling::Value,
};
use scraper::{Html, Selector};

use crate::workflow::{
    blocks::statements::_utils::query_value_with_variables, context::WorkflowContext,
};

pub async fn execute(
    statement: &HtmlExtractorStatement,
    context: Arc<Mutex<WorkflowContext>>,
) -> anyhow::Result<WorkflowBlockExecuteResult> {
    let result = _execute(statement, context).await;

    match result {
        Ok(v) => Ok(WorkflowBlockExecuteResultBuilder::default()
            .status(WorkflowBlockExecuteResultStatus::Succeeded)
            .source_handle("source")
            .result(v)
            .build()?),
        Err(err) => Ok(WorkflowBlockExecuteResultBuilder::default()
            .status(WorkflowBlockExecuteResultStatus::Succeeded)
            .source_handle("fail-branch")
            .build()?),
    }
}

pub async fn _execute(
    statement: &HtmlExtractorStatement,
    context: Arc<Mutex<WorkflowContext>>,
) -> anyhow::Result<HashMap<String, Value>> {
    let url = query_value_with_variables(&statement.url, context.clone()).await?;
    let selector = query_value_with_variables(&statement.selector, context.clone()).await?;

    let attribute = &statement.attribute;
    let output = &statement.outputs;

    tracing::error!("html爬取:[{}][{}]", url, selector);

    let mut attribute_name = String::new();

    let single_value = if let Some(attribute) = attribute {
        attribute_name = attribute.clone();
        true
    } else {
        false
    };

    //获取HTML内容
    let (headers, response) = http_get(&url, &HashMap::new()).await?;
    let content = String::from_utf8(response).map_err(|err| anyhow::anyhow!("{}", err))?;

    // tracing::debug!("爬取内容:[{}]", content);
    //从HTML中进行数据获取
    let document = Html::parse_document(&content);

    let selector =
        Selector::parse(&selector).map_err(|err| anyhow::anyhow!("选择器解析失败:{}", err))?;

    let mut list = vec![];

    for element in document.select(&selector) {
        if single_value {
            let value = element
                .value()
                .attr(attribute_name.as_str())
                .map_or(String::new(), |v| String::from(v));
            list.push(Value::String(value));
        } else {
            let mut obj: HashMap<String, Value> = HashMap::new();
            //所有属性
            //.value().attr("href")
            for (key, value) in element.value().attrs() {
                obj.insert(String::from(key), Value::String(String::from(value)));
            }
            //文本
            obj.insert(
                String::from("_text"),
                Value::String(element.text().collect::<Vec<_>>().join("")),
            );
            list.push(Value::Object(obj));
        }
    }
    tracing::error!("html爬取:[{}][{:?}]", url, list);
    let mut result: HashMap<String, Value> = HashMap::new();
    result.insert(String::from("result"), Value::Array(list));
    //
    Ok(result)
}
