package xjay.ai.emotionalsupport.tools;

import cn.hutool.http.HttpRequest;
import cn.hutool.http.HttpUtil;
import cn.hutool.json.JSONArray;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import lombok.extern.slf4j.Slf4j;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.springframework.ai.tool.annotation.Tool;
import org.springframework.ai.tool.annotation.ToolParam;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;


/**
 * 先得到一些相关url：webSearch
 * 再抓取url内容：webScrape
 */

@Slf4j
@Component
public class WebSearchTool {
    private final String  BOCHA_API_URL = "https://api.bochaai.com/v1/web-search";
    @Value("${search-api.api-key}")
    private  String api_key;
    /**
     *
     * @param query: 用户输入查询内容
     * @return 返回什么好呢
     * @throws UnsupportedEncodingException
     */
    @Tool(description = "search information from baidu search engine")
    public String searchWeb(@ToolParam(description = "Search query keyword") String query

    ) throws UnsupportedEncodingException {
        log.info("search web query:{}", query);
        //以map格式，设置http请求头
        Map<String, String> headers = new HashMap<>();
        headers.put("Authorization", "Bearer " + api_key);
        headers.put("Content-Type", "application/json");

        //以json格式，设置http请求体
        JSONObject playload = JSONUtil.createObj()
                .set("query", query)
                .set("summary", true)
                .set("count", 10);
        //博查搜索引擎是用POST方法发送请求的，这里返回的响应内容是JSON代码块
        String response = HttpRequest.post(BOCHA_API_URL)
                        .header("Authorization", headers.get("Authorization"))
                        .header("Content-Type", headers.get("Content-Type"))
                        .body(playload.toString())
                        .execute().body();

        //解析这个json代码块，提取出核心url，让ai通过url输入给scrapeWebUrl来调用
        JSONObject json = JSONUtil.parseObj(response); //parse: 解析
        JSONArray value = json.getJSONObject("data")
                                .getJSONObject("webPages")
                                .getJSONArray("value");
        String joinedUrls =value.stream().map(obj -> {
            JSONObject tmpJson = (JSONObject)obj; //valueArray.stream()返回的是 Stream<Object>
            return tmpJson.get("url").toString();
        }).collect(Collectors.joining(","));

        return response; //"url1,url2,url3..."，给ai自动处理，ai调用scrapeWebUrl
    }


    @Tool(description = "scrape the content from a web page")
    public String scrapeWebUrl(@ToolParam(description = "url of the web page to scrape") String url) throws IOException {
        //Jsoup的connect(url)返回Connection类，有get()和post()方法，进行http请求调用
        log.info("scrape web url");
        Document document = Jsoup.connect(url).get();
        return document.html();
    }
}
