package com.yupi.yuso.manager;

import cn.hutool.http.HttpRequest;
import cn.hutool.json.JSONArray;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.yupi.yuso.common.ErrorCode;
import com.yupi.yuso.exception.BusinessException;
import com.yupi.yuso.model.entity.Post;
import com.yupi.yuso.model.vo.PictureVO;
import org.apache.commons.lang3.StringUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

/**
 * 专门用来爬虫的，不会保存到数据库
 *
 * @author vvv
 * @date 2023-12-20 10 53
 * discription
 */
@Service
public class CrawlerManager {

    public static final int CRAWL_TOTAL = 999;

    public List<Post> crawlPost(int current, int pageSize) {
        String url = "https://www.code-nav.cn/api/post/search/page/vo";

        String json = "{\n" +
                "  \"current\": 1,\n" +
                "  \"pageSize\": 8,\n" +
                "  \"sortField\": \"_score\",\n" +
                "  \"sortOrder\": \"descend\",\n" +
                "  \"searchText\": \"\",\n" +
                "  \"category\": \"文章\",\n" +
                "  \"reviewStatus\": 1\n" +
                "}";
        Map<String, Object> jsonMap = JSONUtil.toBean(json, Map.class);
        jsonMap.put("current", current);
        jsonMap.put("pageSize", pageSize);
        String result = HttpRequest.post(url)
                .body(JSONUtil.toJsonStr(jsonMap))
                .execute().body();
        Map<String, Object> map = JSONUtil.toBean(result, Map.class);
        JSONObject data = (JSONObject) map.get("data");
        JSONArray records = (JSONArray) data.get("records");

        List<Post> postList = new ArrayList<>();
        for (Object record : records) {
            JSONObject temp = (JSONObject) record;
            Post post = new Post();
            post.setContent(temp.getStr("content"));
            post.setTitle(temp.getStr("title"));
            JSONArray tags = (JSONArray) temp.get("tags");
            post.setTags(tags.toString());
            post.setUserId(1L);
            postList.add(post);
        }
        return postList;
    }


    public List<PictureVO> crawlPicture(long current, String searchText) {
        searchText = StringUtils.isEmpty(searchText) ? "蔡徐坤打篮球" : searchText;
        String url = String.format("https://www.bing.com/images/search?q=%s&first=%s", searchText, current);
        Document doc = null;
        try {
            doc = Jsoup.connect(url).get();
            Elements elements = doc.select(".iuscp.isv");
            List<PictureVO> pictureVOList = new ArrayList<>();
            for (Element element : elements) {
                PictureVO pictureVO = new PictureVO();
                // 取图片地址 (murl)
                String m = element.select(".iusc").get(0).attr("m");
                Map<String, Object> map = JSONUtil.toBean(m, Map.class);
                String murl = (String) map.get("murl");
                //System.out.println(murl);
                // 取标题
                String title = element.select(".inflnk").get(0).attr("aria-label");
                //System.out.println(title);
                pictureVO.setTitle(title);
                pictureVO.setUrl(murl);
                pictureVOList.add(pictureVO);
            }
            return pictureVOList;
        } catch (IOException e) {
            throw new BusinessException(ErrorCode.SYSTEM_ERROR, "爬取图片失败");
        }
    }
}
