package com.spider.sql.controller;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.ResponseBody;

import javax.servlet.http.HttpSession;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.*;

@Controller
public class CrawlerController {

    @PostMapping("/crawl")
    @ResponseBody
    public Map<String, Object> crawl(@RequestBody Map<String, String> request, HttpSession session) {
        String url = request.get("url");
        Map<String, Object> response = new HashMap<>();
        Map<String, String> parsedData = new HashMap<>();

        try {
            // 使用Jsoup进行网页爬取
            Document doc = Jsoup.connect(url).get();
            String content = doc.html();

            // 解析常见的标签元素
            parsedData.put("title", doc.title());
            parsedData.put("h1", doc.select("h1").text());
            parsedData.put("h2", doc.select("h2").text());
            parsedData.put("h3", doc.select("h3").text());
            parsedData.put("p", doc.select("p").text());
            parsedData.put("img", doc.select("img").attr("src"));

            // 添加时间戳
            String timestamp = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date());

            // 创建记录对象
            Map<String, Object> record = new HashMap<>();
            record.put("timestamp", timestamp);
            record.put("parsedData", parsedData);

            // 将记录对象存储到Session中的列表
            List<Map<String, Object>> spiderData = (List<Map<String, Object>>) session.getAttribute("spiderData");
            if (spiderData == null) {
                spiderData = new ArrayList<>();
            }
            spiderData.add(record);
            session.setAttribute("spiderData", spiderData);

            response.put("htmlContent", content);
            response.put("parsedData", parsedData);
        } catch (IOException e) {
            response.put("htmlContent", "爬取失败：" + e.getMessage());
            response.put("parsedData", "爬取失败：" + e.getMessage());
        }
        return response;
    }
}
