package com.cov.web.handle;

import com.alibaba.fastjson.JSON;
import com.cov.web.bean.HotNewsBean;
import com.cov.web.constant.Constants;
import com.cov.web.utils.redis.RedisCache;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.util.List;

/**
 * @author Miki
 * @date 2022/4/11 23:31
 * @Information 使用jsoup对html页面进行爬取数据
 */
@Component
public class JsoupHandler {

    @Autowired
    private RedisCache redisCache;

    public static final String URL = "https://ncov.dxy.cn/ncovh5/view/pneumonia?scene=2&from=singlemessage&isappinstalled=0";

    private static final Logger logger = LoggerFactory.getLogger(JsoupHandler.class);

    @Scheduled(cron = "*/5 * * * * ?") //*/5 * * * * ? 每5秒执行一次,0 */15 * * * ? 每15分钟执行一次
    public void updateData() {
        logger.info("更新数据");
        setHotNews();
    }

    /**
     * 缓存丁香园疫情热点新闻
     */
    public void setHotNews() {
        try {
            Document doc = Jsoup.connect(URL).get();
            // 找到指定的标签数据
            Element oneScript = doc.getElementById("getTimelineService1");

            assert oneScript != null;
            String data = oneScript.data();
            // 字符串截取出json格式的数据
            String subData = data.substring(data.indexOf("["), data.lastIndexOf("]") + 1);

            List<HotNewsBean> hotNews = JSON.parseArray(subData, HotNewsBean.class);
            redisCache.setCacheObject(Constants.DATA_KEY + "cov.hot.news", hotNews);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public static void main(String[] args) {


    }

}
