package cn.josewu.crawler;

import cn.josewu.DatasourceApplication;
import cn.josewu.bean.CovidBean;
import cn.josewu.util.HttpUtils;
import cn.josewu.util.TimeUtils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;

import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * @program: covid19
 * @description: 定时爬取疫情数据
 * @author: Mr. Tigger
 * @create 2020-11-12 08:17
 **/
/*
@RunWith(SpringJUnit4ClassRunner.class)
@SpringBootTest(classes = DatasourceApplication.class)
*/
@Component
public class Covid19DataCrawler {


    @Autowired
    private KafkaTemplate kafkaTemplate;

    /*
    @Test
    public void testKafkaTemplate() throws Exception {
        kafkaTemplate.send("covid19",1,"FFFFFFFFFAs11-12Tigger");
        Thread.sleep(10000000);
    } */


    //@Scheduled(cron="0/10 * * * * ?") //每隔10秒
    public void testCrawling() throws Exception {

        System.out.println("每隔10秒执行一次");
        String datetime = TimeUtils.format(System.currentTimeMillis(), "yyyy-MM-dd");

        String html = HttpUtils.getHtml("https://ncov.dxy.cn/ncovh5/view/pneumonia");
        //System.out.println(html);
        Document doc = Jsoup.parse(html);
        String text= doc.select("script[id=getAreaStat]").toString();
        //System.out.println(text);

        String pattern = "\\[(.*)\\]";
        Pattern reg = Pattern.compile(pattern);
        Matcher matcher = reg.matcher(text);
        String jsonStr = "";
        if(matcher.find()) {
            jsonStr = matcher.group(0);
            //System.out.println(jsonStr);
        } else {
            System.out.println("no match");
        }

        List<CovidBean> pCovidBeans = JSON.parseArray(jsonStr, CovidBean.class);
        for (CovidBean pBean : pCovidBeans) {  //省 pBean
            pBean.setDatetime(datetime);
            //System.out.println(pBean);
            //解析第二层数据（cities）
            String citiesStr = pBean.getCities();
            List<CovidBean> covidBeans = JSON.parseArray(citiesStr, CovidBean.class);
            for(CovidBean bean : covidBeans) {  //cities  bean
                bean.setDatetime(datetime);
                bean.setPid(pBean.getLocationId());
                bean.setProvinceShortName(pBean.getProvinceShortName());
                //System.out.println(bean);
                String beanStr = JSON.toJSONString(bean);
                kafkaTemplate.send("covid19",bean.getPid(),beanStr);

            }
            //获取第一层json（省份数据）中每一天的统计数据
            String statisticsDataUrl = pBean.getStatisticsData();
            String  statisticsDataStr= HttpUtils.getHtml(statisticsDataUrl);
            JSONObject jsonObject = JSON.parseObject(statisticsDataStr);
            String dataStr = jsonObject.getString("data");
            //System.out.println(dataStr);
            //将爬取解析出来的每一天的数据设置回省份pBean中的statisticsData字段中（之前该字段只是一个URL）
            pBean.setStatisticsData(dataStr);
            pBean.setCities(null);
            //System.out.println(pBean);
            String pBeanStr = JSON.toJSONString(pBean);
            kafkaTemplate.send("covid19",pBean.getLocationId(),pBeanStr);
        }
        //Thread.sleep(10000000);

    }
}
