package com.xuan.collegeblog.spider.restapi;


import com.xuan.collegeblog.spider.service.BaiduService;
import com.xuan.collegeblog.spider.service.ProcessDataService;
import io.swagger.annotations.Api;
import lombok.extern.slf4j.Slf4j;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;

@RestController
@RequestMapping("/spider")
@Api(value = "spiderRestApi", tags = "爬虫相关接口")
@Slf4j
public class SpiderRestApi {

    final static int READ = 1;
    final static int UNREAD = 0;

    @Autowired
    private RabbitTemplate rabbitTemplate;

    @Autowired
    private ProcessDataService processDataService;

    /**
     * @Description: 1.首先数据库要有这个college_id 2.获得数据
     * @param college_id
     * @return java.lang.String
     * @Author: huangzx
     * @Date: 2024/7/19 11:37
     */
    @GetMapping("/getMessagePy")
    public String sendDirectMessage(@RequestParam("college_id") Long college_id) {
        rabbitTemplate.convertAndSend(
                "college.dir",
                String.valueOf(college_id));
        return "ok";
    }

    /**
     * @param college_id
     * @return java.lang.String
     * @Description: 自动根据学校id来生成话题
     * 1. 拿到所有话题
     * 2. 处理所有话题
     * 3. 将处理后的数据发送到百度
     * 4. 将处理后的数据存储到数据库
     * @Author: huangzx
     * @Date: 2024/7/17 17:56
     */
    @GetMapping("/handleMessage")
    private String handleMessage(@RequestParam("college_id") Long college_id) {
        return processDataService.integratedData(college_id);
    }


}
