package org.example.finalsecurities.service.impl;

import lombok.extern.slf4j.Slf4j;
import org.example.finalsecurities.entity.ChangeInfo;
import org.example.finalsecurities.entity.CrawlerLog;
import org.example.finalsecurities.mapper.ChangeInfoMapper;
import org.example.finalsecurities.service.ChangeInfoService;
import org.example.finalsecurities.service.CrawlerLogService;
import org.example.finalsecurities.utils.CrawlerUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;

/**
 * @author 戴诚棋
 * @version 1.0
 */
@Service
@Slf4j
public class ChangeInfoServiceImpl implements ChangeInfoService {
    @Autowired
    private ChangeInfoMapper changeInfoMapper;

    @Autowired
    private CrawlerLogService crawlerLogService;
    /**
     * @author 戴诚棋
     *  该方法查询数据表中的数据
     */
    @Override
    public List<ChangeInfo> getChangeInfoList() {
        List<ChangeInfo> changeInfos = changeInfoMapper.select();
        System.out.println(changeInfos.toString());
        return changeInfos;
    }

    /**
     * @author 戴诚棋
     *  该方法先检查当前数据表中是否存在数据，如果存在，清空表中数据，再爬取，反之，直接爬取
     */
    @Override
    public void insertChangeInfo() {
        Integer id = 1;
        List<ChangeInfo> changeInfoList = new ArrayList<>();

        Date startTime = new Date();
        log.info("开始爬取股票数据: {}", startTime);

        // 构建请求参数
        CrawlerLog crawlerLog = new CrawlerLog();
        crawlerLog.setActionDesc("爬取今日涨跌幅");
        crawlerLog.setUrl("https://data.eastmoney.com/bkzj/");
        crawlerLog.setStartTime(startTime);
        //爬取涨幅前五，跌幅前五的数据，保存在map中
        Map<String, List<ChangeInfo>> crawlChangeInfo = CrawlerUtil.crawlChangeInfo();
        System.out.println(crawlChangeInfo.toString());
        //遍历map，取出所有的ChangeInfo，放入新的list集合中
        // 保存爬取日志
        Date endTime = new Date();

        crawlerLog.setEndTime(endTime);
        crawlerLog.setCreateTime(endTime);
        crawlerLog.setUpdateTime(endTime);

        crawlerLogService.saveLog(crawlerLog);
        log.info("爬取股票数据完成，本次爬虫日志: {}", crawlerLog);

        if (!crawlChangeInfo.isEmpty()) {
//            System.out.println("---------------------------------------------");
            List<ChangeInfo> topGainers = crawlChangeInfo.get("涨幅");
            List<ChangeInfo> topLosers = crawlChangeInfo.get("跌幅");
            //遍历 两个list，将所有数据（设置创建和更新时间）保存到changeInfoList中
            for (ChangeInfo changeInfo : topGainers) {
                //手动设置id，并自增
                changeInfo.setId(id++);
                changeInfo.setCreateTime(new Date());
                changeInfo.setUpdateTime(new Date());
                changeInfoList.add(changeInfo);
            }
            for (ChangeInfo changeInfo : topLosers) {
                changeInfo.setId(id++);
                changeInfo.setCreateTime(new Date());
                changeInfo.setUpdateTime(new Date());
                changeInfoList.add(changeInfo);
            }
        }
//        System.out.println(changeInfoList.toString());
        //先判断数据表中是否存在数据
        List<ChangeInfo> changeInfos = getChangeInfoList();
        //如果表中没有数据，直接插入
        if (changeInfos.isEmpty()) {
            //将新list数据插入数据表中
            changeInfoMapper.insert(changeInfoList);
        }else {
            //表中存在数据，更新数据
            changeInfoMapper.update(changeInfoList);
        }
    }


}

