package com.example.chinesenation.Schedule;

import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.example.chinesenation.ElasticSearch.ElasticSearchService;
import com.example.chinesenation.Entity.*;
import com.example.chinesenation.Redis.RedisService;
import com.example.chinesenation.Service.*;
import com.example.chinesenation.Utils.*;
import com.example.chinesenation.VO.NationPage.VONationArticle;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.transaction.annotation.Isolation;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;

import java.sql.Timestamp;
import java.util.*;
import java.util.function.Consumer;
import java.util.stream.Collectors;

/**
 * @author sakuraFallingDown
 * @version 1.0
 * @date 2021/1/19 19:55
 */
@Configuration
public class ScheduleTools {//执行定时任务


    private final Logger logger= LoggerFactory.getLogger(getClass());

    private final int size=3000;

    @Autowired
    private IArticleService articleService;
    @Autowired
    private IPolicyListService policyListService;

    @Autowired
    private IBrowseRecordService browseRecordService;

    @Autowired
    private IBrowseRecordPolicyService browseRecordPolicyService;

    @Autowired
    private ElasticSearchService elasticSearchService;

    @Autowired
    private INationService nationService;

    @Autowired
    private IArticleClassicalService articleClassicalService;

    @Autowired
    private RedisService redisService;

    @Autowired
    private RecommendedTools recommendedTools;


    /**
     * 更新所有民族文章信息和民族政策的阅读量
     */
    @Scheduled(cron = "0 0 4 1/1 * ? ")//每天凌晨1点更新
    @Transactional(isolation = Isolation.READ_COMMITTED,propagation = Propagation.REQUIRES_NEW)
    public void updateArticlePageView(){//统计计算阅读量
        HashMap<String,Integer> articlePageViews=new HashMap<>();
        Set<String> articleKeys=redisService.getAllArticlePageViewKeys();
        if(articleKeys!=null&&articleKeys.size()!=0){
            List<String> collect = articleKeys.stream().map(str -> {
                String[] split = str.split(":");
                logger.info("" + split[(split.length - 1)]);
                return split[split.length - 1];
            }).collect(Collectors.toList());
            List<Article> articles = articleService.listByIds(collect);
            List<Article> updateArticle=new ArrayList<>();
            articles.forEach(article->{
                if (!redisService.hasComment(CommonURL.RedisArticlePageView + article.getId())) return;
                int count = redisService.getPageView(CommonURL.RedisArticlePageView + article.getId());
                logger.info(article.getId()+":"+count);
                redisService.deletePageView(CommonURL.RedisArticlePageView + article.getId());//每次获取之后清空缓存
                if(count==0) return ;
                articlePageViews.put(article.getId(), article.getPageview() + count);
                Article update=new Article();
                update.setPageview(article.getPageview()+count);
                update.setId(article.getId());
                updateArticle.add(update);
            });
            if(updateArticle.size()!=0) {
                articleService.updateBatchById(updateArticle);//批量更新
            }
            if(articlePageViews.size()!=0){
                elasticSearchService.updateArticlePageView(articlePageViews);//批量更新ES
            }
        }
    }

    @Scheduled(cron = "0 0 4 1/1 * ? ")//每天凌晨2点更新
    @Transactional(isolation = Isolation.READ_COMMITTED,propagation = Propagation.REQUIRES_NEW)
    public void updatePolicyPageView() {//统计计算阅读量
        HashMap<String,Integer> policyPageViews=new HashMap<>();
        Set<String> policyKeys=redisService.getAllPolicyPageViewKeys();
        if(policyKeys!=null&&policyKeys.size()!=0){
            List<String> collect = policyKeys.stream().map(str -> {
                String[] split = str.split(":");
                logger.info("" + (split.length - 1));
                return split[split.length - 1];
            }).collect(Collectors.toList());
            List<Policy> policies = policyListService.listByIds(collect);
            List<Policy> updatePolicy=new ArrayList<>();
            policies.forEach(policy->{
                if (!redisService.hasComment(CommonURL.RedisPolicyPageView + policy.getId())) return;
                int count = redisService.getPageView(CommonURL.RedisPolicyPageView + policy.getId());
                logger.info(policy.getId()+":"+count);
                redisService.deletePageView(CommonURL.RedisPolicyPageView + policy.getId());//每次获取之后清空缓存
                if(count==0) return ;
                policyPageViews.put(policy.getId(), policy.getPageview() + count);
                Policy update=new Policy();
                update.setPageview(policy.getPageview()+count);
                update.setId(policy.getId());
                updatePolicy.add(update);
            });
            if(updatePolicy.size()!=0) {
                policyListService.updateBatchById(updatePolicy);
            }
            if(policyPageViews.size()!=0){
                elasticSearchService.updatePolicyPageView(policyPageViews);
            }
        }
    }

    @Scheduled(cron = "0 0 3 1/1 * ? ")//每一天执行一次 更新每个民族的热点文章。每天凌晨2点更新
    @Transactional(isolation = Isolation.READ_COMMITTED,propagation = Propagation.REQUIRES_NEW)
    public void uploadRedisArticleList(){
        List<Nation> list = nationService.list();
        List<String> collect = list.stream().map(Nation::getNationName).collect(Collectors.toList());
        for(String nationName:collect){
            if(redisService!=null&&redisService.hasKey(CommonURL.NationPageRecommend+nationName)){
                continue;//存在该key，不做任何操作
            }
            QueryWrapper<ArticleClassical> qw=new QueryWrapper<>();
            qw.eq("nation_name",nationName);
            List<ArticleClassical> list1 = articleClassicalService.list(qw);
            //这里查询效率会很慢,建议不要用article
            List<String> collect1 = list1.stream().map(ArticleClassical::getArticleId).collect(Collectors.toList());
            List<Article> articles = articleService.listByIds(collect1);
            List<VONationArticle> voNationArticleList=new ArrayList<>();
            for (Article article : articles) {
                VONationArticle voNationArticle=new VONationArticle();
                DataChanges.fromArticleToVONationArticle(article,voNationArticle);
                voNationArticleList.add(voNationArticle);
            }
            redisService.put(CommonURL.NationPageRecommend+nationName,voNationArticleList);
        }
    }
    /*
    @Scheduled(cron = "0 0 1 1/1 * ? ")//每一天执行一次 更新热门排行
    public void updateReadCount() throws Exception {
        String date4 = CommonFunction.getRedisDayKey(4);
        String date1 = CommonFunction.getRedisDayKey(1);
        QueryWrapper<Article> qw=new QueryWrapper<>();
        qw.select("id");
        List<Article> list = articleService.list(qw);
        if(list!=null&&list.size()!=0) {
            List<String> keys = list.stream().map(o -> {
                return o.getId();
            }).collect(Collectors.toList());
            List<String> collect1 = list.stream().map(o -> {
                return CommonURL.RedisArticlePageView+date1+o.getId();
            }).collect(Collectors.toList());

            List<String> collect4 = list.stream().map(o -> {
                return CommonURL.RedisArticlePageView+date4+o.getId();
            }).collect(Collectors.toList());

            List<Long> readView1 = redisService.pipeline(collect1,new Long(1));
            List<Long> readView4 = redisService.pipeline(collect4,new Long(1));
            if(readView1.size()!=readView4.size()){
                throw  new Exception("Redis记录丢失");
            }
            List<Long> values=new ArrayList<>(readView1.size());
            for(int i=0;i<readView1.size();i++){
                Long aLong = readView1.get(i)==null?0:readView1.get(i);
                Long aLong1 = readView4.get(i)==null?0:readView4.get(i);
                values.add(aLong - aLong1);
            }
            redisService.pipelineZSet("rankArticle",keys,values);
        }
    }

     */
}
