package com.heima.article.stream;

import com.alibaba.fastjson.JSON;
import com.heima.article.dto.ArticleStreamMessageDto;
import com.heima.article.dto.UpdateArticleMessageDto;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.*;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.messaging.handler.annotation.SendTo;

import java.time.Duration;


/**
 * 热门文章监听器
 *
 * @author Wq
 * @date 2022/10/21
 */
@EnableBinding(ArticleStreamsProcessor.class)
public class HotArticleListener {

    @StreamListener("hot_article_score_topic")
    @SendTo("hot_article_score_result_topic")
    public KStream<String, String> process(KStream<String, String> input) {
        /**
         * 方法简介：
         * 1、flatMapValues的效果：（k,v）–>(k,v1),(k,v2)
         * 2、GroupByKey:根据key进行分组
         * 3、GroupBy:根据自定义的信息进行分组
         * 4、map | mapValues  将一条record映射为另外一条record
         * 5、 Count方法 ：统计key相同的record出现的次数
         * 6、windowed窗口操作：根据时间维度统计 每隔一段时间统计 统计是这段时间内的数据
         */

        /**
         * 原始数据
         * {"add":1,"articleId":1582655625763414018,"type":1}
         * {"add":1,"articleId":1582655625763414018,"type":2}
         * {"add":1,"articleId":1582655625763414018,"type":2}
         */
        //变换每条数据  给每条数据设置一个key
        //1582655625763414018:{"add":1,"articleId":1582655625763414018,"type":1}
        KStream<String, String> kStream = input.map(new KeyValueMapper<String, String, KeyValue<String, String>>() {
            @Override
            public KeyValue<String, String> apply(String key, String value) {
                UpdateArticleMessageDto articleMessageDto = JSON.parseObject(value, UpdateArticleMessageDto.class);
                String articleIdtringKey = articleMessageDto.getArticleId().toString();
                return new KeyValue<>(articleIdtringKey, value);
            }
        });

        //根据key分组
        KGroupedStream<String, String> groupBy = kStream.groupByKey();
        //计算时间片
        TimeWindowedKStream<String, String> windowedBy = groupBy.windowedBy(TimeWindows.of(Duration.ofSeconds(60)));
        //统计
        //统计之前 什么都没有
        Initializer<String> initializer = new Initializer<String>() {
            @Override
            public String apply() {
                return null;
            }
        };

        Aggregator<String, String, String> aggregator = new Aggregator<String, String, String>() {
            @Override
            public String apply(String key, String value, String aggregate) {
                //解析value
                UpdateArticleMessageDto articleMessage = JSON.parseObject(value, UpdateArticleMessageDto.class);

                //aggregate: 就是聚合后的结果
                ArticleStreamMessageDto articleStream = null;
                //第一次聚合之前aggregate是空的
                if (StringUtils.isBlank(aggregate)) {
                    articleStream = new ArticleStreamMessageDto();
                    articleStream.setArticleId(Long.valueOf(key));
                    articleStream.setLike(0);
                    articleStream.setComment(0);
                    articleStream.setCollect(0);
                    articleStream.setView(0);
                } else {
                    //第二次进来
                    articleStream = JSON.parseObject(aggregate, ArticleStreamMessageDto.class);
                }
                //获取每条数据的用户行为类型
                Integer type = articleMessage.getType();
                switch (type) {
                    case 0:
                        //点赞
                        articleStream.setLike(articleStream.getLike() + articleMessage.getAdd());
                        break;
                    case 1:
                        //评论
                        articleStream.setComment(articleStream.getComment() + articleMessage.getAdd());
                        break;
                    case 2:
                        //收藏
                        articleStream.setCollect(articleStream.getCollect() + articleMessage.getAdd());
                        break;
                    case 3:
                        //阅读
                        articleStream.setView(articleStream.getView() + articleMessage.getAdd());
                        break;

                }
                return JSON.toJSONString(articleStream);
            }
        };
        KTable<Windowed<String>, String> aggregate = windowedBy.aggregate(initializer, aggregator);
        //处理返回结果
        KStream<String, String> resultMap = aggregate.toStream().map(new KeyValueMapper<Windowed<String>, String, KeyValue<String, String>>() {
            @Override
            public KeyValue<String, String> apply(Windowed<String> key, String value) {
                return new KeyValue<>(key.key(), value);
            }
        });

        return resultMap;
    }
}
