package com.heima.comment.listener;

import com.alibaba.fastjson.JSON;
import com.heima.comment.dto.CommentStreamMessage;
import com.heima.comment.entity.UpdateCommentMessage;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.*;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.messaging.handler.annotation.SendTo;
import org.springframework.util.StringUtils;

import java.time.Duration;

@EnableBinding(ICommentRepayProcessor.class)
public class CommentRepayListener {

    @StreamListener("comment_repay_score_topic")
    @SendTo("comment_repay_result_topic")
    public KStream<String, String> process(KStream<String, String> input) {
        // 输入的数据格式是 UpdateArticleMessage {"articleId":1471738975990321153,"type":0,"add":1}
        // 需要统计一段时间内的每一篇文章的操作数量,所以统计的key为文章的id
        // 将文章id取出来
        KStream<String, String> map = input.map(new KeyValueMapper<String, String, KeyValue<String, String>>() {
            @Override
            public KeyValue<String, String> apply(String key, String value) {
                // value 为 {"articleId":1471738975990321153,"type":0,"add":1}
                UpdateCommentMessage updateArticleMessage = JSON.parseObject(value, UpdateCommentMessage.class);
                return new KeyValue<>(updateArticleMessage.getCommentId().toString(), value);
            }
        });
        // 根据文章id分组
        KGroupedStream<String, String> groupByKey = map.groupByKey();
        // 根据时间窗口进行统计
        TimeWindowedKStream<String, String> windowedBy = groupByKey.windowedBy(TimeWindows.of(Duration.ofSeconds(15)));
        // 进行聚合处理
        // 定义初始聚合结果,第一次接收到消息
        Initializer<String> init = new Initializer<String>() {
            @Override
            public String apply() {
                return null;
            }
        };
        // 定义时间区间内的汇总聚合
        Aggregator<String, String, String> agg = new Aggregator<String, String, String>() {
            @Override
            public String apply(String key, String value, String aggregate) {
                // key 就是文章的id
                // value 是 UpdateArticleMessage {"articleId":1471738975990321153,"type":0,"add":1}
                // aggregate 是 上一次处理完的结果,存放在特殊主题,存放时间区间内的聚合结果
                CommentStreamMessage result = null;
                if (StringUtils.isEmpty(aggregate)) {
                    result = new CommentStreamMessage();
                    result.setCommentId(key);
                    result.setLike(0L);
                    result.setComment(0L);
                } else {
                    result = JSON.parseObject(aggregate, CommentStreamMessage.class);
                }
                // 根据当前的消息更新结果对象内容
                UpdateCommentMessage updateArticleMessage = JSON.parseObject(value, UpdateCommentMessage.class);
                switch (updateArticleMessage.getType()) {
                    case 1:
                        // 点赞,将结果中的点赞量+当前的增量
                        result.setLike(result.getLike() + updateArticleMessage.getAdd());
                        break;
                    case 2:
                        // 评论,将结果中的评论量+当前的增量
                        result.setComment(result.getComment() + updateArticleMessage.getAdd());
                        break;

                }
                String json = JSON.toJSONString(result);
                return json;
            }
        };
        KTable<Windowed<String>, String> aggregate = windowedBy.aggregate(init, agg);
        // 输出的数据格式是 ArticleStreamMessage
        KStream<String, String> stream = aggregate.toStream().map(new KeyValueMapper<Windowed<String>, String, KeyValue<String, String>>() {
            @Override
            public KeyValue<String, String> apply(Windowed<String> key, String value) {
                // 输出的数据格式是value  ArticleStreamMessage
                return new KeyValue<>(key.key(), value);
            }
        });
        return stream;
    }
}
