package com.heima.article.listener;

import com.alibaba.fastjson.JSON;
import com.heima.article.dto.ArticleStreamMessage;
import com.heima.article.dto.UpdateArticleMessage;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.*;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.messaging.handler.annotation.SendTo;
import org.springframework.util.StringUtils;

import java.time.Duration;

@EnableBinding(IHotArticleProcessor.class)
public class HotArticleListener {

    @Value("${kafka.commitTime}")
    private String commitTime;

    @StreamListener("hot_article_score_topic")
    @SendTo("hot_article_result_topic")
    public KStream<String, String> process(KStream<String, String> input) {
        // 接收到的value格式为 UpdateArticleMessage
        // 数据为  {"articleId":1503626290400337922,"type":1,"add":1}
        KStream<String, String> map = input.map(new KeyValueMapper<String, String, KeyValue<String, String>>() {
            @Override
            public KeyValue<String, String> apply(String key, String value) {
                // value 为  {"articleId":1503626290400337922,"type":1,"add":1}
                UpdateArticleMessage updateArticleMessage = JSON.parseObject(value, UpdateArticleMessage.class);
                // 需要获取文章id
                return new KeyValue<>(updateArticleMessage.getArticleId().toString(), value);
            }
        });

        // 根据文章id分组  此时key为文章id  value 为传入的json
        KGroupedStream<String, String> groupByKey = map.groupByKey();
        // 定义汇总的区间
        TimeWindowedKStream<String, String> windowedBy = groupByKey.windowedBy(TimeWindows.of(Duration.ofMillis(Long.parseLong(commitTime))));

        // 需要根据文章id来进行汇总聚合
        Initializer<String> init = new Initializer<String>() {
            @Override
            public String apply() {
                // 第一次收到消息,没有之前的数据,返回null
                return null;
            }
        };
        // 定义聚合的逻辑
        Aggregator<String, String, String> agg = new Aggregator<String, String, String>() {
            @Override
            public String apply(String key, String value, String aggregate) {
                // key 为文章id
                long articleId = Long.parseLong(key);
                // value 为 接收消息的json
                UpdateArticleMessage updateArticleMessage = JSON.parseObject(value, UpdateArticleMessage.class);
                // aggregate 是上一次汇总的结果
                // 结果类型为 ArticleStreamMessage 每一次处理的结果都会保存到中间主题
                ArticleStreamMessage message = null;
                // 判断上一次的结果是否为空
                if (StringUtils.isEmpty(aggregate)) {
                    message = new ArticleStreamMessage();
                    message.setArticleId(articleId);
                    message.setView(0);
                    message.setLike(0);
                    message.setComment(0);
                    message.setCollect(0);
                } else {
                    message = JSON.parseObject(aggregate, ArticleStreamMessage.class);
                }
                // 根据本次消息的记录增加对应的数据
                switch (updateArticleMessage.getType()) {
                    // 0 阅读 1 点赞 2 评论 3 收藏
                    case 0:
                        message.setView(message.getView() + updateArticleMessage.getAdd());
                        break;
                    case 1:
                        message.setLike(message.getLike() + updateArticleMessage.getAdd());
                        break;
                    case 2:
                        message.setComment(message.getComment() + updateArticleMessage.getAdd());
                        break;
                    case 3:
                        message.setCollect(message.getCollect() + updateArticleMessage.getAdd());
                        break;
                }
                String json = JSON.toJSONString(message);
                return json;
            }
        };
        KTable<Windowed<String>, String> aggregate = windowedBy.aggregate(init, agg);
        // 发送结果
        KStream<String, String> result = aggregate.toStream().map((key, value) -> new KeyValue<>(key.key(), value));
        return result;
    }
}
