package com.heima.article.listener;

import com.alibaba.fastjson.JSON;
import com.heima.article.dto.ArticleStreamMessage;

import com.heima.article.entity.UpdateArticleMessage;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.*;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.messaging.handler.annotation.SendTo;
import org.springframework.util.StringUtils;

import java.time.Duration;

@EnableBinding(HotArticleProcess.class)
public class HotArticleListener {

    @Value("${commit.time}")
    private String commitTime;

    @StreamListener(value = "article_behavior")  // 指定监听主题
    @SendTo("hot_article_output") // 指定发送主题
    public KStream<String, String> process(KStream<String, String> input) {
        // 接收消息的格式为 UpdateArticleMessage   value = {"articleId":1526388337244405762,"type":0,"add":1}
        // 对value进行解析,获取文章id
        KStream<String, String> map = input.map(new KeyValueMapper<String, String, KeyValue<String, String>>() {
            @Override
            public KeyValue<String, String> apply(String key, String value) {
                // 解析json
                UpdateArticleMessage updateArticleMessage = JSON.parseObject(value, UpdateArticleMessage.class);
                Long articleId = updateArticleMessage.getArticleId();
                System.out.println("接收到消息:" + value);
                return new KeyValue<>(articleId.toString(), value);
            }
        });
        // 根据文章id来进行分组
        KGroupedStream<String, String> groupByKey = map.groupByKey();
        // 时间窗口统计
        long time = Long.parseLong(commitTime);
        TimeWindowedKStream<String, String> windowedBy = groupByKey.windowedBy(TimeWindows.of(Duration.ofMillis(time)));
        // 分组聚合统计
        Initializer<String> init = new Initializer<String>() {
            @Override
            public String apply() {
                // 时间区间内第一条消息进来后中间结果
                System.out.println("中间结果初始化...");
                return null;
            }
        };
        // 聚合处理的过程
        Aggregator<String, String, String> aggregator = new Aggregator<String, String, String>() {
            @Override
            public String apply(String key, String value, String aggregate) {
                System.out.println("聚合处理, key = " + key + " , value = " + value + " , aggregate = " + aggregate);
                // key --> articleId
                // value --> value = {"articleId":1526388337244405762,"type":0,"add":1}
                // aggregate --> 上一次聚合的结果
                // 最终的结果是 ArticleStreamMessage
                ArticleStreamMessage result;
                if (StringUtils.isEmpty(aggregate)) {
                    // 第一条消息进来,初始化结果
                    result = new ArticleStreamMessage();
                    result.setArticleId(Long.parseLong(key));
                } else {
                    // 不是第一条消息,直接解析结果
                    result = JSON.parseObject(aggregate, ArticleStreamMessage.class);
                }
                // 根据本次消息操作行为更新结果
                UpdateArticleMessage updateArticleMessage = JSON.parseObject(value, UpdateArticleMessage.class);
                // 操作类型 0 阅读 1 点赞 2 评论 3 收藏
                switch (updateArticleMessage.getType()) {
                    case 0:
                        result.setView(result.getView() + updateArticleMessage.getAdd());
                        break;
                    case 1:
                        result.setLike(result.getLike() + updateArticleMessage.getAdd());
                        break;
                    case 2:
                        result.setComment(result.getComment() + updateArticleMessage.getAdd());
                        break;
                    case 3:
                        result.setCollect(result.getCollect() + updateArticleMessage.getAdd());
                        break;
                }
                String json = JSON.toJSONString(result);
                System.out.println("本次聚合结果: " + json);
                return json;
            }
        };
        KTable<Windowed<String>, String> aggregate = windowedBy.aggregate(init, aggregator);
        // 返回结果
        return aggregate.toStream().map(new KeyValueMapper<Windowed<String>, String, KeyValue<String, String>>() {
            @Override
            public KeyValue<String, String> apply(Windowed<String> key, String value) {
                return new KeyValue(key.key(), value);
            }
        });
    }
}