package com.heima.article.listener;

import com.alibaba.fastjson.JSON;
import com.heima.article.processor.IHotArticleProcessor;
import com.heima.behavior.dto.ArticleStreamMessage;
import com.heima.behavior.dto.UpdateArticleMessage;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.*;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.messaging.handler.annotation.SendTo;

import java.time.Duration;

/**
 * @author: itheima
 * @create: 2022-07-13 15:14
 */
@Slf4j
@EnableBinding(IHotArticleProcessor.class)
public class HotArticleListener {


    /**
     * 实时计算-统计最近一段时间内，某篇文章所有操作行为次数
     *
     * @param input 操作行为数据流 形式：
     *              key:null
     *              val:{"add":1,"articleId":1543814439277469698,"type":1} {"add":1,"articleId":1543814439277469698,"type":2}
     * @return 实时统计操作文章所有的操作行为次数 {"articleId":1543814439277469698,view:100,like:50,colleciton:25,comment:12}
     */
    @StreamListener("hot_article_score_topic")
    @SendTo("hot_article_result_topic")
    public KStream<String, String> process(KStream<String, String> input) {
        //1.处理原始数据 将为null的Key 重新设置为文章ID，为后续分组提供方便
        KStream<String, String> map = input.map(new KeyValueMapper<String, String, KeyValue<String, String>>() {
            /**
             * 重新设置数据流 键值
             * @param key 原始为空
             * @param value {"add":1,"articleId":1543814439277469698,"type":1}
             * @return 新的键值对象
             */
            @Override
            public KeyValue<String, String> apply(String key, String value) {
                //1.1 将原始文章操作JSON转为Java对象
                UpdateArticleMessage updateArticleMessage = JSON.parseObject(value, UpdateArticleMessage.class);
                //1.2 重新设置键值对象
                return new KeyValue<>(updateArticleMessage.getArticleId().toString(), value);
            }
        });

        //2.对处理后数据进行分组：按照文章ID进行分组  key:1543814439277469698   val:{"add":1,"articleId":1543814439277469698,"type":1}
        KGroupedStream<String, String> groupedStream = map.groupByKey();

        //3.对处理后数据进行设置时间窗口：统计最近一分钟内
        TimeWindowedKStream<String, String> timeWindowedKStream = groupedStream.windowedBy(TimeWindows.of(Duration.ofSeconds(30)));

        //4.对处理后数据进行聚合：对最近一段时间某篇文章所有行为进行统计
        //4.1 设置初始化 一开始没有聚合结果
        Initializer<String> initializer = () -> {

            //第一条消息进入后，聚合结果返回null
            return null;
        };
        //4.2 对时间窗口内，某篇文章所有操作行为进行数据汇总
        Aggregator<String, String, String> aggregator = new Aggregator<String, String, String>() {
            /**
             * 聚合回调方法:返回上次+本次聚合 合并后结果
             * @param key 数据流中键：文章id
             * @param value 数据流中值：本次文章操作行为
             * @param aggregate 上次聚合结果（时间窗口内）
             * @return 本次聚合-当前文章聚合结果（时间窗口内历史数据汇总）
             */
            @Override
            public String apply(String key, String value, String aggregate) {
                //4.2.1 声明聚合后文章操作行为结果对象 ArticleStreamMessage
                ArticleStreamMessage result = null;
                log.info("聚合回调:本次数据：{}，上次聚合数据{}", value, aggregate);
                //4.2.2 判断上次是否有聚合结果
                if (StringUtils.isBlank(aggregate)) {
                    result = new ArticleStreamMessage();
                    result.setArticleId(Long.parseLong(key));
                    result.setView(0);
                    result.setComment(0);
                    result.setCollect(0);
                    result.setLike(0);
                } else {
                    //说明时间窗口内上次有聚合结果 形式：{article:123,view:1,like:1,comment:1,collect:1}
                    result = JSON.parseObject(aggregate, ArticleStreamMessage.class);
                }

                //4.2.3 根据当前本次操作行为计算文章操作结果(将本次操作行为跟上次聚合结果累加)
                UpdateArticleMessage currOpBeavior = JSON.parseObject(value, UpdateArticleMessage.class);
                switch (currOpBeavior.getType()) {
                    case 0:
                        //阅读
                        result.setView(result.getView() + currOpBeavior.getAdd());
                        break;
                    case 1:
                        //点赞
                        result.setLike(result.getLike() + currOpBeavior.getAdd());
                        break;
                    case 2:
                        //评论
                        result.setComment(result.getComment() + currOpBeavior.getAdd());
                        break;
                    case 3:
                        //收藏
                        result.setCollect(result.getCollect() + currOpBeavior.getAdd());
                        break;
                }
                return JSON.toJSONString(result);
            }
        };
        //4.3 设置聚合
        KTable<Windowed<String>, String> aggregate = timeWindowedKStream.aggregate(initializer, aggregator);

        //5.对数据流中结果中Key设置类型，将实时计算结果发送到目标话题 Windowed<String>转为字符串类型
        KStream<String, String> stream = aggregate.toStream().map(new KeyValueMapper<Windowed<String>, String, KeyValue<String, String>>() {
            @Override
            public KeyValue<String, String> apply(Windowed<String> key, String value) {
                log.info("处理类型，发送结果到目标");
                return new KeyValue<>(key.key(), value);
            }
        });
        return stream;
    }

}
