package com.heima.article.listener;

import com.alibaba.fastjson.JSON;
import com.heima.article.process.IHotArticleProcessor;
import com.heima.behavior.dto.ArticleAggregatorResult;
import com.heima.behavior.dto.ArticleOperationMessage;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.*;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.messaging.handler.annotation.SendTo;
import org.springframework.stereotype.Component;

import java.time.Duration;

/**
 * @author: itheima
 * @create: 2022-08-10 15:02
 */
@Slf4j
@Component
@EnableBinding(IHotArticleProcessor.class)
public class HotArticleListener {

    /**
     * 实时统计所有文章操作行为得出文章对应 点赞数，阅读数，评论数，收藏数量
     *
     * @param input 数据流 形式：Key:null  val：{"add":1,"articleId":1553958754792316930,"type":1}
     * @return 统计分析结果 形式：{"articleId":1553958754792316930,"view":100,"like":50,"comment":10,"collect":10}
     */
    @StreamListener("hot_article_score_topic")
    @SendTo("hot_article_result_topic")
    public KStream<String, String> process(KStream<String, String> input) {
        //1.处理原始数据 对Key进行重新指定  处理后：Key：文章Id  Val：{"add":1,"articleId":1553958754792316930,"type":1}
        KStream<String, String> map = input.map(new KeyValueMapper<String, String, KeyValue<String, String>>() {
            /**
             *
             * @param key 数据流中键  null
             * @param value 数据流中值  {"add":1,"articleId":1553958754792316930,"type":1}
             * @return 新键值对象
             */
            @Override
            public KeyValue<String, String> apply(String key, String value) {
                log.info("1.实时监听到操作行为数据：{}", value);
                //1.1 对文章操作行为消息转为Java对象 ArticleOperationMessage
                ArticleOperationMessage articleOperationMessage = JSON.parseObject(value, ArticleOperationMessage.class);
                //1.2 重新设置数据键值
                log.info("1.得到新键值对象中Key：{}， Val：{}", articleOperationMessage.getArticleId(), value);
                return new KeyValue<>(articleOperationMessage.getArticleId() + "", value);
            }
        });
        //2.对数据里中记录根据Key进行分组
        KGroupedStream<String, String> groupedStream = map.groupByKey();

        //3.设置时间窗口 统计：最近30S内操作行为
        TimeWindowedKStream<String, String> timeWindowedKStream = groupedStream.windowedBy(TimeWindows.of(Duration.ofSeconds(30)));

        //4.设置聚合,将文章ID相同的所有文章操作行为全部获取到
        //4.1 创建初始化聚合结果
        Initializer<String> initializer = new Initializer() {
            @Override
            public String apply() {
                //初次执行聚合，第一次聚合结果
                log.info("4.初次聚合设置为null");
                return null;
            }
        };
        //4.2 创建聚合对象-完成聚合逻辑
        Aggregator<String, String, String> aggregator = new Aggregator<String, String, String>() {
            /**
             * 前提：一个时间窗口内，将上次聚合结果跟本次聚合结果合并在一起
             * @param key 数据流Key  文章ID
             * @param value 数据流中Val 操作行为JSON {"add":1,"articleId":1553958754792316930,"type":1}
             * @param aggregate 上次聚合结果 {"articleId":1553958754792316930,"view":100,"like":50,"comment":10,"collect":10}
             * @return 将本次操作行为跟上次聚合结果累加
             */
            @Override
            public String apply(String key, String value, String aggregate) {
                log.info("4.2.0.进行聚合，上次聚合结果：{}", aggregate);
                //4.2.1 先获取上次聚合结果
                ArticleAggregatorResult result = null;
                if (StringUtils.isBlank(aggregate)) {
                    //上次聚合结果为空
                    result = new ArticleAggregatorResult();
                    result.setArticleId(Long.valueOf(key));
                    result.setView(0);
                    result.setLike(0);
                    result.setCollect(0);
                    result.setComment(0);
                } else {
                    //说明上次聚合结果有值
                    result = JSON.parseObject(aggregate, ArticleAggregatorResult.class);
                }
                //4.2.2 在获取本次对文章操作行为
                ArticleOperationMessage articleOperationMessage = JSON.parseObject(value, ArticleOperationMessage.class);

                //4.2.3 将本次操作行为跟上次聚合结果累加
                Integer type = articleOperationMessage.getType();
                Integer add = articleOperationMessage.getAdd();
                switch (type) {
                    case 0:
                        result.setView(result.getView() + add);
                        break;
                    case 1:
                        result.setLike(result.getLike() + add);
                        break;
                    case 2:
                        result.setComment(result.getComment() + add);
                        break;
                    case 3:
                        result.setCollect(result.getCollect() + add);
                        break;
                }
                log.info("4.2.3.进行聚合后结果：{}", JSON.toJSONString(result));
                return JSON.toJSONString(result);
            }
        };

        KTable<Windowed<String>, String> aggregate = timeWindowedKStream.aggregate(initializer, aggregator);

        //5.重新设置数据流Key跟Val
        KStream<String, String> stream = aggregate.toStream().map(new KeyValueMapper<Windowed<String>, String, KeyValue<String, String>>() {
            @Override
            public KeyValue<String, String> apply(Windowed<String> key, String value) {
                return new KeyValue<>(key.key(), value);
            }
        });
        return stream;
    }
}
