package com.heima.article.listener;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.heima.article.processor.IHotArticleProcessor;
import com.heima.behavior.dto.ArticleOperationMessage;
import com.heima.behavior.entity.ArticleAggregatorResult;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.*;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.messaging.handler.annotation.SendTo;
import org.springframework.stereotype.Component;

import java.time.Duration;

@Slf4j
@Component
@EnableBinding(IHotArticleProcessor.class)
public class HotArticleListener {
    /**
     * 实时计算文章行为
     *
     * @param input 数据流 形式 key:null , value: {"add":-1,"articleId":1379649440003920000,"type":1}
     * @return 统计分析结果 形式  {"articleId":1379649440003920000,"view":100,“like”:40,"collection":20}
     */
    @StreamListener("hot_article_score_topic")
    @SendTo("hot_article_result_topic")
    public KStream<String, String> process(KStream<String, String> input) {
//        处理原始数据  重新指定key  key : 1379649440003920000  value : {"add":-1,"articleId":1379649440003920000,"type":1}
        KStream<String, String> map = input.map(new KeyValueMapper<String, String, KeyValue<String, String>>() {
            /**
             *   key:null , value: {"add":-1,"articleId":1379649440003920000,"type":1}
             * @param key   the key of the stream
             * @param value the value of the stream
             * @return
             */
            @Override
            public KeyValue<String, String> apply(String key, String value) {
                log.error("1.实时监听到的数据:{}", value);
//          -把对文章的操作转换成java对象,ArticleOperationMessage
                ArticleOperationMessage articleOperationMessage = JSONObject.parseObject(value, ArticleOperationMessage.class);
//          -重新设置数据的键和值
                log.error("1.得到键值中的数据key:{},value:{}", key, value);
                return new KeyValue<>(articleOperationMessage.getArticleId().toString(), value);
            }
        });
//        对数据中的记录按照key进行分组
        KGroupedStream<String, String> groupByKey = map.groupByKey();
//        设置时间窗口统计30 秒中的数据
        TimeWindowedKStream<String, String> windowedKStream = groupByKey.windowedBy(TimeWindows.of(Duration.ofSeconds(30)));
//        设置聚合
//          -创建初始化聚合结果
        Initializer<String> initializer = new Initializer<String>() {
            @Override
            public String apply() {
                log.error("初次聚合");
                return null;
            }
        };
//          -创建聚合对象，完成聚合逻辑
        Aggregator<String, String, String> aggregator = new Aggregator<String, String, String>() {
            /**
             * 前提：一个时间窗口内，将上次聚合结果跟本次聚合结果合并在一起
             * @param key 数据流Key  文章ID
             * @param value 数据流中Val 操作行为JSON {"add":1,"articleId":1553958754792316930,"type":1}
             * @param aggregate 上次聚合结果 {"articleId":1553958754792316930,"view":100,"like":50,"comment":10,"collect":10}
             * @return 将本次操作行为跟上次聚合结果累加
             */
            @Override
            public String apply(String key, String value, String aggregate) {
                log.error("数据中的key：{}，value：{}，上次聚合结果{}", key, value, aggregate);
                ArticleAggregatorResult articleAggregatorResult = new ArticleAggregatorResult();
//                封装上次的聚合结果
//                  -如果上次聚合结果为空
                if (StringUtils.isBlank(aggregate)) {
                    articleAggregatorResult.setArticleId(Long.valueOf(key));
                    articleAggregatorResult.setCollect(0);
                    articleAggregatorResult.setLike(0);
                    articleAggregatorResult.setView(0);
                    articleAggregatorResult.setComment(0);
                } else {
//                  -如果上次聚合结果不为空
                    articleAggregatorResult = JSON.parseObject(aggregate, ArticleAggregatorResult.class);
                }
//                获取本次操作行为
                ArticleOperationMessage articleOperationMessage = JSON.parseObject(value, ArticleOperationMessage.class);
//                将本次操作行为和之前的聚合结果进行累加
                Integer type = articleOperationMessage.getType();
                Integer add = articleOperationMessage.getAdd();
                switch (type) {
                    case 0:
                        articleAggregatorResult.setView(articleAggregatorResult.getView()+add);
                        break;
                    case 1:
                        articleAggregatorResult.setLike(articleAggregatorResult.getLike()+add);
                        break;
                    case 2:
                        articleAggregatorResult.setComment(articleAggregatorResult.getComment()+add);
                        break;
                    case 3:
                        articleAggregatorResult.setCollect(articleAggregatorResult.getCollect()+add);
                        break;
                }
                log.error("聚合之后的结果：{}",JSON.toJSONString(articleAggregatorResult));
                return JSON.toJSONString(articleAggregatorResult);
            }
        };
        KTable<Windowed<String>, String> aggregate = windowedKStream.aggregate(initializer, aggregator);
//        设置数据流中的key和value
        KStream<String, String> stream = aggregate.toStream().map(new KeyValueMapper<Windowed<String>, String, KeyValue<String, String>>() {
            @Override
            public KeyValue<String, String> apply(Windowed<String> key, String value) {
                return new KeyValue<>(key.key(), value);
            }
        });
        return stream;
    }
}
