package com.itheima.article.stream;

import com.itheima.article.config.KafkaStreamListener;
import com.itheima.common.constants.BusinessConstants;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.*;
import org.springframework.stereotype.Component;

import java.awt.image.BufferStrategy;
import java.time.Duration;
import java.util.Arrays;

@Component
public class HotArticleStreamHandler implements KafkaStreamListener<KStream<String,String>> {
    //input
    @Override
    public String listenerTopic() {
        return BusinessConstants.MqConstants.HOT_ARTICLE_SCORE_TOPIC;
    }

    //out
    @Override
    public String sendTopic() {
        return BusinessConstants.MqConstants.HOT_ARTICLE_INCR_HANDLE_TOPIC;
    }

    @Override
    public KStream<String, String> getService(KStream<String, String> textLines) {


        //key : stream0001

        //value: articleId_type

        //发送过来
        // articleId_type    文章ID_类型
        // articleId_type
        // articleId_type
        // articleId_type
        // articleId_type
        // articleId_type
        // articleId_type



        //  KTable<Windowed<String>, Long>    key:  Windowed<String> (单词：articleId_type)  value: Long （出现的次数）

        KTable<Windowed<String>, Long> wordCounts = textLines
                // 一个个小的list --->转成一个大的list
                .flatMapValues(textLine -> Arrays.asList(textLine))
                //设置根据word（单词）来进行统计 而不是根据key来进行分组
                .groupBy((key, word) -> word)
                //设置5秒窗口时间   按照时间窗口来统计
                .windowedBy(TimeWindows.of(Duration.ofSeconds(5)))
                //进行count统计
                .count(Materialized.as("counts-store"));



        return wordCounts
                .toStream()

                .map((key,value)->{

                    // key.key().toString() 就是 articleId_type
                    // value.toString()  统计之后的数据次数的字符串
                    return new KeyValue<>(key.key().toString(),value.toString());
                });
    }
}
