package tjs.study.notes.flink.simple.agg;

import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * Created by Nicky.Tang on 2024/4/28 10:51
 *
 * @since 02.12.10
 */
public class AggLogTest {
    public static final Logger LOG = LoggerFactory.getLogger(AggLogTest.class);

    public static void main(String[] args) throws Exception {

        Configuration configuration = new Configuration();
        // 
        /**
         * 单机本地模式：
         * 以下配置，则最大并行度等于
         * =taskManager数量*slot数量
         * =LOCAL_NUMBER_TASK_MANAGER*TASK_MANAGER_NUM_TASK_SLOTS=2*6=12
         *
         * yarm模式：
         *
         */
        configuration.setInteger(ConfigConstants.TASK_MANAGER_NUM_TASK_SLOTS, 2);
        configuration.setInteger(ConfigConstants.LOCAL_NUMBER_TASK_MANAGER, 6);
        configuration.setInteger(ConfigConstants.DEFAULT_PARALLELISM_KEY, 1);// 全局默认的并行度
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(configuration);
        DataStream<Map<String, Object>> dataStream = env.addSource(new RichParallelSourceFunction<Map<String, Object>>() {
            @Override
            public void run(SourceContext<Map<String, Object>> sourceContext) throws Exception {
                /**
                 * 多线程并行读取：
                 * 使用id%partitionSum=partitionCur，来拆分目标集
                 * 将时间范围拆分成partitionSum段数组[partitionCur]，来拆分目标集
                 */
                int partitionCur = getRuntimeContext().getIndexOfThisSubtask();// 当前下标
                int partitionSum = getRuntimeContext().getNumberOfParallelSubtasks();// 当前流总的并行度
                
                LOG.info(">>>>>>>>addSource：{}__{}", Thread.currentThread().getId() + "@" + Thread.currentThread().getName(),
                        getRuntimeContext().getIndexOfThisSubtask() + "@" + getRuntimeContext().getNumberOfParallelSubtasks());
                for (int i = 0; i < 10000; i++) {
                    Map<String, Object> row = new HashMap<>();
                    row.put("content", "A" + i);
                    sourceContext.collect(row);
                }
            }

            @Override
            public void cancel() {

            }
        }).setParallelism(1);

        dataStream.assignTimestampsAndWatermarks(WatermarkStrategy.forMonotonousTimestamps())
                .keyBy(new KeySelector<Map<String, Object>, Integer>() {
                    @Override
                    public Integer getKey(Map<String, Object> item) throws Exception {
                        // 直接根据content分组，粒度太大了。会导致分组数过多->每次批量修改可能值修改一条
                        return item.get("content").hashCode() % 10;
                    }
                })
                .window(TumblingProcessingTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(1)))
                .trigger(new CountWithTimeoutTrigger<>(500))// 触发条件达到才会执行process
                .process(new ProcessWindowFunction<Map<String, Object>, Map<String, Object>, Integer, TimeWindow>() {

                    @Override
                    public void process(Integer integer, ProcessWindowFunction<Map<String, Object>, Map<String, Object>, Integer, TimeWindow>.Context context, Iterable<Map<String, Object>> iterable, Collector<Map<String, Object>> collector) throws Exception {
                        if (iterable != null && iterable.iterator().hasNext()) {
                            AtomicInteger size = new AtomicInteger();
                            iterable.forEach(item -> {
                                // item.put("contentTemplate", "template" + item.get("content"));
                                size.getAndIncrement();
                            });
                            Map<String, Object> item = new HashMap<>();
                            item.put("count", size);
                            collector.collect(item);
                            LOG.info(">>>>>>>>process：{}。{}", Thread.currentThread().getId() + "@" + Thread.currentThread().getName(), size.get());
                        }
                        Thread.currentThread().sleep(200);
                    }
                })/*.setParallelism(12)*/
                .addSink(new RichSinkFunction<Map<String, Object>>() {
                    @Override
                    public void invoke(Map<String, Object> value, Context context) throws Exception {
                        Thread.currentThread().sleep(200);
                        LOG.info(">>>>>>>>RichSinkFunction：{}。{}", Thread.currentThread().getId() + "@" + Thread.currentThread().getName(), value.get("content"));
                    }
                });


        JobExecutionResult result = env.execute("聚类分析");
        LOG.info("【聚类分析】流批执行结果：{}", result.toString());
    }
}
