package org.example.flink.connections.pulsar;

import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.FileUtil;
import lombok.Data;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.ParallelSourceFunction;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.WindowAssigner;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.triggers.Trigger;
import org.apache.flink.streaming.api.windowing.windows.Window;
import org.apache.flink.streaming.connectors.pulsar.FlinkPulsarSink;
import org.apache.flink.streaming.util.serialization.PulsarSerializationSchema;
import org.apache.flink.streaming.util.serialization.PulsarSerializationSchemaWrapper;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.types.AtomicDataType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Tuple2;

import java.io.Serializable;
import java.nio.charset.Charset;
import java.time.Duration;
import java.util.Collection;
import java.util.Date;
import java.util.Optional;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;

public class CountSource {


    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
//        env.getConfig().setAutoWatermarkInterval(10L);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        env.enableCheckpointing(60000);


        DataStreamSource<PD> pdDataStreamSource = env.addSource(new CustSource());

        SingleOutputStreamOperator<String> pdSource = pdDataStreamSource.assignTimestampsAndWatermarks(
                WatermarkStrategy.<PD>forBoundedOutOfOrderness(Duration.ofSeconds(10))
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<PD>() {
                                    @Override
                                    public long extractTimestamp(PD element, long recordTimestamp) {
                                        return element.getTimeStamp().getTime();
                                    }
                                }
                        ))
                .map(new MapFunction<PD, Tuple2<PD, Integer>>() {
                    @Override
                    public Tuple2<PD, Integer> map(PD value) throws Exception {
//                        FileUtil.appendString(value.toString()+"\n","/home/zhangjg/work/a.log", Charset.forName("UTF-8"));
                        return Tuple2.apply(value, 1);
                    }
                }).map( x -> x._1.toString() );

        String serviceUrl = "http://localhost:8080";
        String adminUrl = serviceUrl;
        String topic = "persistent://public/default/s1";
        Properties props = new Properties();

        PulsarSerializationSchema<String> pulsarSerialization = new PulsarSerializationSchemaWrapper.Builder<>(new SimpleStringSchema())
                .useAtomicMode(DataTypes.STRING())
                .build();

        FlinkPulsarSink sink = new FlinkPulsarSink(
                serviceUrl,
                adminUrl,
                Optional.of(topic),
                props,
                pulsarSerialization
        );

        pdSource.addSink(sink);

        /*pdSource.windowAll(TumblingEventTimeWindows.of(Time.seconds(5)))
                .reduce(new ReduceFunction<Tuple2<PD, Integer>>() {
                    @Override
                    public Tuple2<PD, Integer> reduce(Tuple2<PD, Integer> value1, Tuple2<PD, Integer> value2) throws Exception {
                        value1._1.setName(DateUtil.now());
                        return Tuple2.apply(value1._1, value1._2 + value2._2);
                    }
                }).print();*/


        env.execute("CountSource");


    }

}

@Data
class PD implements Serializable {
    private int age;
    private String name;
    private Date timeStamp;
}

class CustSource implements ParallelSourceFunction<PD> {
    private static final Logger logger = LoggerFactory.getLogger(CustSource.class);

    private AtomicInteger count = new AtomicInteger(0);

    @Override
    public void run(SourceContext<PD> ctx) throws Exception {
        int size = 20000;
        while (count.get() < size) {
            count.incrementAndGet();
            PD pd = new PD();
            pd.setAge(count.get());
            pd.setName("");
            pd.setTimeStamp(DateUtil.date());
            logger.info("pd={}", pd);
            ctx.collect(pd);
//            Thread.sleep(TimeUnit.SECONDS.toMillis(1));
        }

    }

    @Override
    public void cancel() {

    }
}