import com.atguigu.pojo.Event;
import com.atguigu.pojo.UrlViewCount;
import com.atguigu.pojo.WordCount;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.time.Duration;

public class urlCountTest {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        env.enableCheckpointing(2000L);
        env.getConfig().setAutoWatermarkInterval(500L);
        //1.对接数据源，从kafka中读取数据
          //1.1创建kafka连接器对象(读取json串)
        KafkaSource<String> kafkaSource = KafkaSource.<String>builder()
                .setBootstrapServers("hadoop102:9092,hadoop103:9092")
                .setGroupId("urlCountTest")
                .setTopics("topicA")
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .setStartingOffsets(
                        OffsetsInitializer.latest() //从最新位置消费
                )
                .setProperty(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed")//设置其它参数?
                .build();

        //1.2对接连接器，获取数据流
        DataStreamSource<String> ds1 
                = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "kafkaSource");

        //1.3将输入的json数据转换成event类型
        SingleOutputStreamOperator<Event> ds = ds1.map(
                line -> {
                    String[] fields = line.split(",");
                    //新建envent对象，第一个是姓名，第二个是url，第三个是时间戳
                    return new Event(fields[0].trim(), fields[1].trim(), Long.valueOf(fields[2].trim()));
                }
        ).assignTimestampsAndWatermarks(
                WatermarkStrategy.<Event>forBoundedOutOfOrderness(Duration.ZERO)
                        .withTimestampAssigner(
                                (event, ts) -> event.getTs()
                        )
        );

        //打印查验是否转成功
        ds.print("INPUT");

        //2.使用增量聚合开窗函数reducefunction，全量输出窗口信息
        SingleOutputStreamOperator<UrlViewCount> aggregatedDs = ds.map(
                        event -> new WordCount(event.getUrl(), 1L)

                ).keyBy(WordCount::getWord)
                .window(TumblingEventTimeWindows.of(Time.seconds(10)))
                .aggregate(
                        //封装成UrlViewCount对象，里面存多个值
                        new AggregateFunction<WordCount, UrlViewCount, UrlViewCount>() {
                            @Override
                            public UrlViewCount createAccumulator() {
                                return new UrlViewCount(null, null, null, 0L);
                            }

                            @Override
                            public UrlViewCount add(WordCount value, UrlViewCount accumulator) {
                                accumulator.setUrl(value.getWord());
                                accumulator.setCount(value.getCount() + accumulator.getCount());
                                return  accumulator;
                            }

                            @Override
                            public UrlViewCount getResult(UrlViewCount accumulator) {
                                return accumulator;
                            }

                            @Override
                            public UrlViewCount merge(UrlViewCount urlViewCount, UrlViewCount acc1) {
                                return null;
                            }
                        },
                        new ProcessWindowFunction<UrlViewCount, UrlViewCount, String, TimeWindow>() {
                            @Override
                            public void process(String key, ProcessWindowFunction<UrlViewCount, UrlViewCount, String, TimeWindow>.Context context, Iterable<UrlViewCount> values, Collector<UrlViewCount> out) throws Exception {
                                //获得增量聚合结果
                                UrlViewCount urlViewCount = values.iterator().next();

                                //获取窗口信息
                                long windowStart = context.window().getStart();
                                long windowEnd = context.window().getEnd();

                                //将获得全的信息封装到UrlViewCount对象中
                                urlViewCount.setWindowStart(windowStart);
                                urlViewCount.setWindowEnd(windowEnd);

                                //将结果输出
                                out.collect(urlViewCount);
                            }
                        }
                );

        //3.将结果写到mysql

        SinkFunction<UrlViewCount> urlViewCountSinkFunction = JdbcSink.<UrlViewCount>sink(
                "insert into url_view_count values(? ,? ,? ,?)",

                new JdbcStatementBuilder<UrlViewCount>() {
                    @Override
                    public void accept(PreparedStatement preparedStatement, UrlViewCount urlViewCount) throws SQLException {
                        preparedStatement.setLong(1, urlViewCount.getWindowStart());
                        preparedStatement.setLong(2, urlViewCount.getWindowEnd());
                        preparedStatement.setString(3, urlViewCount.getUrl());
                        preparedStatement.setLong(4, urlViewCount.getCount());
                    }
                },
                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                        .withDriverName("com.mysql.cj.jdbc.Driver")
                        .withUrl("jdbc:mysql://hadoop102:3306/test")
                        .withUsername("root")
                        .withPassword("000000")
                        .build()
        );

        aggregatedDs.addSink(urlViewCountSinkFunction);


        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}
