package com.gsm.projects.jobDw.dw.dws;

import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.util.serialization.SimpleStringSchema;

public class DWSLayer {

    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // Kafka 源配置
        KafkaSource<String> kafkaSource = KafkaSource.<String>builder()
                .setBootstrapServers("hadoop102:9092,hadoop103:9092,hadoop104:9092")
                .setTopics("dwd_employment_records")
                .setGroupId("gsm")
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .setStartingOffsets(OffsetsInitializer.latest())
                .build();

        // 读取 Kafka 数据
        DataStream<String> stream = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "Kafka Source");

        // 数据转换和业务逻辑聚合
        DataStream<Tuple4<String, Integer, Long, Long>> processedStream = stream
                .map(new MapFunction<String, Tuple4<String, Integer, Long, Long>>() {
                    @Override
                    public Tuple4<String, Integer, Long, Long> map(String value) throws Exception {
                        ObjectMapper mapper = new ObjectMapper();
                        Tuple4<String, Integer, Long, Long> record = mapper.readValue(value, Tuple4.class);
                        return record;
                    }
                })
                .keyBy(t -> t.f2) // 按窗口开始时间分组
                .timeWindow(Time.minutes(10)) // 每 10 分钟触发一次
                .reduce((t1, t2) -> {
                    if (t1.f1 > t2.f1) {
                        return t1;
                    } else {
                        return t2;
                    }
                });

        // 写入 ClickHouse
        processedStream.addSink(JdbcSink.sink(
                "INSERT INTO top_cities (city, count, window_start, window_end) VALUES (?, ?, ?, ?)",
                (ps, t) -> {
                    ps.setString(1, t.f0);
                    ps.setInt(2, t.f1);
                    ps.setLong(3, t.f2);
                    ps.setLong(4, t.f3);
                },
                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                        .withUrl("jdbc:clickhouse://localhost:8123/default")
                        .withDriverName("ru.yandex.clickhouse.ClickHouseDriver")
                        .withUsername("your_username")
                        .withPassword("your_password")
                        .build()
        ));

        // 执行任务
        env.execute("DWS Layer Processing");
    }
}