package com.gsm.projects.jobDw.dw.ods;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.gsm.projects.jobDw.dw.beans.EmploymentRecord;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.util.serialization.SimpleStringSchema;

import java.util.Properties;

public class ODSLayer {

    public static void main(String[] args) throws Exception {

        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // Kafka 源配置
        KafkaSource<String> kafkaSource = KafkaSource.<String>builder()
                .setBootstrapServers("hadoop102:9092,hadoop103:9092,hadoop104:9092")
                .setTopics("raw_employment_records")
                .setGroupId("gsm")
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .setStartingOffsets(OffsetsInitializer.latest())
                .build();

        // 创建 Kafka 生产者
        Properties producerProps = new Properties();
        producerProps.setProperty("bootstrap.servers", "hadoop102:9092,hadoop103:9092,hadoop104:9092");
        KafkaSink<String> kafkaSink = KafkaSink.<String>builder()
                .setBootstrapServers("hadoop102:9092,hadoop103:9092,hadoop104:9092")
                .setRecordSerializer(KafkaRecordSerializationSchema.builder()
                        .setTopic("ods_employment_records")
                        .setValueSerializationSchema(new SimpleStringSchema())
                        .build())
                .build();

        // 读取 Kafka 数据
        DataStream<String> stream = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "Kafka Source");

        // 数据清洗
        DataStream<String> cleanedStream = stream.map(new MapFunction<String, String>() {
            @Override
            public String map(String value) throws Exception {
                ObjectMapper mapper = new ObjectMapper();
                EmploymentRecord record = mapper.readValue(value, EmploymentRecord.class);
                // 进行简单的清洗操作，例如去除空值
                if (record.getSendOfferTime() != null && record.getPosition() != null) {
                    return mapper.writeValueAsString(record);
                } else {
                    return null;
                }
            }
        }).filter(record -> record != null);

        // 写回 Kafka
        cleanedStream.sinkTo(kafkaSink);

        // 执行任务
        env.execute("ODS Layer Processing");
    }

}