package com.atguigu.gmall.realtime.demo;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.util.KafkaUtil;
import org.apache.doris.flink.cfg.DorisExecutionOptions;
import org.apache.doris.flink.cfg.DorisOptions;
import org.apache.doris.flink.cfg.DorisSink;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.Properties;

/**
 * @author caodan
 * @version 1.0
 * @description  flink 将kafka 数据导入doris
 * @date 2025-08-01 16:49
 */
public class FlinkDorisWriteForKafkaDemo {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        Properties pro = new Properties();
        pro.setProperty("format", "json");
        pro.setProperty("read_json_by_line", "true");
        pro.setProperty("strip_outer_array", "true");

        // 读取kafka数据
        FlinkKafkaConsumer<String> kafkaConsumer = KafkaUtil.getFlinkKafkaConsumer("test", "test_demo");

        env.addSource(kafkaConsumer)
                .map((MapFunction<String, String>) value -> {
                    String[] split = value.split(",");
                    JSONObject jsonObject = new JSONObject();
                    jsonObject.put("id", split[0]);
                    jsonObject.put("code", split[1]);
                    return jsonObject.toString();
                })
                .addSink(DorisSink.sink(
                                new DorisExecutionOptions.Builder()
//                                .setBatchIntervalMs(2000L)
                                        .setEnableDelete(false)
                                        .setMaxRetries(3)
                                        .setStreamLoadProp(pro)
                                        .build(),
                                new DorisOptions.Builder()
                                        .setFenodes("192.168.10.102:8030")
                                        .setUsername("root")
                                        .setPassword("Caodan520@")
                                        .setTableIdentifier("doris_test.hello")
                                        .build())
                );
        env.execute();
    }
}
