package com.atguigu.gmall.realtime.test;

import org.apache.doris.flink.cfg.DorisExecutionOptions;
import org.apache.doris.flink.cfg.DorisOptions;
import org.apache.doris.flink.sink.DorisSink;
import org.apache.doris.flink.sink.writer.SimpleStringSerializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.Properties;

/**
 * @author Felix
 * @date 2023/12/26
 * 通过FlinkAPI的方式操作Doris
 */
public class Flink06_Doris_API {
    public static void main(String[] args) throws Exception {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(1);
        //TODO 2.检查点相关的设置(略)
        //对于无界流数据，写到Doris的话，必须开启检查点
        /*//TODO 3.从Doris中读取数据
        DorisOptions.Builder builder = DorisOptions.builder()
            .setFenodes("hadoop102:7030")
            .setTableIdentifier("test.table1")
            .setUsername("root")
            .setPassword("aaaaaa");

        DorisSource<List<?>> dorisSource = DorisSourceBuilder.<List<?>>builder()
            .setDorisOptions(builder.build())
            // .setDorisReadOptions(DorisReadOptions.builder().build())
            .setDeserializer(new SimpleListDeserializationSchema())
            .build();

        env.fromSource(dorisSource, WatermarkStrategy.noWatermarks(), "doris source").print();
*/
        //TODO 4.向Doris中写入数据
        DataStreamSource<String> source = env
            .fromElements(
                "{\"siteid\": \"550\", \"citycode\": \"1001\", \"username\": \"ww\",\"pv\": \"100\"}");
        Properties props = new Properties();
        props.setProperty("format", "json");
        props.setProperty("read_json_by_line", "true"); // 每行一条 json 数据

        DorisSink<String> sink = DorisSink.<String>builder()
            // .setDorisReadOptions(DorisReadOptions.builder().build())
            .setDorisOptions(DorisOptions.builder() // 设置 doris 的连接参数
                .setFenodes("hadoop102:7030")
                .setTableIdentifier("test.table1")
                .setUsername("root")
                .setPassword("aaaaaa")
                .build())
            .setDorisExecutionOptions(DorisExecutionOptions.builder() // 执行参数
                //.setLabelPrefix("doris-label")  // stream-load 导入的时候的 label 前缀
                .disable2PC() // 开启两阶段提交后,labelPrefix 需要全局唯一,为了测试方便禁用两阶段提交
                .setDeletable(false)
                .setBufferCount(3) // 批次条数: 默认 3
                .setBufferSize(8*1024) // 批次大小: 默认 1M
                .setCheckInterval(3000) // 批次输出间隔   三个对批次的限制是或的关系
                .setMaxRetries(3)
                .setStreamLoadProp(props) // 设置 stream load 的数据格式 默认是 csv,根据需要改成 json
                .build())
            .setSerializer(new SimpleStringSerializer())
            .build();
        source.sinkTo(sink);

        env.execute();

    }
}
