package doris;


import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.ResolvedSchema;

import java.util.Properties;


class KafkaToDoris {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        String topic = "flink-doris-topic";
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        Properties kafkaProps = new Properties();
        kafkaProps.setProperty("bootstrap.servers", "sjz01:9092,sjz02:9092,sjz03:9092");
        kafkaProps.setProperty("group.id", "flink-consumer-group");

        // 从 Kafka 主题中读取 JSON 数据流
        FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>(topic, new SimpleStringSchema(), kafkaProps);
        kafkaConsumer.setStartFromEarliest();

        DataStream<String> dataStream = env.addSource(kafkaConsumer);
        dataStream.print();
        DataStream<Tuple3<Integer, String, Long>> userLogs = dataStream.map(new MapFunction<String, Tuple3<Integer, String, Long>>() {
            @Override
            public Tuple3<Integer, String, Long> map(String value) {
                String[] fields = value.split(",");
                return new Tuple3<>(Integer.parseInt(fields[0].split("_")[1]), fields[1], Long.parseLong(fields[2]));
            }
        });

        String jdbcUrl = "jdbc:mysql://sjz02:9030/demo";
        String jdbcDriver = "com.mysql.cj.jdbc.Driver";
        String jdbcUsername = "root";
        String jdbcPassword = "Sjz@0626";

        JdbcExecutionOptions executionOptions = JdbcExecutionOptions.builder()
                .withBatchSize(100)
                .withBatchIntervalMs(200)
                .build();

        userLogs.addSink(JdbcSink.sink(
                "INSERT INTO user_logs (user_id, activity, timestamp) VALUES (?, ?, ?)",
                (JdbcStatementBuilder<Tuple3<Integer, String, Long>>) (ps, t) -> {
                    ps.setInt(1, t.f0);
                    ps.setString(2, t.f1);
                    ps.setLong(3, t.f2);
                },
                executionOptions,
                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                        .withUrl(jdbcUrl)
                        .withDriverName(jdbcDriver)
                        .withUsername(jdbcUsername)
                        .withPassword(jdbcPassword)
                        .build()
        ));

        env.execute("Flink Kafka to Doris");
    }
}

