import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.ververica.cdc.connectors.oracle.OracleSource;
import com.ververica.cdc.debezium.DebeziumSourceFunction;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Properties;

/**
 * @author jason
 * @create 2022-04-09 21:35
 */
public class Demo {
    public static void main(String[] args) throws Exception {
        Properties pros = new Properties();
        pros.setProperty("debezium.log.mining.strategy", "online_catalog");
        pros.setProperty("debezium.log.mining.continuous.mine", "true");
        DebeziumSourceFunction<String> sourceFunction = OracleSource.<String>builder()
                .hostname("172.16.18.113")
                .port(1521)
                .database("ECOLOGY")
                .schemaList("schema")
                .tableList("schema.table1, schema.table2")
                .username("ecologyqr")
                .password("ecologyQr5000")
                .debeziumProperties(pros)
                .deserializer(new JsonDebeziumDeserializationSchema())
                .build();

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
//        // 指定登录hadoop的用户
//        System.setProperty("HADOOP_USER_NAME", "hadoop");
//        // 开启检查点
//        env.enableCheckpointing(1000);
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//        // 检查点存储位置
//        env.setStateBackend(new FsStateBackend("hdfs://ip:9000/user/bd/flink/checkpoint/", true));
//        // 取消作业，checkpoint清除策略
//        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // 数据源
        DataStreamSource<String> source = env.addSource(sourceFunction);
        //
        SingleOutputStreamOperator<String> archiveLog = source.map((MapFunction<String, String>) json -> {
            ArchiveLog archiveLog1 = JSON.parseObject(json, ArchiveLog.class);
            return JSON.toJSONString(archiveLog1);
        });

        // 消息存入数据库TiDB
//        archiveLog.addSink(new SinkToTiDB());
archiveLog.print();
        env.execute("flink cdc");
    }

    private static class SinkToTiDB extends RichSinkFunction<String> {
        private transient DruidDataSource dataSource = null;

        @Override
        public void open(Configuration parameters) throws Exception {
            // 数据库连接
            dataSource = new DruidDataSource();
            dataSource.setDriverClassName("com.mysql.jdbc.Driver");
            dataSource.setUsername("username");
            dataSource.setPassword("password");
            dataSource.setUrl("jdbc:mysql://ip:port/database?useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&useSSL=false");
            dataSource.setMaxActive(5);
        }

        @Override
        public void invoke(String json, Context context) throws Exception {
            ArchiveLog archiveLog = JSON.parseObject(json, ArchiveLog.class);

            String op = archiveLog.getOp();
            ArchiveLogSource source = archiveLog.getSource();
            String after = archiveLog.getAfter();
            JSONObject jsonObject = JSON.parseObject(after);
            String sql = "";
            switch (op) {
                // insert 新增
                case "c":
                    System.out.println("新增逻辑");
                    StringBuilder keyBuilder = new StringBuilder();
                    StringBuilder valueBuilder = new StringBuilder();
                    for (String item : jsonObject.keySet()) {
                        keyBuilder.append(item).append(",");
                        valueBuilder.append("'").append(jsonObject.get(item)).append("'").append(",");
                    }
                    String key = keyBuilder.substring(0, keyBuilder.length() - 1);
                    String value = valueBuilder.substring(0, valueBuilder.length() - 1);
                    sql = "insert into " + source.getSchema() + "." + source.getTable() + "(" + key + ") values(" + value + ")";
                    break;
                // update 更新
                case "u":
                    System.out.println("更新逻辑");
                    StringBuilder updateBuilder = new StringBuilder();
                    StringBuilder idBuilder = new StringBuilder();
                    for (String item : jsonObject.keySet()) {
                        if (item.equalsIgnoreCase("id")) {
                            idBuilder.append("'").append(jsonObject.get(item)).append("'");
                        } else {
                            updateBuilder.append(item).append("=").append("'").append(jsonObject.get(item)).append("'").append(",");
                        }
                    }
                    String keyValue = updateBuilder.substring(0, updateBuilder.length() - 1);
                    String id = idBuilder.toString();
                    System.out.println(keyValue);
                    sql = "update " + source.getSchema() + "." + source.getTable() + " set " + keyValue + " where id =" + id;
                    break;
                // delete 删除
                case "d":
                    String before = archiveLog.getBefore();
                    JSONObject deleteObj = JSON.parseObject(before);
                    id = deleteObj.get("ID").toString();
                    System.out.println("删除逻辑");
                    sql = "delete from " + source.getSchema() + "." + source.getTable() + " where id = '" + id + "'";
                    break;
                case "r":
                    System.out.println("读取逻辑");
                    break;
            }
            Connection conn = null;
            PreparedStatement ps = null;
            try {
                conn = dataSource.getConnection();
                ps = conn.prepareStatement(sql);
                ps.execute();
            } catch (Exception e) {
                e.printStackTrace();
            } finally {
                if (ps != null) {
                    try {
                        ps.close();
                    } catch (SQLException e) {
                        e.printStackTrace();
                    }
                }
                if (conn != null) {
                    conn.close();
                }
            }

        }
    }
}
