package service;

import com.github.openjson.JSONObject;
import com.ververica.cdc.connectors.postgres.PostgreSQLSource;
import com.ververica.cdc.debezium.DebeziumDeserializationSchema;
import com.ververica.cdc.debezium.StringDebeziumDeserializationSchema;
import io.debezium.data.Envelope;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.FlatMapIterator;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.IOUtils;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.source.SourceRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import util.EsUtil;
import util.IOUtilES;
import util.JDBCUtil;


import java.io.File;
import java.io.IOException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;

/**
 * description FlinkCDC
 * created by haoziqi on 2022/4/7
 */
public class PostGreSource {
    private static final Logger LOG = LoggerFactory.getLogger(PostGreSource.class);
    //记录监控类型的集合
    private static final HashSet formatset=new HashSet();
    //记录挂载路径
    private static final String  path1="/assetdata";
    public static void main(String[] args) throws Exception {
        //记录文件格式
        formatset.add("docx");
        formatset.add("doc");
        formatset.add("txt");
        formatset.add("md");
        formatset.add("pdf");
        formatset.add("xlsx");
        formatset.add("csv");
        formatset.add("xls");
        formatset.add("mp4");
        //1.创建source
        //补充:关于startupOptions方法，pgsqlsource并没有提供，默认会先读取原始数据快照，再读取更新数据
        SourceFunction<String> sourceFunction = PostgreSQLSource.<String>builder()
                .hostname("10.7.221.38")
                .port(5432)
                .database("edg231") // 指定库
                .schemaList("public")  // 指定模式
                .tableList("public.edac_assetbase_ss") // 指定表
                .username("postgres")
                .password("postgres")
                .deserializer(new StringDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
                .slotName("t_table_slot2") // 复制槽名称 不能重复
                .deserializer(new DebeziumDeserializationSchema<String>() {  //自定义数据解析器
                                  @Override
                                  public void deserialize(SourceRecord sourceRecord, Collector<String> collector) throws Exception {

                                      //获取主题信息,包含着数据库和表名  mysql_binlog_source.gmall-flink.z_user_info
                                      String topic = sourceRecord.topic();
                                      String[] arr = topic.split("\\.");
                                      String tableName = arr[2];

                                      //获取操作类型 READ DELETE UPDATE CREATE
                                      Envelope.Operation operation = Envelope.operationFor(sourceRecord);

                                      //获取值信息并转换为Struct类型
                                      Struct value = (Struct) sourceRecord.value();

                                      //获取变化后的数据，主要分为：读取之前数据（read）、
                                      Struct after = value.getStruct("after");
                                      Struct before = value.getStruct("before");
                                      if(after!=null) {
                                          //创建JSON对象用于存储数据信息
                                          JSONObject data = new JSONObject();
                                          for (Field field : after.schema().fields()) {
                                              Object o = after.get(field);
                                              data.put(field.name(), o);
                                          }

                                          //创建JSON对象用于封装最终返回值数据信息
                                          JSONObject result = new JSONObject();
                                          result.put("operation", operation.toString().toLowerCase());
                                          result.put("data", data);
                                          //库名直接用常量写死，因为本脚本库名不会发生变化，如果该代码需要复用，使用  String db=value.getStruct("source").getStruct("db") 拿到库名。
                                          result.put("database", "edg231");
                                          result.put("table", tableName);

                                          //发送数据至下游
                                          collector.collect(result.toString());
                                      }else if(before!=null){
                                          //创建JSON对象用于存储数据信息
                                          JSONObject data = new JSONObject();
                                          for (Field field : before.schema().fields()) {
                                              Object o = before.get(field);
                                              data.put(field.name(), o);
                                          }

                                          //创建JSON对象用于封装最终返回值数据信息
                                          JSONObject result = new JSONObject();
                                          result.put("operation", operation.toString().toLowerCase());
                                          result.put("data", data);
                                          //库名直接用常量写死，因为本脚本库名不会发生变化，如果该代码需要复用，使用  String db=value.getStruct("source").getStruct("db") 拿到库名。
                                          result.put("database", "edg231");
                                          result.put("table", tableName);

                                          //发送数据至下游
                                          collector.collect(result.toString());
                                      }
                                  }

                                  @Override
                                  public TypeInformation<String> getProducedType() {
                                      return TypeInformation.of(String.class);
                                  }
                              }
                )
                .build();

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //2.Flink-CDC将读取binlog的位置信息以状态的方式保存在CK,如果想要做到断点续传,需要从Checkpoint或者Savepoint启动程序
        //2.1 开启Checkpoint,每隔5秒钟做一次CK pgsqlcdc必须开启ck，否则会出现连接无法关闭的情况导致内存泄露
        env.enableCheckpointing(5000L);
        //2.2 指定CK的一致性语义
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        //2.3 设置任务关闭的时候保留最后一次CK数据
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //2.4 指定从CK自动重启策略
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 2000L));
        //2.5 设置状态后端
        env.setStateBackend(new FsStateBackend("hdfs://10.7.221.29:8020/flinkCDC"));
        //2.6 设置访问HDFS的用户名 HDFS登陆时会优先从JVM里找HADOOP_USER_NAME属性
        System.setProperty("HADOOP_USER_NAME", "hdfs");


        DataStreamSource<String> ds = env
                .addSource(sourceFunction);
        SingleOutputStreamOperator<String> result = ds.process(new ProcessFunction<String, String>() {
            private EsUtil esUtil = null;
            private Connection conn = null;
            private JDBCUtil jdbcUtil = null;
            private IOUtilES ioUtils = null;
            private ResultSet query=null;

            @Override
            public void open(Configuration parameters) throws Exception {
                esUtil = new EsUtil("10.7.221.29", 9200);
                conn = new JDBCUtil().getConnection();
                jdbcUtil = new JDBCUtil();
                ioUtils = new IOUtilES();

            }

            @Override
            public void processElement(String value, Context ctx, Collector<String> out) throws SQLException {
                JSONObject jsonObject = new JSONObject(value);
                String operation = jsonObject.getString("operation");
                //资产id
                String assetid = jsonObject.getJSONObject("data").getString("id_");
                //status结果为“3”时是发布状态，其他都属于未发布状态
                String status = jsonObject.getJSONObject("data").getString("status_");
                //发布路径
                String publishpath = jsonObject.getJSONObject("data").getString("publishpath_");
                //资产类型
                String format = jsonObject.getJSONObject("data").getString("format_").toLowerCase();
                if ("3".equals(status) && formatset.contains(format)&&("read".equals(operation)||"create".equals(operation)||"update".equals(operation))) {
                    query = jdbcUtil.query(conn, "select path_ from esen_xmgyy_edac_assetpublish where assetid_='" + assetid+"'");
                    String path_="";
                    while (query.next()){
                         path_ = query.getString("path_");
                    }
                    String pf=path1 +publishpath +"/"+ path_;
                    File f = new File(pf);
                    if (f.exists()) {
                        try {
                            String contents = ioUtils.getContents(f);
                            String addstatus = esUtil.createDoc("officefile", assetid, assetid, contents);
                            if("created".equals(addstatus)){
                                LOG.info("资产-------" + publishpath + "-------写入成功!");
                            }else{
                                LOG.debug("资产-------"+publishpath+"-------写入ES失败");
                            }

                        } catch (IOException e) {
                            LOG.debug("资产-------"+publishpath+"-------写入ES失败");
                        }
                    } else {
                        LOG.debug("资产-------" + publishpath + "-------写入ES失败，文件不存在");
                    }
                }else  if (!"3".equals(status) && formatset.contains(format)&&"delete".equals(operation)) {
                    try {
                        String docstatus = esUtil.deleteDoc("officefile", assetid);
                        if("deleted".equals(docstatus)){
                            LOG.debug("资产-------"+assetid+"-------已删除");
                        }
                    } catch (IOException e) {
                        LOG.debug("资产-------"+assetid+"-------在ElasticSearch中删除失败"+e.getMessage());
                    }
                }
            }

            @Override
            public void close() throws Exception {
                query.close();
                conn.close();
                esUtil.client.close();
                esUtil=null;
                jdbcUtil=null;
                ioUtils=null;
            }
        });
        ds.print();
        env.execute();
    }
}
