package com.yifeng.repo.flink.data.transport.streaming.connectors.oracle;

import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;

import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import org.apache.commons.collections.CollectionUtils;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.util.Collector;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.source.SourceRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.mysql.cj.util.StringUtils;
import com.ververica.cdc.debezium.DebeziumDeserializationSchema;
import com.yifeng.repo.flink.data.transport.config.DdlConstants;
import com.yifeng.repo.flink.data.transport.dto.AlterContent;
import com.yifeng.repo.flink.data.transport.dto.DbTypeEnums;
import com.yifeng.repo.flink.data.transport.dto.DdlContent;
import com.yifeng.repo.flink.data.transport.dto.DdlDataPacket;
import com.yifeng.repo.flink.data.transport.dto.DdlTableChanges;
import com.yifeng.repo.flink.data.transport.dto.DdlTableChangesColumns;
import com.yifeng.repo.flink.data.transport.dto.DdlTableChangesDetail;
import com.yifeng.repo.flink.data.transport.dto.DdlTypeEnums;
import com.yifeng.repo.flink.data.transport.dto.HistoryRecord;
import com.yifeng.repo.flink.data.transport.streaming.connectors.mysql.antlr.listener.MysqlDdlParserListener;
import com.yifeng.repo.flink.data.transport.streaming.connectors.oracle.antlr.listener.OracleDdlParserListener;
import com.yifeng.repo.flink.data.transport.utils.ddl.DdlUtils;

import io.debezium.data.Envelope;
import io.debezium.ddl.parser.mysql.generated.MySqlLexer;
import io.debezium.ddl.parser.mysql.generated.MySqlParser;
import io.debezium.ddl.parser.oracle.generated.PlSqlLexer;
import io.debezium.ddl.parser.oracle.generated.PlSqlParser;

/**
 * 自定义序列化器
 */
public class OracleDebeziumDeserializationSchema implements DebeziumDeserializationSchema<String> {
	
	private static  final Logger logger = LoggerFactory.getLogger(OracleDebeziumDeserializationSchema.class);
	
	private static final long serialVersionUID = 1L;
	
	public static String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss";
	
	public static String YYYY_MM_DD_HH_MM_SS_SSSSSS = "yyyy-MM-dd HH:mm:ss:SSSSSS";
	
    private String database;
    
    private boolean schemaChangeToDownstream;
	
	public OracleDebeziumDeserializationSchema(boolean schemaChangeToDownstream,String database) {
		this.database = database;
		this.schemaChangeToDownstream = schemaChangeToDownstream;
	}
	
    @Override
    public void deserialize(SourceRecord sourceRecord, Collector<String> collector)
        throws Exception {
        //判断是否是DDL变更数据
    	Schema keySchema = sourceRecord.keySchema();
    	if(keySchema != null && DdlConstants.ORACLE_SCHEMA_CHANGE_EVENT_KEY_NAME.equalsIgnoreCase(keySchema.name())) {
    		if(!schemaChangeToDownstream) {
    			return;
    		}
    		dealDDLData(sourceRecord,collector);
    		return;
    	}
    	dealDMlData(sourceRecord,collector);
    }
    
    /**
     * 处理DML数据
     * @param sourceRecord 数据记录
     * @param collector 结果集合
     */
    private void dealDMlData(SourceRecord sourceRecord, Collector<String> collector) {
    	Struct value = (Struct) sourceRecord.value();
        // 获取before数据
        Struct before = value.getStruct("before");
        JSONObject beforeJson = new JSONObject();
        DateTimeFormatter datetimeFormatter = DateTimeFormatter.ofPattern(YYYY_MM_DD_HH_MM_SS);
//        DateTimeFormatter datetimeFormatterMicro = DateTimeFormatter.ofPattern(YYYY_MM_DD_HH_MM_SS_SSSSSS);
        if (before != null) {
            Schema beforeSchema = before.schema();
            List<Field> beforeFields = beforeSchema.fields();
            for (Field field : beforeFields) {
                Object beforeValue = before.get(field);
                if(Objects.nonNull(beforeValue)) {
                	if("io.debezium.time.Timestamp".equals(field.schema().name())) {
                		beforeValue = datetimeFormatter.format(timeStampToLocalDateTime((long)beforeValue,1000L));
                	}
                	if("io.debezium.time.MicroTimestamp".equals(field.schema().name())) {
                		beforeValue = datetimeFormatter.format(timeStampToLocalDateTime((long)beforeValue,1000000L));
                	}
                }
                beforeJson.put(field.name(), beforeValue);
            }
        }
        // 获取after数据
        Struct after = value.getStruct("after");
        JSONObject afterJson = new JSONObject();
        if (after != null) {
            Schema afterSchema = after.schema();
            List<Field> afterFields = afterSchema.fields();
            for (Field field : afterFields) {
                Object afterValue = after.get(field);
                if(Objects.nonNull(afterValue)) {
                	if("io.debezium.time.Timestamp".equals(field.schema().name())) {
                		//将时间戳转换为日期
                		afterValue = datetimeFormatter.format(timeStampToLocalDateTime((long)afterValue,1000L));
                	}
                	if("io.debezium.time.MicroTimestamp".equals(field.schema().name())) {
                		afterValue = datetimeFormatter.format(timeStampToLocalDateTime((long)afterValue,1000000L));
                	}
                }
                afterJson.put(field.name(), afterValue);
            }
        }
        JSONObject res = new JSONObject();
        // 将字段写到json对象中
        res.put("database", database);
        // 获取数据库和表名称
        String topic = sourceRecord.topic();
        String[] fields = topic.split("\\.");
        String schema = fields[1];
        String tableName = fields[2];
        res.put("schema", schema);
        res.put("tb", tableName);
        res.put("before", beforeJson);
        res.put("after", afterJson);
        //获取操作类型
        Envelope.Operation operation = Envelope.operationFor(sourceRecord);
        res.put("op", operation.code());
        res.put("ts_ms", System.currentTimeMillis());

        //输出数据
        collector.collect(JSONObject.toJSONString(res,
                SerializerFeature.WriteNullStringAsEmpty,
                SerializerFeature.WriteNullListAsEmpty,
                SerializerFeature.WriteMapNullValue));
    }
    
    /**
     * 处理DDL数据
     * @param sourceRecord 数据记录
     * @param collector 结果集合
     */
    private void dealDDLData(SourceRecord sourceRecord, Collector<String> collector) {
    	DdlDataPacket ddlDataPacket = new DdlDataPacket();
    	ddlDataPacket.setOp(DdlConstants.SCHEMA_CHANGE_OP_TYPE);
    	ddlDataPacket.setDbType(DbTypeEnums.ORACLE.getType());
    	ddlDataPacket.setDatabase(database);
    	Struct value = (Struct)sourceRecord.value();
    	Struct source = value.getStruct("source");
    	ddlDataPacket.setSchema(String.valueOf(source.get("schema")));
    	ddlDataPacket.setTb(String.valueOf(source.get("table")));
    	//将historyRecord转换为bean
    	HistoryRecord historyRecord = JSONObject.parseObject(JSON.parse(JSON.toJSONString(value.get("historyRecord"))).toString(), HistoryRecord.class);
    	//根据ddl语句转换数据给下游系统
    	List<DdlContent> ddlContentList = parseDdlContent(historyRecord,ddlDataPacket.getTb());
    	if(CollectionUtils.isEmpty(ddlContentList)) {
    		return;
    	}
        ddlDataPacket.setDdlContentList(ddlContentList);
    	ddlDataPacket.setTsMs(System.currentTimeMillis());
    	logger.info("DDL变更转换后的数据:{}",JSONObject.toJSONString(ddlDataPacket));
    	collector.collect(JSONObject.toJSONString(ddlDataPacket));
    }
    
    /**
     * 解析DDL语句，转换成下游需要的数据
     * @param historyRecord ddl数据
     * @param tableName 表名
     * @return ddl变更数据
     */
    private static List<DdlContent> parseDdlContent(HistoryRecord historyRecord,String tableName) {
    	String ddl = historyRecord.getDdl();
    	PlSqlLexer lexer = new PlSqlLexer(CharStreams.fromString(ddl.toUpperCase()));  //词法分析
        PlSqlParser parser = new PlSqlParser(new CommonTokenStream(lexer)); // 语法分析
        OracleDdlParserListener oracleDdlParserListener = new OracleDdlParserListener(tableName);
        ParseTreeWalker.DEFAULT.walk(oracleDdlParserListener,parser.sql_script());
    	//如果没有数据则不传递给下游
        if(oracleDdlParserListener.getColumnMap().isEmpty() && oracleDdlParserListener.getRenameMap().isEmpty()) {
        	return null;
        }
        return DdlUtils.parseDdlContent(oracleDdlParserListener.getRenameMap(),oracleDdlParserListener.getColumnMap(),historyRecord);
    }
    
    private static LocalDateTime timeStampToLocalDateTime(long value,long magnification) {
    	return LocalDateTime.ofEpochSecond((long)value / magnification, 0, ZoneOffset.ofHours(0));
    }
    
    @Deprecated
    private static LocalDateTime timeStampToLocalDateTimeNano(long value,long magnification) {
    	String timestamp = String.valueOf(value);
    	if(timestamp.length() != 16) {
    		return null;
    	}
    	int nano = Integer.valueOf(timestamp.substring(timestamp.length()-6, timestamp.length()) + "000");
    	return LocalDateTime.ofEpochSecond((long)value / magnification, nano, ZoneOffset.ofHours(0));
    }
    
    @Override
    public TypeInformation<String> getProducedType() {
        return BasicTypeInfo.STRING_TYPE_INFO;
    } 
   
}