package com.dcits.nifi;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.behavior.*;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.SeeAlso;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.state.Scope;
import org.apache.nifi.dbcp.DBCPService;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.*;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.StreamCallback;
import org.apache.nifi.processor.util.StandardValidators;

import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.sql.*;
import java.text.SimpleDateFormat;
import java.util.*;

@Tags({"cdc","Sql Server"})
@CapabilityDescription("to capture change of sql server, the cdc datas are output as flow file which store the scheme of cdc table in attribute and cdc data in content, all above with json format.Waht you need to do is to specify the table name storing the cdc data of source table,and offering the DBCPService with SELECT privilege on the source table.")
@SeeAlso({})
@ReadsAttributes({@ReadsAttribute(attribute = "", description = "")})
@WritesAttributes({@WritesAttribute(attribute = "metadata", description = "the metadata of the CDC Table"),
        @WritesAttribute(attribute = "cdcTable", description = "the CDC Table name")})
@Stateful(description = "store the start_lsn named 'cdcTable'_start_lsn in cluster",scopes = {Scope.CLUSTER})
public class CaptureChangeSqlServer extends AbstractProcessor {
    private SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
    public static final PropertyDescriptor PROP_CDC_SOURCE_DB = new PropertyDescriptor
            .Builder().name("Source Database")
            .description("the source database to be captured")
            .required(true)
            .identifiesControllerService(DBCPService.class)
            .build();

    public static final PropertyDescriptor PROP_SOURCE_TABLE = new PropertyDescriptor
            .Builder().name("CDC Table")
            .description("table name which stores the cdc information of the source table, format:schema.tableName")
            .required(true)
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .build();

    public static final PropertyDescriptor MAXRECORDS_PER_FLOWFILE = new PropertyDescriptor
            .Builder().name("Step Size")
            .description("the step size on column __$seqval")
            .required(true)
            .defaultValue("10000")
            .expressionLanguageSupported(ExpressionLanguageScope.NONE)
            .addValidator(StandardValidators.createLongValidator(10000l,Long.MAX_VALUE/2,true))
            .build();

    public static final Relationship REL_SUCCESS = new Relationship.Builder()
            .name("success")
            .description("Success, all done")
            .build();

    private List<PropertyDescriptor> descriptors;

    private Set<Relationship> relationships;

    @Override
    protected void init(final ProcessorInitializationContext context) {
        final List<PropertyDescriptor> descriptors = new ArrayList<PropertyDescriptor>();
        descriptors.add(PROP_CDC_SOURCE_DB);
        descriptors.add(PROP_SOURCE_TABLE);
        descriptors.add(MAXRECORDS_PER_FLOWFILE);
        this.descriptors = Collections.unmodifiableList(descriptors);

        final Set<Relationship> relationships = new HashSet<Relationship>();
        relationships.add(REL_SUCCESS);
        this.relationships = Collections.unmodifiableSet(relationships);
    }

    @Override
    public Set<Relationship> getRelationships() {
        return this.relationships;
    }

    @Override
    public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
        return descriptors;
    }

    @OnScheduled
    public void onScheduled(final ProcessContext context) {

    }

    @Override
    public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
        List<FlowFile> flowfiles = new ArrayList<>();
        int rows_per_flowfile = 1000;             //每个flowfile的最大记录数
        FlowFile flowFile = null;

        final String cdcTable = context.getProperty(PROP_SOURCE_TABLE).getValue();  //待查询的表名称
        final int maxNum = context.getProperty(MAXRECORDS_PER_FLOWFILE).asInteger();  //每个flowFile的最大记录数
        final DBCPService cdcSourceDbcpService = context.getProperty(PROP_CDC_SOURCE_DB).asControllerService(DBCPService.class);

        try(Connection con = cdcSourceDbcpService.getConnection();Statement state = con.createStatement();){
            String metadata = getMetadataOfTable(cdcTable,cdcSourceDbcpService);

            Map<String, String> stateMap = context.getStateManager().getState(Scope.CLUSTER).toMap();
            Map<String,String> newStateMap = new HashMap<>();
            long seqval =  getSeqval(cdcSourceDbcpService,stateMap,cdcTable);
            long seqval_top = seqval+maxNum;
            String sql = "SELECT CAST( __$seqval AS BIGINT) seq, * from "+cdcTable+"  where CAST( __$seqval AS BIGINT) > "+seqval+" and CAST( __$seqval AS BIGINT) <= "+seqval_top+" ORDER BY CAST( __$seqval AS BIGINT)";
            //getLogger().error(sql);
            ResultSet res = state.executeQuery(sql);
            ResultSetMetaData resMeta = res.getMetaData();
            JSONArray  contentArray = new JSONArray();
            while(res.next()) {
                JSONObject object = new JSONObject();
                for(int i=0;i<resMeta.getColumnCount();i++){
                    String colName = resMeta.getColumnName(i+1);
                    Object value = res.getObject(i+1);
                    value = value==null?"null":value;
                    object.put(colName,value);
                    if(StringUtils.equals(colName,"seq")){
                        newStateMap.put(cdcTable+"_seqval",value.toString());
                    }
                }
                contentArray.add(object);
                if(contentArray.size() == rows_per_flowfile){
                    flowFile = session.create();
                    if (flowFile == null) {
                        session.remove(flowfiles);
                        return;
                    }
                    session.putAttribute(flowFile,"metadata",metadata);
                    session.putAttribute(flowFile,"cdcTable",cdcTable);
                    session.write(flowFile, new StreamCallback(){
                        public void process(InputStream in, OutputStream out) throws IOException{
                            out.write(contentArray.toJSONString().getBytes(Charset.forName("UTF-8")));
                        }
                    });
                    flowfiles.add(flowFile);
                    contentArray.clear();
                }
            }
            if(contentArray. size() != 0){   //如果查到数据，则说明当前seqval,seqval_top之间存在数据
                flowFile = session.create();
                if (flowFile == null) {
                    session.remove(flowfiles);
                    return;
                }
                session.putAttribute(flowFile,"metadata",metadata);
                session.putAttribute(flowFile,"cdcTable",cdcTable);
                session.write(flowFile, new StreamCallback(){
                    public void process(InputStream in, OutputStream out) throws IOException{
                        out.write(contentArray.toJSONString().getBytes(Charset.forName("UTF-8")));
                    }
                });
                flowfiles.add(flowFile);
                contentArray.clear();
            }

            if(flowfiles. size() != 0){   //如果查到数据，则说明当前seqval,seqval_top之间存在数据
                session.transfer(flowfiles,REL_SUCCESS);
                //成功之后更新状态
                context.getStateManager().setState(newStateMap,Scope.CLUSTER);
            }else{   //如果没查到数据，则需要查看seqval_top以上区域是否存在数据，防止在此区间死循环
                res = state.executeQuery("SELECT min(CAST( __$seqval AS BIGINT)) from  "+cdcTable+"  where CAST( __$seqval AS BIGINT) > " +seqval_top);
                if(res.next()){
                    long minSeqval = res.getLong(1);
                    if(minSeqval > seqval_top){     //
                        newStateMap.put(cdcTable+"_seqval",(minSeqval- maxNum/10)+"");
                        context.getStateManager().setState(newStateMap,Scope.CLUSTER);
                    }
                }
            }
            res.close();
        }catch(Exception e){
            getLogger().error("抽取["+cdcTable+"]异常!",e);
        }
    }

    private Long getSeqval(DBCPService cdcSourceDbcpService,Map<String, String> stateMap,String cdcTable){
        if(stateMap.containsKey(cdcTable+"_seqval")){
            return Long.parseLong(stateMap.get(cdcTable+"_seqval"));
        }else{
            try(Connection con = cdcSourceDbcpService.getConnection();Statement state = con.createStatement();){
                ResultSet res = state.executeQuery("SELECT min(CAST( __$seqval AS BIGINT)) from  "+cdcTable);
                if(res.next()){
                     long minSeqval = res.getLong(1);
                     return minSeqval-10;
                }
            }catch(Exception e){
                getLogger().error("获取__$seqval下限失败",e);
            }
        }
        return -1l;
    }

    private String getMetadataOfTable(String tableName,DBCPService cdcSourceDbcpService)throws Exception {
        JSONObject schema = new JSONObject();
        schema.put("tableName", tableName);
        JSONArray cols = new JSONArray();
        try (Connection con = cdcSourceDbcpService.getConnection();) {
            DatabaseMetaData meta = con.getMetaData();
            ResultSet res_p = meta.getPrimaryKeys(null, tableName.split("\\.")[0], tableName.split("\\.")[1]);
            Set<String> pkCols = new HashSet<>();
            while (res_p.next()) {
                pkCols.add(res_p.getString("COLUMN_NAME"));
            }
            ResultSet colRet = meta.getColumns(null, tableName.split("\\.")[0], tableName.split("\\.")[1], "%");
            while (colRet.next()) {
                JSONObject col = new JSONObject();
                String columnName = colRet.getString("COLUMN_NAME");
                col.put("columnName", colRet.getString("COLUMN_NAME"));
                col.put("columnType", colRet.getString("TYPE_NAME"));
                col.put("columnSize", colRet.getInt("COLUMN_SIZE"));
                col.put("decimalDigits", colRet.getInt("DECIMAL_DIGITS"));
                col.put("nullable", colRet.getInt("NULLABLE"));
                col.put("isPK", pkCols.contains(columnName) ? 1 : 0);
                cols.add(col);
            }
            schema.put("cols", cols);
        } catch (SQLException e) {
            getLogger().error("获取表[" + tableName + "]结构信息错误!", e);
        }
        return schema.toJSONString();
    }
}
