package com.dcits.nifi;


import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.behavior.*;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.dbcp.DBCPService;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.flowfile.attributes.FragmentAttributes;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.InputStreamCallback;
import org.apache.nifi.processor.io.OutputStreamCallback;
import org.apache.nifi.processor.util.StandardValidators;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.ArrayNode;
import org.codehaus.jackson.node.JsonNodeFactory;

import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.sql.*;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;

@SideEffectFree
@SupportsBatching
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
@Tags({"json", "sql", "database", "rdbms", "insert", "update", "relational", "flat"})
@CapabilityDescription("Converts a JSON-formatted FlowFile into an UPDATE or INSERT SQL statement for the merge purpose. The incoming FlowFile is expected to be \"flat\" JSON message, meaning that it consists of a single JSON element and each field maps to a simple type. If a field maps to a JSON object, that JSON object will be interpreted as Text. If the input is an array of JSON elements, each element in the array is output as a separate FlowFile to the 'sql' relationship. Upon successful conversion, the original FlowFile is routed to the 'original' relationship and the SQL is routed to the 'sql' relationship.")
@WritesAttributes({@WritesAttribute(
        attribute = "mime.type",
        description = "Sets mime.type of FlowFile that is routed to 'sql' to 'text/plain'."
), @WritesAttribute(
        attribute = "<sql>.table",
        description = "Sets the <sql>.table attribute of FlowFile that is routed to 'sql' to the name of the table that is updated by the SQL statement. The prefix for this attribute ('sql', e.g.) is determined by the SQL Parameter Attribute Prefix property."
), @WritesAttribute(
        attribute = "<sql>.catalog",
        description = "If the Catalog name is set for this database, specifies the name of the catalog that the SQL statement will update. If no catalog is used, this attribute will not be added. The prefix for this attribute ('sql', e.g.) is determined by the SQL Parameter Attribute Prefix property."
), @WritesAttribute(
        attribute = "fragment.identifier",
        description = "All FlowFiles routed to the 'sql' relationship for the same incoming FlowFile (multiple will be output for the same incoming FlowFile if the incoming FlowFile is a JSON Array) will have the same value for the fragment.identifier attribute. This can then be used to correlate the results."
), @WritesAttribute(
        attribute = "fragment.count",
        description = "The number of SQL FlowFiles that were produced for same incoming FlowFile. This can be used in conjunction with the fragment.identifier attribute in order to know how many FlowFiles belonged to the same incoming FlowFile."
), @WritesAttribute(
        attribute = "fragment.index",
        description = "The position of this FlowFile in the list of outgoing FlowFiles that were all derived from the same incoming FlowFile. This can be used in conjunction with the fragment.identifier and fragment.count attributes to know which FlowFiles originated from the same incoming FlowFile and in what order the SQL FlowFiles were produced"
), @WritesAttribute(
        attribute = "<sql>.args.N.type",
        description = "The output SQL statements are parametrized in order to avoid SQL Injection Attacks. The types of the Parameters to use are stored in attributes named <sql>.args.1.type, <sql>.args.2.type, <sql>.args.3.type, and so on. The type is a number representing a JDBC Type constant. Generally, this is useful only for software to read and interpret but is added so that a processor such as PutSQL can understand how to interpret the values. The prefix for this attribute ('sql', e.g.) is determined by the SQL Parameter Attribute Prefix property."
), @WritesAttribute(
        attribute = "<sql>.args.N.value",
        description = "The output SQL statements are parametrized in order to avoid SQL Injection Attacks. The values of the Parameters to use are stored in the attributes named sql.args.1.value, sql.args.2.value, sql.args.3.value, and so on. Each of these attributes has a corresponding <sql>.args.N.type attribute that indicates how the value should be interpreted when inserting it into the database.The prefix for this attribute ('sql', e.g.) is determined by the SQL Parameter Attribute Prefix property."
)})
public class MergeTablesPostgre extends AbstractProcessor {
    private static final String UPDATE_TYPE = "UPDATE";
    private static final String INSERT_TYPE = "INSERT";
    private static final String DELETE_TYPE = "DELETE";

    static final PropertyDescriptor CONNECTION_POOL = (new PropertyDescriptor.Builder()).name("JDBC Connection Pool").description("Specifies the JDBC Connection Pool to use in order to convert the JSON message to a SQL statement. The Connection Pool is necessary in order to determine the appropriate database column types.").identifiesControllerService(DBCPService.class).required(true).build();

    static final PropertyDescriptor STATEMENT_TYPE_FIELD = (new PropertyDescriptor.Builder()).name("Statement Type Field").description("The field in the json which indicates statement Type.keeping empty means generating insert sql,otherwise Insert Field Value,Delete Field Value,Update Field Value must be set at the same time").addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
    static final PropertyDescriptor STATEMENT_TYPE_INSERT = (new PropertyDescriptor.Builder()).name("Insert Field Value").description("The field value in the json which indicates insert statement Type").addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
    static final PropertyDescriptor STATEMENT_TYPE_DELETE = (new PropertyDescriptor.Builder()).name("Delete Field Value").description("The field value in the json which indicates delete statement Type").addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
    static final PropertyDescriptor STATEMENT_TYPE_UPDATE = (new PropertyDescriptor.Builder()).name("Update Field Value").description("The field value in the json which indicates update statement Type,format example:[before update,after update | after update]").addValidator(StandardValidators.createListValidator(true,false,StandardValidators.NON_EMPTY_VALIDATOR)).build();

    static final PropertyDescriptor TARGET_FIELDS = (new PropertyDescriptor.Builder()).name("Target Fields").description("the fields in the json which will be used to update the table").addValidator(StandardValidators.createListValidator(true,false,StandardValidators.NON_EMPTY_VALIDATOR)).required(true).build();
    static final PropertyDescriptor COMMON_FIELDS = (new PropertyDescriptor.Builder()).name("Common Fields").description("the fields int the json which will be used to update the table,but may come from other tables").addValidator(StandardValidators.createListValidator(true,false,StandardValidators.NON_EMPTY_VALIDATOR)).build();
    static final PropertyDescriptor VERSION_FIELD = (new PropertyDescriptor.Builder()).name("Version Field").description("the version field used as the where clause when updating the common fields").addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
    static final PropertyDescriptor PRIMARY_KEY_CONSTRAINT = (new PropertyDescriptor.Builder()).name("Primary Key Constraint").description("the primary key constraint name of the target table").addValidator(StandardValidators.NON_EMPTY_VALIDATOR).required(true).build();

    static final PropertyDescriptor TABLE_NAME;
    static final PropertyDescriptor CATALOG_NAME;
    static final PropertyDescriptor TABLE_SCHEMA_CACHE_SIZE;
    static final Relationship REL_ORIGINAL;
    static final Relationship REL_SQL;
    static final Relationship REL_FAILURE;
    private Cache<MergeTablesPostgre.SchemaKey, MergeTablesPostgre.TableSchema> schemaCache;

    public MergeTablesPostgre() {
    }

    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
        List<PropertyDescriptor> properties = new ArrayList();
        properties.add(CONNECTION_POOL);
        properties.add(STATEMENT_TYPE_FIELD);
        properties.add(STATEMENT_TYPE_INSERT);
        properties.add(STATEMENT_TYPE_DELETE);
        properties.add(STATEMENT_TYPE_UPDATE);
        properties.add(TABLE_NAME);
        properties.add(CATALOG_NAME);
        properties.add(TARGET_FIELDS);
        properties.add(COMMON_FIELDS);
        properties.add(VERSION_FIELD);
        properties.add(PRIMARY_KEY_CONSTRAINT);
        properties.add(TABLE_SCHEMA_CACHE_SIZE);
        return properties;
    }

    public Set<Relationship> getRelationships() {
        Set<Relationship> rels = new HashSet();
        rels.add(REL_ORIGINAL);
        rels.add(REL_SQL);
        rels.add(REL_FAILURE);
        return rels;
    }

    @OnScheduled
    public void onScheduled(ProcessContext context) {
        int tableSchemaCacheSize = context.getProperty(TABLE_SCHEMA_CACHE_SIZE).asInteger().intValue();
        this.schemaCache = Caffeine.newBuilder().maximumSize((long)tableSchemaCacheSize).build();
    }

    private String getStatementType(JsonNode jsonNode,String statementTypeField,String statementTypeInsert,String statementTypeDel,String statementTypeUpdate){
        if(!jsonNode.has(StringUtils.trim(statementTypeField))){
            throw new ProcessException("not find statement type field");
        }
        String fieldValue = jsonNode.get(StringUtils.trim(statementTypeField)).asText();
        if(StringUtils.equals(fieldValue,statementTypeInsert)){
            return INSERT_TYPE;
        }else if(StringUtils.equals(fieldValue,statementTypeDel)){
            return DELETE_TYPE;
        }else if(StringUtils.equals(fieldValue,statementTypeUpdate)){
            return UPDATE_TYPE;
        }else{
             return null;
        }
    }

    public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
        FlowFile flowFile = session.get();
        if (flowFile != null) {
            boolean translateFieldNames =  false; // 不对json的key进行转换，json的key要和table的column名称相同
            boolean ignoreUnmappedFields = false;  //如果没匹配上，则失败

            String statementTypeField = context.getProperty(STATEMENT_TYPE_FIELD).getValue();
            String statementTypeInsert = context.getProperty(STATEMENT_TYPE_INSERT).getValue();
            String statementTypeDel = context.getProperty(STATEMENT_TYPE_DELETE).getValue();
            String statementTypeUpdate = context.getProperty(STATEMENT_TYPE_UPDATE).getValue();

            String targetFields = context.getProperty(TARGET_FIELDS).getValue();
            String commonFields = context.getProperty(COMMON_FIELDS).getValue();
            String versionField = context.getProperty(VERSION_FIELD).getValue();
            String pkConstraint = context.getProperty(PRIMARY_KEY_CONSTRAINT).getValue();

            List<String> targetFieldList =  Arrays.asList(targetFields.split(","));
            List<String> commonFieldList =  Arrays.asList(commonFields.split(","));

            String catalog = context.getProperty(CATALOG_NAME).evaluateAttributeExpressions(flowFile).getValue();
            String schemaName = null;
            String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue();
            MergeTablesPostgre.SchemaKey schemaKey = new MergeTablesPostgre.SchemaKey(catalog, tableName);
            //如果没有匹配到列，则失败
            boolean failUnmappedColumns = true;
            boolean warningUnmappedColumns = false;
            //不对表名，列名加双引号
            boolean escapeColumnNames = true;
            boolean quoteTableName = true;
            String attributePrefix = "sql";

            String fragmentIdentifier = UUID.randomUUID().toString();
            Set<FlowFile> created = new HashSet();
            //查询目标表元数据
            MergeTablesPostgre.TableSchema schema = getSchema(schemaKey,context,flowFile,catalog,schemaName,tableName,translateFieldNames);
            //从flowfile中读取json数据
            ArrayNode arrayNode = getArrayNode(session,flowFile);
            if(arrayNode == null){
                session.transfer(flowFile, REL_FAILURE);
                return;
            }
            //遍历，处理json数据,生成sql
            for(int i = 0; i < arrayNode.size(); ++i) {
                JsonNode jsonNode = arrayNode.get(i);
                String statementType = "";
                if(StringUtils.isEmpty(statementTypeField)){   //如果没有指定statementTypeField，则统统认为是insert,以此来适配cdc,非cdc两种情况
                    statementType = "INSERT";
                }else{
                    statementType = getStatementType(jsonNode,statementTypeField,statementTypeInsert,statementTypeDel,statementTypeUpdate);
                }
                if(StringUtils.isEmpty(statementType)){
                    continue;         //如果是更新前的值，则忽略
                }
                try {
                    StringBuilder tableNameBuilder = new StringBuilder();
                    if (catalog != null) {
                        if (quoteTableName) {
                            tableNameBuilder.append(schema.getQuotedIdentifierString()).append(catalog).append(schema.getQuotedIdentifierString()).append(".");
                        }else{
                            tableNameBuilder.append(catalog).append(".");
                        }
                    }
                    if (schemaName != null) {
                        if (quoteTableName) {
                            tableNameBuilder.append(schema.getQuotedIdentifierString()).append(schemaName).append(schema.getQuotedIdentifierString()).append(".");
                        }else{
                            tableNameBuilder.append(schemaName).append(".");
                        }
                    }
                    if (quoteTableName) {
                        tableNameBuilder.append(schema.getQuotedIdentifierString()).append(tableName).append(schema.getQuotedIdentifierString());
                    } else {
                        tableNameBuilder.append(tableName);
                    }
                    String fqTableName = tableNameBuilder.toString();
                    HashMap attributes = null;
                    String sql = "";
                    if ("INSERT".equals(statementType)) {   //如果是插入，则为targetFieldList生成 insert or update语句，并且为commonFieldList生成update语句
                        attributes = new HashMap();
                        sql = this.generateInsertUpdate(jsonNode, attributes, fqTableName, schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix,targetFieldList,pkConstraint);
                        newFlowfile(session,flowFile,created,sql,attributes,attributePrefix,tableName,catalog);

                        attributes = new HashMap();
                        sql = this.generateUpdate(jsonNode, attributes, fqTableName, schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix,commonFieldList,versionField);
                        newFlowfile(session,flowFile,created,sql,attributes,attributePrefix,tableName,catalog);
                    } else if ("UPDATE".equals(statementType)) { //如果是更新，则为分别为targetFieldList，commonFieldList生成update语句
                        attributes = new HashMap();
                        sql = this.generateUpdate(jsonNode, attributes, fqTableName, schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix,targetFieldList,null);
                        newFlowfile(session,flowFile,created,sql,attributes,attributePrefix,tableName,catalog);

                        attributes = new HashMap();
                        sql = this.generateUpdate(jsonNode, attributes, fqTableName,  schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix,commonFieldList,versionField);
                        newFlowfile(session,flowFile,created,sql,attributes,attributePrefix,tableName,catalog);
                    } else if ("DELETE".equals(statementType)) { //如果是删除，则根据idFieldList生成delete语句
                        attributes = new HashMap();
                        sql = this.generateDelete(jsonNode, attributes, fqTableName, schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix);
                        newFlowfile(session,flowFile,created,sql,attributes,attributePrefix,tableName,catalog);
                    }
                } catch (ProcessException var33) {
                    this.getLogger().error("Failed to convert {} to a SQL {} statement due to {}; routing to failure", new Object[]{flowFile, statementType, var33.toString()}, var33);
                    session.remove(created);
                    session.transfer(flowFile, REL_FAILURE);
                    return;
                }
            }

            FlowFile newFlowFile = FragmentAttributes.copyAttributesToOriginal(session, flowFile, fragmentIdentifier, arrayNode.size());
            session.transfer(newFlowFile, REL_ORIGINAL);
        }
    }

    /**
     * 从flowfile中读取json数据
     * @param session
     * @param flowFile
     * @return
     */
    private ArrayNode getArrayNode(ProcessSession session,FlowFile flowFile){
        final ObjectMapper mapper = new ObjectMapper();
        final AtomicReference rootNodeRef = new AtomicReference((Object)null);
        try {
            session.read(flowFile, new InputStreamCallback() {
                public void process(InputStream in) throws IOException {
                    InputStream bufferedIn = new BufferedInputStream(in);
                    Throwable var3 = null;

                    try {
                        rootNodeRef.set(mapper.readTree(bufferedIn));
                    } catch (Throwable var12) {
                        var3 = var12;
                        throw var12;
                    } finally {
                        if (bufferedIn != null) {
                            if (var3 != null) {
                                try {
                                    bufferedIn.close();
                                } catch (Throwable var11) {
                                    var3.addSuppressed(var11);
                                }
                            } else {
                                bufferedIn.close();
                            }
                        }

                    }

                }
            });
        } catch (ProcessException var31) {
            this.getLogger().error("Failed to parse {} as JSON due to {}; routing to failure", new Object[]{flowFile, var31.toString()}, var31);
            return null;
        }

        JsonNode rootNode = (JsonNode)rootNodeRef.get();
        ArrayNode arrayNode;
        if (rootNode.isArray()) {
            arrayNode = (ArrayNode)rootNode;
        } else {
            JsonNodeFactory nodeFactory = JsonNodeFactory.instance;
            arrayNode = new ArrayNode(nodeFactory);
            arrayNode.add(rootNode);
        }
        return arrayNode;
    }

    /**
     * 将sql生成新的flowfile，并发送
     * @param session
     * @param flowFile
     * @param created
     * @param sql
     * @param attributes
     * @param attributePrefix
     * @param tableName
     * @param catalog
     */
    private void  newFlowfile(ProcessSession session,FlowFile flowFile, Set<FlowFile> created,String sql,HashMap attributes,String attributePrefix,String tableName,String catalog){
        FlowFile sqlFlowFile = session.create(flowFile);
        String fragmentIdentifier = UUID.randomUUID().toString();
        created.add(sqlFlowFile);
        sqlFlowFile = session.write(sqlFlowFile, new OutputStreamCallback() {
            public void process(OutputStream out) throws IOException {
                out.write(sql.getBytes(StandardCharsets.UTF_8));
            }
        });
        attributes.put(CoreAttributes.FILENAME.key(), fragmentIdentifier);
        attributes.put(CoreAttributes.MIME_TYPE.key(), "text/plain");
        attributes.put(attributePrefix + ".table", tableName);
        attributes.put(FragmentAttributes.FRAGMENT_ID.key(), fragmentIdentifier);
        attributes.put(FragmentAttributes.FRAGMENT_COUNT.key(), "1");
        attributes.put(FragmentAttributes.FRAGMENT_INDEX.key(), "0");
        if (catalog != null) {
            attributes.put(attributePrefix + ".catalog", catalog);
        }
        sqlFlowFile = session.putAllAttributes(sqlFlowFile, attributes);
        session.transfer(sqlFlowFile, REL_SQL);
    }

    /**
     * 分析获得目标表的元数据信息
     * @param schemaKey
     * @param context
     * @param flowFile
     * @param catalog
     * @param schemaName
     * @param tableName
     * @param translateFieldNames
     * @return
     */
    private MergeTablesPostgre.TableSchema getSchema(MergeTablesPostgre.SchemaKey schemaKey,ProcessContext context,FlowFile flowFile,String catalog,String schemaName,String tableName,boolean translateFieldNames){
        return (MergeTablesPostgre.TableSchema)this.schemaCache.get(schemaKey, (key) -> {
            DBCPService dbcpService = (DBCPService)context.getProperty(CONNECTION_POOL).asControllerService(DBCPService.class);

            try {
                Connection conn = dbcpService.getConnection(flowFile.getAttributes());
                Throwable var10 = null;

                MergeTablesPostgre.TableSchema var11;
                try {
                    var11 = MergeTablesPostgre.TableSchema.from(conn, catalog, schemaName, tableName, translateFieldNames, true);
                } catch (Throwable var21) {
                    var10 = var21;
                    throw var21;
                } finally {
                    if (conn != null) {
                        if (var10 != null) {
                            try {
                                conn.close();
                            } catch (Throwable var20) {
                                var10.addSuppressed(var20);
                            }
                        } else {
                            conn.close();
                        }
                    }

                }

                return var11;
            } catch (SQLException var23) {
                throw new ProcessException(var23);
            }
        });
    }

    private Set<String> getNormalizedColumnNames(JsonNode node, boolean translateFieldNames) {
        Set<String> normalizedFieldNames = new HashSet();
        Iterator fieldNameItr = node.getFieldNames();

        while(fieldNameItr.hasNext()) {
            normalizedFieldNames.add(normalizeColumnName((String)fieldNameItr.next(), translateFieldNames));
        }

        return normalizedFieldNames;
    }

    /**
     * 生成 insert into test values(2,'rudy3') ON CONFLICT ON CONSTRAINT idx_t_id do update set name=EXCLUDED.name ;
     * @param rootNode
     * @param attributes
     * @param tableName
     * @param schema
     * @param translateFieldNames
     * @param ignoreUnmappedFields
     * @param failUnmappedColumns
     * @param warningUnmappedColumns
     * @param escapeColumnNames
     * @param quoteTableName
     * @param attributePrefix
     * @param fields
     * @param pkConstraint
     * @return
     */
    private String generateInsertUpdate(JsonNode rootNode, Map<String, String> attributes, String tableName, MergeTablesPostgre.TableSchema schema, boolean translateFieldNames, boolean ignoreUnmappedFields, boolean failUnmappedColumns, boolean warningUnmappedColumns, boolean escapeColumnNames, boolean quoteTableName, String attributePrefix,List<String> fields,String pkConstraint) {
        Set<String> normalizedFieldNames = this.getNormalizedColumnNames(rootNode, translateFieldNames);
        List<String> allFields = new ArrayList<>();
        allFields.addAll(fields);
        allFields.addAll(schema.getPrimaryKeyColumnNames());
        Iterator var13 = allFields.iterator();

        String fieldName;
        while(var13.hasNext()) {
            String requiredColName = (String)var13.next();
            String normalizedColName = normalizeColumnName(requiredColName, translateFieldNames);
            if (!normalizedFieldNames.contains(normalizedColName)) {
                fieldName = "JSON does not have a value for the Required column '" + requiredColName + "'";
                if (failUnmappedColumns) {
                    this.getLogger().error(fieldName);
                    throw new ProcessException(fieldName);
                }

                if (warningUnmappedColumns) {
                    this.getLogger().warn(fieldName);
                }
            }
        }

        StringBuilder sqlBuilder = new StringBuilder();
        int fieldCount = 0;
        sqlBuilder.append("INSERT INTO ");
        sqlBuilder.append(tableName);
        sqlBuilder.append(" (");
        Iterator fieldNames = allFields.iterator();

        while(fieldNames.hasNext()) {
            fieldName = (String)fieldNames.next();
            MergeTablesPostgre.ColumnDescription desc = (MergeTablesPostgre.ColumnDescription)schema.getColumns().get(normalizeColumnName(fieldName, translateFieldNames));
            if (desc == null && !ignoreUnmappedFields) {
                throw new ProcessException("Cannot map JSON field '" + fieldName + "' to any column in the database");
            }

            if (desc != null) {
                if (fieldCount++ > 0) {
                    sqlBuilder.append(", ");
                }

                if (escapeColumnNames) {
                    sqlBuilder.append(schema.getQuotedIdentifierString()).append(desc.getColumnName()).append(schema.getQuotedIdentifierString());
                } else {
                    sqlBuilder.append(desc.getColumnName());
                }

                int sqlType = desc.getDataType();
                attributes.put(attributePrefix + ".args." + fieldCount + ".type", String.valueOf(sqlType));
                Integer colSize = desc.getColumnSize();
                JsonNode fieldNode = rootNode.get(fieldName);
                if (!fieldNode.isNull()) {
                    String fieldValue = createSqlStringValue(fieldNode, colSize, sqlType);
                    attributes.put(attributePrefix + ".args." + fieldCount + ".value", fieldValue);
                }
            }
        }

        sqlBuilder.append(") VALUES (");

        for(int i = 0; i < fieldCount; ++i) {
            if (i > 0) {
                sqlBuilder.append(", ");
            }

            sqlBuilder.append("?");
        }

        sqlBuilder.append(") ON CONFLICT ON CONSTRAINT "+ pkConstraint +" do update set ");
        Iterator fieldIt = fields.iterator();
        int n = 0;
        while(fieldIt.hasNext()) {
            fieldName = (String)fieldIt.next();
            MergeTablesPostgre.ColumnDescription desc = (MergeTablesPostgre.ColumnDescription)schema.getColumns().get(normalizeColumnName(fieldName, translateFieldNames));
            if (desc == null && !ignoreUnmappedFields) {
                throw new ProcessException("Cannot map JSON field '" + fieldName + "' to any column in the database");
            }

            if (desc != null) {
                if (n++ > 0) {
                    sqlBuilder.append(", ");
                }
                fieldCount ++ ;
                if (escapeColumnNames) {
                    sqlBuilder.append(schema.getQuotedIdentifierString()).append(desc.getColumnName()).append(schema.getQuotedIdentifierString());
                } else {
                    sqlBuilder.append(desc.getColumnName());
                }
                sqlBuilder.append("=?");
                int sqlType = desc.getDataType();
                attributes.put(attributePrefix + ".args." + fieldCount + ".type", String.valueOf(sqlType));
                Integer colSize = desc.getColumnSize();
                JsonNode fieldNode = rootNode.get(fieldName);
                if (!fieldNode.isNull()) {
                    String fieldValue = createSqlStringValue(fieldNode, colSize, sqlType);
                    attributes.put(attributePrefix + ".args." + fieldCount + ".value", fieldValue);
                }
            }
        }

        if (fieldCount == 0) {
            throw new ProcessException("None of the fields in the JSON map to the columns defined by the " + tableName + " table");
        } else {
            return sqlBuilder.toString();
        }
    }

    protected static String createSqlStringValue(JsonNode fieldNode, Integer colSize, int sqlType) {
        String fieldValue = fieldNode.asText();
        switch(sqlType) {
            case -16:
            case -15:
            case -9:
            case -1:
            case 1:
            case 12:
                if (colSize != null && fieldValue.length() > colSize.intValue()) {
                    fieldValue = fieldValue.substring(0, colSize.intValue());
                }
                break;
            case -7:
            case -6:
            case -5:
            case 2:
            case 3:
            case 4:
            case 5:
            case 6:
            case 7:
            case 8:
                if (fieldNode.isBoolean()) {
                    fieldValue = fieldNode.asBoolean() ? "1" : "0";
                }
                break;
            case 16:
                fieldValue = Boolean.valueOf(fieldValue).toString();
            case 91:
            case 92:
            case 93:
        }

        return fieldValue;
    }

    /**
     * 生成updarte语句
     * @param rootNode
     * @param attributes
     * @param tableName
     * @param schema
     * @param translateFieldNames
     * @param ignoreUnmappedFields
     * @param failUnmappedColumns
     * @param warningUnmappedColumns
     * @param escapeColumnNames
     * @param quoteTableName
     * @param attributePrefix
     * @param updateFields
     * @param versionField
     * @return
     */
    private String generateUpdate(JsonNode rootNode, Map<String, String> attributes, String tableName, MergeTablesPostgre.TableSchema schema, boolean translateFieldNames, boolean ignoreUnmappedFields, boolean failUnmappedColumns, boolean warningUnmappedColumns, boolean escapeColumnNames, boolean quoteTableName, String attributePrefix,List<String> updateFields,String versionField) {
        Set updateKeyNames = schema.getPrimaryKeyColumnNames();
        int fieldCount;

        if (updateKeyNames.isEmpty()) {
            throw new ProcessException("Table '" + tableName + "' does not have a Primary Key and no Update Keys were specified");
        } else {
            StringBuilder sqlBuilder = new StringBuilder();
            fieldCount = 0;
            sqlBuilder.append("UPDATE ");
            sqlBuilder.append(tableName);
            sqlBuilder.append(" SET ");
            Set<String> normalizedFieldNames = this.getNormalizedColumnNames(rootNode, translateFieldNames);
            Set<String> normalizedUpdateNames = new HashSet();
            Iterator fieldNames = updateKeyNames.iterator();

            String fieldName;
            String normalizedColName;
            while(fieldNames.hasNext()) {
                fieldName = (String)fieldNames.next();
                fieldName = normalizeColumnName(fieldName, translateFieldNames);
                normalizedUpdateNames.add(fieldName);
                if (!normalizedFieldNames.contains(fieldName)) {
                    normalizedColName = "JSON does not have a value for the Key column '" + fieldName + "'";
                    if (failUnmappedColumns) {
                        this.getLogger().error(normalizedColName);
                        throw new ProcessException(normalizedColName);
                    }

                    if (warningUnmappedColumns) {
                        this.getLogger().warn(normalizedColName);
                    }
                }
            }

            fieldNames = updateFields.iterator();
            String fieldValue;
            while(fieldNames.hasNext()) {
                fieldName = (String)fieldNames.next();
                fieldName = normalizeColumnName(fieldName, translateFieldNames);
                MergeTablesPostgre.ColumnDescription desc = (MergeTablesPostgre.ColumnDescription)schema.getColumns().get(fieldName);
                if (desc == null) {
                    if (!ignoreUnmappedFields) {
                        throw new ProcessException("Cannot map JSON field '" + fieldName + "' to any column in the database");
                    }
                } else if (!normalizedUpdateNames.contains(fieldName)) {
                    if (fieldCount++ > 0) {
                        sqlBuilder.append(", ");
                    }

                    if (escapeColumnNames) {
                        sqlBuilder.append(schema.getQuotedIdentifierString()).append(desc.getColumnName()).append(schema.getQuotedIdentifierString());
                    } else {
                        sqlBuilder.append(desc.getColumnName());
                    }

                    sqlBuilder.append(" = ?");
                    int sqlType = desc.getDataType();
                    attributes.put(attributePrefix + ".args." + fieldCount + ".type", String.valueOf(sqlType));
                    Integer colSize = desc.getColumnSize();
                    JsonNode fieldNode = rootNode.get(fieldName);
                    if (!fieldNode.isNull()) {
                        fieldValue = createSqlStringValue(fieldNode, colSize, sqlType);
                        attributes.put(attributePrefix + ".args." + fieldCount + ".value", fieldValue);
                    }
                }
            }

            if(!StringUtils.isEmpty(versionField)){
                normalizedColName = normalizeColumnName(versionField, translateFieldNames);
                MergeTablesPostgre.ColumnDescription desc = (MergeTablesPostgre.ColumnDescription)schema.getColumns().get(normalizedColName);
                if (desc != null) {
                    if (fieldCount++ > 0) {
                        sqlBuilder.append(", ");
                    }
                    if (escapeColumnNames) {
                        sqlBuilder.append(schema.getQuotedIdentifierString()).append(normalizedColName).append(schema.getQuotedIdentifierString());
                    } else {
                        sqlBuilder.append(normalizedColName);
                    }

                    sqlBuilder.append(" = ?");
                    int sqlType = desc.getDataType();
                    attributes.put(attributePrefix + ".args." + fieldCount + ".type", String.valueOf(sqlType));
                    Integer colSize = desc.getColumnSize();
                    fieldValue = rootNode.get(versionField).asText();
                    if (colSize != null && fieldValue.length() > colSize.intValue()) {
                        fieldValue = fieldValue.substring(0, colSize.intValue());
                    }

                    attributes.put(attributePrefix + ".args." + fieldCount + ".value", fieldValue);
                }
            }

            sqlBuilder.append(" WHERE ");
            fieldNames = schema.getPrimaryKeyColumnNames().iterator();
            int var29 = 0;

            while(fieldNames.hasNext()) {
                fieldName = (String)fieldNames.next();
                normalizedColName = normalizeColumnName(fieldName, translateFieldNames);
                MergeTablesPostgre.ColumnDescription desc = (MergeTablesPostgre.ColumnDescription)schema.getColumns().get(normalizedColName);
                if (desc != null && normalizedUpdateNames.contains(normalizedColName)) {
                    if (var29++ > 0) {
                        sqlBuilder.append(" AND ");
                    }
                    ++fieldCount;
                    if (escapeColumnNames) {
                        sqlBuilder.append(schema.getQuotedIdentifierString()).append(normalizedColName).append(schema.getQuotedIdentifierString());
                    } else {
                        sqlBuilder.append(normalizedColName);
                    }

                    sqlBuilder.append(" = ?");
                    int sqlType = desc.getDataType();
                    attributes.put(attributePrefix + ".args." + fieldCount + ".type", String.valueOf(sqlType));
                    Integer colSize = desc.getColumnSize();
                    fieldValue = rootNode.get(fieldName).asText();
                    if (colSize != null && fieldValue.length() > colSize.intValue()) {
                        fieldValue = fieldValue.substring(0, colSize.intValue());
                    }

                    attributes.put(attributePrefix + ".args." + fieldCount + ".value", fieldValue);
                }
            }
         //   UPDATE "public"."ass_accessory" SET "ACCESSORY_NAME" = 'rrrrr', "ACCESSORY_TYPE" =
         // WHERE "ACCESSORY_ID" = 555555 AND ("ACCESSORY_TYPE"  IS NULL OR "ACCESSORY_TYPE" < 7)
            if(!StringUtils.isEmpty(versionField)){
                normalizedColName = normalizeColumnName(versionField, translateFieldNames);
                MergeTablesPostgre.ColumnDescription desc = (MergeTablesPostgre.ColumnDescription)schema.getColumns().get(normalizedColName);
                if (desc != null) {
                    sqlBuilder.append(" AND (");
                    if (escapeColumnNames) {
                        sqlBuilder.append(schema.getQuotedIdentifierString()).append(normalizedColName).append(schema.getQuotedIdentifierString());
                    } else {
                        sqlBuilder.append(normalizedColName);
                    }
                    sqlBuilder.append(" IS NULL OR ");
                    if (escapeColumnNames) {
                        sqlBuilder.append(schema.getQuotedIdentifierString()).append(normalizedColName).append(schema.getQuotedIdentifierString());
                    } else {
                        sqlBuilder.append(normalizedColName);
                    }
                    ++fieldCount;
                    sqlBuilder.append(" < ? )");
                    int sqlType = desc.getDataType();
                    attributes.put(attributePrefix + ".args." + fieldCount + ".type", String.valueOf(sqlType));
                    Integer colSize = desc.getColumnSize();
                    fieldValue = rootNode.get(versionField).asText();
                    if (colSize != null && fieldValue.length() > colSize.intValue()) {
                        fieldValue = fieldValue.substring(0, colSize.intValue());
                    }

                    attributes.put(attributePrefix + ".args." + fieldCount + ".value", fieldValue);
                }
            }

            return sqlBuilder.toString();
        }
    }

    /**
     * 生成delete语句
     * @param rootNode
     * @param attributes
     * @param tableName
     * @param schema
     * @param translateFieldNames
     * @param ignoreUnmappedFields
     * @param failUnmappedColumns
     * @param warningUnmappedColumns
     * @param escapeColumnNames
     * @param quoteTableName
     * @param attributePrefix
     * @return
     */
    private String generateDelete(JsonNode rootNode, Map<String, String> attributes, String tableName, MergeTablesPostgre.TableSchema schema, boolean translateFieldNames, boolean ignoreUnmappedFields, boolean failUnmappedColumns, boolean warningUnmappedColumns, boolean escapeColumnNames, boolean quoteTableName, String attributePrefix) {
        Set<String> normalizedFieldNames = this.getNormalizedColumnNames(rootNode, translateFieldNames);
        Iterator var13 = schema.getPrimaryKeyColumnNames().iterator();

        String fieldName;
        while(var13.hasNext()) {
            String requiredColName = (String)var13.next();
            String normalizedColName = normalizeColumnName(requiredColName, translateFieldNames);
            if (!normalizedFieldNames.contains(normalizedColName)) {
                fieldName = "JSON does not have a value for the Required column '" + requiredColName + "'";
                if (failUnmappedColumns) {
                    this.getLogger().error(fieldName);
                    throw new ProcessException(fieldName);
                }

                if (warningUnmappedColumns) {
                    this.getLogger().warn(fieldName);
                }
            }
        }

        StringBuilder sqlBuilder = new StringBuilder();
        int fieldCount = 0;
        sqlBuilder.append("DELETE FROM ");
        sqlBuilder.append(tableName);
        sqlBuilder.append(" WHERE ");
        Iterator fieldNames = schema.getPrimaryKeyColumnNames().iterator();

        while(fieldNames.hasNext()) {
            fieldName = (String)fieldNames.next();
            MergeTablesPostgre.ColumnDescription desc = (MergeTablesPostgre.ColumnDescription)schema.getColumns().get(normalizeColumnName(fieldName, translateFieldNames));
            if (desc == null && !ignoreUnmappedFields) {
                throw new ProcessException("Cannot map JSON field '" + fieldName + "' to any column in the database");
            }

            if (desc != null) {
                if (fieldCount++ > 0) {
                    sqlBuilder.append(" AND ");
                }

                if (escapeColumnNames) {
                    sqlBuilder.append(schema.getQuotedIdentifierString()).append(desc.getColumnName()).append(schema.getQuotedIdentifierString());
                } else {
                    sqlBuilder.append(desc.getColumnName());
                }

                sqlBuilder.append(" = ?");
                int sqlType = desc.getDataType();
                attributes.put(attributePrefix + ".args." + fieldCount + ".type", String.valueOf(sqlType));
                Integer colSize = desc.getColumnSize();
                JsonNode fieldNode = rootNode.get(fieldName);
                if (!fieldNode.isNull()) {
                    String fieldValue = fieldNode.asText();
                    if (colSize != null && fieldValue.length() > colSize.intValue()) {
                        fieldValue = fieldValue.substring(0, colSize.intValue());
                    }

                    attributes.put(attributePrefix + ".args." + fieldCount + ".value", fieldValue);
                }
            }
        }

        if (fieldCount == 0) {
            throw new ProcessException("None of the fields in the JSON map to the columns defined by the " + tableName + " table");
        } else {
            return sqlBuilder.toString();
        }
    }

    private static String normalizeColumnName(String colName, boolean translateColumnNames) {
        return translateColumnNames ? colName.toUpperCase().replace("_", "") : colName;
    }

    static {
        TABLE_NAME = (new PropertyDescriptor.Builder()).name("Table Name").description("The name of the table that the statement should update").required(true).expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES).addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
        CATALOG_NAME = (new PropertyDescriptor.Builder()).name("Catalog Name").description("The name of the catalog that the statement should update. This may not apply for the database that you are updating. In this case, leave the field empty").required(false).expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES).addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
        TABLE_SCHEMA_CACHE_SIZE = (new PropertyDescriptor.Builder()).name("table-schema-cache-size").displayName("Table Schema Cache Size").description("Specifies how many Table Schemas should be cached").addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR).defaultValue("100").required(true).build();
        REL_ORIGINAL = (new org.apache.nifi.processor.Relationship.Builder()).name("original").description("When a FlowFile is converted to SQL, the original JSON FlowFile is routed to this relationship").build();
        REL_SQL = (new org.apache.nifi.processor.Relationship.Builder()).name("sql").description("A FlowFile is routed to this relationship when its contents have successfully been converted into a SQL statement").build();
        REL_FAILURE = (new org.apache.nifi.processor.Relationship.Builder()).name("failure").description("A FlowFile is routed to this relationship if it cannot be converted into a SQL statement. Common causes include invalid JSON content or the JSON content missing a required field (if using an INSERT statement type).").build();
    }

    private static class SchemaKey {
        private final String catalog;
        private final String tableName;

        public SchemaKey(String catalog, String tableName) {
            this.catalog = catalog;
            this.tableName = tableName;
        }

        public int hashCode() {
            int result = 31 + (this.catalog == null ? 0 : this.catalog.hashCode());
            result = 31 * result + (this.tableName == null ? 0 : this.tableName.hashCode());
            return result;
        }

        public boolean equals(Object obj) {
            if (this == obj) {
                return true;
            } else if (obj == null) {
                return false;
            } else if (this.getClass() != obj.getClass()) {
                return false;
            } else {
                MergeTablesPostgre.SchemaKey other = (MergeTablesPostgre.SchemaKey)obj;
                if (this.catalog == null) {
                    if (other.catalog != null) {
                        return false;
                    }
                } else if (!this.catalog.equals(other.catalog)) {
                    return false;
                }

                if (this.tableName == null) {
                    if (other.tableName != null) {
                        return false;
                    }
                } else if (!this.tableName.equals(other.tableName)) {
                    return false;
                }

                return true;
            }
        }
    }

    private static class ColumnDescription {
        private final String columnName;
        private final int dataType;
        private final boolean required;
        private final Integer columnSize;

        private ColumnDescription(String columnName, int dataType, boolean required, Integer columnSize) {
            this.columnName = columnName;
            this.dataType = dataType;
            this.required = required;
            this.columnSize = columnSize;
        }

        public int getDataType() {
            return this.dataType;
        }

        public Integer getColumnSize() {
            return this.columnSize;
        }

        public String getColumnName() {
            return this.columnName;
        }

        public boolean isRequired() {
            return this.required;
        }

        public static MergeTablesPostgre.ColumnDescription from(ResultSet resultSet) throws SQLException {
            ResultSetMetaData md = resultSet.getMetaData();
            List<String> columns = new ArrayList();

            for(int i = 1; i < md.getColumnCount() + 1; ++i) {
                columns.add(md.getColumnName(i));
            }

            String defaultValue = resultSet.getString("COLUMN_DEF");
            String columnName = resultSet.getString("COLUMN_NAME");
            int dataType = resultSet.getInt("DATA_TYPE");
            int colSize = resultSet.getInt("COLUMN_SIZE");
            String nullableValue = resultSet.getString("IS_NULLABLE");
            boolean isNullable = "YES".equalsIgnoreCase(nullableValue) || nullableValue.isEmpty();
            String autoIncrementValue = "NO";
            if (columns.contains("IS_AUTOINCREMENT")) {
                autoIncrementValue = resultSet.getString("IS_AUTOINCREMENT");
            }

            boolean isAutoIncrement = "YES".equalsIgnoreCase(autoIncrementValue);
            boolean required = !isNullable && !isAutoIncrement && defaultValue == null;
            return new MergeTablesPostgre.ColumnDescription(columnName, dataType, required, colSize == 0 ? null : colSize);
        }
    }

    private static class TableSchema {
        private List<String> requiredColumnNames;
        private Set<String> primaryKeyColumnNames;
        private Map<String, MergeTablesPostgre.ColumnDescription> columns = new HashMap();
        private String quotedIdentifierString;

        private TableSchema(List<MergeTablesPostgre.ColumnDescription> columnDescriptions, boolean translateColumnNames, Set<String> primaryKeyColumnNames, String quotedIdentifierString) {
            this.primaryKeyColumnNames = primaryKeyColumnNames;
            this.quotedIdentifierString = quotedIdentifierString;
            this.requiredColumnNames = new ArrayList();
            Iterator var5 = columnDescriptions.iterator();

            while(var5.hasNext()) {
                MergeTablesPostgre.ColumnDescription desc = (MergeTablesPostgre.ColumnDescription)var5.next();
                this.columns.put(MergeTablesPostgre.normalizeColumnName(desc.columnName, translateColumnNames), desc);
                if (desc.isRequired()) {
                    this.requiredColumnNames.add(desc.columnName);
                }
            }

        }

        public Map<String, MergeTablesPostgre.ColumnDescription> getColumns() {
            return this.columns;
        }

        public List<String> getRequiredColumnNames() {
            return this.requiredColumnNames;
        }

        public Set<String> getPrimaryKeyColumnNames() {
            return this.primaryKeyColumnNames;
        }

        public String getQuotedIdentifierString() {
            return this.quotedIdentifierString;
        }

        public static MergeTablesPostgre.TableSchema from(Connection conn, String catalog, String schema, String tableName, boolean translateColumnNames, boolean includePrimaryKeys) throws SQLException {
            DatabaseMetaData dmd = conn.getMetaData();
            ResultSet colrs = dmd.getColumns(catalog, schema, tableName, "%");
            Throwable var8 = null;

            MergeTablesPostgre.TableSchema var40;
            try {
                ArrayList cols = new ArrayList();

                while(colrs.next()) {
                    MergeTablesPostgre.ColumnDescription col = MergeTablesPostgre.ColumnDescription.from(colrs);
                    cols.add(col);
                }

                Set<String> primaryKeyColumns = new HashSet();
                if (includePrimaryKeys) {
                    ResultSet pkrs = conn.getMetaData().getPrimaryKeys(catalog, (String)null, tableName);
                    Throwable var12 = null;

                    try {
                        while(pkrs.next()) {
                            String colName = pkrs.getString("COLUMN_NAME");
                            primaryKeyColumns.add(MergeTablesPostgre.normalizeColumnName(colName, translateColumnNames));
                        }
                    } catch (Throwable var35) {
                        var12 = var35;
                        throw var35;
                    } finally {
                        if (pkrs != null) {
                            if (var12 != null) {
                                try {
                                    pkrs.close();
                                } catch (Throwable var34) {
                                    var12.addSuppressed(var34);
                                }
                            } else {
                                pkrs.close();
                            }
                        }

                    }
                }

                var40 = new MergeTablesPostgre.TableSchema(cols, translateColumnNames, primaryKeyColumns, dmd.getIdentifierQuoteString());
            } catch (Throwable var37) {
                var8 = var37;
                throw var37;
            } finally {
                if (colrs != null) {
                    if (var8 != null) {
                        try {
                            colrs.close();
                        } catch (Throwable var33) {
                            var8.addSuppressed(var33);
                        }
                    } else {
                        colrs.close();
                    }
                }

            }

            return var40;
        }
    }
}
