package com.dcits.nifi;

import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.behavior.*;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.components.AllowableValue;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.dbcp.DBCPService;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.flowfile.attributes.FragmentAttributes;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.InputStreamCallback;
import org.apache.nifi.processor.io.OutputStreamCallback;
import org.apache.nifi.processor.util.StandardValidators;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.ArrayNode;
import org.codehaus.jackson.node.JsonNodeFactory;

import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.sql.*;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;


@SideEffectFree
@SupportsBatching
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
@Tags({"json", "sql", "database", "rdbms", "insert", "update", "delete", "relational", "flat"})
@CapabilityDescription("Converts a JSON-formatted FlowFile into an UPDATE, INSERT, or DELETE SQL statement. The incoming FlowFile is expected to be \"flat\" JSON message, meaning that it consists of a single JSON element and each field maps to a simple type. If a field maps to a JSON object, that JSON object will be interpreted as Text. If the input is an array of JSON elements, each element in the array is output as a separate FlowFile to the 'sql' relationship. Upon successful conversion, the original FlowFile is routed to the 'original' relationship and the SQL is routed to the 'sql' relationship.")
@WritesAttributes({@WritesAttribute(
        attribute = "mime.type",
        description = "Sets mime.type of FlowFile that is routed to 'sql' to 'text/plain'."
), @WritesAttribute(
        attribute = "<sql>.table",
        description = "Sets the <sql>.table attribute of FlowFile that is routed to 'sql' to the name of the table that is updated by the SQL statement. The prefix for this attribute ('sql', e.g.) is determined by the SQL Parameter Attribute Prefix property."
), @WritesAttribute(
        attribute = "<sql>.catalog",
        description = "If the Catalog name is set for this database, specifies the name of the catalog that the SQL statement will update. If no catalog is used, this attribute will not be added. The prefix for this attribute ('sql', e.g.) is determined by the SQL Parameter Attribute Prefix property."
), @WritesAttribute(
        attribute = "fragment.identifier",
        description = "All FlowFiles routed to the 'sql' relationship for the same incoming FlowFile (multiple will be output for the same incoming FlowFile if the incoming FlowFile is a JSON Array) will have the same value for the fragment.identifier attribute. This can then be used to correlate the results."
), @WritesAttribute(
        attribute = "fragment.count",
        description = "The number of SQL FlowFiles that were produced for same incoming FlowFile. This can be used in conjunction with the fragment.identifier attribute in order to know how many FlowFiles belonged to the same incoming FlowFile."
), @WritesAttribute(
        attribute = "fragment.index",
        description = "The position of this FlowFile in the list of outgoing FlowFiles that were all derived from the same incoming FlowFile. This can be used in conjunction with the fragment.identifier and fragment.count attributes to know which FlowFiles originated from the same incoming FlowFile and in what order the SQL FlowFiles were produced"
), @WritesAttribute(
        attribute = "<sql>.args.N.type",
        description = "The output SQL statements are parametrized in order to avoid SQL Injection Attacks. The types of the Parameters to use are stored in attributes named <sql>.args.1.type, <sql>.args.2.type, <sql>.args.3.type, and so on. The type is a number representing a JDBC Type constant. Generally, this is useful only for software to read and interpret but is added so that a processor such as PutSQL can understand how to interpret the values. The prefix for this attribute ('sql', e.g.) is determined by the SQL Parameter Attribute Prefix property."
), @WritesAttribute(
        attribute = "<sql>.args.N.value",
        description = "The output SQL statements are parametrized in order to avoid SQL Injection Attacks. The values of the Parameters to use are stored in the attributes named sql.args.1.value, sql.args.2.value, sql.args.3.value, and so on. Each of these attributes has a corresponding <sql>.args.N.type attribute that indicates how the value should be interpreted when inserting it into the database.The prefix for this attribute ('sql', e.g.) is determined by the SQL Parameter Attribute Prefix property."
)})
public class ConvertJSONToAllSQL extends AbstractProcessor {
    private static final String UPDATE_TYPE = "UPDATE";
    private static final String INSERT_TYPE = "INSERT";
    private static final String DELETE_TYPE = "DELETE";
    static final AllowableValue IGNORE_UNMATCHED_FIELD = new AllowableValue("Ignore Unmatched Fields", "Ignore Unmatched Fields", "Any field in the JSON document that cannot be mapped to a column in the database is ignored");
    static final AllowableValue FAIL_UNMATCHED_FIELD = new AllowableValue("Fail", "Fail", "If the JSON document has any field that cannot be mapped to a column in the database, the FlowFile will be routed to the failure relationship");
    static final AllowableValue IGNORE_UNMATCHED_COLUMN = new AllowableValue("Ignore Unmatched Columns", "Ignore Unmatched Columns", "Any column in the database that does not have a field in the JSON document will be assumed to not be required.  No notification will be logged");
    static final AllowableValue WARNING_UNMATCHED_COLUMN = new AllowableValue("Warn on Unmatched Columns", "Warn on Unmatched Columns", "Any column in the database that does not have a field in the JSON document will be assumed to not be required.  A warning will be logged");
    static final AllowableValue FAIL_UNMATCHED_COLUMN = new AllowableValue("Fail on Unmatched Columns", "Fail on Unmatched Columns", "A flow will fail if any column in the database that does not have a field in the JSON document.  An error will be logged");
    static final PropertyDescriptor CONNECTION_POOL = (new PropertyDescriptor.Builder()).name("JDBC Connection Pool").description("Specifies the JDBC Connection Pool to use in order to convert the JSON message to a SQL statement. The Connection Pool is necessary in order to determine the appropriate database column types.").identifiesControllerService(DBCPService.class).required(true).build();
    static final PropertyDescriptor STATEMENT_TYPE_FIELD = (new PropertyDescriptor.Builder()).name("Statement Type Field").description("The field in the json which indicates statement Type").required(true).expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES).addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
    static final PropertyDescriptor STATEMENT_TYPE_INSERT = (new PropertyDescriptor.Builder()).name("Insert Field Value").description("The field value in the json which indicates insert statement Type").required(true).addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
    static final PropertyDescriptor STATEMENT_TYPE_DELETE = (new PropertyDescriptor.Builder()).name("Delete Field Value").description("The field value in the json which indicates delete statement Type").required(true).addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
    static final PropertyDescriptor STATEMENT_TYPE_UPDATE = (new PropertyDescriptor.Builder()).name("Update Field Value").description("The field value in the json which indicates update statement Type").required(true).addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();

    static final PropertyDescriptor TABLE_NAME;
    static final PropertyDescriptor CATALOG_NAME;
    static final PropertyDescriptor SCHEMA_NAME;
    static final PropertyDescriptor TRANSLATE_FIELD_NAMES;
    static final PropertyDescriptor UNMATCHED_FIELD_BEHAVIOR;
    static final PropertyDescriptor UNMATCHED_COLUMN_BEHAVIOR;
    static final PropertyDescriptor UPDATE_KEY;
    static final PropertyDescriptor QUOTED_IDENTIFIERS;
    static final PropertyDescriptor QUOTED_TABLE_IDENTIFIER;
    static final PropertyDescriptor SQL_PARAM_ATTR_PREFIX;
    static final PropertyDescriptor TABLE_SCHEMA_CACHE_SIZE;
    static final Relationship REL_ORIGINAL;
    static final Relationship REL_SQL;
    static final Relationship REL_FAILURE;
    private Cache<ConvertJSONToAllSQL.SchemaKey, ConvertJSONToAllSQL.TableSchema> schemaCache;

    public ConvertJSONToAllSQL() {
    }

    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
        List<PropertyDescriptor> properties = new ArrayList();
        properties.add(CONNECTION_POOL);
        properties.add(STATEMENT_TYPE_FIELD);
        properties.add(STATEMENT_TYPE_INSERT);
        properties.add(STATEMENT_TYPE_DELETE);
        properties.add(STATEMENT_TYPE_UPDATE);
        properties.add(TABLE_NAME);
        properties.add(CATALOG_NAME);
        properties.add(SCHEMA_NAME);
        properties.add(TRANSLATE_FIELD_NAMES);
        properties.add(UNMATCHED_FIELD_BEHAVIOR);
        properties.add(UNMATCHED_COLUMN_BEHAVIOR);
        properties.add(UPDATE_KEY);
        properties.add(QUOTED_IDENTIFIERS);
        properties.add(QUOTED_TABLE_IDENTIFIER);
        properties.add(SQL_PARAM_ATTR_PREFIX);
        properties.add(TABLE_SCHEMA_CACHE_SIZE);
        return properties;
    }

    public Set<Relationship> getRelationships() {
        Set<Relationship> rels = new HashSet();
        rels.add(REL_ORIGINAL);
        rels.add(REL_SQL);
        rels.add(REL_FAILURE);
        return rels;
    }

    @OnScheduled
    public void onScheduled(ProcessContext context) {
        int tableSchemaCacheSize = context.getProperty(TABLE_SCHEMA_CACHE_SIZE).asInteger().intValue();
        this.schemaCache = Caffeine.newBuilder().maximumSize((long)tableSchemaCacheSize).build();
    }

    private String getStatementType(JsonNode jsonNode,String statementTypeField,String statementTypeInsert,String statementTypeDel,String statementTypeUpdate){
        if(!jsonNode.has(StringUtils.trim(statementTypeField))){
            throw new ProcessException("not find statement type field");
        }
        String fieldValue = jsonNode.get(StringUtils.trim(statementTypeField)).asText();
        if(StringUtils.equals(fieldValue,statementTypeInsert)){
            return INSERT_TYPE;
        }else if(StringUtils.equals(fieldValue,statementTypeDel)){
            return DELETE_TYPE;
        }else if(StringUtils.equals(fieldValue,statementTypeUpdate)){
            return UPDATE_TYPE;
        }
        return null;
    }

    public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
        FlowFile flowFile = session.get();
        if (flowFile != null) {
            boolean translateFieldNames = context.getProperty(TRANSLATE_FIELD_NAMES).asBoolean().booleanValue();
            boolean ignoreUnmappedFields = IGNORE_UNMATCHED_FIELD.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_FIELD_BEHAVIOR).getValue());
            String statementTypeField = context.getProperty(STATEMENT_TYPE_FIELD).getValue();
            String statementTypeInsert = context.getProperty(STATEMENT_TYPE_INSERT).getValue();
            String statementTypeDel = context.getProperty(STATEMENT_TYPE_DELETE).getValue();
            String statementTypeUpdate = context.getProperty(STATEMENT_TYPE_UPDATE).getValue();

            String updateKeys = context.getProperty(UPDATE_KEY).evaluateAttributeExpressions(flowFile).getValue();
            String catalog = context.getProperty(CATALOG_NAME).evaluateAttributeExpressions(flowFile).getValue();
            String schemaName = context.getProperty(SCHEMA_NAME).evaluateAttributeExpressions(flowFile).getValue();
            String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue();
            ConvertJSONToAllSQL.SchemaKey schemaKey = new ConvertJSONToAllSQL.SchemaKey(catalog, tableName);
            boolean failUnmappedColumns = FAIL_UNMATCHED_COLUMN.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_COLUMN_BEHAVIOR).getValue());
            boolean warningUnmappedColumns = WARNING_UNMATCHED_COLUMN.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_COLUMN_BEHAVIOR).getValue());
            boolean escapeColumnNames = context.getProperty(QUOTED_IDENTIFIERS).asBoolean().booleanValue();
            boolean quoteTableName = context.getProperty(QUOTED_TABLE_IDENTIFIER).asBoolean().booleanValue();
            String attributePrefix = context.getProperty(SQL_PARAM_ATTR_PREFIX).evaluateAttributeExpressions(flowFile).getValue();

            final ObjectMapper mapper = new ObjectMapper();
            final AtomicReference rootNodeRef = new AtomicReference((Object)null);
            try {
                session.read(flowFile, new InputStreamCallback() {
                    public void process(InputStream in) throws IOException {
                        InputStream bufferedIn = new BufferedInputStream(in);
                        Throwable var3 = null;

                        try {
                            rootNodeRef.set(mapper.readTree(bufferedIn));
                        } catch (Throwable var12) {
                            var3 = var12;
                            throw var12;
                        } finally {
                            if (bufferedIn != null) {
                                if (var3 != null) {
                                    try {
                                        bufferedIn.close();
                                    } catch (Throwable var11) {
                                        var3.addSuppressed(var11);
                                    }
                                } else {
                                    bufferedIn.close();
                                }
                            }

                        }

                    }
                });
            } catch (ProcessException var31) {
                this.getLogger().error("Failed to parse {} as JSON due to {}; routing to failure", new Object[]{flowFile, var31.toString()}, var31);
                session.transfer(flowFile, REL_FAILURE);
                return;
            }

            JsonNode rootNode = (JsonNode)rootNodeRef.get();
            ArrayNode arrayNode;
            if (rootNode.isArray()) {
                arrayNode = (ArrayNode)rootNode;
            } else {
                JsonNodeFactory nodeFactory = JsonNodeFactory.instance;
                arrayNode = new ArrayNode(nodeFactory);
                arrayNode.add(rootNode);
            }

            String fragmentIdentifier = UUID.randomUUID().toString();
            Set<FlowFile> created = new HashSet();

            for(int i = 0; i < arrayNode.size(); ++i) {
                JsonNode jsonNode = arrayNode.get(i);
                HashMap attributes = new HashMap();
                String statementType = getStatementType(jsonNode,statementTypeField,statementTypeInsert,statementTypeDel,statementTypeUpdate);
                if(StringUtils.isEmpty(statementType)){
                    continue;         //如果没匹配到操作类型，则忽略
                }
                boolean includePrimaryKeys = "UPDATE".equals(statementType) && updateKeys == null;
                ConvertJSONToAllSQL.TableSchema schema;
                final String sql;
                try {
                    schema = (ConvertJSONToAllSQL.TableSchema)this.schemaCache.get(schemaKey, (key) -> {
                        DBCPService dbcpService = (DBCPService)context.getProperty(CONNECTION_POOL).asControllerService(DBCPService.class);

                        try {
                            Connection conn = dbcpService.getConnection(flowFile.getAttributes());
                            Throwable var10 = null;

                            ConvertJSONToAllSQL.TableSchema var11;
                            try {
                                var11 = ConvertJSONToAllSQL.TableSchema.from(conn, catalog, schemaName, tableName, translateFieldNames, includePrimaryKeys);
                            } catch (Throwable var21) {
                                var10 = var21;
                                throw var21;
                            } finally {
                                if (conn != null) {
                                    if (var10 != null) {
                                        try {
                                            conn.close();
                                        } catch (Throwable var20) {
                                            var10.addSuppressed(var20);
                                        }
                                    } else {
                                        conn.close();
                                    }
                                }

                            }

                            return var11;
                        } catch (SQLException var23) {
                            throw new ProcessException(var23);
                        }
                    });

                    StringBuilder tableNameBuilder = new StringBuilder();
                    if (catalog != null) {
                        tableNameBuilder.append(catalog).append(".");
                    }

                    if (schemaName != null) {
                        tableNameBuilder.append(schemaName).append(".");
                    }

                    tableNameBuilder.append(tableName);
                    String fqTableName = tableNameBuilder.toString();
                    if ("INSERT".equals(statementType)) {
                        sql = this.generateInsert(jsonNode, attributes, fqTableName, schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix);
                    } else if ("UPDATE".equals(statementType)) {
                        sql = this.generateUpdate(jsonNode, attributes, fqTableName, updateKeys, schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix);
                    } else {
                        sql = this.generateDelete(jsonNode, attributes, fqTableName, schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix);
                    }
                } catch (ProcessException var33) {
                    this.getLogger().error("Failed to convert {} to a SQL {} statement due to {}; routing to failure", new Object[]{flowFile, statementType, var33.toString()}, var33);
                    session.remove(created);
                    session.transfer(flowFile, REL_FAILURE);
                    return;
                }

                FlowFile sqlFlowFile = session.create(flowFile);
                created.add(sqlFlowFile);
                sqlFlowFile = session.write(sqlFlowFile, new OutputStreamCallback() {
                    public void process(OutputStream out) throws IOException {
                        out.write(sql.getBytes(StandardCharsets.UTF_8));
                    }
                });
                attributes.put(CoreAttributes.MIME_TYPE.key(), "text/plain");
                attributes.put(attributePrefix + ".table", tableName);
                //使用parent flowfile的值，这些值一般由splitjson产生，便于EnforceOrder 进行排序
//                attributes.put(FragmentAttributes.FRAGMENT_ID.key(), fragmentIdentifier);
//                attributes.put(FragmentAttributes.FRAGMENT_COUNT.key(), String.valueOf(arrayNode.size()));
//                attributes.put(FragmentAttributes.FRAGMENT_INDEX.key(), String.valueOf(i));
                if (catalog != null) {
                    attributes.put(attributePrefix + ".catalog", catalog);
                }

                sqlFlowFile = session.putAllAttributes(sqlFlowFile, attributes);
                session.transfer(sqlFlowFile, REL_SQL);
            }

            FlowFile newFlowFile = FragmentAttributes.copyAttributesToOriginal(session, flowFile, fragmentIdentifier, arrayNode.size());
            session.transfer(newFlowFile, REL_ORIGINAL);
        }
    }

    private Set<String> getNormalizedColumnNames(JsonNode node, boolean translateFieldNames) {
        Set<String> normalizedFieldNames = new HashSet();
        Iterator fieldNameItr = node.getFieldNames();

        while(fieldNameItr.hasNext()) {
            normalizedFieldNames.add(normalizeColumnName((String)fieldNameItr.next(), translateFieldNames));
        }

        return normalizedFieldNames;
    }

    private String generateInsert(JsonNode rootNode, Map<String, String> attributes, String tableName, ConvertJSONToAllSQL.TableSchema schema, boolean translateFieldNames, boolean ignoreUnmappedFields, boolean failUnmappedColumns, boolean warningUnmappedColumns, boolean escapeColumnNames, boolean quoteTableName, String attributePrefix) {
        Set<String> normalizedFieldNames = this.getNormalizedColumnNames(rootNode, translateFieldNames);
        Iterator var13 = schema.getRequiredColumnNames().iterator();

        String fieldName;
        while(var13.hasNext()) {
            String requiredColName = (String)var13.next();
            String normalizedColName = normalizeColumnName(requiredColName, translateFieldNames);
            if (!normalizedFieldNames.contains(normalizedColName)) {
                fieldName = "JSON does not have a value for the Required column '" + requiredColName + "'";
                if (failUnmappedColumns) {
                    this.getLogger().error(fieldName);
                    throw new ProcessException(fieldName);
                }

                if (warningUnmappedColumns) {
                    this.getLogger().warn(fieldName);
                }
            }
        }

        StringBuilder sqlBuilder = new StringBuilder();
        int fieldCount = 0;
        sqlBuilder.append("INSERT INTO ");
        if (quoteTableName) {
            sqlBuilder.append(schema.getQuotedIdentifierString()).append(tableName).append(schema.getQuotedIdentifierString());
        } else {
            sqlBuilder.append(tableName);
        }

        sqlBuilder.append(" (");
        Iterator fieldNames = rootNode.getFieldNames();

        while(fieldNames.hasNext()) {
            fieldName = (String)fieldNames.next();
            ConvertJSONToAllSQL.ColumnDescription desc = (ConvertJSONToAllSQL.ColumnDescription)schema.getColumns().get(normalizeColumnName(fieldName, translateFieldNames));
            if (desc == null && !ignoreUnmappedFields) {
                throw new ProcessException("Cannot map JSON field '" + fieldName + "' to any column in the database");
            }

            if (desc != null) {
                if (fieldCount++ > 0) {
                    sqlBuilder.append(", ");
                }

                if (escapeColumnNames) {
                    sqlBuilder.append(schema.getQuotedIdentifierString()).append(desc.getColumnName()).append(schema.getQuotedIdentifierString());
                } else {
                    sqlBuilder.append(desc.getColumnName());
                }

                int sqlType = desc.getDataType();
                attributes.put(attributePrefix + ".args." + fieldCount + ".type", String.valueOf(sqlType));
                Integer colSize = desc.getColumnSize();
                JsonNode fieldNode = rootNode.get(fieldName);
                if (!fieldNode.isNull()) {
                    String fieldValue = createSqlStringValue(fieldNode, colSize, sqlType);
                    attributes.put(attributePrefix + ".args." + fieldCount + ".value", fieldValue);
                }
            }
        }

        sqlBuilder.append(") VALUES (");

        for(int i = 0; i < fieldCount; ++i) {
            if (i > 0) {
                sqlBuilder.append(", ");
            }

            sqlBuilder.append("?");
        }

        sqlBuilder.append(")");
        if (fieldCount == 0) {
            throw new ProcessException("None of the fields in the JSON map to the columns defined by the " + tableName + " table");
        } else {
            return sqlBuilder.toString();
        }
    }

    protected static String createSqlStringValue(JsonNode fieldNode, Integer colSize, int sqlType) {
        String fieldValue = fieldNode.asText();
        switch(sqlType) {
            case -16:
            case -15:
            case -9:
            case -1:
            case 1:
            case 12:
                if (colSize != null && fieldValue.length() > colSize.intValue()) {
                    fieldValue = fieldValue.substring(0, colSize.intValue());
                }
                break;
            case -7:
            case -6:
            case -5:
            case 2:
            case 3:
            case 4:
            case 5:
            case 6:
            case 7:
            case 8:
                if (fieldNode.isBoolean()) {
                    fieldValue = fieldNode.asBoolean() ? "1" : "0";
                }
                break;
            case 16:
                fieldValue = Boolean.valueOf(fieldValue).toString();
            case 91:
            case 92:
            case 93:
        }

        return fieldValue;
    }

    private String generateUpdate(JsonNode rootNode, Map<String, String> attributes, String tableName, String updateKeys, ConvertJSONToAllSQL.TableSchema schema, boolean translateFieldNames, boolean ignoreUnmappedFields, boolean failUnmappedColumns, boolean warningUnmappedColumns, boolean escapeColumnNames, boolean quoteTableName, String attributePrefix) {
        Object updateKeyNames;
        int fieldCount;
        if (updateKeys == null) {
            updateKeyNames = schema.getPrimaryKeyColumnNames();
        } else {
            updateKeyNames = new HashSet();
            String[] var14 = updateKeys.split(",");
            fieldCount = var14.length;

            for(int var16 = 0; var16 < fieldCount; ++var16) {
                String updateKey = var14[var16];
                ((Set)updateKeyNames).add(updateKey.trim());
            }
        }

        if (((Set)updateKeyNames).isEmpty()) {
            throw new ProcessException("Table '" + tableName + "' does not have a Primary Key and no Update Keys were specified");
        } else {
            StringBuilder sqlBuilder = new StringBuilder();
            fieldCount = 0;
            sqlBuilder.append("UPDATE ");
            if (quoteTableName) {
                sqlBuilder.append(schema.getQuotedIdentifierString()).append(tableName).append(schema.getQuotedIdentifierString());
            } else {
                sqlBuilder.append(tableName);
            }

            sqlBuilder.append(" SET ");
            Set<String> normalizedFieldNames = this.getNormalizedColumnNames(rootNode, translateFieldNames);
            Set<String> normalizedUpdateNames = new HashSet();
            Iterator fieldNames = ((Set)updateKeyNames).iterator();

            String fieldName;
            String normalizedColName;
            while(fieldNames.hasNext()) {
                fieldName = (String)fieldNames.next();
                fieldName = normalizeColumnName(fieldName, translateFieldNames);
                normalizedUpdateNames.add(fieldName);
                if (!normalizedFieldNames.contains(fieldName)) {
                    normalizedColName = "JSON does not have a value for the " + (updateKeys == null ? "Primary" : "Update") + "Key column '" + fieldName + "'";
                    if (failUnmappedColumns) {
                        this.getLogger().error(normalizedColName);
                        throw new ProcessException(normalizedColName);
                    }

                    if (warningUnmappedColumns) {
                        this.getLogger().warn(normalizedColName);
                    }
                }
            }

            fieldNames = rootNode.getFieldNames();

            String fieldValue;
            while(fieldNames.hasNext()) {
                fieldName = (String)fieldNames.next();
                fieldName = normalizeColumnName(fieldName, translateFieldNames);
                ConvertJSONToAllSQL.ColumnDescription desc = (ConvertJSONToAllSQL.ColumnDescription)schema.getColumns().get(fieldName);
                if (desc == null) {
                    if (!ignoreUnmappedFields) {
                        throw new ProcessException("Cannot map JSON field '" + fieldName + "' to any column in the database");
                    }
                } else if (!normalizedUpdateNames.contains(fieldName)) {
                    if (fieldCount++ > 0) {
                        sqlBuilder.append(", ");
                    }

                    if (escapeColumnNames) {
                        sqlBuilder.append(schema.getQuotedIdentifierString()).append(desc.getColumnName()).append(schema.getQuotedIdentifierString());
                    } else {
                        sqlBuilder.append(desc.getColumnName());
                    }

                    sqlBuilder.append(" = ?");
                    int sqlType = desc.getDataType();
                    attributes.put(attributePrefix + ".args." + fieldCount + ".type", String.valueOf(sqlType));
                    Integer colSize = desc.getColumnSize();
                    JsonNode fieldNode = rootNode.get(fieldName);
                    if (!fieldNode.isNull()) {
                        fieldValue = createSqlStringValue(fieldNode, colSize, sqlType);
                        attributes.put(attributePrefix + ".args." + fieldCount + ".value", fieldValue);
                    }
                }
            }

            sqlBuilder.append(" WHERE ");
            fieldNames = rootNode.getFieldNames();
            int var29 = 0;

            while(fieldNames.hasNext()) {
                fieldName = (String)fieldNames.next();
                normalizedColName = normalizeColumnName(fieldName, translateFieldNames);
                ConvertJSONToAllSQL.ColumnDescription desc = (ConvertJSONToAllSQL.ColumnDescription)schema.getColumns().get(normalizedColName);
                if (desc != null && normalizedUpdateNames.contains(normalizedColName)) {
                    if (var29++ > 0) {
                        sqlBuilder.append(" AND ");
                    }

                    ++fieldCount;
                    if (escapeColumnNames) {
                        sqlBuilder.append(schema.getQuotedIdentifierString()).append(normalizedColName).append(schema.getQuotedIdentifierString());
                    } else {
                        sqlBuilder.append(normalizedColName);
                    }

                    sqlBuilder.append(" = ?");
                    int sqlType = desc.getDataType();
                    attributes.put(attributePrefix + ".args." + fieldCount + ".type", String.valueOf(sqlType));
                    Integer colSize = desc.getColumnSize();
                    fieldValue = rootNode.get(fieldName).asText();
                    if (colSize != null && fieldValue.length() > colSize.intValue()) {
                        fieldValue = fieldValue.substring(0, colSize.intValue());
                    }

                    attributes.put(attributePrefix + ".args." + fieldCount + ".value", fieldValue);
                }
            }

            return sqlBuilder.toString();
        }
    }

    private String generateDelete(JsonNode rootNode, Map<String, String> attributes, String tableName, ConvertJSONToAllSQL.TableSchema schema, boolean translateFieldNames, boolean ignoreUnmappedFields, boolean failUnmappedColumns, boolean warningUnmappedColumns, boolean escapeColumnNames, boolean quoteTableName, String attributePrefix) {
        Set<String> normalizedFieldNames = this.getNormalizedColumnNames(rootNode, translateFieldNames);
        Iterator var13 = schema.getRequiredColumnNames().iterator();

        String fieldName;
        while(var13.hasNext()) {
            String requiredColName = (String)var13.next();
            String normalizedColName = normalizeColumnName(requiredColName, translateFieldNames);
            if (!normalizedFieldNames.contains(normalizedColName)) {
                fieldName = "JSON does not have a value for the Required column '" + requiredColName + "'";
                if (failUnmappedColumns) {
                    this.getLogger().error(fieldName);
                    throw new ProcessException(fieldName);
                }

                if (warningUnmappedColumns) {
                    this.getLogger().warn(fieldName);
                }
            }
        }

        StringBuilder sqlBuilder = new StringBuilder();
        int fieldCount = 0;
        sqlBuilder.append("DELETE FROM ");
        if (quoteTableName) {
            sqlBuilder.append(schema.getQuotedIdentifierString()).append(tableName).append(schema.getQuotedIdentifierString());
        } else {
            sqlBuilder.append(tableName);
        }

        sqlBuilder.append(" WHERE ");
        Iterator fieldNames = rootNode.getFieldNames();

        while(fieldNames.hasNext()) {
            fieldName = (String)fieldNames.next();
            ConvertJSONToAllSQL.ColumnDescription desc = (ConvertJSONToAllSQL.ColumnDescription)schema.getColumns().get(normalizeColumnName(fieldName, translateFieldNames));
            if (desc == null && !ignoreUnmappedFields) {
                throw new ProcessException("Cannot map JSON field '" + fieldName + "' to any column in the database");
            }

            if (desc != null) {
                if (fieldCount++ > 0) {
                    sqlBuilder.append(" AND ");
                }

                if (escapeColumnNames) {
                    sqlBuilder.append(schema.getQuotedIdentifierString()).append(desc.getColumnName()).append(schema.getQuotedIdentifierString());
                } else {
                    sqlBuilder.append(desc.getColumnName());
                }

                sqlBuilder.append(" = ?");
                int sqlType = desc.getDataType();
                attributes.put(attributePrefix + ".args." + fieldCount + ".type", String.valueOf(sqlType));
                Integer colSize = desc.getColumnSize();
                JsonNode fieldNode = rootNode.get(fieldName);
                if (!fieldNode.isNull()) {
                    String fieldValue = fieldNode.asText();
                    if (colSize != null && fieldValue.length() > colSize.intValue()) {
                        fieldValue = fieldValue.substring(0, colSize.intValue());
                    }

                    attributes.put(attributePrefix + ".args." + fieldCount + ".value", fieldValue);
                }
            }
        }

        if (fieldCount == 0) {
            throw new ProcessException("None of the fields in the JSON map to the columns defined by the " + tableName + " table");
        } else {
            return sqlBuilder.toString();
        }
    }

    private static String normalizeColumnName(String colName, boolean translateColumnNames) {
        return translateColumnNames ? colName.toUpperCase().replace("_", "") : colName;
    }

    static {
        TABLE_NAME = (new PropertyDescriptor.Builder()).name("Table Name").description("The name of the table that the statement should update").required(true).expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES).addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
        CATALOG_NAME = (new PropertyDescriptor.Builder()).name("Catalog Name").description("The name of the catalog that the statement should update. This may not apply for the database that you are updating. In this case, leave the field empty").required(false).expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES).addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
        SCHEMA_NAME = (new PropertyDescriptor.Builder()).name("Schema Name").description("The name of the schema that the table belongs to. This may not apply for the database that you are updating. In this case, leave the field empty").required(false).expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES).addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
        TRANSLATE_FIELD_NAMES = (new PropertyDescriptor.Builder()).name("Translate Field Names").description("If true, the Processor will attempt to translate JSON field names into the appropriate column names for the table specified. If false, the JSON field names must match the column names exactly, or the column will not be updated").allowableValues(new String[]{"true", "false"}).defaultValue("true").build();
        UNMATCHED_FIELD_BEHAVIOR = (new PropertyDescriptor.Builder()).name("Unmatched Field Behavior").description("If an incoming JSON element has a field that does not map to any of the database table's columns, this property specifies how to handle the situation").allowableValues(new AllowableValue[]{IGNORE_UNMATCHED_FIELD, FAIL_UNMATCHED_FIELD}).defaultValue(IGNORE_UNMATCHED_FIELD.getValue()).build();
        UNMATCHED_COLUMN_BEHAVIOR = (new PropertyDescriptor.Builder()).name("Unmatched Column Behavior").description("If an incoming JSON element does not have a field mapping for all of the database table's columns, this property specifies how to handle the situation").allowableValues(new AllowableValue[]{IGNORE_UNMATCHED_COLUMN, WARNING_UNMATCHED_COLUMN, FAIL_UNMATCHED_COLUMN}).defaultValue(FAIL_UNMATCHED_COLUMN.getValue()).build();
        UPDATE_KEY = (new PropertyDescriptor.Builder()).name("Update Keys").description("A comma-separated list of column names that uniquely identifies a row in the database for UPDATE statements. If the Statement Type is UPDATE and this property is not set, the table's Primary Keys are used. In this case, if no Primary Key exists, the conversion to SQL will fail if Unmatched Column Behaviour is set to FAIL. This property is ignored if the Statement Type is INSERT").addValidator(StandardValidators.NON_EMPTY_VALIDATOR).required(false).expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES).build();
        QUOTED_IDENTIFIERS = (new PropertyDescriptor.Builder()).name("jts-quoted-identifiers").displayName("Quote Column Identifiers").description("Enabling this option will cause all column names to be quoted, allowing you to use reserved words as column names in your tables.").allowableValues(new String[]{"true", "false"}).defaultValue("false").build();
        QUOTED_TABLE_IDENTIFIER = (new PropertyDescriptor.Builder()).name("jts-quoted-table-identifiers").displayName("Quote Table Identifiers").description("Enabling this option will cause the table name to be quoted to support the use of special characters in the table name").allowableValues(new String[]{"true", "false"}).defaultValue("false").build();
        SQL_PARAM_ATTR_PREFIX = (new PropertyDescriptor.Builder()).name("jts-sql-param-attr-prefix").displayName("SQL Parameter Attribute Prefix").description("The string to be prepended to the outgoing flow file attributes, such as <sql>.args.1.value, where <sql> is replaced with the specified value").expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES).addValidator(StandardValidators.NON_EMPTY_EL_VALIDATOR).required(true).defaultValue("sql").build();
        TABLE_SCHEMA_CACHE_SIZE = (new PropertyDescriptor.Builder()).name("table-schema-cache-size").displayName("Table Schema Cache Size").description("Specifies how many Table Schemas should be cached").addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR).defaultValue("100").required(true).build();
        REL_ORIGINAL = (new org.apache.nifi.processor.Relationship.Builder()).name("original").description("When a FlowFile is converted to SQL, the original JSON FlowFile is routed to this relationship").build();
        REL_SQL = (new org.apache.nifi.processor.Relationship.Builder()).name("sql").description("A FlowFile is routed to this relationship when its contents have successfully been converted into a SQL statement").build();
        REL_FAILURE = (new org.apache.nifi.processor.Relationship.Builder()).name("failure").description("A FlowFile is routed to this relationship if it cannot be converted into a SQL statement. Common causes include invalid JSON content or the JSON content missing a required field (if using an INSERT statement type).").build();
    }

    private static class SchemaKey {
        private final String catalog;
        private final String tableName;

        public SchemaKey(String catalog, String tableName) {
            this.catalog = catalog;
            this.tableName = tableName;
        }

        public int hashCode() {
            int result = 31 + (this.catalog == null ? 0 : this.catalog.hashCode());
            result = 31 * result + (this.tableName == null ? 0 : this.tableName.hashCode());
            return result;
        }

        public boolean equals(Object obj) {
            if (this == obj) {
                return true;
            } else if (obj == null) {
                return false;
            } else if (this.getClass() != obj.getClass()) {
                return false;
            } else {
                ConvertJSONToAllSQL.SchemaKey other = (ConvertJSONToAllSQL.SchemaKey)obj;
                if (this.catalog == null) {
                    if (other.catalog != null) {
                        return false;
                    }
                } else if (!this.catalog.equals(other.catalog)) {
                    return false;
                }

                if (this.tableName == null) {
                    if (other.tableName != null) {
                        return false;
                    }
                } else if (!this.tableName.equals(other.tableName)) {
                    return false;
                }

                return true;
            }
        }
    }

    private static class ColumnDescription {
        private final String columnName;
        private final int dataType;
        private final boolean required;
        private final Integer columnSize;

        private ColumnDescription(String columnName, int dataType, boolean required, Integer columnSize) {
            this.columnName = columnName;
            this.dataType = dataType;
            this.required = required;
            this.columnSize = columnSize;
        }

        public int getDataType() {
            return this.dataType;
        }

        public Integer getColumnSize() {
            return this.columnSize;
        }

        public String getColumnName() {
            return this.columnName;
        }

        public boolean isRequired() {
            return this.required;
        }

        public static ConvertJSONToAllSQL.ColumnDescription from(ResultSet resultSet) throws SQLException {
            ResultSetMetaData md = resultSet.getMetaData();
            List<String> columns = new ArrayList();

            for(int i = 1; i < md.getColumnCount() + 1; ++i) {
                columns.add(md.getColumnName(i));
            }

            String defaultValue = resultSet.getString("COLUMN_DEF");
            String columnName = resultSet.getString("COLUMN_NAME");
            int dataType = resultSet.getInt("DATA_TYPE");
            int colSize = resultSet.getInt("COLUMN_SIZE");
            String nullableValue = resultSet.getString("IS_NULLABLE");
            boolean isNullable = "YES".equalsIgnoreCase(nullableValue) || nullableValue.isEmpty();
            String autoIncrementValue = "NO";
            if (columns.contains("IS_AUTOINCREMENT")) {
                autoIncrementValue = resultSet.getString("IS_AUTOINCREMENT");
            }

            boolean isAutoIncrement = "YES".equalsIgnoreCase(autoIncrementValue);
            boolean required = !isNullable && !isAutoIncrement && defaultValue == null;
            return new ConvertJSONToAllSQL.ColumnDescription(columnName, dataType, required, colSize == 0 ? null : colSize);
        }
    }

    private static class TableSchema {
        private List<String> requiredColumnNames;
        private Set<String> primaryKeyColumnNames;
        private Map<String, ConvertJSONToAllSQL.ColumnDescription> columns = new HashMap();
        private String quotedIdentifierString;

        private TableSchema(List<ConvertJSONToAllSQL.ColumnDescription> columnDescriptions, boolean translateColumnNames, Set<String> primaryKeyColumnNames, String quotedIdentifierString) {
            this.primaryKeyColumnNames = primaryKeyColumnNames;
            this.quotedIdentifierString = quotedIdentifierString;
            this.requiredColumnNames = new ArrayList();
            Iterator var5 = columnDescriptions.iterator();

            while(var5.hasNext()) {
                ConvertJSONToAllSQL.ColumnDescription desc = (ConvertJSONToAllSQL.ColumnDescription)var5.next();
                this.columns.put(ConvertJSONToAllSQL.normalizeColumnName(desc.columnName, translateColumnNames), desc);
                if (desc.isRequired()) {
                    this.requiredColumnNames.add(desc.columnName);
                }
            }

        }

        public Map<String, ConvertJSONToAllSQL.ColumnDescription> getColumns() {
            return this.columns;
        }

        public List<String> getRequiredColumnNames() {
            return this.requiredColumnNames;
        }

        public Set<String> getPrimaryKeyColumnNames() {
            return this.primaryKeyColumnNames;
        }

        public String getQuotedIdentifierString() {
            return this.quotedIdentifierString;
        }

        public static ConvertJSONToAllSQL.TableSchema from(Connection conn, String catalog, String schema, String tableName, boolean translateColumnNames, boolean includePrimaryKeys) throws SQLException {
            DatabaseMetaData dmd = conn.getMetaData();
            ResultSet colrs = dmd.getColumns(catalog, schema, tableName, "%");
            Throwable var8 = null;

            ConvertJSONToAllSQL.TableSchema var40;
            try {
                ArrayList cols = new ArrayList();

                while(colrs.next()) {
                    ConvertJSONToAllSQL.ColumnDescription col = ConvertJSONToAllSQL.ColumnDescription.from(colrs);
                    cols.add(col);
                }

                Set<String> primaryKeyColumns = new HashSet();
                if (includePrimaryKeys) {
                    ResultSet pkrs = conn.getMetaData().getPrimaryKeys(catalog, (String)null, tableName);
                    Throwable var12 = null;

                    try {
                        while(pkrs.next()) {
                            String colName = pkrs.getString("COLUMN_NAME");
                            primaryKeyColumns.add(ConvertJSONToAllSQL.normalizeColumnName(colName, translateColumnNames));
                        }
                    } catch (Throwable var35) {
                        var12 = var35;
                        throw var35;
                    } finally {
                        if (pkrs != null) {
                            if (var12 != null) {
                                try {
                                    pkrs.close();
                                } catch (Throwable var34) {
                                    var12.addSuppressed(var34);
                                }
                            } else {
                                pkrs.close();
                            }
                        }

                    }
                }

                var40 = new ConvertJSONToAllSQL.TableSchema(cols, translateColumnNames, primaryKeyColumns, dmd.getIdentifierQuoteString());
            } catch (Throwable var37) {
                var8 = var37;
                throw var37;
            } finally {
                if (colrs != null) {
                    if (var8 != null) {
                        try {
                            colrs.close();
                        } catch (Throwable var33) {
                            var8.addSuppressed(var33);
                        }
                    } else {
                        colrs.close();
                    }
                }

            }

            return var40;
        }
    }
}
