package org.apache.nifi.processors.huawei.dli;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.huaweicloud.sdk.dli.v1.model.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.SupportsBatching;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.processors.huawei.abstractprocessor.AbstractDLIProcessor;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static org.apache.nifi.processors.huawei.common.PropertyDescriptors.*;

@SupportsBatching
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
@Tags({"HuaweiCloud", "DLI",  "CreateSqlJob"})
@CapabilityDescription("This API is used to submit jobs to the queue by executing SQL statements. Jobs include the following types: DDL, DCL, IMPORT, QUERY, and INSERT. " +
        "Among them, the function of IMPORT is the same as that of importing data, the difference is only in the way of implementation.")
public class DLICreateSqlJob extends AbstractDLIProcessor {

    public static final PropertyDescriptor DLI_PROJECT_ID = new PropertyDescriptor.Builder()
            .name("project_id")
            .displayName("Project ID")
            .description("Project ID")
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .required(true)
            .build();

    public static final PropertyDescriptor DLI_SQL_QUERY = new PropertyDescriptor.Builder()
            .name("sql")
            .displayName("SQL Query")
            .description("The SQL statement to be executed.")
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .required(true)
            .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
            .build();

    public static final PropertyDescriptor DLI_CURRENT_DB = new PropertyDescriptor.Builder()
            .name("currentdb")
            .displayName("Current DB")
            .description("The database where the SQL statement is executed.")
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .required(false)
            .build();

    public static final PropertyDescriptor DLI_QUEUE_NAME = new PropertyDescriptor.Builder()
            .displayName("Queue Name")
            .description("The queue name of the job to be submitted.")
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .required(false)            .name("queue_name")

            .build();

    public static final PropertyDescriptor DLI_CONF = new PropertyDescriptor.Builder()
            .name("conf")
            .displayName("Configurations")
            .description("The configuration parameters for this job in the form of \"key=value\" in an array.")
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .required(false)
            .build();

    public static final PropertyDescriptor DLI_TAGS = new PropertyDescriptor.Builder()
            .name("tags")
            .displayName("Tags")
            .description("Tags for the job in the form of key, value map in an array.")
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .required(false)
            .build();

    public static final PropertyDescriptor JOB_STATUS_CHECK_INTERVAL = new PropertyDescriptor.Builder()
            .name("jobStatusCheckInterval")
            .displayName("Job Status Check Interval")
            .description("Amount of time that get spends between status check in milliseconds.")
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .defaultValue("2000")
            .required(false)
            .build();

    public static final PropertyDescriptor JOB_STATUS_CHECK_ENABLE = new PropertyDescriptor.Builder()
            .name("jobStatusCheckEnable")
            .displayName("Job Status Check Enable")
            .description("Whether the user wants to check job status or not.")
            .allowableValues("true", "false")
            .defaultValue("false")
            .build();

    private static final List<PropertyDescriptor> properties = List.of(ACCESS_KEY, SECRET_KEY, DLI_PROJECT_ID, DLI_SQL_QUERY, DLI_CURRENT_DB,
            DLI_QUEUE_NAME, DLI_CONF, DLI_TAGS, DLI_REGION, JOB_STATUS_CHECK_INTERVAL, JOB_STATUS_CHECK_ENABLE);

    @Override
    public List<PropertyDescriptor> getSupportedPropertyDescriptors() {
        return properties;
    }

    private CreateSqlJobResponse createSqlJob(CreateSqlJobRequest request) {

        return dliClient.createSqlJob(request);
    }

    private ShowSqlJobStatusResponse showSqlJobStatus(ShowSqlJobStatusRequest request) {

        return dliClient.showSqlJobStatus(request);
    }

    @Override
    public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {

        FlowFile flowFile = session.get();
        if (flowFile == null) {

            return;
        }

        try {

            String sqlQuery = context.getProperty(DLI_SQL_QUERY).evaluateAttributeExpressions(flowFile).getValue();
            String currentDb = context.getProperty(DLI_CURRENT_DB).evaluateAttributeExpressions(flowFile).getValue();
            String queueName = context.getProperty(DLI_QUEUE_NAME).evaluateAttributeExpressions(flowFile).getValue();
            String configurations = context.getProperty(DLI_CONF).evaluateAttributeExpressions(flowFile).getValue();
            String tags = context.getProperty(DLI_TAGS).evaluateAttributeExpressions(flowFile).getValue();
            String jobStatusCheckInterval = context.getProperty(JOB_STATUS_CHECK_INTERVAL).evaluateAttributeExpressions(flowFile).getValue();
            String isJobStatusCheckEnabled = context.getProperty(JOB_STATUS_CHECK_ENABLE).evaluateAttributeExpressions(flowFile).getValue();

            int jobStatusCheckIntervalInt = 2000;
            boolean isJobStatusCheckEnabledBoolean = false;

            CreateSqlJobRequest createSqlJobRequest = new CreateSqlJobRequest();
            CreateSqlJobRequestBody body = new CreateSqlJobRequestBody();
            body.setSql(sqlQuery);
            body.setCurrentdb(currentDb);
            body.setQueueName(queueName);

            if (!StringUtils.isEmpty(configurations)) {

                List<String> configurationList = new ObjectMapper().readValue(configurations, List.class);
                body.setConf(configurationList);
            }

            if (!StringUtils.isEmpty(tags)) {

                Map<String, String> tagsMap = new ObjectMapper().readValue(tags, HashMap.class);
                List<TmsTagEntity> tmsTagEntities = new ArrayList<>();

                tagsMap.forEach((key, value) -> {

                    TmsTagEntity tmsTagEntity = new TmsTagEntity();
                    tmsTagEntity.setKey(key);
                    tmsTagEntity.setValue(value);

                    tmsTagEntities.add(tmsTagEntity);
                });

                body.setTags(tmsTagEntities);
            }

            if (!StringUtils.isEmpty(jobStatusCheckInterval)) {

                jobStatusCheckIntervalInt = Integer.parseInt(jobStatusCheckInterval);
            }

            if (!StringUtils.isEmpty(isJobStatusCheckEnabled)) {

                isJobStatusCheckEnabledBoolean = Boolean.parseBoolean(isJobStatusCheckEnabled);
            }

            createSqlJobRequest.withBody(body);

            CreateSqlJobResponse response = createSqlJob(createSqlJobRequest);

            ShowSqlJobStatusRequest showSqlJobStatusRequest = new ShowSqlJobStatusRequest();
            showSqlJobStatusRequest.setJobId(response.getJobId());

            if (isJobStatusCheckEnabledBoolean) {

                boolean isJobNotFinished = true;

                while (isJobNotFinished) {

                    ShowSqlJobStatusResponse showSqlJobStatusResponse = showSqlJobStatus(showSqlJobStatusRequest);

                    if (showSqlJobStatusResponse.getStatus() == ShowSqlJobStatusResponse.StatusEnum.FINISHED) {

                        getLogger().error("Successfully executed the sql for {}", new Object[]{flowFile});

                        session.transfer(flowFile, REL_SUCCESS);
                        session.getProvenanceReporter().send(flowFile, response.getJobId());

                        isJobNotFinished = false;
                    }

                    if (showSqlJobStatusResponse.getStatus() == ShowSqlJobStatusResponse.StatusEnum.FAILED ||
                            showSqlJobStatusResponse.getStatus() == ShowSqlJobStatusResponse.StatusEnum.CANCELLED) {

                        getLogger().error("Failed to execute the sql for {}", new Object[]{flowFile});

                        session.transfer(flowFile, REL_FAILURE);

                        isJobNotFinished = false;
                    }

                    Thread.sleep(jobStatusCheckIntervalInt);
                }

            } else {

                session.transfer(flowFile, REL_SUCCESS);
                session.getProvenanceReporter().send(flowFile, response.getJobId());
            }

            getLogger().info("Successfully created the sql job for {}", new Object[]{flowFile});

        } catch (Exception e) {

            getLogger().error("Failed to create the sql job for {} due to {}", new Object[]{flowFile, e});

            flowFile = session.penalize(flowFile);
            session.transfer(flowFile, REL_FAILURE);
        }
    }
}
