package com.dcits.nifi;


import org.apache.nifi.annotation.behavior.*;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.dbcp.DBCPService;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.*;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;

import java.io.File;
import java.sql.Connection;
import java.sql.Statement;
import java.util.*;

@SideEffectFree
@SupportsBatching
@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED)
@Tags({"postgresql","copy"})
@CapabilityDescription("copy data from postgresql with query sql")
@DynamicProperty(
        name = "the prefix of the output file name",
        value = "The query sql,the column name must be lower case",
        expressionLanguageScope = ExpressionLanguageScope.NONE,
        description = "the query will be used to copy data from postgresql"
)
@WritesAttributes({@WritesAttribute(
        attribute = "prefix",
        description = "this prefix of the output file name"
), @WritesAttribute(
        attribute = "fileNmae",
        description = "the output file name"
), @WritesAttribute(
        attribute = "directory",
        description = "the output directory"
)})
public class PostgreCopyWithQuerysqlExport extends AbstractProcessor {
    static final PropertyDescriptor CONNECTION_POOL = (new PropertyDescriptor.Builder()).name("JDBC Connection Pool").description("Specifies the JDBC Connection Pool to use in order to  execute copy").identifiesControllerService(DBCPService.class).required(true).build();
    static final PropertyDescriptor OUTPUT_DIR = new PropertyDescriptor.Builder().name("Output Directory")
            .description("the output directory of the csv file")
            .required(true)
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .build();


    public static final Relationship REL_SUCCESS = new Relationship.Builder()
            .name("success")
            .description("Success to copy data with the sql")
            .build();

    public static final Relationship REL_FAILURE = new Relationship.Builder()
            .name("fail")
            .description("Fail to copy data with the sql")
            .build();

    private List<PropertyDescriptor> descriptors;

    private Set<Relationship> relationships;

    @Override
    protected void init(final ProcessorInitializationContext context) {
        final List<PropertyDescriptor> descriptors = new ArrayList<PropertyDescriptor>();
        descriptors.add(CONNECTION_POOL);
        descriptors.add(OUTPUT_DIR);
        this.descriptors = Collections.unmodifiableList(descriptors);

        final Set<Relationship> relationships = new HashSet<Relationship>();
        relationships.add(REL_SUCCESS);
        relationships.add(REL_FAILURE);
        this.relationships = Collections.unmodifiableSet(relationships);
    }

    @Override
    public Set<Relationship> getRelationships() {
        return this.relationships;
    }

    @Override
    public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
        return descriptors;
    }

    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(String propertyDescriptorName) {
        org.apache.nifi.components.PropertyDescriptor.Builder propertyBuilder = (new org.apache.nifi.components.PropertyDescriptor.Builder()).name(propertyDescriptorName).required(false).addValidator(StandardValidators.ATTRIBUTE_KEY_PROPERTY_NAME_VALIDATOR).expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES).dynamic(true);
        return   propertyBuilder.addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
    }

    @OnScheduled
    public void onScheduled(final ProcessContext context) {

    }

    @Override
    public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
        final  DBCPService dbcpService = (DBCPService)context.getProperty(CONNECTION_POOL).asControllerService(DBCPService.class);
        final String outputDir = context.getProperty(OUTPUT_DIR).getValue();

        Map<PropertyDescriptor, String> properties = context.getProperties();
        properties.remove(CONNECTION_POOL);
        properties.remove(OUTPUT_DIR);
        if(properties.isEmpty()){
            return;
        }
        Object[] propertyDescriptors = properties.keySet().toArray();
        for(int i=0;i<propertyDescriptors.length;i++) {
            PropertyDescriptor propertyDescriptor = (PropertyDescriptor) propertyDescriptors[i];
            String prefix = propertyDescriptor.getName();
            String fileName = prefix + "_" + System.currentTimeMillis() + ".csv";
            String querySql = properties.get(propertyDescriptor);
            FlowFile flowFile = session.create();
            if (flowFile == null) {
                getLogger().warn("导出[" + prefix + "]时,创建FlowFile失败");
                i--;
                continue;
            }
            session.write(flowFile, outputStream -> outputStream.write(querySql.getBytes("UTF-8")));
            session.putAttribute(flowFile, "prefix", prefix);
            session.putAttribute(flowFile, "fileName", fileName);
            session.putAttribute(flowFile, "directory", outputDir);
            try (Connection conn = dbcpService.getConnection(); Statement stmt = conn.createStatement()) {
                //COPY (select "ACCESSORY_ID" accessory_id, "tableName" tablename,"DOSSIER_ID" dossier_id,
                // "ACCESSORY_NAME" accessory_name,"ACCESSORY_SAVENAME" accessory_savename,"ACCESSORY_TYPE"
                // accessory_type,"DOMAIN_ID" domain_id from ass_accessory_inner )
                // TO '/tmp/postgres/ass_accessory/ass_accessory.csv'  WITH csv;
                stmt.execute("COPY (" + querySql + ") TO '" + outputDir + File.separatorChar + fileName + "'  WITH csv");
                session.transfer(flowFile, REL_SUCCESS);
            } catch (Exception e) {
                session.transfer(flowFile, REL_FAILURE);
                getLogger().error("生成partition，查询SQL失败", e);
            }
        }
    }
}
