package com.dcits.nifi;

import org.apache.nifi.annotation.behavior.*;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.dbcp.DBCPService;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.*;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.StreamCallback;
import org.apache.nifi.processor.util.StandardValidators;

import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.*;

@SideEffectFree
@SupportsBatching
@Tags({"postgresql","page"})
@CapabilityDescription("generate postgresql page query with limit,offset")

@WritesAttributes({@WritesAttribute(
        attribute = "fragment.identifier",
        description = "the group fragment.identifier of the query sqls"
), @WritesAttribute(
        attribute = "fragment.count",
        description = "the fragment.count of the query sql"
), @WritesAttribute(
        attribute = "fragment.index",
        description = "the fragment.index of the sql"
)})
public class GeneratePgPageQuerySql extends AbstractProcessor {
    static final PropertyDescriptor CONNECTION_POOL = (new PropertyDescriptor.Builder()).name("JDBC Connection Pool").description("the jdbc connection pool to query db").identifiesControllerService(DBCPService.class).required(true).build();
    static final PropertyDescriptor Sql_selece_query = new PropertyDescriptor.Builder().name("SQL select query")
            .description("the query sql ")
            .required(true)
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .build();
    static final PropertyDescriptor Page_size = new PropertyDescriptor.Builder().name("Page size")
            .description("page size")
            .required(true)
            .addValidator(StandardValidators.INTEGER_VALIDATOR)
            .defaultValue("10000")
            .build();


    public static final Relationship REL_SUCCESS = new Relationship.Builder()
            .name("success")
            .description("generate sqls success")
            .build();

    private List<PropertyDescriptor> descriptors;

    private Set<Relationship> relationships;

    @Override
    protected void init(final ProcessorInitializationContext context) {
        final List<PropertyDescriptor> descriptors = new ArrayList<PropertyDescriptor>();
        descriptors.add(CONNECTION_POOL);
        descriptors.add(Sql_selece_query);
        descriptors.add(Page_size);
        this.descriptors = Collections.unmodifiableList(descriptors);

        final Set<Relationship> relationships = new HashSet<Relationship>();
        relationships.add(REL_SUCCESS);
        this.relationships = Collections.unmodifiableSet(relationships);
    }

    @Override
    public Set<Relationship> getRelationships() {
        return this.relationships;
    }

    @Override
    public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
        return descriptors;
    }

    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(String propertyDescriptorName) {
        org.apache.nifi.components.PropertyDescriptor.Builder propertyBuilder = (new org.apache.nifi.components.PropertyDescriptor.Builder()).name(propertyDescriptorName).required(false).addValidator(StandardValidators.ATTRIBUTE_KEY_PROPERTY_NAME_VALIDATOR).expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES).dynamic(true);
        return   propertyBuilder.addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
    }

    @OnScheduled
    public void onScheduled(final ProcessContext context) {

    }

    @Override
    public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
        FlowFile originalFlowfile = session.get();
        if(originalFlowfile == null){
            return;
        }
        List<FlowFile> flowFiles = new ArrayList<>();
        final DBCPService dbcpService = (DBCPService)context.getProperty(CONNECTION_POOL).asControllerService(DBCPService.class);
        final String querySql = context.getProperty(Sql_selece_query).getValue();
        final int pageSize = Integer.parseInt(context.getProperty(Page_size).getValue());
        String countSql = "select count(1) from ( "+querySql+" ) tmp";
        int total = 0;
        long group = System.currentTimeMillis();
        int count = 0;
        int index = 0;
        try (Connection conn = dbcpService.getConnection(); Statement stmt = conn.createStatement()) {
            ResultSet countSet = stmt.executeQuery(countSql);
            if(countSet.next()){
                total = countSet.getInt(1);
                count = (total/pageSize) + (total%pageSize == 0?0:1);
            }
            int offset = 0;
            while(offset < total){
                String sql = "select * from ("+querySql+") tmp LIMIT "+pageSize+" OFFSET "+offset;
                FlowFile flowFile = session.create(originalFlowfile);
                if(flowFile == null){
                     throw new Exception("flowfile生成失败");
                }
                session.putAttribute(flowFile,"fragment.identifier",group+"");
                session.putAttribute(flowFile,"fragment.count",count+"");
                session.putAttribute(flowFile,"fragment.index",index+"");
                session.putAttribute(flowFile,"filename", group+""+index);

                session.write(flowFile, new StreamCallback(){
                    public void process(InputStream in, OutputStream out) throws IOException {
                        out.write(sql.getBytes(Charset.forName("UTF-8")));
                    }
                });
                flowFiles.add(flowFile);
                offset += pageSize;
                index +=1;
            }
            session.remove(originalFlowfile);   //删除旧的
            session.transfer(flowFiles, REL_SUCCESS);
        } catch (Exception e) {
            session.remove(flowFiles);
            getLogger().error("生成partition，查询SQL失败", e);
        }
    }
}
