package com.dcits.nifi;

import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.behavior.*;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.*;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.*;

@Tags({"hive","partition"})
@CapabilityDescription("add partition for hive table according datetime,supporting only one partition,")
@DynamicProperty(
        name = "The hive table name",
        value = "The query sql,the column name must be lower case",
        expressionLanguageScope = ExpressionLanguageScope.NONE,
        description = "the query will be used to query data from source db,and fill the hive table"
)
@ReadsAttributes({@ReadsAttribute(attribute = "", description = "")})
@WritesAttributes({@WritesAttribute(attribute = "partitiion_directory", description = "the directory of partition created"),
        @WritesAttribute(attribute = "tableName", description = "the hive table name")})
public class AddDateTimePartitionWithQuerySql extends AbstractProcessor {
    private static final String driverClass = "org.apache.hive.jdbc.HiveDriver";


    public static final PropertyDescriptor HIVE_JDBC_URL = new PropertyDescriptor
            .Builder().name("Hive Url")
            .description("the jdbc url of hive,e.g. jdbc:hive2://host:10000/dbname")
            .required(true)
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .build();

    public static final PropertyDescriptor HIVE_USER = new PropertyDescriptor
            .Builder().name("Hive User")
            .description("the user name of hive database")
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .build();

    public static final PropertyDescriptor HIVE_PASSWD = new PropertyDescriptor
            .Builder().name("Hive Passwd")
            .description("the passwd of hive database")
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .build();

    public static final PropertyDescriptor HIVE_PARTITION_FIELD = new PropertyDescriptor
            .Builder().name("Partition Field")
            .description("the partition field in hive table")
            .required(true)
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .build();

    public static final PropertyDescriptor HIVE_PARTITION_FIELD_FORMAT = new PropertyDescriptor
            .Builder().name("Partition Field Format")
            .description("the datetime format of partition,default yyyyMMdd")
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .defaultValue("yyyyMMdd")
            .build();

    public static final Relationship REL_SUCCESS = new Relationship.Builder()
            .name("success")
            .description("Success, add partition successfully")
            .build();

    private List<PropertyDescriptor> descriptors;

    private Set<Relationship> relationships;

    @Override
    protected void init(final ProcessorInitializationContext context) {
        final List<PropertyDescriptor> descriptors = new ArrayList<PropertyDescriptor>();
        descriptors.add(HIVE_JDBC_URL);
        descriptors.add(HIVE_USER);
        descriptors.add(HIVE_PASSWD);
        descriptors.add(HIVE_PARTITION_FIELD);
        descriptors.add(HIVE_PARTITION_FIELD_FORMAT);
        this.descriptors = Collections.unmodifiableList(descriptors);

        final Set<Relationship> relationships = new HashSet<Relationship>();
        relationships.add(REL_SUCCESS);
        this.relationships = Collections.unmodifiableSet(relationships);
    }

    @Override
    public Set<Relationship> getRelationships() {
        return this.relationships;
    }

    @Override
    public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
        return descriptors;
    }

    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(String propertyDescriptorName) {
        org.apache.nifi.components.PropertyDescriptor.Builder propertyBuilder = (new org.apache.nifi.components.PropertyDescriptor.Builder()).name(propertyDescriptorName).required(false).addValidator(StandardValidators.ATTRIBUTE_KEY_PROPERTY_NAME_VALIDATOR).expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES).dynamic(true);
        return   propertyBuilder.addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
    }

    @OnScheduled
    public void onScheduled(final ProcessContext context) {

    }

    @Override
    public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
        List<FlowFile> flowFileList = Collections.synchronizedList(new ArrayList<>());

        final String url = context.getProperty(HIVE_JDBC_URL).getValue();
        final String user = context.getProperty(HIVE_USER).getValue();
        final String pwd = context.getProperty(HIVE_PASSWD).getValue();
        final String partitionField = context.getProperty(HIVE_PARTITION_FIELD).getValue().trim();
        final String partitionFormat = context.getProperty(HIVE_PARTITION_FIELD_FORMAT).getValue();
        Map<PropertyDescriptor, String> properties = context.getProperties();
        properties.remove(HIVE_JDBC_URL);
        properties.remove(HIVE_USER);
        properties.remove(HIVE_PASSWD);
        properties.remove(HIVE_PARTITION_FIELD);
        properties.remove(HIVE_PARTITION_FIELD_FORMAT);
        if(properties.isEmpty()){
            return;
        }
        try{
            SimpleDateFormat sdf = new SimpleDateFormat(partitionFormat);
            String partitionFieldValue = sdf.format(new Date());
            Class.forName(driverClass);
            Connection con = DriverManager.getConnection(url, user, pwd);
            Statement stmt = con.createStatement();
            Object[] propertyDescriptors = properties.keySet().toArray();
            for(int i=0;i<propertyDescriptors.length;i++){
                PropertyDescriptor propertyDescriptor = (PropertyDescriptor)propertyDescriptors[i];
                FlowFile flowFile = session.create();
                if (flowFile == null) {
                   throw new Exception("创建FlowFile失败");
                }
                String tableName = propertyDescriptor.getName();
                String querySql = properties.get(propertyDescriptor);
                ResultSet res = stmt.executeQuery("show partitions " + tableName);
                boolean flag = false;
                while (res.next()) {
                    if(StringUtils.equals(res.getString(1),partitionField+"="+partitionFieldValue)){
                        flag = true;
                        break;
                    }
                }
                if(!flag){     //对应的partition不存在，则创建
                    stmt.execute("ALTER TABLE " + tableName +" add PARTITION ("+partitionField+" = '"+partitionFieldValue+"')");
                }
                session.write(flowFile,outputStream -> outputStream.write(querySql.getBytes("UTF-8")));
                session.putAttribute(flowFile,"tableName",tableName);
                session.putAttribute(flowFile,"partitiion_directory",partitionField+"="+partitionFieldValue);
                flowFileList.add(flowFile);
            }
            session.transfer(flowFileList,REL_SUCCESS);
        } catch (Exception e) {
            session.remove(flowFileList);
            getLogger().error("生成partition，查询SQL失败",e);
        }
    }

}
