package com.dcits.nifi;

import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.behavior.ReadsAttribute;
import org.apache.nifi.annotation.behavior.ReadsAttributes;
import org.apache.nifi.annotation.behavior.WritesAttribute;
import org.apache.nifi.annotation.behavior.WritesAttributes;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.*;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.*;

@Tags({"hive","partition"})
@CapabilityDescription("add partition for hive table in the order of year,month,day,hour,minute,second")
@ReadsAttributes({@ReadsAttribute(attribute = "", description = "")})
@WritesAttributes({@WritesAttribute(attribute = "partitiion_directory", description = "the directory of partition created"),
        @WritesAttribute(attribute = "tableName", description = "the hive table name")})
public class AddYearMonthDayPartition extends AbstractProcessor {
    private static final String driverClass = "org.apache.hive.jdbc.HiveDriver";

    public static final PropertyDescriptor HIVE_JDBC_URL = new PropertyDescriptor
            .Builder().name("Hive Url")
            .description("the jdbc url of hive,e.g. jdbc:hive2://host:10000/dbname")
            .required(true)
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .build();

    public static final PropertyDescriptor HIVE_USER = new PropertyDescriptor
            .Builder().name("Hive User")
            .description("the user name of hive database")
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .build();

    public static final PropertyDescriptor HIVE_PASSWD = new PropertyDescriptor
            .Builder().name("Hive Passwd")
            .description("the passwd of hive database")
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .build();

    public static final PropertyDescriptor HIVE_TABLE = new PropertyDescriptor
            .Builder().name("Table Name")
            .description("the hive table name")
            .required(true)
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .build();

    public static final PropertyDescriptor PARTITION_YEAR = new PropertyDescriptor
            .Builder().name("Year column")
            .description("the column name of the year partition")
            .required(true)
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .build();

    public static final PropertyDescriptor PARTITION_MONTH = new PropertyDescriptor
            .Builder().name("Month column")
            .description("the column name of the month partition")
            .required(true)
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .build();

    public static final PropertyDescriptor PARTITION_DAY = new PropertyDescriptor
            .Builder().name("Day column")
            .description("the column name of the day partition")
            .required(true)
            .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
            .build();

    public static final Relationship REL_SUCCESS = new Relationship.Builder()
            .name("success")
            .description("Success, add partition successfully")
            .build();

    public static final Relationship ALREADY_EXIST = new Relationship.Builder()
            .name("existed")
            .description("the partition already exists")
            .build();

    private List<PropertyDescriptor> descriptors;

    private Set<Relationship> relationships;

    @Override
    protected void init(final ProcessorInitializationContext context) {
        final List<PropertyDescriptor> descriptors = new ArrayList<PropertyDescriptor>();
        descriptors.add(HIVE_JDBC_URL);
        descriptors.add(HIVE_USER);
        descriptors.add(HIVE_PASSWD);
        descriptors.add(HIVE_TABLE);
        descriptors.add(PARTITION_YEAR);
        descriptors.add(PARTITION_MONTH);
        descriptors.add(PARTITION_DAY);

        this.descriptors = Collections.unmodifiableList(descriptors);

        final Set<Relationship> relationships = new HashSet<Relationship>();
        relationships.add(REL_SUCCESS);
        relationships.add(ALREADY_EXIST);
        this.relationships = Collections.unmodifiableSet(relationships);
    }

    @Override
    public Set<Relationship> getRelationships() {
        return this.relationships;
    }

    @Override
    public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
        return descriptors;
    }

    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(String propertyDescriptorName) {
        org.apache.nifi.components.PropertyDescriptor.Builder propertyBuilder = (new org.apache.nifi.components.PropertyDescriptor.Builder()).name(propertyDescriptorName).required(false).addValidator(StandardValidators.ATTRIBUTE_KEY_PROPERTY_NAME_VALIDATOR).expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES).dynamic(true);
        return   propertyBuilder.addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
    }

    @OnScheduled
    public void onScheduled(final ProcessContext context) {

    }

    @Override
    public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
        FlowFile flowFile = session.create();
        if(flowFile == null){
            return;
        }
        final String url = context.getProperty(HIVE_JDBC_URL).getValue();
        final String user = context.getProperty(HIVE_USER).getValue();
        final String pwd = context.getProperty(HIVE_PASSWD).getValue();

        final String tableName = context.getProperty(HIVE_TABLE).getValue();
        final String year = context.getProperty(PARTITION_YEAR).getValue();
        final String month = context.getProperty(PARTITION_MONTH).getValue();
        final String day = context.getProperty(PARTITION_DAY).getValue();

        Calendar calendar = Calendar.getInstance();
        //year=2019/month=9/day=9
        int yearV = calendar.get(Calendar.YEAR);
        int monthV = calendar.get(Calendar.MONTH)+1;
        int dayV = calendar.get(Calendar.DAY_OF_MONTH);
        String partitionStr = year+"="+yearV+"/"+month+"="+monthV+"/"+day+"="+dayV;
        try{
            session.putAttribute(flowFile,"tableName",tableName);
            session.putAttribute(flowFile,"partitiion_directory",partitionStr);

            Class.forName(driverClass);
            Connection con = DriverManager.getConnection(url, user, pwd);
            Statement stmt = con.createStatement();
            ResultSet res = stmt.executeQuery("show partitions " + tableName);
            boolean flag = false;
            while (res.next()) {
                if(StringUtils.equals(res.getString(1),partitionStr)){
                    flag = true;
                    break;
                }
            }
            if(!flag){     //对应的partition不存在，则创建
                stmt.execute("ALTER TABLE " + tableName +" add PARTITION ( "+year+"="+yearV+","+month+"="+monthV+","+day+"="+dayV+")");
                session.transfer(flowFile,REL_SUCCESS);
            }else{
                session.transfer(flowFile,ALREADY_EXIST);
            }
        } catch (Exception e) {
            getLogger().error("创建partition异常",e);
        }
    }
}
