package com.autodesk.akn.emr.dao;

import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.regions.Region;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduceClient;
import com.amazonaws.services.elasticmapreduce.model.*;
import com.amazonaws.services.elasticmapreduce.util.StepFactory;
import com.autodesk.akn.emr.web.entity.ClusterConfigurationDTO;
import com.autodesk.akn.emr.web.entity.EMR;
import com.autodesk.akn.emr.web.entity.EMRDescription;
import com.autodesk.akn.emr.web.entity.SPSSConversionParams;
import com.autodesk.akn.emr.web.entity.StepDescription;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.Properties;
import java.util.logging.Logger;

public class EMRDaoImpl {
    private final static Logger LOGGER = Logger.getLogger(EMRDaoImpl.class.getName());
    public static final String TAG_EMR_INSTANCE_SH = "s3://globant-emr/tag_emr_instance.sh";
    public static final String TAG_ARGUMENTS = "--aws-credentials=s3://globant-emr/aws_credentials.sh Opco=KI Service=Test Cartesis=KI Environment=POC Username=Irfan.Sabir GST=Matt.Glace";
    public static final String SPSS_CONVERTER_JAR_PATH = "s3n://globant-emr/spssreader-1.0.0-jar-with-dependencies.jar";
    public static final String S3_LOG_PATH = "s3n://globant-emr/";
    private AmazonElasticMapReduceClient emr;

	
    public EMRDaoImpl(){
        initializeClient();
    }

    private void initializeClient(){
        try {
            Properties prop = new Properties();
            prop.load(EMRDaoImpl.class.getClassLoader().getResourceAsStream("config.properties"));
            String accessKey = (String) prop.get("credentials.accessKey");
            String secretKey = (String) prop.get("credentials.secretKey");
            String proxy =(String)prop.get("proxy.enabled");
            boolean proxyneeded = "true".equals(proxy);
            AWSCredentials credentials = new BasicAWSCredentials(accessKey,secretKey);
            emr = new AmazonElasticMapReduceClient(credentials);
            if(proxyneeded){
                setProxyConfiguration();
            }
            emr.setRegion(Region.getRegion(Regions.EU_WEST_1));
//    emr.setEndpoint("elasticmapreduce.eu-west-1.amazonaws.com");
        } catch (Exception e){
            e.printStackTrace();
        }
    }
	
    private void setProxyConfiguration(){
        System.setProperty("com.amazonaws.regions.RegionUtils.fileOverride", "endpoints.xml");
        System.setProperty("https.proxyHost", "proxy.corp.globant.com");
        System.setProperty("https.proxyPort", "3128");
        ClientConfiguration clientConfiguration = new ClientConfiguration();
        clientConfiguration.setProxyPort(3128);
        clientConfiguration.setProxyHost("proxy.corp.globant.com");
        emr.setConfiguration(clientConfiguration);
    }

    public List<EMR> list(String jobFlowId){
        List<EMR> emrList = new ArrayList<EMR>();
        DescribeJobFlowsRequest jobFlowsRequest = new DescribeJobFlowsRequest();
        DescribeJobFlowsResult res;
        if(jobFlowId!=null){
            List <String> jobFlowIds= new ArrayList<String>();
            jobFlowIds.add(jobFlowId);
            jobFlowsRequest.setJobFlowIds(jobFlowIds);
            res = emr.describeJobFlows(jobFlowsRequest);
        } else {
            res = emr.describeJobFlows();
        }
        EMR emr = null;
        for (JobFlowDetail jobFlowDetail : res.getJobFlows()){
            emr= new EMR(jobFlowDetail.getJobFlowId(),
                    jobFlowDetail.getName(),
                    jobFlowDetail.getExecutionStatusDetail().getState().toString(),
                    new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(jobFlowDetail.getExecutionStatusDetail().getCreationDateTime()));
            emrList.add(emr);
        }
        return emrList;
    }

    public List<EMR> list(){
        return this.list(null);
    }
    
    public List<EMRDescription> getDescription(String jobFlowId){
        List<EMRDescription> emrDescriptionList = new ArrayList<EMRDescription>();
        DescribeJobFlowsRequest jobFlowsRequest = new DescribeJobFlowsRequest();
        DescribeJobFlowsResult res;
        if(jobFlowId!=null){
            List <String> jobFlowIds= new ArrayList<String>();
            jobFlowIds.add(jobFlowId);
            jobFlowsRequest.setJobFlowIds(jobFlowIds);
            res = emr.describeJobFlows(jobFlowsRequest);
        
            EMRDescription description = null;
            for (JobFlowDetail jobFlowDetail : res.getJobFlows()){
                description= new EMRDescription();
                description.setJobFlowId(jobFlowDetail.getJobFlowId());
                description.setJobFlowStatus(jobFlowDetail.getExecutionStatusDetail().getState().toString());
                description.setJobFlowName(jobFlowDetail.getName());
                description.setAmiVersion(jobFlowDetail.getAmiVersion());
                description.setJobCreationTime(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(jobFlowDetail.getExecutionStatusDetail().getCreationDateTime()));
                description.setDnsName(jobFlowDetail.getInstances().getMasterPublicDnsName());
                description.setInstanceNumber(jobFlowDetail.getInstances().getInstanceCount());
                description.setMasterType(jobFlowDetail.getInstances().getMasterInstanceType());
                description.setSlaveType(jobFlowDetail.getInstances().getSlaveInstanceType());
                description.setNumberOfSteps(jobFlowDetail.getSteps().size());
                description.setHadoopVersion(jobFlowDetail.getInstances().getHadoopVersion());
                for (StepDetail stepDetail:jobFlowDetail.getSteps()){
                    StepDescription stepDescription = new StepDescription();
                    stepDescription.setStepStatus(stepDetail.getExecutionStatusDetail().getState());
                    stepDescription.setStepName(stepDetail.getStepConfig().getName());
                    description.addSteps(stepDescription);
                }
                emrDescriptionList.add(description);
            }
        }
        return emrDescriptionList;
    }

    public String create(ClusterConfigurationDTO clusterConfiguration){
        String jobFlowId = null;
        try {
            //Setup bootstrap-action configuration
            LOGGER.info("Bootstrap Action: "+clusterConfiguration.getBootstrapAction());
            LOGGER.info("Bootstrap Arguments: "+clusterConfiguration.getArguments());
            ScriptBootstrapActionConfig bootstrapScriptConfig = new ScriptBootstrapActionConfig();
            bootstrapScriptConfig.setPath(clusterConfiguration.getBootstrapAction());
            List<String> args = new ArrayList<String>();
            args.add(clusterConfiguration.getArguments());
            bootstrapScriptConfig.setArgs(args);
            BootstrapActionConfig bootstrapActionConfig = new BootstrapActionConfig("tagging",bootstrapScriptConfig);

            //Setup the job flow instances
            LOGGER.info("Instances Number: "+clusterConfiguration.getInstances());
            LOGGER.info("Master Type: "+clusterConfiguration.getMasterType());
            LOGGER.info("Slave Type: "+clusterConfiguration.getSlaveType());
            JobFlowInstancesConfig instances = new JobFlowInstancesConfig()
                .withInstanceCount(clusterConfiguration.getInstances())
                .withKeepJobFlowAliveWhenNoSteps(true)                
                .withMasterInstanceType(clusterConfiguration.getMasterType());
            if(clusterConfiguration.getInstances()>1){
                instances.withSlaveInstanceType(clusterConfiguration.getSlaveType());
            } else {
                instances.withSlaveInstanceType(clusterConfiguration.getMasterType());
            }

            //Setup the Job Flow
            LOGGER.info("Job Flow Name: "+clusterConfiguration.getJobName());
            RunJobFlowRequest request = new RunJobFlowRequest()
                    .withBootstrapActions(bootstrapActionConfig)
                    .withName(clusterConfiguration.getJobName())
                    .withLogUri(clusterConfiguration.getLogPath())
                    .withInstances(instances);

            //Setup the steps
            StepFactory stepFactory = new StepFactory("eu-west-1.elasticmapreduce");
            List<StepConfig> steps = new ArrayList<StepConfig>();
            if(clusterConfiguration.isPigEnabled()){
                LOGGER.info("Pig Enabled");
                StepConfig installPig = new StepConfig()
                        .withName("Install Pig")
                        .withActionOnFailure(ActionOnFailure.TERMINATE_JOB_FLOW)
                        .withHadoopJarStep(stepFactory.newInstallPigStep());
                steps.add(installPig);
            }
            if(clusterConfiguration.isHiveEnabled()){
                LOGGER.info("Hive Enabled");
                StepConfig installHive = new StepConfig()
                        .withName("Install Hive")
                        .withActionOnFailure(ActionOnFailure.TERMINATE_JOB_FLOW)
                        .withHadoopJarStep(stepFactory.newInstallHiveStep());
                steps.add(installHive);
            }
            request.withSteps(steps);

            // Run the job flow
            request.withAmiVersion("latest");
            RunJobFlowResult runJobFlowResult = emr.runJobFlow(request);
            jobFlowId = runJobFlowResult.getJobFlowId();
        }catch(Exception e){
            e.printStackTrace();
        }
        return jobFlowId;
    }

    public void terminate(List<String> jobFlowIds){
        TerminateJobFlowsRequest terminateJobFlowsRequest = new TerminateJobFlowsRequest();
        terminateJobFlowsRequest.setJobFlowIds(jobFlowIds);
        LOGGER.info("Terminate job flow : "+jobFlowIds.get(0));
        emr.terminateJobFlows(terminateJobFlowsRequest);
    }

    public String convertSPSSFile(SPSSConversionParams params){
        ScriptBootstrapActionConfig bootstrapScriptConfig = new ScriptBootstrapActionConfig();
        bootstrapScriptConfig.setPath(TAG_EMR_INSTANCE_SH);
        List<String> args = new ArrayList<String>();
        args.add(TAG_ARGUMENTS);
        bootstrapScriptConfig.setArgs(args);
        BootstrapActionConfig bootstrapActionConfig = new BootstrapActionConfig("tagging",bootstrapScriptConfig);
        Calendar cal = Calendar.getInstance();
        RunJobFlowRequest request = new RunJobFlowRequest()
                .withBootstrapActions(bootstrapActionConfig)
                .withName("SPSSConversion"+new SimpleDateFormat("yyyyMMddHHmmss").format(cal.getTime()))
                .withInstances(new JobFlowInstancesConfig()
                        .withInstanceCount(1)
                        .withEc2KeyName("IS-Kantar")
                        .withKeepJobFlowAliveWhenNoSteps(false)
                        .withMasterInstanceType("m1.small")
                        .withSlaveInstanceType("m1.small"));
        request.setLogUri(S3_LOG_PATH);
        request.setAmiVersion("latest");

        //List of steps to be configured into the EMR instance
        List<StepConfig> steps = new ArrayList<StepConfig>();

        //Configure the Hadoop jar to use
        HadoopJarStepConfig jarConfig = new HadoopJarStepConfig(SPSS_CONVERTER_JAR_PATH);
        List <String> arguments = new ArrayList<String>();
        arguments.add(params.getInputBucket());
        arguments.add(params.getInputKey());
        arguments.add(params.getOutputBucket());
        arguments.add(params.getOutputKey());
        arguments.add(params.getFolder());
        arguments.add(params.getHeadersOnly());
        jarConfig.setArgs(arguments);
        StepConfig stepConfig = new StepConfig("spssconverter",jarConfig);
        steps.add(stepConfig);

        //Setup Pig Installation
        //TODO: Call the pig script into this step
        StepFactory stepFactory = new StepFactory("eu-west-1.elasticmapreduce");
        StepConfig installPig = new StepConfig()
                .withName("Install Pig")
                .withActionOnFailure(ActionOnFailure.TERMINATE_JOB_FLOW)
                .withHadoopJarStep(stepFactory.newInstallPigStep());
        steps.add(installPig);

        StepConfig runPig = new StepConfig()
                .withName("Run Pig Script")
                .withActionOnFailure(ActionOnFailure.TERMINATE_JOB_FLOW);
                //.withHadoopJarStep(stepFactory.newRunPigScriptStep("s3n://globant-emr/pigscript.pig"));
        steps.add(runPig);

        //Add the step list to the request
        request.setSteps(steps);
        //Run the job flow
        RunJobFlowResult result = emr.runJobFlow(request);
        return result.getJobFlowId();
    }

    public String getJobStatus(String jobId){
        DescribeJobFlowsRequest jobFlowsRequest = new DescribeJobFlowsRequest();
        DescribeJobFlowsResult res;
        List <String> jobFlowIds= new ArrayList<String>();
        jobFlowIds.add(jobId);
        jobFlowsRequest.setJobFlowIds(jobFlowIds);
        res = emr.describeJobFlows(jobFlowsRequest);
        for (JobFlowDetail jobFlowDetail : res.getJobFlows()){
            return jobFlowDetail.getExecutionStatusDetail().getState().toString();
        }
        return "FAILED";
    }
}
