package com.autodesk.akn.emr.dao;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.logging.Logger;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;

import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.regions.Region;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduceClient;
import com.amazonaws.services.elasticmapreduce.model.ActionOnFailure;
import com.amazonaws.services.elasticmapreduce.model.BootstrapActionConfig;
import com.amazonaws.services.elasticmapreduce.model.DescribeJobFlowsRequest;
import com.amazonaws.services.elasticmapreduce.model.DescribeJobFlowsResult;
import com.amazonaws.services.elasticmapreduce.model.HadoopJarStepConfig;
import com.amazonaws.services.elasticmapreduce.model.JobFlowDetail;
import com.amazonaws.services.elasticmapreduce.model.JobFlowInstancesConfig;
import com.amazonaws.services.elasticmapreduce.model.RunJobFlowRequest;
import com.amazonaws.services.elasticmapreduce.model.RunJobFlowResult;
import com.amazonaws.services.elasticmapreduce.model.ScriptBootstrapActionConfig;
import com.amazonaws.services.elasticmapreduce.model.StepConfig;
import com.amazonaws.services.elasticmapreduce.model.StepDetail;
import com.amazonaws.services.elasticmapreduce.model.TerminateJobFlowsRequest;
import com.amazonaws.services.elasticmapreduce.util.StepFactory;
import com.autodesk.akn.emr.domain.ClusterConfiguration;
import com.autodesk.akn.emr.domain.EMR;
import com.autodesk.akn.emr.domain.EMRDescription;
import com.autodesk.akn.emr.domain.StepDescription;
import com.autodesk.akn.emr.util.S3Util;

@Repository
public class EMRDao {
    private static final String MAPRED_JAR = "s3n://com.autodesk.akn/testProcesses/BDFunctions/mapred-poc-0.0.1-SNAPSHOT-jar-with-dependencies.jar";
	private final static Logger LOGGER = Logger.getLogger(EMRDao.class.getName());
    public static final String CONFIGURE_HADOOP_SH = "s3://com.autodesk.akn/testProcesses/BDFunctions/configureHadoopJob/configureHadoop.sh";
    public static final String S3_LOG_PATH = "s3n://com.autodesk.akn/ProcessedLogs/EMRLogs";
    private AmazonElasticMapReduceClient emr;

    @Autowired
    private S3Util s3Util;
	
    public EMRDao(){
        initializeClient();
    }

    public S3Util getS3Util() {
		return s3Util;
	}

	public void setS3Util(S3Util s3Util) {
		this.s3Util = s3Util;
	}

	private void initializeClient(){
        try {
            Properties prop = new Properties();
            prop.load(EMRDao.class.getClassLoader().getResourceAsStream("config.properties"));
            String accessKey = (String) prop.get("credentials.accessKey");
            String secretKey = (String) prop.get("credentials.secretKey");
            String proxy =(String)prop.get("proxy.enabled");
            boolean proxyneeded = "true".equals(proxy);
            AWSCredentials credentials = new BasicAWSCredentials(accessKey,secretKey);
            emr = new AmazonElasticMapReduceClient(credentials);
            if(proxyneeded){
                setProxyConfiguration();
            }
            emr.setRegion(Region.getRegion(Regions.US_EAST_1));
        } catch (Exception e){
            e.printStackTrace();
        }
    }
	
    private void setProxyConfiguration(){
        System.setProperty("com.amazonaws.regions.RegionUtils.fileOverride", "endpoints.xml");
        System.setProperty("https.proxyHost", "proxy.corp.globant.com");
        System.setProperty("https.proxyPort", "3128");
        ClientConfiguration clientConfiguration = new ClientConfiguration();
        clientConfiguration.setProxyPort(3128);
        clientConfiguration.setProxyHost("proxy.corp.globant.com");
        emr.setConfiguration(clientConfiguration);
    }

    public List<EMR> list(String jobFlowId){
        List<EMR> emrList = new ArrayList<EMR>();
        DescribeJobFlowsRequest jobFlowsRequest = new DescribeJobFlowsRequest();
        DescribeJobFlowsResult res;
        if(jobFlowId!=null){
            List <String> jobFlowIds= new ArrayList<String>();
            jobFlowIds.add(jobFlowId);
            jobFlowsRequest.setJobFlowIds(jobFlowIds);
            res = emr.describeJobFlows(jobFlowsRequest);
        } else {
            res = emr.describeJobFlows();
        }
        EMR emr = null;
        for (JobFlowDetail jobFlowDetail : res.getJobFlows()){
            emr= new EMR(jobFlowDetail.getJobFlowId(),
                    jobFlowDetail.getName(),
                    jobFlowDetail.getExecutionStatusDetail().getState().toString(),
                    new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(jobFlowDetail.getExecutionStatusDetail().getCreationDateTime()));
            emrList.add(emr);
        }
        return emrList;
    }

    public List<EMR> list(){
        return this.list(null);
    }
    
    public List<EMRDescription> getDescription(String jobFlowId){
        List<EMRDescription> emrDescriptionList = new ArrayList<EMRDescription>();
        DescribeJobFlowsRequest jobFlowsRequest = new DescribeJobFlowsRequest();
        DescribeJobFlowsResult res;
        if(jobFlowId!=null){
            List <String> jobFlowIds= new ArrayList<String>();
            jobFlowIds.add(jobFlowId);
            jobFlowsRequest.setJobFlowIds(jobFlowIds);
            res = emr.describeJobFlows(jobFlowsRequest);
        
            EMRDescription description = null;
            for (JobFlowDetail jobFlowDetail : res.getJobFlows()){
                description= new EMRDescription();
                description.setJobFlowId(jobFlowDetail.getJobFlowId());
                description.setJobFlowStatus(jobFlowDetail.getExecutionStatusDetail().getState().toString());
                description.setJobFlowName(jobFlowDetail.getName());
                description.setAmiVersion(jobFlowDetail.getAmiVersion());
                description.setJobCreationTime(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(jobFlowDetail.getExecutionStatusDetail().getCreationDateTime()));
                description.setDnsName(jobFlowDetail.getInstances().getMasterPublicDnsName());
                description.setInstanceNumber(jobFlowDetail.getInstances().getInstanceCount());
                description.setMasterType(jobFlowDetail.getInstances().getMasterInstanceType());
                description.setSlaveType(jobFlowDetail.getInstances().getSlaveInstanceType());
                description.setNumberOfSteps(jobFlowDetail.getSteps().size());
                description.setHadoopVersion(jobFlowDetail.getInstances().getHadoopVersion());
                for (StepDetail stepDetail:jobFlowDetail.getSteps()){
                    StepDescription stepDescription = new StepDescription();
                    stepDescription.setStepStatus(stepDetail.getExecutionStatusDetail().getState());
                    stepDescription.setStepName(stepDetail.getStepConfig().getName());
                    description.addSteps(stepDescription);
                }
                emrDescriptionList.add(description);
            }
        }
        return emrDescriptionList;
    }

    public String create(ClusterConfiguration clusterConfiguration){
        String jobFlowId = null;
        try {
            //Setup bootstrap-action configuration
            LOGGER.info("Bootstrap Action: "+clusterConfiguration.getBootstrapAction());
            LOGGER.info("Bootstrap Arguments: "+clusterConfiguration.getArguments());
            ScriptBootstrapActionConfig bootstrapScriptConfig = new ScriptBootstrapActionConfig();
            bootstrapScriptConfig.setPath(clusterConfiguration.getBootstrapAction());
            List<String> args = new ArrayList<String>();
            args.add(clusterConfiguration.getArguments());
            bootstrapScriptConfig.setArgs(args);
            BootstrapActionConfig bootstrapActionConfig = new BootstrapActionConfig("tagging",bootstrapScriptConfig);

            //Setup the job flow instances
            LOGGER.info("Instances Number: "+clusterConfiguration.getInstances());
            LOGGER.info("Master Type: "+clusterConfiguration.getMasterType());
            LOGGER.info("Slave Type: "+clusterConfiguration.getSlaveType());
            JobFlowInstancesConfig instances = new JobFlowInstancesConfig()
                .withInstanceCount(clusterConfiguration.getInstances())
                .withKeepJobFlowAliveWhenNoSteps(true)                
                .withMasterInstanceType(clusterConfiguration.getMasterType());
            if(clusterConfiguration.getInstances()>1){
                instances.withSlaveInstanceType(clusterConfiguration.getSlaveType());
            } else {
                instances.withSlaveInstanceType(clusterConfiguration.getMasterType());
            }

            //Setup the Job Flow
            LOGGER.info("Job Flow Name: "+clusterConfiguration.getJobName());
            RunJobFlowRequest request = new RunJobFlowRequest()
                    .withBootstrapActions(bootstrapActionConfig)
                    .withName(clusterConfiguration.getJobName())
                    .withLogUri(clusterConfiguration.getLogPath())
                    .withInstances(instances);

            //Setup the steps
            StepFactory stepFactory = new StepFactory("eu-west-1.elasticmapreduce");
            List<StepConfig> steps = new ArrayList<StepConfig>();
            if(clusterConfiguration.isPigEnabled()){
                LOGGER.info("Pig Enabled");
                StepConfig installPig = new StepConfig()
                        .withName("Install Pig")
                        .withActionOnFailure(ActionOnFailure.CONTINUE)
                        .withHadoopJarStep(stepFactory.newInstallPigStep());
                steps.add(installPig);
            }
            if(clusterConfiguration.isHiveEnabled()){
                LOGGER.info("Hive Enabled");
                StepConfig installHive = new StepConfig()
                        .withName("Install Hive")
                        .withActionOnFailure(ActionOnFailure.CONTINUE)
                        .withHadoopJarStep(stepFactory.newInstallHiveStep());
                steps.add(installHive);
            }
            request.withSteps(steps);

            // Run the job flow
            request.withAmiVersion("latest");
            RunJobFlowResult runJobFlowResult = emr.runJobFlow(request);
            jobFlowId = runJobFlowResult.getJobFlowId();
        }catch(Exception e){
            e.printStackTrace();
        }
        return jobFlowId;
    }

    public void terminate(List<String> jobFlowIds){
        TerminateJobFlowsRequest terminateJobFlowsRequest = new TerminateJobFlowsRequest();
        terminateJobFlowsRequest.setJobFlowIds(jobFlowIds);
        LOGGER.info("Terminate job flow : "+jobFlowIds.get(0));
        emr.terminateJobFlows(terminateJobFlowsRequest);
    }

    public String processLogs(String filePath) {
    	// Clean the workspace directory
    	s3Util.cleanWorkspaceDirectory();
    	LOGGER.info("Folder com.autodesk.akn/ProcessedLogs/"+filePath);
    	// Configure bootstrap actions
        ScriptBootstrapActionConfig bootstrapScriptConfig = new ScriptBootstrapActionConfig();
        bootstrapScriptConfig.setPath(CONFIGURE_HADOOP_SH);
        BootstrapActionConfig bootstrapActionConfig = new BootstrapActionConfig("bootstrap-actions",bootstrapScriptConfig);

        // Configure job flow request
        RunJobFlowRequest request = new RunJobFlowRequest()
                .withBootstrapActions(bootstrapActionConfig)
                .withName("Log Processing: "+filePath)
                .withInstances(new JobFlowInstancesConfig()
                        .withInstanceCount(1)
                        .withEc2KeyName("dev-us-east-shamik-1")
                        .withKeepJobFlowAliveWhenNoSteps(false)
                        .withMasterInstanceType("m1.large")
                        .withSlaveInstanceType("m1.large"));
        request.setLogUri(S3_LOG_PATH);
        request.setAmiVersion("latest");

        // Debug Step
        StepFactory stepFactory = new StepFactory();
        StepConfig enabledebugging = new StepConfig()
        .withName("Enable debugging")
        .withActionOnFailure("TERMINATE_JOB_FLOW")
        .withHadoopJarStep(stepFactory.newEnableDebuggingStep());

        //List of steps to be configured into the EMR instance
        List<StepConfig> steps = new ArrayList<StepConfig>();
//        Uncomment the following line to enable debug
//        steps.add(enabledebugging);
        steps.add(this.findNewRegisters(filePath));
        steps.add(this.updateDatabase());
        steps.add(this.processLogFiles(filePath));
        steps.add(this.buildVolumeReport());
        steps.add(this.buildWordReport());
        steps.add(this.buildQueryReport());
        steps.add(this.buildNoResultQueryReport());
        steps.add(this.buildSessionReport());
        steps.add(this.buildSessionCountReport());
        request.setSteps(steps);
        
        // Run the job flow
        RunJobFlowResult result = emr.runJobFlow(request);
        return result.getJobFlowId();
//        return "fakeJobFlow";
    }

	/**
	 * Process that finds the new Registers
	 * @param filePath The S3 file path
	 * @return The configured step
	 */
	private StepConfig findNewRegisters(String filePath) {
        LOGGER.info("Process that finds the new Registers");
		HadoopJarStepConfig jarConfig = new HadoopJarStepConfig(
            MAPRED_JAR);
        List <String> arguments = new ArrayList<String>();
        arguments.add("s3n://com.autodesk.akn/"+filePath);
        arguments.add("s3n://com.autodesk.akn/ProcessedLogs/ExtraInformation/NewLogs");
        jarConfig.setArgs(arguments);
        jarConfig.setMainClass("Autodesk.LookForNotFound");
        StepConfig stepConfig = new StepConfig("findNewRegisters",jarConfig);
        stepConfig.setActionOnFailure(ActionOnFailure.CONTINUE);
        stepConfig.setName("Look for new Regs Logs");
		return stepConfig;
	}

	/**
	 * Process that updates the database, inserts new
	 * registers and sends the emails for registers not found
	 * @return The configured step
	 */
	private StepConfig updateDatabase() {
		LOGGER.info("Process that updates the database, inserts new "
        		+ "registers and sends the emails for registers not found");
        HadoopJarStepConfig jarConfig = new HadoopJarStepConfig(
                MAPRED_JAR);
        List <String> arguments = new ArrayList<String>();
        arguments.add("s3n://com.autodesk.akn/ProcessedLogs/ExtraInformation/NewLogs/part-r-00000");
        arguments.add("s3n://com.autodesk.akn/ProcessedLogs/ExtraInformation/RegsToEmail");
        jarConfig.setArgs(arguments);
        jarConfig.setMainClass("WebserviceChecker.CheckWebServiceProcess");
        StepConfig stepConfig = new StepConfig("updateDatabase",jarConfig);
        stepConfig.setActionOnFailure(ActionOnFailure.CONTINUE);
        stepConfig.setName("Check WebService and Regs to Database");
		return stepConfig;
	}

	/**
	 * Process that processes the log files
	 * @param filePath The S3 file path
	 * @return The configured step
	 */
	private StepConfig processLogFiles(String filePath) {
		LOGGER.info("Process that processes the log files");
        HadoopJarStepConfig jarConfig = new HadoopJarStepConfig(
                MAPRED_JAR);
        List <String> arguments = new ArrayList<String>();
        arguments.add("s3n://com.autodesk.akn/"+filePath);
        arguments.add("s3n://com.autodesk.akn/ProcessedLogs/ExtraInformation/ParsedLogs");
        arguments.add("-ip");
        arguments.add("s3n://com.autodesk.akn/testProcesses/BDFunctions/MappedIpExcluded");
        jarConfig.setArgs(arguments);
        jarConfig.setMainClass("Autodesk.ParseLogs");
        StepConfig stepConfig = new StepConfig("processLogFiles",jarConfig);
        stepConfig.setActionOnFailure(ActionOnFailure.CONTINUE);
        stepConfig.setName("Parsed Logs");
		return stepConfig;
	}

	/**
	 * Process that builds the volume report
	 * @return The configured step
	 */
	private StepConfig buildVolumeReport() {
		LOGGER.info("Process that builds the volume report");
        HadoopJarStepConfig jarConfig = new HadoopJarStepConfig(
                MAPRED_JAR);
        List <String> arguments = new ArrayList<String>();
        arguments.add("s3n://com.autodesk.akn/ProcessedLogs/ExtraInformation/ParsedLogs");
        arguments.add("s3n://com.autodesk.akn/ProcessedLogs/toDatabase/akn_traffic_fact");
        jarConfig.setArgs(arguments);
        jarConfig.setMainClass("Autodesk.TotalVolumeFilter");
        StepConfig stepConfig = new StepConfig("buildVolumeReport",jarConfig);
        stepConfig.setActionOnFailure(ActionOnFailure.CONTINUE);
        stepConfig.setName("Volume Filter");
		return stepConfig;
	}

	/**
	 * Process that builds the word report
	 * @return The configured step
	 */
	private StepConfig buildWordReport() {
		LOGGER.info("Process that builds the word report");
        HadoopJarStepConfig jarConfig = new HadoopJarStepConfig(
                MAPRED_JAR);
        List <String> arguments = new ArrayList<String>();
        arguments.add("s3n://com.autodesk.akn/ProcessedLogs/ExtraInformation/ParsedLogs");
        arguments.add("s3n://com.autodesk.akn/ProcessedLogs/toDatabase/akn_word_fact");
        arguments.add("-arch");
        arguments.add("s3n://com.autodesk.akn/testProcesses/BDFunctions/excludedWords");
        jarConfig.setArgs(arguments);
        jarConfig.setMainClass("Autodesk.WordFilter");
        StepConfig stepConfig = new StepConfig("buildWordReport",jarConfig);
        stepConfig.setActionOnFailure(ActionOnFailure.CONTINUE);
        stepConfig.setName("WordFilter");
		return stepConfig;
	}
    
	/**
	 * Process that builds the query report
	 * @return The configured step
	 */
	private StepConfig buildQueryReport() {
		LOGGER.info("Process that builds the query report");
        HadoopJarStepConfig jarConfig = new HadoopJarStepConfig(
                MAPRED_JAR);
        List <String> arguments = new ArrayList<String>();
        arguments.add("s3n://com.autodesk.akn/ProcessedLogs/ExtraInformation/ParsedLogs");
        arguments.add("s3n://com.autodesk.akn/ProcessedLogs/toDatabase/akn_query_fact");
        jarConfig.setArgs(arguments);
        jarConfig.setMainClass("Autodesk.TotalQueryFilter");
        StepConfig stepConfig = new StepConfig("buildQueryReport",jarConfig);
        stepConfig.setActionOnFailure(ActionOnFailure.CONTINUE);
        stepConfig.setName("QueryReport");
		return stepConfig;
	}

	/**
	 * Process that builds the no result query report
	 * @return The configured step
	 */
	private StepConfig buildNoResultQueryReport() {
		LOGGER.info("Process that builds the no result query report");
        HadoopJarStepConfig jarConfig = new HadoopJarStepConfig(
                MAPRED_JAR);
        List <String> arguments = new ArrayList<String>();
        arguments.add("s3n://com.autodesk.akn/ProcessedLogs/ExtraInformation/ParsedLogs");
        arguments.add("s3n://com.autodesk.akn/ProcessedLogs/toDatabase/akn_noresultquery_fact");
        jarConfig.setArgs(arguments);
        jarConfig.setMainClass("Autodesk.NoResultQueryFilter");
        StepConfig stepConfig = new StepConfig("buildNoResultQueryReport",jarConfig);
        stepConfig.setActionOnFailure(ActionOnFailure.CONTINUE);
        stepConfig.setName("NoResultQueryReport");
		return stepConfig;
	}

	/**
	 * Process that builds the session report step config
	 * @return The configured step
	 */
	private StepConfig buildSessionReport() {
		LOGGER.info("Process that builds the session report");
        HadoopJarStepConfig jarConfig = new HadoopJarStepConfig(
                MAPRED_JAR);
        List <String> arguments = new ArrayList<String>();
        arguments.add("s3n://com.autodesk.akn/ProcessedLogs/ExtraInformation/ParsedLogs");
        arguments.add("s3n://com.autodesk.akn/ProcessedLogs/ExtraInformation/SessionFilterNotReady");
        jarConfig.setArgs(arguments);
        jarConfig.setMainClass("Autodesk.SessionFilter");
        StepConfig stepConfig = new StepConfig("buildSessionReport",jarConfig);
        stepConfig.setActionOnFailure(ActionOnFailure.CONTINUE);
        stepConfig.setName("SessionFilter");
		return stepConfig;
	}

	/**
	 * Process that builds the session report step config
	 * @return The configured step
	 */
	private StepConfig buildSessionCountReport() {
        HadoopJarStepConfig jarConfig = new HadoopJarStepConfig(
                MAPRED_JAR);
        List <String> arguments = new ArrayList<String>();
        arguments.add("s3n://com.autodesk.akn/ProcessedLogs/ExtraInformation/SessionFilterNotReady/part-r-00000");
        arguments.add("s3n://com.autodesk.akn/ProcessedLogs/toDatabase/akn_session_fact");
        arguments.add("-dayTime");
        arguments.add("s3n://com.autodesk.akn/testProcesses/BDFunctions/MappedDayMinute");
        jarConfig.setArgs(arguments);
        jarConfig.setMainClass("Autodesk.SessionFilterCounter");
        StepConfig stepConfig = new StepConfig("buildSessionCountReport",jarConfig);
        stepConfig.setName("SessionFilterCounter");
		return stepConfig;
	}

    public String getJobStatus(String jobId){
        DescribeJobFlowsRequest jobFlowsRequest = new DescribeJobFlowsRequest();
        DescribeJobFlowsResult res;
        List <String> jobFlowIds= new ArrayList<String>();
        jobFlowIds.add(jobId);
        jobFlowsRequest.setJobFlowIds(jobFlowIds);
        res = emr.describeJobFlows(jobFlowsRequest);
        for (JobFlowDetail jobFlowDetail : res.getJobFlows()){
            return jobFlowDetail.getExecutionStatusDetail().getState().toString();
        }
        return "FAILED";
    }
}
