package com.globant.AutomateEMR;

import java.io.File;
import java.io.IOException;

import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.dao.DataAccessException;
import org.springframework.mail.MailException;
import org.springframework.mail.SimpleMailMessage;

import com.globant.ErrorManager.ErrorLog;
import com.globant.ErrorManager.ErrorMang;
import com.globant.Exceptions.ErrorManagerException;
import com.globant.util.email.EmailSender;
import com.globant.util.log.ILogFunctions;
import com.globant.util.log.LogFunctionsImpl;
import com.globant.util.log.details.ILogDetailsDAO;
import com.globant.util.log.details.LogDetailsImpl;
import com.globant.util.log.reports.ILogReportsDAO;
import com.globant.util.log.reports.LogReportsImpl;

/**
 * Main class for the AutomateEMR project.
 * 
 * @author leandro.mora, gonzalo.zarza
 */

public class AutomateBigDataProcessing {
	
	/* 
	================================================================================================
	 Fields
	================================================================================================
	*/	
    private static final String     SPRING_EMAIL_BEANS_FILE = "Spring-Module_email.xml";
    private static final String     SPRING_LOGS_BEANS_FILE  = "Spring-Module_logs.xml";
    private static final String     BEAN_EMAIL_SENDER       = "mailClass";
    private static final String     BEAN_TEMPLATE_MESSAGE   = "templateMessage";
    private static final String     BEAN_LOG_DETAILS        = "logDetailsDAO";
    private static final String     BEAN_LOG_REPORTS        = "logReportsDAO";
    
    private static final int        NUMBER_OTHER_LOGS       = 2;
    private static final int        NUMBER_ATTEMPS          = 15;
    

	/* 
	================================================================================================
	 Methods
	================================================================================================
	*/
    /** 
     * Default constructor.
     */
	public AutomateBigDataProcessing() { }		
		
	
	/* ------------------------------------------------------------------------------------------ */

    /**
     * Main method, used to call the class from the corresponding bash script file. 
     * 
     * @param args             command line arguments
     * @throws Exception       if cannot complete the operation
     */
    public static void main(String[] args) {
        // needed objetcs
        ApplicationContext          context;
        EmailSender                 sender;
        SimpleMailMessage           message;
        PropertyLoader              loader;
        AutomateBigDataProcessing   automate;
        ErrorMang                   errManager;
        boolean                     completed;
        int                         attemps;
        ILogDetailsDAO              details;
        ILogReportsDAO              reports;
        ILogFunctions               logs;
        int							accepted_attemps;
       
        // init the property loader using the command line arguments
        loader              = new PropertyLoader(args[0]);
        // init the local object
        automate            = new AutomateBigDataProcessing();
        
        try{
        	accepted_attemps=Integer.parseInt(loader.readProperty("Number_Attemps"));
        }catch(Exception e){
        	accepted_attemps=AutomateBigDataProcessing.NUMBER_ATTEMPS;
        }
        // init the processing
        try{          
            context         = new ClassPathXmlApplicationContext(AutomateBigDataProcessing.SPRING_EMAIL_BEANS_FILE);
            // beans to send the notification emails
            sender          = (EmailSender) context.getBean(AutomateBigDataProcessing.BEAN_EMAIL_SENDER);
            message         = (SimpleMailMessage) context.getBean(AutomateBigDataProcessing.BEAN_TEMPLATE_MESSAGE);
            // beens to connect to mysql through jdbc
            context         = new ClassPathXmlApplicationContext(AutomateBigDataProcessing.SPRING_LOGS_BEANS_FILE);
            details = (LogDetailsImpl) context.getBean(AutomateBigDataProcessing.BEAN_LOG_DETAILS);
            reports = (LogReportsImpl) context.getBean(AutomateBigDataProcessing.BEAN_LOG_REPORTS);
            
            // init the objects depending on local objects
            errManager      = new ErrorMang(loader, sender, message);            
            logs            = new LogFunctionsImpl(details, reports);
            
            // go processing...
            try { 
                attemps     = 0;
                completed   = automate.automateBDProcessing(loader, sender, message, errManager, logs);
                // re-process in case of errors...
                while (!completed && attemps < accepted_attemps){
                    completed = automate.automateBDProcessing(loader, sender, message, errManager, logs);
                    attemps++;
                }
//                if(attemps==AutomateBigDataProcessing.NUMBER_ATTEMPS){
//                	String pErrMessage="The Big Data Processing failed.It has exceed the number of attemps to start it. Please, try re-run it manually or wait till tomorrow.";
//                	String pErrSubject="The Big Data Processing failed";
//                	errManager.notifyErrors(pErrSubject, pErrMessage);
//                }
            } catch (IOException e) {
                // TODO decide what to do here! It's not possible or recommended to send emails and or log to the database
                e.printStackTrace();        
            } catch (MailException me) {
                // TODO decide what to do here! It's not possible or recommended to send emails and or log to the database
                me.printStackTrace();                
            }
            
        } catch (BeansException e){
            //TODO decide what to do here! It's not possible or recommended to send emails and or log to the database
            e.printStackTrace();
        }
        
    }
    
    /* ------------------------------------------------------------------------------------------ */
    
	/**
     * automateBDProcessing method refactored to use parameters instead of instance fields.
     * 
     * @param pLoader           properties loader
     * @param pSender           email sender
     * @param pMessage          template message
     * @param pErrManager       error manager
     * @oaram pLogs             log functions
     * @return                  the operation status
     * @throws IOException      if cannot complete the operation because of IO problems
     * @throws MailException    if cannot send the email to notify errors
     */
    public boolean automateBDProcessing(PropertyLoader pLoader, EmailSender pSender, SimpleMailMessage pMessage, ErrorMang pErrManager, ILogFunctions pLogs) throws IOException, MailException {               
        
        ErrorLog                    errLogger;
        FileManager                 fileManager;
        File[]                      logFiles = null;        
        AutomateBigDataFunctions    functions;
        AutomateImport              importer;
        boolean                     otherRunning;
        boolean                     completed=false;
        String                      logId = null;        
        File 						logMoved=null;
        // init objects
        errLogger           = new ErrorLog();
        fileManager         = new FileManager();        
        functions           = new AutomateBigDataFunctions(pLoader);
        importer            = new AutomateImport(pLoader);
        
        // process...        
        try {
            System.out.println("Get Log Files");
            logFiles        = this.getLogFiles(pLoader);
       //     System.out.println("Check is other process is running");
       //     otherRunning    = this.isOtherProcessRunning(pLoader);
            otherRunning    = false;
            System.out.println("Start checking..");
            System.out.println("Amount of log files:"+logFiles.length);
            System.out.println("Is other process running:"+otherRunning);
            if(logFiles != null && logFiles.length > 0 && !otherRunning){                
            	
                for (int i = 0; i < logFiles.length; i++){
                    System.out.println("Enter to the loop..."+logFiles[i].getName());
                	if(fileManager.validateLog(logFiles[i])){
                	    System.out.println("Starting.."+logFiles[i].getName());
                	    // init the coresponding log recrods into the db
                	    System.out.println("Logging..");
                	    pLogs.setLogInformation(logFiles[i]);  
                	    logId      = fileManager.generateLogId(logFiles[i]);
                	    // run the bigdata functions
                	    System.out.println("Logged.Starting processing..");
                	    functions.automateBigDataFunctions(logFiles[i]);                	    

	                   logMoved=fileManager.moveFile(logFiles[i], pLoader.readProperty("PathToBucket")+ pLoader.readProperty("PathToInputProcessedLogs"));

	                    System.out.println(logMoved.getAbsolutePath());
							                    // error manager call
	                    try {	                                               
	                        pErrManager.automateErrorManager(pLoader, pLogs, logId,logMoved); 
	                        
	                        pLogs.updateLogStatusInformation(logId, ILogFunctions.DETAIL_STATUS_COMPLETED);
	                        // then updtate the efficiency information too
	                        pLogs.updateSingleEfficiencyInformation(logId); 
	                        completed  = true;
	                        
	                    } catch (ErrorManagerException e) {
	                        e.printStackTrace();
	            			String pathFromLogs			=pLoader.readProperty("PathToBucket")+pLoader.readProperty("PathToInputProcessedLogs");
	            			String pathToMoveErrorLog	=pLoader.readProperty("PathToBucket")+pLoader.readProperty("PathToNotStoredLogs");
	                        importer.storeErrorLog(errLogger,fileManager,pathFromLogs,pathToMoveErrorLog,logMoved);	                        
	                        // update log status (failure/exception)
	                        pLogs.updateLogStatusInformation(logId, ILogFunctions.DETAIL_STATUS_FAILED);	                        
	                        // uses the exception type as subject and the exepction info as body text                      
	                        pErrManager.notifyErrors("ERROR: " + e.getClass().getSimpleName(), e.toString());
	                        deleteReportsFromDatabase(pLogs, fileManager, logFiles);       
	                    }
                        
	                    // We iterate and break because in this way we ensure that 1 log was processed.                   
	                    break;
                	}
                }                    
            } else {
                throw new IOException();
            }
            
        } catch (DataAccessException e) {
            System.out.println("ERROR: DataAccessException..." );
            e.printStackTrace();
            deleteReportsFromDatabase(pLogs, fileManager, logFiles);       
//            if (logId != null){
//                pLogs.deleteLogAndReportsById(logId);
//            }
            // notify the error
            return (false);
        } catch (IOException e1) {
            System.out.println("ERROR: There are not logs available or there is other process running. Trying again..." );
            // there's an error in the log processing... delete all the log files
            e1.printStackTrace();
            deleteReportsFromDatabase(pLogs, fileManager, logFiles);       

            // notify the problem
            return (false);
        } catch (NullPointerException e2){
            System.out.println("ERROR: There are not logs available or there is other process running. Trying again..." );
            // there's an error in the log processing... delete all the log files
            e2.printStackTrace();
            deleteReportsFromDatabase(pLogs, fileManager, logFiles);

            // notify the problem
            return (false);
        } 
        // if we reach this point, everthing is ok!
        return completed;        
        
    }


	private void deleteReportsFromDatabase(ILogFunctions pLogs,
			FileManager fileManager, File[] logFiles) {
		String logId;
		if (logFiles != null && logFiles.length > 0) {
		    for (int i = 0; i < logFiles.length; i++){
		    	if(fileManager.validateLog(logFiles[i])){
		            logId = fileManager.generateLogId(logFiles[i]);
		            pLogs.deleteLogAndReportsById(logId);
		    	}
		    }    
		}
	}    
    
	/* ------------------------------------------------------------------------------------------ */
	/* ================ DBLoader Code ================================
	

    public class DbLoaderException extends Exception{
    	public DbLoaderException (){   		
    	}	
    }
    
    public boolean runScriptDBLoader(PropertyLoader prop) {
    	boolean result=false;
    	try {
    		System.out.println("Running Big Data Script..");
    		Process p = Runtime.getRuntime().exec(prop.readProperty("PathToBucket") + "DBLoader.sh ");
    		BufferedReader stdInput = new BufferedReader(new InputStreamReader(
    				p.getInputStream()));
    	
    		BufferedReader stdError = new BufferedReader(new InputStreamReader(
    				p.getErrorStream()));
    		String s;
    		while ((s = stdInput.readLine()) != null) {
    			System.out.println(s);
    		}

    		// read any errors from the attempted command
    		System.out.println("Here is the standard error of the command (if any):\n");
    		while ((s = stdError.readLine()) != null) {
    			System.out.println(s);
    		}
    		result=true;
    	} catch (IOException e) {
    		// TODO LOG DATABASE!!!
    		System.out.println("heyyyy");
    		e.printStackTrace();
    	}	
    	return result;
    }
	public static void main(String args[]){
		AutomateBigDataProcessing autBD=new AutomateBigDataProcessing();
		PropertyLoader pLoader=new PropertyLoader(args[0]);
		autBD.runScriptDBLoader(pLoader);
		
	}
     */
	/* ------------------------------------------------------------------------------------------ */
	
	private File[] getLogFiles(PropertyLoader pLoader) throws IOException {
	    // get files
	    String    path2LogsFolder = pLoader.readProperty("PathToBucket") + pLoader.readProperty("PathToInputLogs");
	    File      logFolder       = new File(path2LogsFolder);
	    File[]    logFiles        = logFolder.listFiles();

		// bye bye
		return logFiles;
	}

	/* ------------------------------------------------------------------------------------------ */
	
	private boolean isOtherProcessRunning(PropertyLoader pLoader) throws IOException {
	    // get the number of running processes
	    String    path2LogProccesedFolder = pLoader.readProperty("PathToBucket") + pLoader.readProperty("PathToInputProcessedLogs");
	    File      logProcessedFolder      = new File(path2LogProccesedFolder);
		File[]    logs                    = logProcessedFolder.listFiles();
		// return the other process status
		return (logs.length > AutomateBigDataProcessing.NUMBER_OTHER_LOGS);
	}
	
	/* ------------------------------------------------------------------------------------------ */
	
}