import threading
import time
import os
import pysvn
import platform
import shutil
import tarfile
import re
import ast
import subprocess
import datetime
import requests

from models import getScriptPath, Job, JobWithResults, loadConfigFile, MyTextBox, Downloader


class JobHandler(threading.Thread):
    def __init__(self, logger, showNotifWindow, hideNotifWindow, jobList, jobsToSendList, configData,
                 svnUsername, svnPassword, statusSender):
        threading.Thread.__init__(self)
        self._isOnline = True
        self._isRunning = True
        self.daemon = True
        self._logger = logger
        self._jobList = jobList
        self._svnUsername = svnUsername
        self._svnPassword = svnPassword
        self._jobsToSendList = jobsToSendList
        self._pegasusLoggingFolder = ""
        self._runningJob = None
        self._headerLogInfo = "[" + "JOB_HANDLER".rjust(19) + "]\t"
        self._configData = configData
        self._statusSender = statusSender
        self.showNotifWindow = showNotifWindow
        self.hideNotifWindow = hideNotifWindow
        self.start()

    def run(self):
        self._logger.info(self._headerLogInfo + "Job handler started")
        while(self._isRunning):
            time.sleep(0.5)
            if self._runningJob is None:
                try:
                    if self._isOnline:
                        self._job = self._jobList.use()
                        if self._job is not None:
                            self._logger.debug(self._headerLogInfo + "{} Starting job".format(self._job._id))
                            self._startTimeJob = datetime.datetime.now()
                            self._timedelta = datetime.timedelta(minutes=self._job._timeout)
                            timer = threading.Timer(0.0, self.runJob)
                            timer.start()
                except Exception as e:
                    self._logger.exception(self._headerLogInfo + "{} JobHandler crash while running job: {}"
                                           .format(self._job._id, str(e)))
            else:
                if datetime.datetime.now() - self._startTimeJob > self._timedelta:
                    timer = threading.Timer(0, self.killRunningJob,
                                            ["Timeout expired. Stopping job. Wait for safe removal.", True])
                    timer.start()

    def runJob(self):
        self.showNotifWindow("test")
        self._logger.info(self._headerLogInfo + "{} Starting queued job".format(self._job._id))
        # self._statusSender.setCurrentJob(self._job._id)
        self._runningJob = JobRunner(self._logger, self._job, self._jobsToSendList, self._configData, self._svnUsername,
                                     self._svnPassword, self._headerLogInfo, self._job._id)
        self._runningJob.join()
        self._runningJob = None
        self._logger.info(self._headerLogInfo + "{} Removing finished job".format(self._job._id))
        self._jobList.remove(self._job)
        self.hideNotifWindow()

    def goOffline(self):
        self._isOnline = False

    def goOnline(self):
        self._isOnline = True

    def destroy(self):
        if self._runningJob:
            self.killRunningJob("CI Agent exits", True)
        self._isRunning = False

    def getRunningJob(self):
        return self._runningJob

    def removeJob(self, job):
        self._logger.debug(self._headerLogInfo + "{} Cancel request from Jenskins".format(job._id))
        if job == self._job:
            self._logger.debug(self._headerLogInfo + "{} Removing running job".format(job._id))
            self.killRunningJob("Job cancelled in Jenkins", False)
            ret = 0
        else:
            self._logger.debug(self._headerLogInfo + "{} Removing job from queue".format(job._id))
            ret = self._jobList.remove(job)
        return ret

    def killRunningJob(self, text, sendResults):
        self.showNotifWindow("drop", text)
        self._logger.warning(self._headerLogInfo + "{} Stopping running job".format(self._job._id))
        self._runningJob.stopJob(sendResults)
        while(self._runningJob is not None):
            time.sleep(0.1)
        self.hideNotifWindow()


# ----------------------------------------------------------------------------------------------------------------------
class JobRunner(threading.Thread):
    def __init__(self, logger, job, jobsToSendList, configData, svnUsername, svnPassword, headerLogInfo, jobId):
        threading.Thread.__init__(self)
        self.daemon = True
        self._headerLogInfo = headerLogInfo
        self._isToBeStopped = False
        self._job = job
        self._logger = logger
        self._jobsToSendList = jobsToSendList
        self._svnUsername = svnUsername
        self._svnPassword = svnPassword
        self._reportFile = ""
        self._returnCode = -1
        self._message = "{} Job finished".format(self._job._id)
        self._cancelCommandValue = False
        self._sendResults = True
        self._sendReportData = True
        self.defineListOfJobActions()
        self.prepareSVNData(configData)
        self._artifactsDirectory = os.path.join(getScriptPath(), "ARTIFACTS\\")
        self._pegasusProcess = None
        self._downloader = Downloader()
        self.start()

    def defineListOfJobActions(self):
        self._jobActions = [
            self.sendStartNotificationToJenkins,
            self.updateConfigRepository,
            self.getConfigFilePath,
            self.loadConfigFileData,
            self.setReportFile,
            self.getArtifactsFiles,
            self.runJob]

    def cancel(self):
        return self._isToBeStopped

    def prepareSVNData(self, configData):
        self._logger.debug(self._headerLogInfo + "{} Preparing SVN data".format(self._job._id))
        self._localRepoFolder = os.path.join(getScriptPath(), "CONFIG\\")
        self._configFilesSVNRepository = configData['configFilesSVNRepository']
        self._svnClient = pysvn.Client()
        self._svnClient.callback_ssl_server_trust_prompt = self.ssl_server_trust_prompt
        self._svnClient.callback_get_login = self.getLogin
        self._svnClient.callback_cancel = self.cancel

    def stopJob(self, sendResults=True):
        self._sendResults = sendResults
        self._logger.debug(self._headerLogInfo + "{} Stopping running job".format(self._job._id))
        self._isToBeStopped = True
        if self._downloader._isDownloading:
            self._downloader.cancel()
        if self._pegasusProcess is not None:
            self.killPegasusProcess()
        self._sendReportData = False
        self._message = "{} Job stopped by user".format(self._job._id)

    def killPegasusProcess(self):
        self._logger.debug(self._headerLogInfo + "{} Killing pegasus process".format(self._job._id))
        pegasusProcName = "PegasusRCP.exe"
        if "Windows" in platform.platform():
            os.system("taskkill /f /im " + pegasusProcName)
        else:
            os.system('pkill ' + pegasusProcName)
        self._logger.debug(self._headerLogInfo + "{} Pegasus process killed".format(self._job._id))

    def getLogin(self, realm, username, may_save):
        return True, self._svnUsername, self._svnPassword, False

    def ssl_server_trust_prompt(self, trust_dict):
        return True, trust_dict['failures'], False

    def run(self):
        try:
            if type(self._job) == Job:
                self.executeJob()
        except Exception, e:
            self._logger.exception(self._headerLogInfo + "{} Job Runner crash: {}".format(self._job._id, str(e)))

    def sendStartNotificationToJenkins(self):
        notificationMessage = JobWithResults(self._job, '{} Job started'.format(self._job._id))
        self._jobsToSendList.put(notificationMessage)

    def executeJob(self):
        self._run = None
        for action in self._jobActions:
            if self._run is None or not self._run.is_alive():
                self._run = threading.Timer(0.0, action)
                self._run.start()
            while(self._run.isAlive()):
                time.sleep(0.1)
        self.setTmsRepository()
        self.finishJobExecution()

    def setReportFile(self):
        self._reportFile = self._configFileData['reportFile']

    def setTmsRepository(self):
        if 'tmsReportFileDestination' in self._configFileData:
            self._tmsRepo = self._configFileData["tmsReportFileDestination"]
        else:
            self._tmsRepo = None

    def updateConfigRepository(self):
        while(not self.updateRepository()):
            time.sleep(10)

    def loadConfigFileData(self):
        self._configFileData = self.loadData(self._configFilePath)
        if not self._configFileData:
            self._message = "Could not load config file: {}. Wrong format or no file. Job execution declined".format(self._configFilePath)
            self.finishJobExecution()
            self._isToBeStopped = True
        else:
            if 'executionMessage' in self._configFileData:
                self._message = "{} {}".format(self._job._id, self._configFileData['executionMessage'])

    def updateRepository(self):
        if os.path.exists(self._localRepoFolder):
            self._logger.info(self._headerLogInfo + "{} Updating config repository: {}"
                              .format(self._job._id, self._localRepoFolder))
            try:
                self._svnClient.update(self._localRepoFolder)
                self._logger.info(self._headerLogInfo + "{} Update successful".format(self._job._id))
            except Exception, e:
                self._logger.exception(self._headerLogInfo + "{} Exception while updating config repo ({}): {}"
                                       .format(self._job._id, self._configFilesSVNRepository, str(e)))
                if '\'cleanup\'' in str(e):
                    self._svnClient.cleanup(self._localRepoFolder)
                    self._svnClient.update(self._localRepoFolder)
                else:
                    return False
        else:
            self._logger.info(self._headerLogInfo + "{} Checking out config repository: {}"
                              .format(self._job._id, self._configFilesSVNRepository))
            try:
                self._svnClient.checkout(self._configFilesSVNRepository, self._localRepoFolder)
                self._logger.info(self._headerLogInfo + "{} Checkout successful".format(self._job._id))
            except Exception, e:
                self._logger.exception(self._headerLogInfo + "{} Exception while checking out config repo ({}): {}"
                                       .format(self._job._id, self._configFilesSVNRepository, str(e)))
                return False
        return True

    def getConfigFilePath(self):
        self._logger.debug(self._headerLogInfo + "{} Preparing config file path".format(self._job._id))
        projectFolder = self.getFolderByName(self._localRepoFolder, self._job._project)
        subprojectFolder = projectFolder
        if self._job._subproject != 'None':
            subprojectFolder = self.getFolderByName(projectFolder, self._job._subproject)
        branchFolder = self.getFolderByName(subprojectFolder, self._job._branch)
        testTypeFolder = self.getFolderByName(branchFolder, self._job._testType)
        self._logger.debug(self._headerLogInfo + "{} Config file path set to: {}"
                           .format(self._job._id, os.path.join(testTypeFolder, "config.yaml")))
        self._configFilePath = os.path.join(testTypeFolder, "config.yaml")

    def getFolderByName(self, rootFolder, folderName):
        folder = ""
        for subDir in os.listdir(rootFolder):
            if subDir == folderName:
                folder = os.path.join(rootFolder, subDir)
                break
        return folder

    def loadData(self, configFilePath):
        self._logger.debug(self._headerLogInfo + "{} Loading config file: {}".format(self._job._id, configFilePath))
        fileContent = loadConfigFile(configFilePath)
        if 'error' in fileContent:
            self._logger.error(self._headerLogInfo + "{} Exception while loading agent config file: {}"
                               .format(self._job._id, fileContent['error']))
        else:
            self._logger.info(self._headerLogInfo + "{} Job config file load successful.".format(self._job._id))
        return fileContent

    def getArtifactsFiles(self):
        self._logger.debug(self._headerLogInfo + "{} Retrieving artifacts".format(self._job._id))
        tries = 0
        while (tries < 3 and not self._isToBeStopped):
            for key in self._job._artifacts:
                keyData = self._job._artifacts[key]
                if not self.processArtifacts(keyData):
                    tries += 1

    def processArtifacts(self, keyData):
        if not self.downloadArtifactsFiles(keyData['artifactsPath']):
            return False
        if 'artifactsToUnpack' in keyData and not self.unpackArtifacts(keyData['artifactsToUnpack']):
            return False
        if not self.copyArtifactsFiles(keyData['files'], keyData['destinationFolder']):
            return False
        return True

    def downloadArtifactsFiles(self, artifactsPath):
        if 'svnroot' in artifactsPath:
            return self.getArtifactsFromSVN(artifactsPath)
        elif 'artifactory' in artifactsPath:
            return self.getArtifactsFromArtifactory(artifactsPath)
        else:
            return self.getArtifactsFromlink(artifactsPath)

    def getArtifactsFromSVN(self, artifactsPath):
        artifactsPath = re.sub(r'svn\+ssh://', 'https://', artifactsPath)
        self.cleanupArtifactsDirectory()
        artifactsSvnLink = artifactsPath.split('@')[0]
        self._logger.info(self._headerLogInfo + "{} Retrieving artifact: {} from svn"
                          .format(self._job._id, artifactsPath))
        if '@' in artifactsPath:
            revision = pysvn.Revision(pysvn.opt_revision_kind.number, int(artifactsPath.split('@')[1]))
        else:
            revision = pysvn.Revision(pysvn.opt_revision_kind.head)
        if not self._isToBeStopped:
            self._logger.debug(self._headerLogInfo + "{} Exporting artifacts file: {} to directory: {}"
                               .format(self._job._id, artifactsPath, self._artifactsDirectory))
            try:
                self._svnClient.export(src_url_or_path=artifactsSvnLink,
                                       dest_path=os.path.join(self._artifactsDirectory, 'SVN_EXPORT'),
                                       revision=revision)
                self._logger.info(self._headerLogInfo + "{} Artifacts file retrieved successfully".format(self._job._id))
            except Exception as e:
                self._logger.exception(self._headerLogInfo + "{} Exception while retrieving artifacts file from SVN: {}"
                                       .format(self._job._id, str(e)))
                return False
        return True

    def getArtifactsFromlink(self, artifactsPath):
        self.cleanupArtifactsDirectory()
        self._logger.info(self._headerLogInfo + "{} Retrieving artifacts from link: {}"
                          .format(self._job._id, artifactsPath))
        artifactsFileName = artifactsPath.split('/')[-1]
        artifactsSavedFile = os.path.join(self._artifactsDirectory, artifactsFileName)
        if not self._isToBeStopped:
            self._logger.debug(self._headerLogInfo + "{} Retrieving artifacts file: {} to: {}"
                               .format(self._job._id, artifactsPath, artifactsSavedFile))
            try:
                self._downloader.download(artifactsPath, artifactsSavedFile)
                while(self._downloader._isDownloading):
                    time.sleep(0.5)
                self._logger.info(self._headerLogInfo + "{} Artifacts file retrieved successfully".format(self._job._id))
            except Exception as e:
                self._logger.exception(self._headerLogInfo + "{} Exception while retrieving artifacts file: {}"
                                       .format(self._job._id, str(e)))
                return False
        return True

    def getArtifactsFromArtifactory(self, artifactsPath):
        self.cleanupArtifactsDirectory()
        try:
            request_response = requests.get(artifactsPath, auth=(self._svnUsername, self._svnPassword))
            listOfArtifactFiles = bs4.BeautifulSoup(request_response.content, 'html.parser').find_all('a')
            if listOfArtifactFiles:
                self._downloader.setLoginData(artifactsPath, self._svnUsername, self._svnPassword)
            for item in listOfArtifactFiles:
                if item.get('href') != '../':
                    self._downloader.download(''.join([string, item.get('href')]),
                                              os.path.join(self._artifactsDirectory, item.get('href')))
        except Exception as e:
            self._logger.exception(self._headerLogInfo + "{} Exception while retrieving artifacts files: {}"
                                   .format(self._job._id, str(e)))
            return False
        return True

    def cleanupArtifactsDirectory(self):
        if os.path.exists(self._artifactsDirectory):
            self._logger.debug(self._headerLogInfo + "{} Removing existing artifacts directory"
                               .format(self._job._id))
            self.removeFolder(self._artifactsDirectory)
        os.makedirs(self._artifactsDirectory)

    def unpackArtifacts(self, filesToUnpack):
        for fileToUnpack in filesToUnpack:
            for root, subdirs, files in os.walk(self._artifactsDirectory):
                for file in files:
                    if re.match(fileToUnpack, file):
                        self._logger.info(self._headerLogInfo + "{} Unpacking file: {}".format(self._job._id, file))
                        try:
                            self.unpackFile(os.path.join(root, file))
                        except Exception as e:
                            self._logger.exception(self._headerLogInfo + "{} Exception while unpacking artifacts file: {}"
                                                   .format(self._job._id, str(e)))
                            return False
                        if self._isToBeStopped:
                            return False
        return True

    def isFileName(self, name):
        if "." in name:
            return True
        return False

    def unpackFile(self, artifactFile):
        tar = tarfile.open(artifactFile)
        tar.extractall(self._artifactsDirectory)
        tar.close()
        os.remove(artifactFile)

    def copyArtifactsFiles(self, filesToCopy, destinationFolder):
        self._logger.info(self._headerLogInfo + "{} Copying artifacts files from: {} to: {}"
                          .format(self._job._id, self._artifactsDirectory, destinationFolder))
        if os.path.exists(destinationFolder):
            for item in os.listdir(destinationFolder):
                itemPath = os.path.join(destinationFolder, item)
                if os.path.isfile(itemPath):
                    os.unlink(itemPath)
                elif os.path.isdir(itemPath):
                    shutil.rmtree(itemPath)
        for fileEntry in filesToCopy:
            self.findAndCopyFile(fileEntry, destinationFolder)
        self.removeFolder(self._artifactsDirectory)
        self._logger.info(self._headerLogInfo + "{} Artifacts files copied".format(self._job._id))

    def removeFolder(self, path):
        for root, subdirs, files in os.walk(path):
            for file in files:
                filePath = os.path.join(root, file)
                os.system("attrib -r -h -s " + filePath)
        shutil.rmtree(path)

    def findAndCopyFile(self, fileNameRegex, fileDestinationFolder):
        if not os.path.exists(fileDestinationFolder):
            self._logger.debug(self._headerLogInfo + "{} Creating destination directory: {}"
                               .format(self._job._id, fileDestinationFolder))
            os.makedirs(fileDestinationFolder)
        for rootDir, subDirs, files in os.walk(self._artifactsDirectory):
            for file in files:
                if re.match(fileNameRegex, file):
                    shutil.copyfile(os.path.join(rootDir, file), os.path.join(fileDestinationFolder, file))
                    self._logger.info(self._headerLogInfo + "{} File copied: {}".format(self._job._id, file))
                    return
        self._logger.warning(self._headerLogInfo + "{} File {} not found among artifacts files"
                             .format(self._job._id, fileNameRegex))

    def runJob(self):
        if not os.path.exists(self._configFileData["batchFile"]):
            self._logger.error(self._headerLogInfo + "{} Job cannot be run, no batch file: {}, "
                               .format(self._job._id, self._configFileData["batchFile"]) +
                               "or wrong path. Removing Job from queue.")
        else:
            batchFileDirectory = os.path.dirname(self._configFileData["batchFile"]) + "\\"
            self._logger.info(self._headerLogInfo + "{} Starting job with batch file: {} in folder: {}"
                              .format(self._job._id, self._configFileData["batchFile"], batchFileDirectory))
            self._pegasusProcess = subprocess.Popen(self._configFileData["batchFile"], cwd=batchFileDirectory, shell=False,
                                                    stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            stdout, stderr = self._pegasusProcess.communicate()
            self._logger.info(self._headerLogInfo + "{} Job finished with return code: {}"
                              .format(self._job._id, str(self._pegasusProcess.returncode)))
            self._returnCode = self._pegasusProcess.returncode
            self._pegasusProcess = None

    def finishJobExecution(self):
        self._logger.info(self._headerLogInfo + self._message)
        jobWithResults = None
        if self._sendResults:
            self._logger.info(self._headerLogInfo + "{} Loading report file: {}".format(self._job._id, self._reportFile))
            reportFilePath = self.loadReportFilePath(self._reportFile)
            self._logger.info(self._headerLogInfo + "{} Sending job results".format(self._job._id))
            jobWithResults = JobWithResults(self._job, self._message, self._tmsRepo, self._returnCode, reportFilePath)
            self._jobsToSendList.put(jobWithResults)

    def loadReportFilePath(self, reportFile):
        retData = ''
        if self._sendReportData and os.path.exists(reportFile):
            dct = {}
            with open(reportFile, 'r') as f:
                line = f.readline().replace('(', '{').replace(')', '}').replace('\\\\', '\\').replace('\\', '\\\\')
                try:
                    self._logger.debug(self._headerLogInfo + "{} Parsing report file: {}"
                                       .format(self._job._id, reportFile))
                    dct = ast.literal_eval(line)
                except Exception as e:
                    self._logger.exception(self._headerLogInfo + "{} Loading report file failed. Exception: {}"
                                           .format(self._job._id, str(e)))
            if 'testresults' in dct and 'resultsfile' in dct['testresults'][0]:
                retData = dct['testresults'][0]['resultsfile']
        self._logger.info(self._headerLogInfo + "{} Proper execution report file set to: {}"
                          .format(self._job._id, retData))
        return retData
