﻿import logging
import os
import shutil
import tempfile

from orioluspith.archivers.zip import ZipArchiver
from orioluspith.packets.hashbuilder import HashBuilder
from orioluspith.packer.packer import Packer


class Checker:
    def __init__(self, work_path = None):
        self.logger = logging.getLogger("Checker")
        if not work_path:             
            self.work_path = tempfile.mktemp()
        else:
            self.work_path = work_path
                    
        if not os.path.exists(self.work_path):
            os.makedirs(self.work_path)
        
    def exit(self, result):
        if os.path.exists(self.work_path):
            shutil.rmtree(self.work_path)
            
        return result
        
    def check_archive_packet(self, packet_name):
        if not os.path.exists(packet_name):
            self.logger.error("Packet with name '%s' not found." % packet_name)
            return False
            
        archiver = ZipArchiver()
        
        #if there are 2 dockers running at the same time there could be problems at .\checkerpath\checkpacket
        
        packet_path = os.path.join(self.work_path, "checkpacket")
        if os.path.exists(packet_path):
            shutil.rmtree(packet_path)
        os.makedirs(packet_path)
        
        archiver.extract(packet_name, packet_path)
        result = self.check_packet(packet_path)

        return self.exit(result)
        
    def check_packet(self, packet_path):
        checksums_filename = os.path.join(packet_path, "CHECKSUMS")
        if not os.path.exists(checksums_filename):
            self.logger.error("File CHECKSUMS not found into packet directory.")
            
            return False
        
        copy_checksums_name = os.path.join(self.work_path, "CHECKSUMS")
        shutil.copy(checksums_filename, copy_checksums_name)
        
        packer = Packer()
        packer.write_checksums(packet_path)
        
        hasher = HashBuilder("md5")
        
        result = self.__cmp_checksums_files(checksums_filename, copy_checksums_name)
        #result = hasher.get_file_hash(checksums_filename) == hasher.get_file_hash(copy_checksums_name)
        
        shutil.copy(copy_checksums_name, checksums_filename)
        
        return self.exit(result)
    
    def __cmp_checksums_files(self, checksums_filename, etalon_checksums_filename):
        result = True
        
        checksums_list = self.__get_checksums_list(checksums_filename)
        etalon_checksums_list = self.__get_checksums_list(etalon_checksums_filename)
        
        for key in etalon_checksums_list:
            if key in checksums_list:
                if etalon_checksums_list[key] != checksums_list[key]:
                    self.logger.error("Not equal checksum for file '%s' in current and  etalon checksums" % key)
                    
                    result = False
            else:
                self.logger.error("Not found checksum for file '%s' in checksums file" % key)
                
                result = False
        
        return result
    
    def __get_checksums_list(self, filename):
        checksums_list = {}
        
        fd = file(filename, "r")
        string_list = fd.readlines()
        fd.close() 
        
        for string in string_list:
            (key, var) = self.__parse_checksum_string(string)
            checksums_list[key] = var
        
        return checksums_list
    
    def __parse_checksum_string(self, checksum_string):
        splitter = "\t"
        key = None
        var = None
        
        split_index = checksum_string.find(splitter)
        var = checksum_string[:split_index]
        key = checksum_string[split_index + 1:].strip()
        
        return (key, var)
