# -*- coding: utf-8 -*-

import os
import zipfile
import threading
from utils import *


class Slack_Zip(threading.Thread):

    def __init__(self):
        self.FileList = []
        self.PBProgress = 0
        self.PBMax = 0
        self.CurrFile = ''
        self.CurrDir = ''
        self.zipName = ''
        self.zipPath = []
        self.zipExtensions = ''
        self.zipMode = ''
        self.Halt = False
        self.Status = -1 # Idle
        self.Destination = ''
        threading.Thread.__init__(self)

    def stop(self):
        self.Halt = True

    def run(self):
        """
        Main Ziping function
        """
        self.Status = 0 # Processing
        extensions = self.zipExtensions.split(',')
        z = os.path.join(self.Destination, self.zipName) # Full zip file name
        zp = zipfile.ZipFile(z, self.zipMode)
        for p in self.zipPath:
            self.FileList = []
            self.CurrDir = p
            self.crawler(p, self.zipExtensions)
            self.PBMax = len(self.FileList)
            i = 1
            for f in self.FileList:
                if self.Halt:
                    self.Status = 1 # Cancelled by user
                    return # Break all loops
                zfile = os.path.join(p, f)
                self.CurrFile = zfile
                self.PBProgress = i * 100 / self.PBMax
                i += 1
                try:
                    zp.write(zfile)
                except IOError:
                    None

        zp.close()
        self.Status = 2 # Finished

    def crawler(self, spath, extensions):
        """
        Recursive Path crawling routine
        """
        if os.path.exists(spath):
            buf = os.listdir(spath)
            for f in buf:
                bp = os.path.join(spath, f)
                if os.path.isfile(bp):
                    if e_endsw(f, extensions) or e_wild(extensions):
                        self.FileList.append(bp)
                elif os.path.isdir(bp):
                    self.crawler(bp, extensions)
