# -*- coding:utf-8 -*-
import os,sys
import ProgressStatus
import my_log
import hashlib,io
import platform

need_replace_sep = False

if platform.system() == "Windows":
    need_replace_sep = True
    original_print = print
    def windows_safe_print(*args, **kwargs):
        """Windows平台安全打印函数，处理GBK编码问题"""
        # 编码失败时进行处理
        new_args = []
        for arg in args:
            # 只处理字符串类型，其他类型保持原样
            if isinstance(arg, str):
                new_args.append(arg.encode('gbk', errors='ignore').decode('gbk'))
            else:
                new_args.append(arg)
        original_print(*new_args, **kwargs)

    print = windows_safe_print

def getdirsize(dir):
    size = 0
    for root, dirs, files in os.walk(dir):
        size += sum([os.path.getsize(root + "/" + name) for name in files])
    #print dir,size
    return size

def human_readablity_size(_size):
    if _size > 1024 and _size < 1024 * 1024:
        return str(round(_size/1024,4)) + "K"
    elif _size > 1024 *1024 and _size < 1024 * 1024 * 1024:
        return str(round(_size/1024/1024,4)) + "M"
    elif _size > 1024 *1024 * 1024 and _size < 1024 * 1024 * 1024 * 1024:
        return str(round(_size/1024/1024/1024,4)) + "G"
    else:
        return str(_size) + "B"

#return file item list
#item:[path,size,last_modify_time,md5] md5 will be computer later
def get_files_from_dir(_dir, exculdefiles=[".", "..","md5_info_cache.csv"]):
    filelist=[]
    count = 1
    for root, dirs, files in os.walk(_dir):
        for name in files:
            if name not in exculdefiles:
                _path = root + "/" + name
                #_path = os.path.join(root,name)
                #_path = os.path.normpath(_path)
                # trick the root path maybe c:/dd\ss in windows
                global need_replace_sep
                if need_replace_sep:
                    _path = _path.replace("\\", "/")
                try:
                    #filelist.append(_path.decode(decode_for_path))
                    filelist.append(_path)
                except UnicodeDecodeError as e:
                    my_log.debug("failed decode: " + _path + "\n")
                    continue
                except UnicodeEncodeError as e:
                    my_log.debug("failed decode: " + _path + "\n")
                    continue
            count += 1
            if count % 5000 == 0:
                my_log.debug("scan file: "+ str(count) + "\n")
            if ProgressStatus.is_running() == False:
                return filelist
    return filelist


#remove unique size of file in list
def remove_same_path_of_file_in_list(filelist):
    # --------------------------------
    # sort by file size
    # --------------------------------
    #filelist.sort(key=lambda x: x[0])
    filelist.sort()

    if (len(filelist) < 2):
        return []

    cache_file_list = []
    for f in filelist:
        if(len(cache_file_list) == 0 or f != cache_file_list[-1]):
            cache_file_list.append(f)

    return cache_file_list


def is_sub_or_parent_dir(paths=[""], path=""):
    if path.endswith("/") == False:
        path = "".join([path, "/"])
    paths = list(paths)
    for i,p in enumerate(paths):
        if p.endswith("/") == False:
            paths[i] = "".join([p,"/"])

    for p in paths:
        if p == path:
            return True
        if len(p) == len(path):
            continue
        if len(p) > len(path):
            if p.startswith(path):
                return True
        else:
            if path.startswith(p):
                return True
    return False

#--------------------------------
# compute md5 of file
#--------------------------------
import os
class DirInfoCache:
    def __init__(self):
        self.based_dir_key_cached_name_md5_size = {}
        self.changed_paths = set()
    def __load(self,path):
        #print(f"__load {path}")
        cached_infos_map = self.based_dir_key_cached_name_md5_size.get(path)
        if cached_infos_map is not None:
            return cached_infos_map
        cache_path = os.path.join(path,"md5_info_cache.csv")
        if os.path.exists(cache_path) == False:
            cached_infos_map = {}
            self.based_dir_key_cached_name_md5_size[path] = cached_infos_map
            return cached_infos_map
        cached_infos_map = {}
        with open(cache_path,"r") as f:
            for line in f.readlines():
                name,data = line.strip().split(",",maxsplit=1)
                cached_infos_map[name] = data
        self.based_dir_key_cached_name_md5_size[path] = cached_infos_map
        #print(f"__load2 {path}")
        return cached_infos_map


    def save(self):
        for path in self.changed_paths:
            print(f"save to cache {path}")
            cached_infos_map = self.based_dir_key_cached_name_md5_size[path]
            lines = []
            for key in cached_infos_map.keys():
                if os.path.exists(os.path.join(path,key)):#只保存还存在的文件
                    lines.append(f"{key},{cached_infos_map[key]}\n")
                else:
                    print(f"file already delete: {key}")
            cache_path = os.path.join(path,"md5_info_cache.csv")
            with open(cache_path,"w") as f:
                f.writelines(lines)
        self.changed_paths = set()
        pass
        
    def get(self,filepath):
        path,name = os.path.split(filepath)
        dir_info = self.__load(path)
        #print(f"get from cache {filepath}")
        return dir_info.get(name)

    def add(self,filepath,data):
        path,name = os.path.split(filepath)
        dir_info = self.__load(path)
        dir_info[name] = data
        self.changed_paths.add(path)
        print(f"add to cache {filepath}")
        pass
        
dir_info_cache = DirInfoCache()

def get_md5_of_file(_file,fsize=None,ftime=None):
    #如果文件创建时间和文件大小和当前不一样则，重新计算md5
    data = dir_info_cache.get(_file)
    if data is not None and len(data)>32:
        md5,fsize_old,ftime_old = data.split(",")
        if int(fsize_old) == fsize and float(ftime_old) == ftime:
            #print(f"hit from cache {_file}")
            return md5
    try:
        m = hashlib.md5()
        f = io.FileIO(_file,'r')
        bytes = f.read(1024)
        ProgressStatus.process(len(bytes))
        while(bytes != b''):
            if ProgressStatus.is_running() == False:
                f.close()
                return None
            m.update(bytes)
            bytes = f.read(1024)
            ProgressStatus.process(len(bytes))
        f.close()
        md5string = m.hexdigest()
        dir_info_cache.add(_file,f"{md5string},{fsize},{ftime}")
        return md5string
    except IOError as e:
        my_log.debug("Get md5 failed:"+_file)
    return None

#
#remove unique size of file in list
def remove_unique_size_of_file_in_list(filelist):
    # --------------------------------
    # sort by file size
    # --------------------------------
    #filelist.sort(key=lambda x: x[1])
    file_with_size=[]
    for f in filelist:
        try:
            s = os.path.getsize(f)
            file_with_size.append((f, s))
        except OSError as e:
            my_log.debug("Get file info failed:" + f + "\n")
            continue

    file_with_size.sort(key=lambda x: x[1])
    if (len(file_with_size) < 2):
        return []
    result=[]
    cache_size = None
    cache_file_list=[]
    for f in file_with_size:
        if(cache_size == None):
            cache_size = f[1]
            cache_file_list = []
            cache_file_list.append(f[0])
        elif (f[1] == cache_size):
            cache_file_list.append(f[0])
        else: #f[1] != current_size
            if(len(cache_file_list) > 1):
                result.extend(cache_file_list)
            cache_size = f[1]
            cache_file_list = []
            cache_file_list.append(f[0])

    if(cache_size != None and len(cache_file_list) > 1):
        result.extend(cache_file_list)

    return result



if __name__ == "__main__":
    #reload(sys)
    #sys.setdefaultencoding("utf-8")
    d = u'D:/data/InputEngine'
    ProgressStatus.start()
    l = get_files_from_dir(d)
    ProgressStatus.stop()
    pass