import os
import pandas
from hashlib import md5
import pysnooper
from utils import timefn
df=pandas.DataFrame(columns=['path', 'atime', 'ctime', 'mtime','size','md5sum'])

#@pysnooper.snoop('run.log',relative_time=True)
@timefn
def get_fileinfolist_of_root_dir(root, df=df,update_md5=False):
    info_list=list()
    for root, dirs, files in os.walk(root):
        for f in files:
            file_full_path=root+os.path.sep+f
            file_info=get_fileinfo(file_full_path, update_md5)
            if isinstance(file_info,dict):
                info_list.append(file_info)
    return df.append(info_list, ignore_index=True, sort=True)

def get_fileinfo(path,update_md5=False):
    try:
        stat=os.stat(path)
        if update_md5:
            md5sum=get_md5sum(path)
        else:
            md5sum=''
        finfo_dict={'path'  :path,
            'atime' :stat.st_atime,
            'ctime' :stat.st_ctime,
            'mtime' :stat.st_mtime,
            'size'  :stat.st_size,
            'md5sum':md5sum}
    except:
        return 'fail'

    return finfo_dict

def get_md5sum(fpath):
    md5sum = md5()
    try:
        with open(fpath,'rb') as f:
            while True:
                data = f.read(2024)
                if not data:
                    break
                md5sum.update(data)
        return md5sum.hexdigest()
    except:
        md5sum=''
        return md5sum
    
#@pysnooper.snoop('run.log',relative_time=True)
@timefn
def get_md5sum_of_same_size_files(df=df):
    groups=df.groupby('size')
    for _, group in groups:
        rows=group.shape[0]
        if rows > 1:
            for file_full_path in group['path'].tolist():
                df.loc[df['path']==file_full_path, 'md5sum']=get_md5sum(file_full_path)
    return df[df['md5sum']!='']

