
import io
import os
import re
import time
import json
import queue
import ftplib
import threading

from Module.xRayDetection.xray.logs import Logger
from Module.xRayDetection.xray.msg import send
class FTP:

    def __init__(self, host='', user='', passwd='', logger:Logger=None, port=21):

        self.ftp = ftplib.FTP()
        self.ftp.encoding = 'utf-8'
        self.host = host
        self.user = user
        self.passwd = passwd
        self.logger = logger
        self.port = port
        if self.host:
            self.connect()

    def try_(self, func, msg, logging=True, try_count=4) -> dict:
        count = 0
        ret = dict(resp='')
        error = ''
        while count <= try_count:
            try:
                if logging:
                    self.logger.log(self.logger.INFO, msg)
                ret = func()
                if count > 0:
                    self.logger.log(self.logger.INFO, f'Retry {msg} sucessfully')
                break
            except Exception as e:
                count += 1
                error = str(e)
                self.logger.log(self.logger.ERROR, f'{e} {msg}', exception=True)
                if try_count > 0:
                    resp = self.connect(try_count=0)
                    if '230' in resp:
                        break
        if count > try_count and try_count < 9:
            ret['resp'] = error
        return ret

    def connect(self, logging=False, try_count=8):

        if self.host == '' or self.user == '' or self.passwd == '' or self.port == '':
            msg = f'empty ftp host:{self.host} user:{self.user} passwd:{self.passwd}'
            self.logger.log(self.logger.ERROR, msg)
            return msg

        def connect_():
            self.ftp = ftplib.FTP()
            self.ftp.encoding = 'utf-8'
            self.ftp.connect(self.host, self.port)
            resp = self.ftp.login(self.user, self.passwd)
            ret = dict(resp=resp)
            return ret

        ret = self.try_(connect_, f'connect ftp host:{self.host} user:{self.user} passwd:{self.passwd}', logging, try_count)
        resp = ret.get('resp', '')
        self.resp = resp
        return resp

    def sendcmd(self, cmd='PASV'):
        try:
            if self.ftp.welcome:
                self.ftp.sendcmd(cmd=cmd)
        except Exception as e:
            print(e)

    def mkdirs(self, path: str, logging=False):

        def mkdirs_():
            items = path.strip('/').split('/')
            path_ = ''
            ret = ''
            for i in range(len(items)):
                self.sendcmd('PASV')
                list_ = [('' if path_ in p else path_) + ('' if p.startswith('/') else '/') + p for p in self.ftp.nlst(path_ or '/')]
                path_ += '/' + items[i]
                if list_ == [] or path_ not in list_:
                    self.sendcmd('PASV')
                    ret = self.ftp.mkd(path_)
                    self.logger.log(self.logger.INFO, f'mkd {path_}')
            return ret

        ret = self.try_(mkdirs_, f'mkdirs {path}', logging)
        return ret

    def parse_dir(self, remote, list_):
        items = []
        self.sendcmd('PASV')
        self.ftp.dir(remote, items.append)
        items = [
            dict(
                file=item[0] != 'd',
                permission=re.split(' +', item)[0],
                hard_link=int(re.split(' +', item)[1]),
                owner=re.split(' +', item)[2],
                group=re.split(' +', item)[3],
                size=int(re.split(' +', item)[4]),
                # month=re.split(' +', item)[5],
                # day=int(re.split(' +', item)[6]),
                # time=re.split(' +', item)[7],
                name=list_[i].split('/')[-1],
                path=list_[i] if remote in list_[i] else '/'.join([remote, list_[i]]),
            ) for i, item in enumerate(items)
        ]
        return items

    def listdir(self, remote, parse=False, item_limit=99, logging=False):

        def listdir_():
            self.sendcmd('PASV')
            items = self.ftp.nlst(remote)
            items = self.parse_dir(remote, items) if parse or len(items) < item_limit else items
            if len(items) and remote not in items[0] and 'path' not in items[0]:
                items = ['/'.join([remote, item]) for item in items]
            ret = dict(items=items)
            return ret

        ret = self.try_(listdir_, f'listdir {remote}', logging)

        items = ret.get('items', [])

        return items

    def isdir(self, remote, logging=False):
        ret = {}
        def isdir_():
            try:
                resp = ''
                resp = self.ftp.cwd(remote)
            except Exception as e:
                if not re.search('550', str(e)):
                    raise
            return resp
        while type(ret) is dict or '227' in ret:
            ret = self.try_(isdir_, f'isdir {remote}', logging)
        return '250' in ret

    def exists(self, remote):
        try:
            resp = ''
            resp = self.ftp.size(remote)
        except Exception as e:
            self.connect()
        return resp

    def __upload(self, upload_path: str, path=None, bytes_=None, logging=True):

        upload_path = upload_path.replace('\\', '/')
        if not upload_path:
            self.logger.log(self.logger.ERROR, 'empty upload_path')
            return dict(resp='')

        parent = os.path.dirname(upload_path)
        def upload_():
            callback = open(path, 'rb') if path else io.BytesIO(bytes_)
            self.sendcmd('PASV')
            ret = dict(resp=self.ftp.storbinary(f'STOR {upload_path}', callback))
            return ret

        self.mkdirs(parent)
        ret = self.try_(upload_, f'upload {upload_path}', logging)
        return ret['resp']

    def __download(self, download_path: str, path=None, logging=True):

        download_path = download_path.replace('\\', '/')
        if not download_path:
            self.logger.log(self.logger.ERROR, 'empty download_path')
            return dict(resp='')

        def download_():
            bio = io.BytesIO()
            os.makedirs(os.path.dirname(path), exist_ok=True) if path else bio.write
            callback = open(path, 'wb').write if path else bio.write
            self.sendcmd('PASV')
            ret = dict(bio=bio, resp=self.ftp.retrbinary(f'RETR {download_path}', callback))
            return ret

        if path:
            os.makedirs(os.path.dirname(path), exist_ok=True)
        ret = self.try_(download_, f'download {download_path}', logging)
        bio = ret.get('bio', io.BytesIO())
        return bio

    def delete_files(
        self,
        dir_='',
        items=[],
        func=None,
        percentage=1, 
        sub_queue=None,
        main_queue=None, 
    ):
        import glob
        import shutil
        while True:
            time.sleep(1)
            if check_stop(main_queue).get('stop', False):
                break
            folders = items if re.search('(method|function).*?\.upload', str(func)) else [dir_]
            for folder in folders:
                for item in set(glob.glob(os.path.join(folder, '**', '**'), recursive=True)):
                    if check_stop(main_queue).get('stop', False):
                        break
                    if item == folder + os.path.sep or 'cache' in item:
                        continue
                    try:
                        usage = shutil.disk_usage(folder)
                        if usage.used / usage.total > percentage:
                            if os.path.isfile(item) and self.exists(item.replace(folder, dir_).replace(os.sep, '/')):
                                os.remove(item)
                            elif os.path.isdir(item) and len(os.listdir(item)) == 0:
                                os.removedirs(item)
                    except Exception as e:
                        put_queue(sub_queue, {'err': str(e)})

    def traverse(self, dir_='', cache_dir='/', func=None, extensions=[], items=[], codes=[],
                main_queue=None, sub_queue=None, sort_files=None, cache:dict=None, exclusions=[],
                roots=[], root='', folder='', secs=3600, shape=1280):

        if not roots:
            roots = items


        for idx, item in enumerate(items):

            if roots[idx] == items[idx]:
                root = roots[idx]

                func_name = re.search('(method|function).*?\.(\w+)', str(func))[2]
                sep = '/' if (func_name != 'upload' and not os.path.exists(root)) else os.sep
                basename = root.split(sep)[-1]
                cache_name = f'{func_name}_cache_{basename}.json'

                cache_dir = (func_name == 'upload' and root) or cache_dir or root # TODO xray cache_dir
                local_cache_path = os.path.join(cache_dir, cache_name)
                remote_cache_path = '/'.join([root if func_name != 'upload' else dir_ or cache_dir, cache_name])

                if cache is None or cache.get('remote_cache_path', '') != remote_cache_path:
                    cache = cache or {}
                    cache_ = load_cache(
                        local_cache_path=local_cache_path, 
                        remote_cache_path=remote_cache_path,
                        ftp=self, logger=self.logger) or cache or {}
                    cache.update(cache_)
                    cache['local_cache_path'] = local_cache_path
                    cache['remote_cache_path'] = remote_cache_path

                    cache = {key: value for (key, value) in \
                             (sorted(cache.items(), key=sort_files) if sort_files else cache.items())}

            if folder in cache and item in cache and \
                [*cache].index(item) < [*cache].index(folder) and \
                len(folder.split('/')) == len(item.split('/')):
                return cache

            isftp = not os.path.exists(item)
            files = self.listdir(item) if isftp else [os.path.join(item, f) for f in os.listdir(item)]


            len_files = len(files)
            progress = cache.get(item, '0/1').split('/')

            # check directory
            directory = []
            for file in files:
                path_ = file['path'] if type(file) is dict else file
                if not re.search('\.', path_) and self.isdir(path_) if isftp else os.path.isdir(path_):
                    directory.append(path_)

            i = 0 if any(directory) or (int(progress[1]) < len_files) else int(progress[0])

            cache[item] = f'{i}/{str(len_files)}'

            if len(set(str(cache[item]).split('/'))) < 2:
                self.logger.log(self.logger.INFO, f'{item} {cache[item]}')
                return cache
            
            files = sorted(files, key=sort_files) if sort_files else files

            if (sorted([str(item).replace('\\', '/'), folder.replace('\\', '/')], key=sort_files).index(folder.replace('\\', '/')) \
                and item not in folder and not (any([not re.search('\.', f['path'] if type(f) is dict else f) for f in files]))):
                i = len_files
            
            cache[item] = f'{i}/{str(len_files)}'
            self.logger.log(self.logger.INFO, f'{item} {cache[item]}')

            results = []

            while i < len_files:

                if check_stop(main_queue).get('stop', False):
                    break

                file = files[i]
                file = file['path'] if type(file) is dict else file

                start = time.time()
                if (isftp and self.isdir(file)) or os.path.exists(file) and os.path.isdir(file):

                    cache = self.traverse(dir_=dir_, cache_dir=cache_dir, func=func, extensions=extensions, items=[file],
                                          main_queue=main_queue, sub_queue=sub_queue, folder=folder, secs=secs, exclusions=exclusions,
                                          sort_files=sort_files, cache=cache, roots=roots, root=root, codes=codes, shape=shape)
                else:
                    skip = False
                    skip = skip or ([exclude for exclude in exclusions if exclude in file] != [])
                    skip = skip or (not re.search(f"\.({'|'.join(extensions)})$", file, flags=re.IGNORECASE) if extensions else False)

                    if len(codes):
                        func(codes=codes, files=files, dir_=dir_, root=root, main_queue=main_queue, sub_queue=sub_queue)
                        return cache

                    # self.logger.log(self.logger.INFO, f'Processing file {os.path.join(dir_, file)}')

                    path = file

                    obj = dict(
                        idx=i + 1,
                        path=path,
                        total=len_files,
                    )

                    obj['msg'] = f'{func.__name__} start: {path}'
                    put_queue(sub_queue, obj)

                    if check_stop(main_queue).get('stop', None):
                        return

                    if not skip:
                        results = func(path=path, dir_=dir_, root=root,
                                    i=i, len_files=len_files, files=files, results=results, obj=obj, shape=shape)

                    obj['msg'] = f'{func.__name__} finish: {path}'
                    put_queue(sub_queue, obj)

                end = time.time()

                i += 1
                cache[item] = f'{i}/{str(len_files)}'

                if end-start> 1 and len(codes) == 0 and 'local_cache_path' in cache and 'remote_cache_path' in cache:
                    save_cache(cache=cache, local_cache_path=cache['local_cache_path'], 
                        remote_cache_path=cache['remote_cache_path'], sort_files=sort_files, ftp=self)

        return cache

    def loop_func(self, func=None, loop=False, dir_='', items=[], cache_dir='', extensions=[], codes=[], exclusions=[],
        main_queue=None, sub_queue=None, secs=3600, sort_files=None, cache=None, folder='', percentage=1, shape=1280):

        sort_files = sort_files or sort_files_

        if type(items) is not list:
            items = [items]

        kwargs = dict(
           dir_=dir_,
           func=func,
           items=items,
           sub_queue=sub_queue,
           main_queue=main_queue,
           percentage=percentage,
        )
        if percentage < 1:
            threading.Thread(target=self.delete_files, kwargs=kwargs).start()

        while loop:
            try:
                start = time.time()

                cache = self.traverse(func=func, dir_=dir_, items=items, codes=codes,
                    cache_dir=cache_dir, extensions=extensions, folder=folder,
                    main_queue=main_queue, sub_queue=sub_queue, secs=secs,
                    sort_files=sort_files, cache=cache, shape=shape, exclusions=exclusions)

                end = time.time()

                if check_stop(main_queue).get('stop', False):
                    break

                if end - start < 99:
                    self.logger.log(self.logger.INFO, 
                        f'wait {secs} seconds then check if the lastest folder is updating')

                    time.sleep(secs)

            except Exception as e:
                self.logger.log(self.logger.ERROR, f'retry {e}', exception=True)
        else:
            self.traverse(func=func, dir_=dir_, items=items, codes=codes,
                cache_dir=cache_dir, extensions=extensions, folder=folder,
                main_queue=main_queue, sub_queue=sub_queue, secs=secs,
                sort_files=sort_files, cache=cache, shape=shape, exclusions=exclusions)

        put_queue(sub_queue, dict(msg=None))

    def upload(self, items=[], dir_='', main_queue=None, sub_queue=None, folder='', exclusions=[],
        cache_dir='', extensions=[], loop=False, sort_files=None, secs=3600, percentage=1, **kwargs):

        if 'upload_path' in kwargs:
            ret = self.__upload(**kwargs)
            return ret

        def upload_(path='', dir_='', root='', **kwargs):

            if path == '' and dir_ == '' and root == '':
                print(kwargs)

            upload_path = dir_ + '/'.join([path.replace(root, '').replace('\\', '/')])
            self.__upload(upload_path=upload_path, path=path)

        self.loop_func(func=upload_, loop=loop, dir_=dir_, items=items, folder=folder,
            cache_dir=cache_dir, extensions=extensions, sort_files=sort_files, exclusions=exclusions, 
            main_queue=main_queue, sub_queue=sub_queue, secs=secs, percentage=percentage)

    def download(self, items=[], dir_='', main_queue=None, sub_queue=None, folder='', exclusions=[],
        cache_dir='', extensions=[], loop=False, sort_files=None, secs=3600, percentage=1, **kwargs):

        if 'download_path' in kwargs:
            ret = self.__download(**kwargs)
            return ret

        def download_(path='', dir_='', root='', **kwargs):

            if path == '' and dir_ == '' and root == '':
                print(kwargs)

            download_path = path
            path = dir_ + path.replace(root, '').replace('/', os.path.sep)
            self.__download(download_path=download_path, path=path)

        self.loop_func(func=download_, loop=loop, dir_=dir_, items=items, folder=folder,
            cache_dir=cache_dir, extensions=extensions, sort_files=sort_files, exclusions=exclusions,
            main_queue=main_queue, sub_queue=sub_queue, secs=secs, percentage=percentage)

    def find(self, items=[], dir_='', main_queue=None, sub_queue=None, codes=[], exclusions=[],
        cache_dir='', extensions=[], loop=False, sort_files=None, secs=3600, download=True, **kwargs):

        if len(codes) == 0:
            print(kwargs)
            return

        codes = [str(c).lower() for c in codes]
        codes_ = []

        def find_(files=[], codes=[], dir_='', root='', 
                    main_queue=None, sub_queue=None):

            import numpy

            for code in [c for c in codes if not kwargs['remove'] or c not in codes_]:

                if type(files[0]) is dict and 'path' in files[0]:
                    files = [f['path'] for f in files]

                put_queue(sub_queue, dict(
                        msg=f'Finding {code} in {"/".join(files[0].split("/")[:-1])}'
                    )
                )

                files_ = numpy.array(files)[numpy.char.find(numpy.char.upper(files), str(code).upper()) != -1]

                if check_stop(main_queue).get('stop', None):
                    return

                if len(files_):

                    codes_.append(code)

                    put_queue(sub_queue, dict(
                            msg='',
                            codes=codes_,
                        )
                    )

                    for file in files_:
                        if check_stop(main_queue).get('stop', None):
                            return
                        if download:
                            self.__download(file, dir_ + str(file).replace(root, ''))

        self.loop_func(func=find_, loop=loop, dir_=dir_, items=items, 
            cache_dir=cache_dir, extensions=extensions, codes=codes, exclusions=exclusions,
            main_queue=main_queue, sub_queue=sub_queue, secs=secs, sort_files=sort_files)

def check_stop(q: queue.Queue):
    flags = dict(stop=False)

    if q:
        try:
            flags = q.get(block=False)
        except:
            pass

        if flags.get('stop', None):
            q.put(flags)

    return flags

def put_queue(queue: queue.Queue, obj):
    if queue:
        queue.put(obj)

def test_func(func, dir_='', items=[], extensions=[], codes=[]):

    def log(main_queue: queue.Queue, sub_queue: queue.Queue):
        c = 0
        while True:
            time.sleep(0.001)

            flags = check_stop(main_queue)
            if flags.get('stop', False):
                break

            try:
                result = sub_queue.get(block=False)
                print(result)
            except:
                result = {}
            c += 1

    main_queue = queue.Queue()
    sub_queue = queue.Queue()

    kwargs = {
        'main_queue':main_queue, 
        'sub_queue':sub_queue,
    }
    threading.Thread(target=log, kwargs=kwargs.copy()).start()

    kwargs.update({
        'items': items,
        'dir_': dir_,
        'extensions': extensions,
        'codes': codes,
    })
    threading.Thread(target=func, kwargs=kwargs).start()

    c = 0
    while c < 1e9:

        if c == 1e9:
            main_queue.put({
                'stop': True
            })
            print('stop send')

        c += 1

def test(host='', user='', passwd='',
    upload_items=[], upload_dir='',
    download_items=[], download_dir='',
    find_items=[], find_dir='',
    extensions=[], codes=[]
):

    class Log:
        FATAL = ERROR = WARN = INFO = DEBUG = NOTSET = 0
        def log(*args, **kwargs): kwargs

    ftp = FTP(
            host=host,
            user=user,
            passwd=passwd,
            logger=Log()
        )

    if '230' in ftp.resp:
        items = ftp.listdir('/')
        print(items)

    test_func(
        ftp.upload,
        items=upload_items,
        dir_=upload_dir,
        extensions=extensions
    )

    test_func(
        ftp.download,
        items=download_items,
        dir_=download_dir,
        extensions=extensions
    )

    test_func(
        ftp.find,
        items=find_items,
        dir_=find_dir,
        extensions=extensions,
        codes=codes
    )

def serve(username='', password='', homedir='', host='', port=21):
    from pyftpdlib.authorizers import DummyAuthorizer
    from pyftpdlib.handlers import FTPHandler
    from pyftpdlib.servers import FTPServer
    authorizer = DummyAuthorizer()
    authorizer.add_user(username=username, password=password, homedir=homedir, perm="elradfmwMT")
    handler = FTPHandler
    handler.authorizer = authorizer
    server = FTPServer((host, port), handler)
    server.serve_forever()

def sort_files_(file, *args):
    if 'path' in file and len(args) > -1:
        name = str(file['path'])
    elif type(file) is list or type(file) is tuple:
        name = str(file[0])
    else:
        name = str(file)
    if re.search('\d{2,}', name):
        name = '/'.join([d.rjust(9, '0') for d in name.replace('\\', '/').split('/')])
    return name

def save_cache(cache:dict={}, local_cache_path='', remote_cache_path='', ftp:FTP=None, sort_files=None, thread=False):

    sort_files = sort_files or sort_files_

    cache = {key: value for (key, value) in \
        (sorted(cache.items(), key=sort_files) if sort_files else cache.items())}

    os.makedirs(os.path.dirname(local_cache_path), exist_ok=True)
    json.dump(cache, open(local_cache_path, 'w', encoding='utf-8'), indent=4)

    if remote_cache_path and ftp and ftp.host:
        cache_ = cache.copy()
        upload_path = remote_cache_path
        kwargs = dict(
            upload_path=upload_path, 
            bytes_=json.dumps(cache_, indent=4).encode(),
            logging=False,
        )

        if thread:
            threading.Thread(target=ftp.upload, kwargs=kwargs).start()
        else:
            ftp.upload(**kwargs)

def load_cache(local_cache_path='', remote_cache_path='', ftp:FTP=None, logger:Logger=None):

    try:
        cache = json.load(open(local_cache_path, encoding='utf-8'))
    except Exception as e:
        logger.log(logger.ERROR, f'{e} {local_cache_path}', exception=True)
        try:
            download_path = remote_cache_path
            bio = ftp.download(download_path=download_path)
            cache = json.loads(bio.getvalue().decode())
        except Exception as e:
            cache = {}
            logger.log(logger.ERROR, f'{e} {local_cache_path}', exception=True)

    return cache

def check_folder(ftp:FTP, func=None, access_token='', cache_path='', remote_dir='', folder='', interval=60):
    current_count = 0

    def traverse_(remote, current_count):
        files = sorted(ftp.listdir(remote, item_limit=999), key=sort_files_)
        if len(files) and type(files[-1]) is dict and len([f for f in files if not f['file']]):
            folders = [f for f in files if folder and f['name'] == folder]
            remote_ = folders[0] if len(folders) else [f for f in files if not f['file']][-1]
            current_count = traverse_(remote_['path'], current_count)
        else:
            len_files = len(files)
            if current_count == len_files:
                ftp.logger.log(ftp.logger.WARN, func(remote, len_files))
                send(access_token=access_token, content=func(remote, len_files), cache_path=cache_path)
            current_count = len_files
        return current_count

    while True:
        current_count = traverse_(remote_dir, current_count)
        time.sleep(interval * 60)
    
if __name__ == '__main__':

    test(
        host='',
        user='',
        passwd='',
        upload_items=[],
        upload_dir='',
        download_items=[],
        download_dir='',
        find_items=[],
        find_dir='',
        extensions=[],
        codes=[]
    )
