import os, json
import os.path as osp
import paramiko
import requests

# 递归获取文件夹下所有符合条件的文件路径.
def get_all_filepath(root_dir_path, extension_tag=[], recursive = True):
    """
    Args:
        root_dir_path: 文件根目录.
        extension_tag: 文件后缀名组成的列表, Example: ["jpg", "png", "bmp"]
        如果为空 则获取所有
    Returns:
        特定文件夹下所有符合条件的文件路径.
    """
    def check_endswith(new_path, extension_tag):
        if len(extension_tag) == 0:
            return True
        for s in extension_tag:
            if new_path.lower().endswith(s):
                return True
        return False

    paths = []
    for f in os.listdir(root_dir_path):
        new_path = root_dir_path + os.sep + f
        if os.path.isfile(new_path) and check_endswith(new_path,  extension_tag):
            paths.append(new_path)
        elif os.path.isdir(new_path) and recursive:
            temp_list = get_all_filepath(new_path, extension_tag)
            paths.extend(temp_list)
    return paths

# 递归获取文件夹下所有最后一层子文件夹.
def get_last_dir(root_dir_path, recursive_level = -1):
    """
    Args:
        root_dir_path: 文件夹根目录.
    Returns:
        特定文件夹下所有的文件夹路径.
    """
    paths = []
    for f in os.listdir(root_dir_path):
        new_path = root_dir_path + os.sep + f
        if osp.isdir(new_path):
            if recursive_level > 1 or recursive_level < 0:
                ps = get_last_dir(new_path, recursive_level-1)
                paths.extend(ps)
            else:
                paths.append(new_path)
    if len(paths) == 0:
        paths.append(root_dir_path)
    return paths

def get_file_info(data_dir, data_root, only_copy_labeled=False, via_name="via_region_data.json"):
    ret_data = {}
    def add_dir(dd):
        for f in get_all_filepath(dd):
            size = osp.getsize(f)
            ret_data[f.replace(data_root+os.sep, "")] = size

    if not only_copy_labeled:
        add_dir(data_dir)
    else:
        via_files = get_all_filepath(data_dir, [via_name])
        for via_file in via_files: add_dir(osp.dirname(via_file))
    return ret_data

def query_info(sub_dir, path="/get_info", ip="192.168.10.167", port=9966):
    url = "http://{}:{}{}".format(ip, port, path)
    data = {
        "query_dir": sub_dir
    }
    data = requests.post(url, data=data)
    data_json = data.json()
    if data_json["ret"] != 0:
        print('query_info data_json["ret"]: ', data_json["ret"])
        return None

    return data_json["data"]

    for item in data_json["data"]:
        print(item["size"], item["subpath"])

def remove_data(data, path="/remove_data", ip="192.168.10.167", port=9966):
    url = "http://{}:{}{}".format(ip, port, path)
    ret = requests.post(url, json=data)
    data_json = ret.json()
    if data_json["ret"] != 0:
        print('data_json["ret"]: ', data_json["ret"])
        return False
    return True

def get_svr_root(path="/get_root", ip="192.168.10.167", port=9966):
    url = "http://{}:{}{}".format(ip, port, path)
    ret = requests.get(url)
    data_json = ret.json()
    if data_json["ret"] != 0:
        print('data_json["ret"]: ', data_json["ret"])
        return ""
    return data_json["svr_root"]

def create_dir_svr(dirs, path="/create_dir", ip="192.168.10.167", port=9966):
    url = "http://{}:{}{}".format(ip, port, path)
    data = {
       "dirs": dirs 
    }
    ret = requests.post(url, json=data)
    return 

def copy_data(remote_sftp, data_root, lcl_info):
    svr_root = get_svr_root()
    print("svr_root: ", svr_root)

    dir_set = []
    for k,v in lcl_info.items():
        dir_set.append(osp.dirname(osp.join(svr_root, k).replace("\\", "/")))

    create_dir_svr(list(set(dir_set)))
    for k,v in lcl_info.items():
        src_path = osp.join(data_root, k)
        dst_path = osp.join(svr_root, k).replace("\\", "/")
        # print("\nsrc_path: ", src_path)
        # print("dst_path: ", dst_path)
        remote_sftp.put(src_path, dst_path)

def check_update(data_root, ip="192.168.10.167", user="xc", password="xc", port=22):
    remote_ssh = paramiko.SSHClient()
    remote_ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
    remote_ssh.connect(ip, port, user, password, timeout=10)
    remote_sftp = paramiko.SFTPClient.from_transport(remote_ssh.get_transport())

    sub_dirs = get_last_dir(data_root, 2)
    for sub_dir in sub_dirs:
        # if not "2021\\5" in sub_dir: continue
        svr_info = query_info(sub_dir.replace(data_root+os.sep, "").replace("\\", "/"), ip=ip)
        # 仅拷贝标记的数据
        lcl_info = get_file_info(sub_dir, data_root, True)
        keys = list(lcl_info.keys())
        for key in keys:
            svr_size = svr_info.get(key.replace("\\", "/"), None)
            lcl_size = lcl_info.get(key, None)
            # print(svr_size, lcl_size)
            if svr_size and lcl_size and svr_size == lcl_size:
                del svr_info[key.replace("\\", "/")]
                del lcl_info[key]
        
        if svr_info.__len__() != 0:
            remove_data(svr_info)
        if lcl_info.__len__() != 0:
            copy_data(remote_sftp, data_root, lcl_info)

if __name__ == '__main__':
    data_root= r"F:\work\code\paddle\train_data\det\car\images"
    check_update(data_root)

