# -*- coding: utf-8 -*-
import os
from hdfs import InsecureClient
from Util.Process import INFO, get_config
import hashlib


class HDFS:
    def __init__(self):
        self.namenode_ip = get_config("config", "hadoop")['namenode']['ip']
        self.namenode_port = get_config("config", "hadoop")['namenode']['port']
        self.namenode_user = get_config("config", "hadoop")['namenode']['user']
        self.client = InsecureClient('http://{ip}:{port}'.format(ip=self.namenode_ip, port=self.namenode_port),
                                     user=self.namenode_user)

    def push(self, hdfs_path, local_path):  # 上传文件
        if self.client.status(hdfs_path, strict=False) is None:  # 若远程目录不存在, 先创建目录
            self.client.makedirs(hdfs_path)

        if not os.path.exists(local_path):
            INFO("WARNING", f"HDFS API: local_path {local_path} does not exist.")
            return  # 若当前本地不存在该路径放弃上传
        if os.path.isfile(local_path):
            hdfs_file = hdfs_path + "/" + os.path.basename(local_path)
            status = self.client.status(hdfs_file, strict=False)
            if status is not None:
                hdfs_file_md5 = self.md5(hdfs_file, "hdfs")
                local_file_md5 = self.md5(local_path, "local")
                if hdfs_file_md5 == local_file_md5:
                    return  # 若当前远程已存在该文件放弃上传
                else:
                    self.delete(hdfs_file)
            INFO("INFO", f'HDFS API: push local_file {local_path} to hdfs_path {hdfs_path}')
            self.client.upload(hdfs_path, local_path)
        elif os.path.isdir(local_path):
            for item in os.listdir(local_path):
                if os.path.isdir(os.path.join(local_path, item)):
                    self.push(hdfs_path + '/' + item, os.path.join(local_path, item))
                else:
                    self.push(hdfs_path, os.path.join(local_path, item))

    def pull(self, hdfs_path, local_path):  # 下载文件
        status = self.client.status(hdfs_path, strict=False)  # 判断路径的类型
        if status is None:
            INFO("WARNING", f'HDFS API: hdfs_path {hdfs_path} does not exist.')
            return  # 若远程不存在该路径放弃下载
        if status['type'] == 'FILE':  # 单文件直接下载
            local_file = os.path.join(local_path, os.path.basename(hdfs_path))
            if os.path.exists(local_file):
                hdfs_file_md5 = self.md5(hdfs_path, "hdfs")
                local_file_md5 = self.md5(local_file, "local")
                if hdfs_file_md5 == local_file_md5:
                    return  # 若当前本地已存在该文件放弃下载
                else:
                    os.remove(local_file)
            INFO("INFO", f'HDFS API: pull hdfs_file {hdfs_path} to local_path {local_path}')
            self.client.download(hdfs_path, local_path)
        if status['type'] == 'DIRECTORY':
            for item in self.client.list(hdfs_path):
                if self.client.status(hdfs_path + "/" + item, strict=False)['type'] == 'DIRECTORY':
                    os.mkdir(os.path.join(local_path, item))
                    self.pull(hdfs_path + "/" + item, os.path.join(local_path, item))
                else:
                    self.pull(hdfs_path + "/" + item, local_path)

    def exist(self, hdfs_path):
        status = self.client.status(hdfs_path, strict=False)  # 判断路径的类型
        if status is None:
            return False, "None"
        else:
            return True, status['type']

    def delete(self, hdfs_path):
        self.client.delete(hdfs_path, recursive=True)

    def print(self, hdfs_path):
        file_list = self.client.list(hdfs_path)
        for file in file_list:
            print(hdfs_path + "/" + file)

    def md5(self, path, path_type):
        if path_type == "local":
            md5_hash = hashlib.md5()  # 读取 本地 上的文件内容并计算哈希值
            with open(path, 'rb') as f:
                for chunk in iter(lambda: f.read(4096), b''):
                    md5_hash.update(chunk)
            # 返回本地文件的哈希值
            return md5_hash.hexdigest()
        else:
            md5_hash = hashlib.md5()  # 读取 HDFS 上的文件内容并计算哈希值
            with self.client.read(path) as reader:
                for chunk in iter(lambda: reader.read(4096), b''):
                    md5_hash.update(chunk)
            # 返回远程文件的哈希值
            return md5_hash.hexdigest()