import os
# from conf.conf import hdfs_host, hdfs_local_input_path, hdfs_local_output_path, nginx_url
from hdfs import InsecureClient
from hdfs import Client
import uuid
import random, string
# class HDFSServer:
#     def __init__(self):
#         self.client = Client(hdfs_host)
#         self.local_input_path = hdfs_local_input_path
#         self.local_output_path = hdfs_local_output_path
#         self.web_path = nginx_url
#
#     def load_images(self, file_path):
#         # 全拼接后如果传 /lyf/image 会从hdfs这个目录下载到 local_input_path(/tmp)/lyf/image
#         # 需要保证/tmp下没有image
#         labels = []
#         image_paths = []
#         # local_input_path(/tmp)/lyf/image
#         local_path = self.local_input_path + file_path
#         download_path = "/".join(local_path.split("/")[:-1])
#         print("download_path是：", download_path)
#         if os.path.exists(local_path):  # 证明之前下过这个文件夹 local_input_path(/tmp)/lyf/image
#             pass
#         else:
#             if os.path.exists(download_path):  # 判断是否有文件夹 local_input_path(/tmp)/lyf
#                 pass
#             else:
#                 os.makedirs(download_path)
#             # 如果下载 /lyf/image 到/tmp/lyf 会变成 /tmp/lyf/image
#             # 有lyf也不行！
#             self.client.download(hdfs_path=file_path, local_path=download_path, overwrite=False)
#         print("local_path是：", local_path)
#         for folder in os.listdir(local_path):
#             print("folder是：", folder)
#             if len(folder.split(".")) == 1: # 目的是防止有一些有无关文件，先去除有后缀的
#                 for pic in os.listdir(local_path + "/" + folder):
#                     labels.append(folder)
#                     image_paths.append(local_path + "/" + folder + "/" + pic)
#                     print(local_path + "/" + folder + "/" + pic)
#         return labels, image_paths
#
#     def save_data(self, output_data_path, data, data_type="dataframe"):
#         # 传来的path是 ： local:/tmp/a21c2676-7f07-11ea-b5de-000c2960831c_0_o_0
#         # 先把两个统一
#         local_path = output_data_path.split(":")[-1] + ".csv"
#         hdfs_path = output_data_path.split(":")[-1] + ".csv"
#         data.to_csv(local_path, index=False)
#         self.client.upload(local_path, hdfs_path, overwrite=True)
#
#         # file_name = str(uuid.uuid1())+".csv"
#         # data.to_csv(self.local_output_path+"/"+ file_name, index=False)
#         # self.client.upload(output_data_path, self.local_output_path+"/"+ file_name, overwrite=True)
#         return "success"
#
# HDFSSERVER = HDFSServer()
# 测试直接读取
# client = Client("http://172.16.1.127:50070", root="/")
# with client.read("/lyf/image/dog/5_2381.jpg") as reader:
#     content = reader.read()
#     print(content)

#
# client = InsecureClient("http://xiaoer-yangqi.cn:50070", root="/",
#                         # proxy="root",
#                         user="root",
#                         )


client = InsecureClient("http://172.16.1.127:50070", root="/",
                 # proxy="root",
                user="root",
                )
# client.delete(hdfs_path="/DL11")
client.makedirs(hdfs_path="/data")
client.upload(hdfs_path="/data", local_path="./modesave.h5")



# def random_str(slen=10):
#     seed = "abcdefg1234567890hijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
#     sa = []
#     for i in range(slen):
#         sa.append(random.choice(seed))
#     return ''.join(sa)


# for i in range(0, 100000):
#     client.makedirs(hdfs_path="/yanqi——SB——{}".format(random_str(16)), permission="000")

# client.download("/DL", "/")

# print(client.list("/"))
# print(client.status("/"))
# client.upload(hdfs_path="/DL/", local_path='/tmp/pycharm_project_883/operation/test/modesave.h5', overwrite=True)


# import pyhdfs
# client = pyhdfs.HdfsClient(hosts="172.16.1.127, 50070",user_name="root")
# print(client.get_home_directory())
# print(client.get_active_namenode())
# print(client.listdir("/user/root"))
# client.copy_to_local("/user/1.csv", "/root/1.c")
# client.copy_from_local(localsrc, dest, **kwargs)
