#import os,sys
#import hdfs.client
#c1 = hdfs.client.Client("http://hadoop12:50070")
from hdfs  import *
c1 = Client("http://hadoop12:50070")
#c1 = Client("http://hadoop12:50070",proxy="root")

'''
print(c1.list("/"))
c1.makedirs("/usr/hu")
print(c1.list("/usr"))

c1.rename("/usr/hu","/usr/hu1")
print(c1.list("/usr"))
c1.upload("/usr/hu1/","test.py")
print(c1.list("/usr/hu1"))

c1.download("/usr/hu1/test.py","e:\\test\\")
'''
from json import  dumps
records = [
    {'name': 'foo', 'weight': 1},
    {'name': 'bar', 'weight': 2},
  ]
c1.write('/usr/hu1/records.jsonl', data=dumps(records), encoding='utf-8')

#This method must be called using a `with` block:
with c1.read("/usr/hu1/records.jsonl") as reader:
    print(reader.read())


#c1.delete("/usr/hu1",recursive=True)
#print(c1.list("/usr"))


#print(sys.path)

#from pyhdfs import HdfsClient
#client = HdfsClient(hosts='hadoop11:50070')
#print(client.list_status('/'))