import sys
import ast
import urllib2

HTTPFS_URL = "http://hdfs.download.58corp.com/webhdfs/v1"
FS_PARA_LIST = "?op=LISTSTATUS&user.name=root"
FS_PARA_DL = "?op=OPEN&user.name=root"

def get_file_list(path):
        global CURL, HTTPFS_URL, FS_PARA_LIST, FS_PARA_DL
        fs_path = HTTPFS_URL + path + FS_PARA_LIST
	fs_download = HTTPFS_URL + path + FS_PARA_DL
	temp = urllib2.urlopen(fs_path).read()
	file_list = ast.literal_eval(temp)['FileStatuses']['FileStatus']
	print file_list
 	if len(file_list) == 0:
 		return
 	elif len(file_list) == 1:
 		if file_list[0]['type'] == 'FILE':
 			res = urllib2.urlopen(fs_download).read()
			f = open(file_list[0]['pathSuffix'], 'wb')
			f.write(res)
		elif file_list[0]['type'] == 'DIRECTORY':
			d_path =  path + "/" + file_list[0]['pathSuffix']
			get_file_list(d_path)
	else:
		for f in file_list:
			get_file_list(path + "/" + f['pathSuffix'])

def main(path):
        get_file_list(path)

if __name__ == "__main__":
	if len(sys.argv) == 2:
		path = sys.argv[1]
        	exit(main(path))
	else:
		print "Usage: python httpfs.py + hdfs directory's path. For example: ./httpfs.py /user/hadoop"
