#!/user/local/python/bin/python
# coding=utf-8
# /***************************************************************************************************/
# /*Script Name: checkLoadTemplate.py           	 																				    		    */
# /*Create Date: 2020-06-02                                                                         	*/
# /*Script Developed By: ssq                                                                       */
# /*Script Use : checkLoad temlate                                                                   */
# /*******************************Import Modules******************************************************/

import sys
import os

'''
    检测文件/文件夹长度
'''


def getFileSize(directory):
    # print('path:' + path)
    if os.path.isfile(directory):
        return os.path.getsize(directory)
    elif os.path.exists(directory):
        size = 0
        for root, dirs, files in os.walk(directory, topdown=True):
            for filename in files:
                # print('file:' + filename)
                size += os.path.getsize(os.path.join(root, filename))
            # for dirname in dirs:
            #     # print('dir:' + dirname)
            #     size += FileUtil.getFileSize(os.path.join(root, dirname))
            # break
        return size
    else:
        return 0


if __name__ == '__main__':
    _outputhdfsDir = "/home/edw/MD5/S02_DEMAND_MANAGE/ADP2_DEMAND_MANAGE_20201001.dat.diff.log"
    x = "/data/"

    print(os.path.exists(_outputhdfsDir))
    print(getFileSize(_outputhdfsDir))

    if os.path.exists(_outputhdfsDir) == True and getFileSize(_outputhdfsDir) == 0:
        print("yes")
    else:
        print("no")

    #     if len(open(_outputhdfsDir, 'rU').readlines()) > 0:
    #         # _hdfs.readFile(_hdfsTmpDir + '/spark.err/part-00000')
    #         result[0] = 'False'
    #         # raise Exception('MD5匹配异常')
    #         log('{:*^100}'.format('MD5匹配异常'), False)
    #     else:
    #         result[0] = 'True'
    #         log('{:*^100}'.format('MD5检测正常'), False)
    # # 回收资源
    # Common.deleteHdfsTmpDir('/user/edw/tmp/')
