'''
@Company: TWL
@Author: xue jian
@Email: xuejian@kanzhun.com
@Date: 2020-06-19 15:06:25
'''
# coding: utf-8

import os, sys

def download_data_from_hdfs(get_date, database, table, outdir):
    cluster_code = ['C1_1','C2_1','E1_1','E2_1','F1_1','F2_1','M1_1','M1_2','M2_1','Q1','Q2','S1_1','S1_2','S2_1','SX_1','T1_1','T1_2','T1_3','T1_4','T1_5','T2_1','WM_1','X1_1','X1_2','X2_1','Y1_1','Y1_2','Y2_1','S0','GP_1']
    print 'cluster_code', cluster_code
#    path_hive = '/user/hive/warehouse/{0}.db/{1}'.format(database, table)
    path_hive = '/user/datastar/arc/six/warehouse/{0}/{1}'.format(database, table)
    path_local = outdir

    for code in cluster_code:
        tmp_com = 'rm -rf {2}/{0}/{1} && mkdir -p {2}/{0}/{1} && hadoop fs -getmerge {3}/ds={0}/cluster_code={1}/* {2}/{0}/{1}/out'.format(get_date, code, path_local, path_hive)
        print tmp_com
        os.system('{0} && wait'.format(tmp_com))
    os.system('wait && chmod -R +r {1}/{0}'.format(get_date, path_local))

if __name__ == '__main__':
    download_data_from_hdfs(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])