package avicit.bdp.dds.common.utils;

import avicit.bdp.common.dto.CalculateEngineConf;
import avicit.bdp.common.service.service.CalculateEngineConfigService;
import avicit.bdp.common.utils.SpringApplicationContext;
import avicit.bdp.common.utils.uploads.FileAdapterUtils;
import avicit.bdp.common.utils.uploads.impl.HDFSFileAdapter;
import avicit.bdp.core.util.json.JSONUtils;
import avicit.bdp.dds.api.dto.TableResultBean;
import avicit.bdp.dds.common.utils.parquet.ParquetUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

/**
 * hdfs文件读取
 *
 * @author xugb
 * @date 2022/2/28 10:50
 */
public class HdfsUtils {
    public static final Logger logger = LoggerFactory.getLogger(HdfsUtils.class);
    private static CalculateEngineConfigService calculateEngineConfigService = SpringApplicationContext.getBean(CalculateEngineConfigService.class);

    /**
     * 读取hadoop Json格式文件，返回list对象
     *
     * @param path
     * @return java.util.List<java.util.Map < java.lang.String, java.lang.Object>>
     */
    public static List<Map<String, Object>> getJsonMapList(String path, String defineId) {
        List<Map<String, Object>> list = new ArrayList<>();
        FSDataInputStream is = null;
        BufferedReader br = null;
        String line = null;
        try {
            CalculateEngineConf conf = calculateEngineConfigService.getSparkConfOrDefaultByDefinitionId(defineId);
            HDFSFileAdapter fileAdapter = (HDFSFileAdapter) FileAdapterUtils.getFileAdapterByCalculateEngineConf(conf);
            FileSystem fs = fileAdapter.getFileSystem();
            FileStatus[] fileStatuses = fs.listStatus(new Path(path));
            for (FileStatus file : fileStatuses) {
                if (file.isDirectory() || file.getPath().getName().equalsIgnoreCase("_SUCCESSS")) {
                    continue;
                }
                is = fs.open(file.getPath());
                br = new BufferedReader(new InputStreamReader(is));
                line = br.readLine();
                while (line != null) {
                    list.add(JSONUtils.toObjectMap(line));
                    line = br.readLine();
                }
            }
        } catch (Exception e) {
            logger.error(e.getMessage(), e);
        } finally {
            if (is != null) {
                try {
                    is.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
            if (br != null) {
                try {
                    br.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }

        return list;
    }

    public static List<Map<String, Object>> getParquetMapList(String path, String defineId) throws IOException {
        List<Map<String, Object>> list = new ArrayList<>();
        try {
            CalculateEngineConf conf = calculateEngineConfigService.getSparkConfOrDefaultByDefinitionId(defineId);
            HDFSFileAdapter fileAdapter = (HDFSFileAdapter) FileAdapterUtils.getFileAdapterByCalculateEngineConf(conf);
            String filePath = "";
            FileSystem fs = fileAdapter.getFileSystem();
            if (!fs.exists(new Path(path))) {
                return list;
            }
            FileStatus[] fileStatuses = fs.listStatus(new Path(path));
            for (FileStatus file : fileStatuses) {
                if (file.isDirectory() || file.getPath().getName().contains("_SUCCESS")) {
                    continue;
                }
                filePath = file.getPath().toString();
                break;
            }
            if (StringUtils.isNotBlank(filePath)) {
                TableResultBean bean = ParquetUtils.readParquet(filePath, null, null, null);
                list = bean.getData();
            }

        } catch (Exception e) {
            logger.error(e.getMessage(), e);
            throw e;
        }

        return list;
    }

    /**
     * 读取hadoop文本文件
     *
     * @param fileName
     * @return java.lang.String
     */
    public static String getLine(String fileName, String defineId) {
        FSDataInputStream is = null;
        BufferedReader br = null;
        String line = null;
        StringBuilder sb = new StringBuilder();
        try {
            CalculateEngineConf conf = calculateEngineConfigService.getSparkConfOrDefaultByDefinitionId(defineId);
            HDFSFileAdapter fileAdapter = (HDFSFileAdapter) FileAdapterUtils.getFileAdapterByCalculateEngineConf(conf);
            FileSystem fs = fileAdapter.getFileSystem();

            is = fs.open(new Path(fileName));
            br = new BufferedReader(new InputStreamReader(is));
            line = br.readLine();
            while (line != null) {
                sb.append(line + "\n");
                line = br.readLine();
            }

        } catch (Exception e) {
            logger.error(e.getMessage(), e);
        } finally {
            if (is != null) {
                try {
                    is.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
            if (br != null) {
                try {
                    br.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }

        return sb.toString();
    }

    public static List<String> getDirs(String path, String defineId) {
        List<String> list = new ArrayList<>();

        try {
            CalculateEngineConf conf = calculateEngineConfigService.getSparkConfOrDefaultByDefinitionId(defineId);
            HDFSFileAdapter fileAdapter = (HDFSFileAdapter) FileAdapterUtils.getFileAdapterByCalculateEngineConf(conf);
            FileSystem fs = fileAdapter.getFileSystem();
            FileStatus[] fileStatuses = fs.listStatus(new Path(path));
            for (FileStatus file : fileStatuses) {
                if (file.isDirectory()) {
                    list.add(file.getPath().toString());
                }

            }
        } catch (Exception e) {
            logger.error(e.getMessage(), e);
        }

        return list;
    }

}
