package com.sg.java.util;

import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;

import java.io.*;
import java.util.Enumeration;
import java.util.Map;
import java.util.Properties;

public class PropertiesUtils {

    public static Map<String, String> readProperties(String filePath, Map dataMap) throws IOException {
        File file = new File(filePath);
        if (file.exists()) {
            InputStream in      = new BufferedInputStream(new FileInputStream(file));
            Properties  sysData = new Properties();
            sysData.load(in);
            in.close();
            Enumeration<?> enumeration = sysData.propertyNames();
            while (enumeration.hasMoreElements()) {
                String key   = (String) enumeration.nextElement();
                Double value = Double.valueOf(sysData.getProperty(key));
                dataMap.put(key, value);
            }
        }
        return dataMap;
    }

    public static Map<String, String> sparkReadProperties(String filePath, Map dataMap) throws IOException {
        Path                    path       = new Path(filePath);
        final FileStatus        fileStatus = HdfsUtils.fs.getFileStatus(path);
        final FSDataInputStream in         = HdfsUtils.fs.open(fileStatus);
        Properties              sysData    = new Properties();
        sysData.load(in);
        in.close();
        Enumeration<?> enumeration = sysData.propertyNames();
        while (enumeration.hasMoreElements()) {
            String key   = (String) enumeration.nextElement();
            Double value = Double.valueOf(sysData.getProperty(key));
            dataMap.put(key, value);
        }

        return dataMap;
    }
}
