package com.feidee.fdhadoop.hdfs;

import com.feidee.fdcommon.configuration.CustomConfiguration;
import com.feidee.fdcommon.util.ToolUtils;
import com.feidee.fdhadoop.kerberos.KerberosAuthentication;
import org.apache.commons.fileupload.FileItem;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.log4j.Logger;

import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;


public class HdfsUtils {
    private static Logger logger = Logger.getLogger(HdfsUtils.class.getName());
    private static final int LIMIT_LINE = 1000000;
    static {
        KerberosAuthentication.authenticate();
    }

//    public static FileSystem fileSystem = getFileSystem();

    public static Configuration getHaConfiguration() {
//        Map<String, String> conf = getConfiguration();
        KerberosAuthentication.authenticate();
        Configuration configuration = new Configuration();
        configuration.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
//		conf.forEach((k, v) -> {
//			configuration.set(k, v);
//		});
//        InputStream yarnSiteXMLInputStream = null;
//        try {
//            yarnSiteXMLInputStream = ConfigurationProviderFactory.getConfigurationProvider(configuration)
//                    .getConfigurationInputStream(configuration, YarnConfiguration.YARN_SITE_CONFIGURATION_FILE);
//
//            if (yarnSiteXMLInputStream != null) {
//                configuration.addResource(yarnSiteXMLInputStream,
//                        YarnConfiguration.YARN_SITE_CONFIGURATION_FILE);
//            }
//        } catch (Exception e) {
//            logger.error("load " + YarnConfiguration.YARN_SITE_CONFIGURATION_FILE + ",set Configuration value failed, Cause the cause Exception: " + e.getLocalizedMessage());
//        }
        return configuration;
    }

    public static Map<String, String> getConfiguration() {
        String key;
        Map<String, String> conf = new ConcurrentHashMap<String, String>();
        conf.put("fs.defaultFS", CustomConfiguration.getString("fs.defaultFS"));
        conf.put("ha.zookeeper.quorum", CustomConfiguration.getString("ha.zookeeper.quorum"));
        String dfsNameServices = CustomConfiguration.getString("dfs.nameservices");
        conf.put("dfs.nameservices", dfsNameServices);
        key = "dfs.ha.namenodes." + dfsNameServices;
        String dfsHaNameNodes = CustomConfiguration.getString(key);
        conf.put(key, dfsHaNameNodes);
        String[] nns = dfsHaNameNodes.split(",");
        for (String nn: nns) {
            key = "dfs.namenode.rpc-address." + dfsNameServices + "." + nn.trim();
            conf.put(key, CustomConfiguration.getString(key));
        }
        key = "dfs.client.failover.proxy.provider." + dfsNameServices;
        conf.put(key, CustomConfiguration.getString(key));
        conf.put("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");

        /***** start  set yarn's parameter,in order to oozie client need do it. *****/
        conf.put("yarn.resourcemanager.ha.rm-ids", CustomConfiguration.getString("yarn.resourcemanager.ha.rm-ids"));

        String rm_ids = conf.get("yarn.resourcemanager.ha.rm-ids");
        String[] rm_idArr = rm_ids.split(",");
        for (int i = 0; i < rm_idArr.length; i++) {
            key = "yarn.resourcemanager.address." + rm_idArr[i].trim();
            conf.put(key, CustomConfiguration.getString(key));
        }
        /***** end set yarn's parameter,in order to oozie client need do it. *****/
        return conf;
    }

    public static byte[] readFile2Byte(String path) {
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        int len = 0, buff_size = 1024;
        byte[] buff = new byte[buff_size];
        InputStream in = getInputStream(path);
        if (in == null) {
            logger.error("read" + path + " failed!");
            return null;
        }
        try {
            while (-1 != (len = in.read(buff, 0, buff_size))) {
                out.write(buff, 0, len);
            }
        } catch (IOException e) {
            logger.error("read" + path + " failed! ", e);
        } finally {
            try {
                out.close();
            } catch (IOException e) {

            }
            try {
                in.close();
            } catch (IOException e) {

            }
        }
        return out.toByteArray();
    }

    public static InputStream getInputStream(String filename) {
        InputStream in = null;
        Path path = new Path(filename);
        try {
            in = getFileSystem().open(path);
        } catch (IOException e) {
            logger.error("open " + filename + " failed! ", e);
        }
        return in;
    }

    public static String readFile(String path) {
        StringBuffer content = new StringBuffer();
        String line;
        try {
            FileSystem fileSystem = getFileSystem();
            if (fileExists(path)) {
                BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(fileSystem.open(new Path(path)), "utf-8"));
                while ((line = bufferedReader.readLine()) != null) {
                    content.append(line);
                }
                bufferedReader.close();
            }
        } catch (IOException e) {
            logger.error("readFile Exception: ", e);
        }
        return content.toString();

    }

    /**
     * 查看HDFS文件是否存在
     */
    public static Boolean fileExists(String path) {
        Boolean res = false;
        try {
            FileSystem fileSystem = getFileSystem();
            res = fileSystem.exists(new Path(path));
        } catch (IllegalArgumentException e) {
            logger.error("fileExists Exception: ", e);
        } catch (IOException e) {
            logger.error("fileExists Exception: ", e);
        }
        return res;
    }

    public static void get(String file, String localdst) throws IOException {
        Path path = new Path(file);
        InputStream input = getFileSystem().open(path);
        OutputStream output = new FileOutputStream(localdst);
        IOUtils.copyBytes(input, output, 4096, true);
    }

    /**
     * @param file      文件路径
     * @param content   写入内容
     * @param overwrite 是否覆盖写入
     * @throws IOException
     */
    public static void write(String file, String content, boolean overwrite) throws IOException {
        Path path = new Path(file);
        FileSystem fileSystem = getFileSystem();
        boolean flag = overwrite;
        if (!fileSystem.exists(path)) {
            flag = true;
        }
        FSDataOutputStream outputStream = fileSystem.create(path, flag);
        outputStream.writeBytes(content);
        outputStream.flush();
        outputStream.close();
    }


    private static FileSystem getFileSystem() {
        FileSystem fs = null;
        try {
            fs = FileSystem.get(getHaConfiguration());
        } catch (IOException e) {
            logger.error("getFileSystem Exception: ", e);
        }
        return fs;
    }

    public static List<String> listFiles(String dir) throws IOException {
        List<String> names = new ArrayList<String>();
        if (ToolUtils.isNotNull(dir)) {
            FileSystem fileSystem = getFileSystem();
            Path path = new Path(dir);
            if (fileSystem.exists(path)) {
                FileStatus[] stats = fileSystem.listStatus(path);
                for (int i = 0; i < stats.length; ++i) {
                    if (stats[i].isFile()) {
                        names.add(stats[i].getPath().toString());
                    }
                }
            }
            fileSystem.close();
        }
        return names;
    }

    /**
     * 将上传到hdfs
     */
    public static void putData(List<String> contentList, String hdfsPath, Boolean overwrite) {
        try {
            FileSystem fileSystem = getFileSystem();
            byte[] contentBytes = new byte[1024];
            FSDataOutputStream output = fileSystem.create(new Path(hdfsPath), overwrite);
            for (String content: contentList) {
                contentBytes = content.getBytes();
                output.write(contentBytes, 0, contentBytes.length);
            }
            output.close();
        } catch (IllegalArgumentException e) {
            logger.error("PutData Exception: " + e.getMessage());
        } catch (IOException e) {
            logger.error("PutData Exception: " + e.getMessage());
        }
    }

    /**
     * 创建目录
     */
    public static void mkdir(String path) {
        try {
            FileSystem fileSystem = getFileSystem();
            if (!fileExists(path)) {
                fileSystem.mkdirs(new Path(path));
            }
        } catch (IllegalArgumentException e) {
            logger.error("mkdir Exception: " + e.getMessage());
        } catch (IOException e) {
            logger.error("mkdir Exception: " + e.getMessage());
        }
    }

    /**
     * 删除目录
     */
    public static void rmdir(String path, Boolean recursion) {
        // recursion表示是否递归删除，如果是文件夹，则加上第二个参数为true
        try {
            FileSystem fileSystem = getFileSystem();
            if (fileExists(path)) {
                fileSystem.delete(new Path(path), recursion);
            }
        } catch (IllegalArgumentException e) {
            logger.error("rmdir Exception: " + e.getMessage());
        } catch (IOException e) {
            logger.error("rmdir Exception: " + e.getMessage());
        }
    }


    /*********************************************/
    /**
     * Write a file in hdfs
     *
     * @param uri     target file uri
     * @param content file content
     * @throws IOException
     */
    public static void upload(String uri, String content) throws IOException {
        Path path = new Path(uri);
        FileSystem fileSystem = getFileSystem();
        if (fileSystem.exists(path))
            fileSystem.delete(path, true);
        OutputStream out = fileSystem.create(path);
        InputStream rf = new ByteArrayInputStream(content.getBytes());
        IOUtils.copyBytes(rf, out, 4096, true);
        out.close();
    }

    /**
     * Create a new file on HDFS and add content,
     * it should be noted that the file did not exist
     * need
     *
     * @param uri  HDFS uri
     * @param item InputStream item to upload
     * @throws IOException
     */
    public static void uploadModel(String uri, InputStream item)
            throws IOException {
        FileSystem fileSystem = getFileSystem();
        FSDataOutputStream out =
                fileSystem.create(new Path(uri));
        // IOUtils.copyBytes method
        IOUtils.copyBytes(item, out, 4096, false);
        out.close();
    }

    /**
     * Upload file to HDFS
     * need
     *
     * @param uri  target file path
     * @param item file to upload
     * @param name name of the new file
     * @throws IOException
     */
    public static void uploadfile(String uri, FileItem item, String name)
            throws IOException {
        FileSystem fileSystem = getFileSystem();
        logger.info("[dstPath]" + uri + name);
        FSDataOutputStream out = fileSystem.create(new Path(uri + name));
        IOUtils.copyBytes(item.getInputStream(), out, 4096, true);
    }

    /**
     * Download file in HDFS
     *
     * @param uri of file to download
     * @return inputstream of the file to download
     * @throws Exception
     */
    public static InputStream downInputStream(String uri) throws IOException {
        FileSystem fileSystem = getFileSystem();
        Path path = new Path(uri);
        if (!fileSystem.exists(path))
            return null;
        if (fileSystem.isDirectory(path)) {
            return searchFile(path);
        } else {
            InputStream in = fileSystem.open(new Path(uri));
            return in;
        }
    }

    /**
     * Search File in HDFS
     * need
     *
     * @param path search path
     * @return input stream
     * @throws IOException
     */
    private static InputStream searchFile(Path path) throws IOException {
        FileSystem fileSystem = getFileSystem();
        FileStatus[] status = fileSystem.listStatus(path);
        InputStream in = null;
        if (status == null || status.length == 0)
            return null;

        path = status[0].getPath();
        for (FileStatus s: status) {
            if (!fileSystem.isDirectory(s.getPath())) {
                if (!s.getPath().getName().equals("_SUCCESS")) {
                    in = fileSystem.open(s.getPath());
                    return in;
                }
            } else {
                in = searchFile(s.getPath());
                if (in != null)
                    return in;
            }

        }

        return null;
    }

    /**
     * The same method as hdfs dfs -cat
     *
     * @param uri target file uri
     * @return content string in file
     * @throws IOException
     */
    public static String cat(String uri) throws IOException {
        InputStream in = getInputStream(uri);
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        IOUtils.copyBytes(in, out, 4096, true);
        return out.toString();
    }

    /**
     * Return the paths of files under directory uri
     */
    public static Path[] list(String uri) throws IOException {

        Path path = new Path(uri);
        FileSystem fileSystem = getFileSystem();
        if (!fileSystem.exists(path))
            return null;
        FileStatus[] status = fileSystem.listStatus(new Path(uri));
        Path[] listedPaths = FileUtil.stat2Paths(status);
        return listedPaths;
    }


    /**
     * Copy method
     *
     * @param src_uri Source file uri
     * @param dst_uri destination uri
     * @throws Exception
     */
    public static void copy(String src_uri, String dst_uri) throws Exception {
        FileSystem fileSystem = getFileSystem();
        FileUtil.copy(fileSystem, new Path(src_uri), fileSystem, new Path(dst_uri), false, fileSystem.getConf());
    }

    /**
     * Rename a file in HDFS
     *
     * @param src target source file
     * @param dst destination file
     * @throws IOException
     */
    public static void rename(String src, String dst) throws IOException {
        FileSystem fileSystem = getFileSystem();
        fileSystem.rename(new Path(src), new Path(dst));
    }

    /**
     * The same method as hdfs dfs -head
     * need
     *
     * @param uri target file url in HDFS
     * @param n   the position before you need
     * @return
     * @throws IOException
     */
    public static String head(String uri, int n) throws IOException {
        Path[] paths = list(uri);
        StringBuffer sb = new StringBuffer();
        int line = 0;
        String lineStr = null;
        for (Path temp: paths) {
            InputStream in = downInputStream(temp.toString());
            if (in == null)
                continue;
            BufferedReader br = new BufferedReader(new InputStreamReader(in));

            while (line++ < n && (lineStr = br.readLine()) != null) {
                if (lineStr.length() > 1024) lineStr = lineStr.substring(0, 1024) + "(...)";
                sb.append(lineStr);
                sb.append('\n');
            }
            br.close();
        }
        return sb.toString();
    }

    /**
     * To determine whether the file directory
     * need
     *
     * @param path target file path
     * @return
     * @throws IOException
     */
    public static boolean isDirectory(Path path) throws IOException {
        FileSystem fileSystem = getFileSystem();
        return fileSystem.isDirectory(path);
    }

    /**
     * Read the file or directory size of a file under HDFS
     * need
     *
     * @param uri target File or directory path
     * @return The total size of all file sizes in the file size or directory (in KB)
     * @throws IOException
     */
    public static double getFileSize(String uri) throws IOException {
        Path path = new Path(uri);
        long totalSize = 0;
        if (isDirectory(path)) {
            Path[] dirFilePath = list(uri);
            for (Path p: dirFilePath) {
                totalSize += getFileSize(uri + "/" + p.getName()) * 1000.0;
            }
        } else {
            FileSystem fileSystem = getFileSystem();
            FileStatus status = fileSystem.getFileStatus(path);
            totalSize = status.getLen();
        }

        return totalSize / 1000.0;
    }

    /**
     * Read total data on hdfs  by line
     * need
     *
     * @param uri   data path
     * @param limit if limit line
     * @return
     * @throws IOException
     */
    public static List<String> readAllData(String uri, Boolean limit) throws IOException {
        Path[] paths = list(uri);
        String lineStr = null;
        List<String> result = new ArrayList<String>();
        int lineNum = 0;
        if (limit) {
            boolean overFlag = false;
            for (Path temp: paths) {
                InputStream in = downInputStream(temp.toString());
                if (in == null)
                    continue;
                BufferedReader br = new BufferedReader(new InputStreamReader(in));
                while ((lineStr = br.readLine()) != null) {
                    lineNum = lineNum + 1;
                    result.add(lineStr);
                    if (lineNum > LIMIT_LINE) //If more than limit line number ,only display the limit line number
                    {
                        overFlag = true;
                        break;
                    }
                }
                if (overFlag)
                    break;
                br.close();
            }
        } else {
            for (Path temp: paths) {
                InputStream in = downInputStream(temp.toString());
                if (in == null)
                    continue;
                BufferedReader br = new BufferedReader(new InputStreamReader(in));
                while ((lineStr = br.readLine()) != null) {
                    lineNum = lineNum + 1;
                    result.add(lineStr);
                }
                br.close();
            }
        }
        return result;
    }

    /**
     * Read hdfs page data
     * need
     *
     * @param uri   data path
     * @param start start position
     * @param count data count
     * @return
     * @throws IOException
     */
    public static List<String> readDataForPage(String uri, int start, int count) throws IOException {
        Path[] paths = list(uri);
        int index = -1;
        String lineStr = null;
        List<String> result = new ArrayList<String>();
        for (Path temp: paths) {
            InputStream in = downInputStream(temp.toString());
            if (in == null)
                continue;
            BufferedReader br = new BufferedReader(new InputStreamReader(in));
            while ((lineStr = br.readLine()) != null) {
                index = index + 1;
                if (index < start)
                    continue;
                else if (index >= start + count)
                    break;
                else
                    result.add(lineStr);
            }
            br.close();
        }
        logger.info("Read hdfs data page, start = " + start + ";count=" + count + ";result size = " + result.size());
        return result;
    }


    public static void main(String[] args) throws IOException {
        CustomConfiguration.setString("fs.defaultFS", "hdfs://nameservice1");
        CustomConfiguration.setString("env", "test");
        CustomConfiguration.setString("dfs.nameservices", "nameservice1");
        CustomConfiguration.setString("ha.zookeeper.quorum", "sz-7-centos186:2181,sz-7-centos187:2181,sz-7-centos188:2181");
        CustomConfiguration.setString("dfs.nameservices", "nameservice1");
        CustomConfiguration.setString("dfs.ha.namenodes.nameservice1", "namenode66,namenode111");
        CustomConfiguration.setString("dfs.namenode.rpc-address.nameservice1.namenode66", "sz-7-centos186:8020");
        CustomConfiguration.setString("dfs.namenode.rpc-address.nameservice1.namenode111", "sz-7-centos187:8020");
        CustomConfiguration.setString("dfs.client.failover.proxy.provider.nameservice1", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
        //HdfsUtils.get("/tmp/logs/test.txt", "D:\\Git\\fd-util\\fd-common-hadoop\\src\\main\\resources\\m.txt");
        //HdfsUtils.write("/tmp/1.txt","1234556", true);
        //System.out.println(HdfsUtils.listFiles("/1.txt").toString());
        String modelPath = CustomConfiguration.getString("lr.ssj.ssjd.loan.modelpath", "/data_team/recommend/model_online/SsjdLRPredictorModel/modeldata");
        String featureJson = CustomConfiguration.getString("lr.ssj.ssjd.loan.featureindexformat", "/data_team/recommend/model_online/SsjdLRPredictorModel/ssjdLRTagInfo.json");
        String featureContent = HdfsUtils.readFile(featureJson);
        System.out.println(HdfsUtils.fileExists(modelPath));
        System.out.println(featureContent);
    }
}