package com.zhss.dfs.server;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.sun.org.apache.bcel.internal.generic.NEW;
import com.zhss.dfs.server.model.*;
import io.grpc.internal.JsonUtil;

import java.io.File;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.Collectors;

/**
 * 负责管理元数据的核心组件
 *
 * @author zhonghuashishan
 */
public class FSNamesystem {

    /**
     * 负责管理内存文件目录树的组件
     */
    private FSDirectory directory;
    /**
     * 负责管理edits log写入磁盘的组件
     */
    private FSEditlog editlog;

    /**
     * 目前缓存中最大的txid
     */
    private Long cacheMaxTxid = 0L;

    /**
     * fsImage快照的当前id
     */
    private Long checkPointerTxid = 0L;


    private DataNodeManager dataNodeManager;

    /**
     * 文件对应副本映射的map  filename -- hostname
     */
    private ConcurrentHashMap<String, List<DataNodeInfo>> fileReplicateMap =
            new ConcurrentHashMap<>();

    /**
     * 存储每一个文件副本  hostname ---> list<file>
     */
    private ConcurrentHashMap<String, List<ReplicaFile>> dataNodeFileReplicaMap = new ConcurrentHashMap<>();


    private ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock();


    private JSONArray cacheLogs = new JSONArray();

    public FSNamesystem(DataNodeManager dataNodeManager) {
        this.dataNodeManager = dataNodeManager;
        this.dataNodeManager.setFsNamesystem(this);
        this.directory = new FSDirectory();
        this.editlog = new FSEditlog();
        recoverMetaData();
    }


    public String getDataNodeForFile(String fileName) {
        try {
            readWriteLock.readLock().lock();
            if (fileReplicateMap.containsKey(fileName)) {
                List<DataNodeInfo> dataNodeInfos = fileReplicateMap.get(fileName);
                Random random = new Random();
                return JSONObject.toJSONString(dataNodeInfos.get(random.nextInt(dataNodeInfos.size())));
            }
            return null;
        } finally {
            readWriteLock.readLock().unlock();
        }
    }


    public void informReceivedReplicate(String fileName, String hostName, String ip, Long fileSize) {
        try {
            readWriteLock.writeLock().lock();
            List<DataNodeInfo> dataNodeInfoList = new ArrayList<>();
            DataNodeInfo dataNodeInfo = dataNodeManager.getDataNode(hostName, ip);
            if (!fileReplicateMap.contains(fileName)) {
                dataNodeInfoList.add(dataNodeInfo);
                fileReplicateMap.put(fileName, dataNodeInfoList);
            } else {
                List<DataNodeInfo> dataNodeInfos = fileReplicateMap.get(fileName);
                dataNodeInfos.add(dataNodeInfo);
            }

            ReplicaFile replicaFile = new ReplicaFile(fileName, fileSize);
            if (dataNodeFileReplicaMap.containsKey(hostName)) {
                dataNodeFileReplicaMap.get(hostName).add(replicaFile);
            } else {
                ArrayList<ReplicaFile> fileInfos = new ArrayList<>();
                fileInfos.add(replicaFile);
                dataNodeFileReplicaMap.put(hostName, fileInfos);
            }
        } finally {
            readWriteLock.writeLock().unlock();
        }
    }

    /**
     * 回复元数据
     */
    private void recoverMetaData() {
        String metaDataPath = "D://dfs//image.meta";
        String logDirPath = "D://dfs//log";
        try {
            File file = new File(metaDataPath);
            if (file.exists()) {
                byte[] bytes = Files.readAllBytes(Paths.get(metaDataPath));
                String metaJson = new String(bytes);
                directory.setDirTree(JSONObject.parseObject(metaJson, INode.class));
            }
            // 读取磁盘文件中的数据
            File logDir = new File(logDirPath);
            File[] files = logDir.listFiles();
            for (File logFile : files) {
                String[] txids = logFile.getName().substring(0, logFile.getName().lastIndexOf(".")).split("-");
                Long startTxid = Long.parseLong(txids[0]);
                Long endTxid = Long.parseLong(txids[1]);
                // 构造磁盘记录
                LogDiskRecorder diskRecorder = new LogDiskRecorder(startTxid, endTxid, logFile.getAbsolutePath());
                editlog.addDiskLogRecorder(diskRecorder);
                if (checkPointerTxid < endTxid) {
                    //读取log文件
                    List<String> logs = Files.readAllLines(Paths.get(logFile.getAbsolutePath()));
                    for (String log : logs) {
                        FileOperate fileOperate = JSONObject.parseObject(log, FileOperate.class);
                        if (fileOperate.getTxid() > checkPointerTxid) {
                            if ("MKDIR".equals(fileOperate.getOp())) {
                                directory.mkdir(fileOperate.getPath());
                            }
                            if ("CREATE".equals(fileOperate.getOp())) {
                                directory.createFile(fileOperate.getPath());
                            }
                        }
                    }
                    System.out.println("从磁盘读文件进行恢复....................... ");
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public Long getCheckPointerTxid() {
        return checkPointerTxid;
    }

    public void setCheckPointerTxid(Long checkPointerTxid) {
        this.checkPointerTxid = checkPointerTxid;
    }

    /**
     * 创建目录
     *
     * @param fileOperate 目录路径
     * @return 是否成功
     */
    public Boolean mkdir(FileOperate fileOperate) throws Exception {
        this.directory.mkdir(fileOperate.getPath());
        this.editlog.logEdit(JSONObject.toJSONString(fileOperate));
        return true;
    }

    public JSONArray fetchLogData(int size, Long syncMaxTxid) {
        List<LogDiskRecorder> logDiskRecorders = this.editlog.getDiskLogData();
        if (!logDiskRecorders.isEmpty()) {
            // 先获取磁盘数据
            return getLogDataFromDisk(size, syncMaxTxid);
        }
        // 从内存中获取数据
        return getLogDataFromBuffer(size, 0, syncMaxTxid);
    }


    /**
     * 从内存中进行数据读取
     *
     * @param size 条数
     * @return 返回数据集合
     */
    private JSONArray getLogDataFromBuffer(int size, Integer count, Long syncMaxTxid) {
        if (cacheLogs.size() > 0 && cacheMaxTxid > syncMaxTxid) {
            // 当缓存中的txid当前备份节点的id大，则可以从上次缓存中进行获取
            return handlerBufferData(cacheLogs, count, size, syncMaxTxid);
        }
        cacheLogs.clear();
        String[] logs = this.editlog.getCurrentBuffer();
        if (logs == null || logs.length == 0) {
            return new JSONArray();
        }
        for (String log : logs) {
            JSONObject jsonObject = JSONObject.parseObject(log);
            cacheLogs.add(jsonObject);
            cacheMaxTxid = jsonObject.getLong("txid");
        }
        if (cacheMaxTxid > syncMaxTxid) {
            return handlerBufferData(cacheLogs, count, size, syncMaxTxid);
        }
        return new JSONArray();
    }

    private JSONArray handlerBufferData(JSONArray cacheLogs, Integer count, int size, Long syncMaxTxid) {
        // 记录已经写入的条数
        JSONArray logDataList = new JSONArray();
        // 判断是否为第一次拉取
        for (Object log : cacheLogs) {
            JSONObject jsonObject = (JSONObject) log;
            Long txid = jsonObject.getLong("txid");
            if (syncMaxTxid >= txid) {
                continue;
            }
            logDataList.add(jsonObject);
            syncMaxTxid = txid;
            count++;
            if (count == size) {
                return logDataList;
            }
        }
        return logDataList;
    }


    /**
     * 从磁盘中进行读取数据
     *
     * @param size 读取条数
     * @return log数据集、
     */
    private JSONArray getLogDataFromDisk(int size, Long syncMaxTxid) {
        JSONArray logDataList = new JSONArray();
        List<LogDiskRecorder> diskLogDataList = this.editlog.getDiskLogData();
        if (diskLogDataList.isEmpty()) {
            return null;
        }
        Integer count = 0;
        long logEndTxid = syncMaxTxid + 1 + size;

        // 17 < back < end
        boolean allMatch = diskLogDataList.stream().allMatch(diskRecorder -> diskRecorder.getEndTxid() < syncMaxTxid + 1);
        if (allMatch) {
            // 从缓存中取数据
            logDataList.addAll(getLogDataFromBuffer(size, count, syncMaxTxid));
            return logDataList;
        }
        for (LogDiskRecorder diskRecorder : diskLogDataList) {
            //  1 <= backId-end<17
            if (syncMaxTxid + 1 >= diskRecorder.getBeginTxid() && logEndTxid <= diskRecorder.getEndTxid()) {
                // 所有的数据都在一个log文件中
                getDataFromFile(diskRecorder, logDataList, count, size, syncMaxTxid);
            }
            //   1  < backId <= 17 <= end
            if (syncMaxTxid + 1 <= diskRecorder.getEndTxid() && logEndTxid >= diskRecorder.getEndTxid()) {
                getDataFromFile(diskRecorder, logDataList, count, size, syncMaxTxid);
                // 一部分数据在一个log文件中，另一部分数据在另一个log文件中或者内存中
                LogDiskRecorder recorder = diskLogDataList.stream()
                        .filter(logDiskRecorder -> logDiskRecorder.getBeginTxid() <= logEndTxid
                                && logDiskRecorder.getEndTxid() >= logEndTxid).findFirst().orElse(null);
                if (recorder == null) {
                    // 如果找不到去内存中取
                    logDataList.addAll(getLogDataFromBuffer(size, count, syncMaxTxid));
                } else {
                    // 去另一个文件中取数据
                    getDataFromFile(recorder, logDataList, count, size, syncMaxTxid);
                }
            }
        }
        return logDataList;
    }

    private void getDataFromFile(LogDiskRecorder diskRecorder, JSONArray logDataList, Integer count, int size, Long
            syncMaxTxid) {
        try {
            List<String> lines = Files.readAllLines(Paths.get(diskRecorder.getPath()));
            for (String line : lines) {
                JSONObject jsonObject = JSONObject.parseObject(line);
                Long txid = jsonObject.getLong("txid");
                if (txid <= syncMaxTxid && syncMaxTxid > 1) {
                    continue;
                }
                logDataList.add(jsonObject);
                syncMaxTxid = txid;
                count++;
                if (count == size) {
                    return;
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public boolean deleteLogByTxid() {
        List<LogDiskRecorder> diskLogData = this.editlog.getDiskLogData();
        List<LogDiskRecorder> logDiskRecorders = diskLogData.stream()
                .filter(logDiskRecorder -> logDiskRecorder.getEndTxid() <= checkPointerTxid)
                .collect(Collectors.toList());
        if (!logDiskRecorders.isEmpty()) {
            for (LogDiskRecorder logDiskRecorder : logDiskRecorders) {
                File file = new File(logDiskRecorder.getPath());
                if (file.exists()) {
                    file.delete();
                }
                diskLogData.remove(logDiskRecorder);
                System.out.println("成功删除之前的log文件....................");
            }
        }
        return true;
    }

    public void flush() {
        editlog.flush();
    }

    public void saveCheckPinterTxid() {
        try {
            Files.write(Paths.get("D://dfs//check_pointer_txid.meta"),
                    String.valueOf(cacheMaxTxid).getBytes(StandardCharsets.UTF_8),
                    StandardOpenOption.CREATE, StandardOpenOption.WRITE);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public Boolean createFile(FileOperate fileOperate) {
        String path = fileOperate.getPath();
        Integer status = directory.createFile(path);
        if (status == 1) {
            // 文件名重
            return false;
        }
        editlog.logEdit(JSONObject.toJSONString(fileOperate));
        return true;
    }

    /**
     * dataNode 全量上报文件信息
     *
     * @param hostname
     * @param ip
     * @param filenames
     * @param fileStoreSize
     */
    public void reportCompleteFileInfo(String hostname, String ip, String filenames, long fileStoreSize) {
        DataNodeInfo dataNode = dataNodeManager.getDataNode(hostname, ip);
        dataNode.setFileStoreSize(dataNode.getFileStoreSize() + fileStoreSize);
        JSONArray fileNameList = JSONArray.parseArray(filenames);
        for (Object fileName : fileNameList) {
            //
            List<DataNodeInfo> dataNodeInfos = fileReplicateMap.get(filenames);
            if (!dataNodeInfos.isEmpty() && dataNodeInfos.size() == 2) {
                // 删除 全量上报的过程中发现某个文件已经存在了两个副本则将自身datanode的文件进行删除
                FileReplicaTask fileReplicaTask = new FileReplicaTask(0L, fileName.toString(), null, null);
                dataNode.addDelReplicaTask(fileReplicaTask);
                return;
            }
            informReceivedReplicate(fileName.toString(), hostname, ip, 0L);
        }
    }


    public List<ReplicaFile> getFileReplica(String hostname) {
        try {
            readWriteLock.writeLock().lock();
            return dataNodeFileReplicaMap.get(hostname);
        } finally {
            readWriteLock.writeLock().unlock();
        }
    }


    public DataNodeInfo getSourceDataNode(String fileName, String deadHostname) {
        try {
            readWriteLock.readLock().lock();
            List<DataNodeInfo> dataNodeInfos = fileReplicateMap.get(fileName);
            if (!dataNodeInfos.isEmpty()) {
                List<DataNodeInfo> newDataNodeList = dataNodeInfos.stream().filter(dataNodeInfo -> !deadHostname.equals(dataNodeInfo.getHostname())).collect(Collectors.toList());
                fileReplicateMap.put(fileName,newDataNodeList);
                return newDataNodeList.isEmpty() ? null : newDataNodeList.get(0);
            } else {
                return null;
            }
        } finally {
            readWriteLock.readLock().unlock();
        }
    }

    public String reallocateDataNode(long fileSize, String excludeDataNode) {
        Map<String, DataNodeInfo> dataNodes = dataNodeManager.getDataNodes();
        JSONObject jsonObject = JSONObject.parseObject(excludeDataNode);
        DataNodeInfo dataNode = dataNodeManager.getDataNode(jsonObject.getString("hostname"), jsonObject.getString("ip"));
        // 将先前加上的文件大小减去
        dataNode.addFileStoreSize(-fileSize);
        Collection<DataNodeInfo> dataNodeInfos = dataNodes.values();
        // 寻找到容量最小的那个节点返回
        DataNodeInfo dataNodeInfo1 = dataNodeInfos.stream().filter(dataNodeInfo -> dataNodeInfo.getHostname().equals(jsonObject.getString("hostname")))
                .min(Comparator.comparingLong(DataNodeInfo::getFileStoreSize)).orElse(null);
        return dataNodeInfo1 == null ? null : JSONObject.toJSONString(dataNodeInfo1);
    }

    public String chooseOtherDataNode(String excludeDataNode, String fileName) {
        try {
            readWriteLock.readLock().lock();
            JSONObject jsonObject = JSONObject.parseObject(excludeDataNode);
            List<DataNodeInfo> dataNodeInfos = fileReplicateMap.get(fileName);
            for (DataNodeInfo dataNodeInfo : dataNodeInfos) {
                if (!dataNodeInfo.getHostname().equals(jsonObject.getString("hostname"))) {
                    return JSONObject.toJSONString(dataNodeInfo);
                }
            }
            return null;
        } finally {
            readWriteLock.readLock().unlock();
        }
    }

    public void rebalace() {
        Map<String, DataNodeInfo> dataNodes = dataNodeManager.getDataNodes();
        Collection<DataNodeInfo> dataNodeInfos = dataNodes.values();
        Long sumFileSize = dataNodeInfos.stream().map(DataNodeInfo::getFileStoreSize).reduce(Long::sum).get();
        Long avgStoreFileSize = sumFileSize / dataNodeInfos.size();
        List<DataNodeInfo> destDataNodes = dataNodeInfos.stream().filter(dataNodeInfo -> dataNodeInfo.getFileStoreSize() < avgStoreFileSize).collect(Collectors.toList());
        List<DataNodeInfo> sourceDataNodes = dataNodeInfos.stream().filter(dataNodeInfo -> dataNodeInfo.getFileStoreSize() > avgStoreFileSize).collect(Collectors.toList());
        ArrayList<RemoveFileTask> removeFileTasks = new ArrayList<>();
        for (DataNodeInfo sourceDataNode : sourceDataNodes) {
            long toMoveFileSize = sourceDataNode.getFileStoreSize() - avgStoreFileSize;
            for (DataNodeInfo destDataNode : destDataNodes) {
                if (destDataNode.getFileStoreSize() + toMoveFileSize  <= avgStoreFileSize) {
                    List<ReplicaFile> replicaFileList = dataNodeFileReplicaMap.get(sourceDataNode.getHostname());
                    Long moveFileSize = 0L;
                    for (ReplicaFile replicaFile : replicaFileList) {
                        moveFileSize += replicaFile.getFileSize();
                        if (moveFileSize > toMoveFileSize) {
                            break;
                        }
                        destDataNode.addFileStoreSize(replicaFile.getFileSize());
                        FileReplicaTask fileReplicaTask = new FileReplicaTask(replicaFile.getFileSize(),
                                replicaFile.getFileName(), sourceDataNode, destDataNode);
                        destDataNode.addReplaceTask(fileReplicaTask);

                        sourceDataNode.addFileStoreSize(-replicaFile.getFileSize());
                        // TODO 删除fileMap dataNodeMap
                        RemoveFileTask removeFileTask = new RemoveFileTask(replicaFile.getFileName(), sourceDataNode);
                        removeFileTasks.add(removeFileTask);
                    }
                } else if (destDataNode.getFileStoreSize() < avgStoreFileSize) {
                    Long moveFileSize = 0L;
                    Long maxMoveSize = avgStoreFileSize - destDataNode.getFileStoreSize();
                    List<ReplicaFile> replicaFileList = dataNodeFileReplicaMap.get(sourceDataNode.getHostname());
                    for (ReplicaFile replicaFile : replicaFileList) {
                        moveFileSize += replicaFile.getFileSize();
                        if (moveFileSize > maxMoveSize) {
                            break;
                        }
                        destDataNode.addFileStoreSize(replicaFile.getFileSize());
                        FileReplicaTask fileReplicaTask = new FileReplicaTask(replicaFile.getFileSize(),
                                replicaFile.getFileName(), sourceDataNode, destDataNode);
                        destDataNode.addReplaceTask(fileReplicaTask);

                        sourceDataNode.addFileStoreSize(-replicaFile.getFileSize());
                        // TODO 删除fileMap dataNodeMap
                        RemoveFileTask removeFileTask = new RemoveFileTask(replicaFile.getFileName(), sourceDataNode);
                        removeFileTasks.add(removeFileTask);
                    }
                }
            }
        }

        // 执行延迟队列进行演示删除
        new DelayDeleteTaskThread(removeFileTasks).start();
    }

    class DelayDeleteTaskThread extends Thread{

        List<RemoveFileTask> removeFileTasks;

        private Long startTime;

        public DelayDeleteTaskThread(List<RemoveFileTask> removeFileTasks) {
            this.removeFileTasks = removeFileTasks;
            startTime = System.currentTimeMillis();
        }

        @Override
        public void run() {
           while (true) {
               try {
                   Long now = System.currentTimeMillis();
                   if (now - startTime > 1000 * 60 * 60 * 24) {
                       for (RemoveFileTask removeFileTask : removeFileTasks) {
                           FileReplicaTask fileReplicaTask = new FileReplicaTask(0L, removeFileTask.getFileName(), null, null);
                           removeFileTask.getDataNodeInfo().addDelReplicaTask(fileReplicaTask);
                       }
                       break;
                   }
                   TimeUnit.MINUTES.sleep(1L);
               } catch (Exception e) {
                   e.printStackTrace();
               }
           }
        }
    }
}
