package com.yk.spark.service.hadoop.impl;

import com.yk.spark.model.Icon;
import com.yk.spark.model.NodeModel;
import com.yk.spark.model.cluster.ClusterModel;
import com.yk.spark.service.cluster.api.IClusterService;
import com.yk.spark.service.hadoop.api.IHadoopService;
import com.yk.spark.util.HdfsUtil;
import org.apache.hadoop.fs.FileStatus;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * Created by Limit on 2017/9/8.
 */
@Service("hadoopService")
public class HadoopServiceImpl implements IHadoopService {


    private Map<String,HdfsUtil> fileSystemMap=new HashMap<>();

    @Resource(name = "clusterService")
    private IClusterService clusterService;

    @Override
    public List<NodeModel> queryChildren(String current,String clusterId) {

        initializeFileSystem(clusterId);
        List<NodeModel> nodeModels=new ArrayList<>();
        String parent=current;
        if(parent.equals("#"))
            parent="/";
        for(FileStatus status:this.fileSystemMap.get(clusterId).queryChildrenFileStatus(parent)){
            nodeModels.add(this.buildNode(status,current,parent));
        }
        return nodeModels;
    }

    @Override
    public NodeModel createNode(String current,String parent,String clusterId) {
        initializeFileSystem(clusterId);
        if(this.fileSystemMap.get(clusterId).createDir(current)) {
            FileStatus status=this.fileSystemMap.get(clusterId).queryCurrentFileStatus(current);
            return this.buildNode(status,current,parent);
        }else
            return null;
    }

    @Override
    public NodeModel deleteNode(String current,String parent,String clusterId) {
        initializeFileSystem(clusterId);
        FileStatus status=this.fileSystemMap.get(clusterId).queryCurrentFileStatus(current);
        NodeModel model=this.buildNode(status,current,parent);
        return (this.fileSystemMap.get(clusterId).deleteDir(current))? model:null;
    }


    private NodeModel buildNode(FileStatus status,String current,String parent){
        NodeModel model=new NodeModel();
        model.setParent(current);
        model.setText(status.getPath().getName());
        if(status.isDirectory()){
            model.setChildren(true);
            model.setIcon(Icon.FOLDER_ICON.toString());
            model.setId(parent+status.getPath().getName()+"/");
        }else{
            model.setChildren(false);
            model.setIcon(Icon.FILE_ICON.toString());
            model.setId(parent+status.getPath().getName());
        }
        return model;
    }

    private boolean containFileSysytem(String clusterId){
        return this.fileSystemMap.containsKey(clusterId);
    }

    private Map<String,HdfsUtil> initializeFileSystem(){

        if(this.fileSystemMap.isEmpty()){

            for(ClusterModel model:this.clusterService.queryAll()){
                String fileSystemPath="hdfs://"+model.getIp()+":"+model.getPort();
                this.fileSystemMap.put(model.getClusterId(),new HdfsUtil(fileSystemPath));
            }
            return this.fileSystemMap;
        }else
            return this.fileSystemMap;
    }


    private Map<String,HdfsUtil> initializeFileSystem(String clusterId){
        initializeFileSystem();
        if(!this.fileSystemMap.containsKey(clusterId)) {
            ClusterModel model = this.clusterService.query(clusterId);
            String fileSystemPath = "hdfs://" + model.getIp() + ":" + model.getPort();
            this.fileSystemMap.put(model.getClusterId(), new HdfsUtil(fileSystemPath));
        }
        return this.fileSystemMap;

    }


}
