package com.ideal.hadoopadmin.service.meta.hdfs;

import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.ideal.hadoopadmin.api.better.hdfs.HadoopHDFSAPI;
import com.ideal.hadoopadmin.api.hdfs.HDFSAPI;
import com.ideal.hadoopadmin.common.entity.ResultAPI;
import com.ideal.hadoopadmin.common.framework.orm.SearchFilter;
import com.ideal.hadoopadmin.crontab.hdfs.FlushHDFSInfo;
import com.ideal.hadoopadmin.entity.cluster.ClusterUser;
import com.ideal.hadoopadmin.entity.meta.hdfs.MetaHdfsInfo;
import com.ideal.hadoopadmin.entity.meta.hdfs.MetaHdfsInfoBak;
import com.ideal.hadoopadmin.mapper.webdb.meta.MetaHdfsInfoBakMapper;
import com.ideal.hadoopadmin.service.cluster.ClusterMachineService;
import com.ideal.hadoopadmin.service.cluster.ClusterUserService;
import com.ideal.hadoopadmin.service.cluster.ParameterService;
import com.ideal.tools.ssh.common.CommonProperties;
import com.ideal.tools.ssh.context.ClusterContext;
import com.ideal.tools.ssh.entity.ContextResult;
import com.ideal.tools.ssh.entity.LinuxMachine;
import com.ideal.tools.ssh.result.LinuxResult;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * Created by fwj on 16-2-26.
 * meta_hdfs_bak备份
 */
@Service
public class MetaHdfsInfoBakService {
    @Resource
    private ParameterService parameterService;
    @Resource
    private MetaHdfsInfoBakMapper metaHdfsInfoBakMapper;
    @Resource
    private ClusterMachineService clusterMachineService;
    @Resource
    MetaHdfsInfoService metaHdfsInfoService;
    @Resource
    ClusterUserService clusterUserService;
    /**
     * 根据id删除
     */
    public void deleteMetaHdfsInfoBak(Long id) {
        metaHdfsInfoBakMapper.deleteMetaHdfsInfoBak(id);
    }

    /**
     * 根据id查询
     */
    public MetaHdfsInfoBak queryMetaHdfsInfoBakById(Long id) {
        return metaHdfsInfoBakMapper.queryMetaHdfsInfoBakById(id);
    }


 /*   public void flushHdfsAPI() {
        FlushHDFSInfo flushHDFSInfo = new FlushHDFSInfo();
        Map<String, String> propertyMap = new HashMap<String, String>();
        propertyMap.put(CommonProperties.WEBAPP_INIT_PATH, parameterService.findValueByKey(CommonProperties.WEBAPP_INIT_PATH));
        propertyMap.put(CommonProperties.HIVE_HDFSPATH_PREFIX, parameterService.findValueByKey(CommonProperties.HIVE_HDFSPATH_PREFIX));
        CommonProperties commonProperties = new CommonProperties(propertyMap);
        ClusterContext context = new ClusterContext(commonProperties);
        List<LinuxMachine> machineList = clusterMachineService.getMachineList(null);
        context.setOriginalList(machineList);
        flushHDFSInfo.HDFSDailyRefreshNew(context);
    }*/

    public PageInfo hdfsCompare(int currentNum,Map<String,Object> searchParam) {
        String where = SearchFilter.parseToString(searchParam);
        PageHelper.startPage(currentNum,5);
        PageHelper.setAppendWhere(where);
        List<MetaHdfsInfoBak> metaHdfsInfoBaks = metaHdfsInfoBakMapper.compareWithHdfsInfo();
        PageInfo pageInfo = new PageInfo(metaHdfsInfoBaks);
        return pageInfo;
    }
    public List<String> delHdfsPubDir(Long hdfsInfoBakId){
        List<String> messageList = new ArrayList<String>();
        MetaHdfsInfoBak metaHdfsInfoBak = metaHdfsInfoBakMapper.queryMetaHdfsInfoBakById(hdfsInfoBakId);
        ClusterUser clusterUser = clusterUserService.queryClusterUserById(metaHdfsInfoBak.getClusterUserId());
        String path = metaHdfsInfoService.ordinalSubstring(metaHdfsInfoBak.getHdfsPath(),"/",4);
        //注释掉旧方法，调用新方法update20160802qinfengxia
        //List<LinuxResult> linuxResults = delHdfsPubDirAPI(clusterUser.getUserName(),path);
        List<LinuxResult> linuxResults = delHdfsPubDirAPINew(clusterUser.getUserName(), path);
        ResultAPI.initAPIResult(linuxResults);
        messageList.addAll(ResultAPI.messageList);
        if(ResultAPI.flag){
            metaHdfsInfoBakMapper.deleteMetaHdfsInfoBak(hdfsInfoBakId);
            messageList.add("<div>数据库信息:删除hdfsInfoBak成功!</div>");
        }
        return messageList;
    }
    public List<LinuxResult> delHdfsPubDirAPI(String userName,String path){
        ClusterContext context = metaHdfsInfoService.initHdfsAPI(userName, path);
        HDFSAPI.RMHDFSPubDir(context);
        return context.getContextResult().getLastResult();
    }

    /**
     * update20160802qinfengxia
     * @param userName
     * @param path
     * @return
     */
    public List<LinuxResult> delHdfsPubDirAPINew(String userName,String path){
        Map<String, Object> params = metaHdfsInfoService.initHdfsAPINew(userName, path);
        ContextResult contextResult = HadoopHDFSAPI.RemoveHDFSPubDir(params);
        return contextResult.getLastResult();
    }
    public List<String> makeHdfsPubDir(Long hdfsInfoId){
        List<String> messageList = new ArrayList<String>();
        MetaHdfsInfo metaHdfsInfo= metaHdfsInfoService.queryMetaHdfsInfoById(hdfsInfoId);
        String path = metaHdfsInfoService.ordinalSubstring(metaHdfsInfo.getHdfsPath(),"/",4);
        //注释掉旧方法，调用新方法update20160802qinfengxia
        //List<LinuxResult> linuxResults = makeHdfsPubDirAPI(metaHdfsInfo.getClusterUserId().getUserName(),path);
        List<LinuxResult> linuxResults = makeHdfsPubDirAPINew(metaHdfsInfo.getClusterUserId().getUserName(), path);
        ResultAPI.initAPIResult(linuxResults);
        messageList.addAll(ResultAPI.messageList);
        if(ResultAPI.flag){
            MetaHdfsInfoBak metaHdfsInfoBak = new MetaHdfsInfoBak();
            metaHdfsInfoBak.setHdfsPath(metaHdfsInfo.getHdfsPath());
            metaHdfsInfoBak.setClusterUserId(metaHdfsInfo.getClusterUserId().getId());
            metaHdfsInfoBak.setHdfsGroup(metaHdfsInfo.getHdfsGroup());
            metaHdfsInfoBak.setHdfsOwner(metaHdfsInfo.getHdfsOwner());
            metaHdfsInfoBak.setHdfsPerm(metaHdfsInfo.getHdfsPerm());
            metaHdfsInfoBak.setNote(metaHdfsInfo.getNote());
            metaHdfsInfoBak.setCreateTime(System.currentTimeMillis());
            metaHdfsInfoBak.setProperties(0);//hiveInfo里面的数据都为公有
            metaHdfsInfoBak.setNote("");
            saveHdfsInfoBak(metaHdfsInfoBak);
            messageList.add("<div>数据库信息:添加到hdfsInfoBak成功!</div>");
        }
        return messageList;
    }
    public void saveHdfsInfoBak(MetaHdfsInfoBak metaHdfsInfoBak){
        metaHdfsInfoBakMapper.saveMetaHdfsInfoBak(metaHdfsInfoBak);
    }
    public List<LinuxResult> makeHdfsPubDirAPI(String userName,String path){
        ClusterContext context = metaHdfsInfoService.initHdfsAPI(userName, path);
        HDFSAPI.MakeHDFSPubDir(context);
        return context.getContextResult().getLastResult();
    }

    /**
     * update20160802qinfengxia
     * @param userName
     * @param path
     * @return
     */
    public List<LinuxResult> makeHdfsPubDirAPINew(String userName,String path){
        Map<String, Object> params  = metaHdfsInfoService.initHdfsAPINew(userName, path);
        ContextResult contextResult = HadoopHDFSAPI.CreateHDFSPubDir(params);
        return contextResult.getLastResult();
    }
    public List<MetaHdfsInfoBak> compareWithHdfsInfo(){
       return metaHdfsInfoBakMapper.compareWithHdfsInfo();
    }
}
