package com.ideal.hadoopadmin.service.meta.privilege;

import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.ideal.hadoopadmin.api.better.hive.HiveAPI;
import com.ideal.hadoopadmin.api.hdfs.HDFSAPI;
import com.ideal.hadoopadmin.api.linux.UserAPI;
import com.ideal.hadoopadmin.common.entity.Result;
import com.ideal.hadoopadmin.common.entity.ResultAPI;
import com.ideal.hadoopadmin.common.framework.orm.SearchFilter;
import com.ideal.hadoopadmin.entity.cluster.ClusterType;
import com.ideal.hadoopadmin.entity.cluster.ClusterUser;
import com.ideal.hadoopadmin.entity.meta.hdfs.MetaHdfsAccess;
import com.ideal.hadoopadmin.entity.meta.hdfs.MetaHdfsInfo;
import com.ideal.hadoopadmin.entity.meta.hdfs.MetaHdfsInfoBak;
import com.ideal.hadoopadmin.entity.meta.hive.MetaHiveAccess;
import com.ideal.hadoopadmin.entity.meta.hive.MetaHiveInfo;
import com.ideal.hadoopadmin.entity.meta.privilege.MetaAccessPrivilege;
import com.ideal.hadoopadmin.entity.meta.privilege.MetaPrivileges;
import com.ideal.hadoopadmin.mapper.webdb.meta.MetaAccessPrivilegeMapper;
import com.ideal.hadoopadmin.mapper.webdb.meta.MetaHdfsAccessMapper;
import com.ideal.hadoopadmin.mapper.webdb.meta.MetaHiveAccessMapper;
import com.ideal.hadoopadmin.service.cluster.ClusterUserService;
import com.ideal.hadoopadmin.service.cluster.ClusterUserServiceNew;
import com.ideal.hadoopadmin.service.meta.hdfs.MetaHdfsAccessService;
import com.ideal.hadoopadmin.service.meta.hdfs.MetaHdfsInfoBakService;
import com.ideal.hadoopadmin.service.meta.hdfs.MetaHdfsInfoService;
import com.ideal.hadoopadmin.service.meta.hive.MetaHiveInfoService;
import com.ideal.tools.ssh.entity.ContextResult;
import com.ideal.tools.ssh.result.LinuxResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * Created by qinfengxia on 2016/8/4.
 */
@Service
@Transactional
public class MetaAccessPrivilegeService {
    private static Logger logger = LoggerFactory.getLogger(MetaAccessPrivilegeService.class);
    @Resource
    private MetaAccessPrivilegeMapper metaAccessPrivilegeMapper;
    @Resource
    private MetaHiveInfoService metaHiveInfoService;
    @Resource
    private MetaHdfsInfoService metaHdfsInfoService;
    @Resource
    private MetaHdfsInfoBakService metaHdfsInfoBakService;
    @Resource
    private MetaHdfsAccessService metaHdfsAccessService;
    @Resource
    private MetaPrivilegesService metaPrivilegesService;
    @Resource
    private ClusterUserService clusterUserService;
    @Resource
    private MetaHdfsAccessMapper metaHdfsAccessMapper;
    @Resource
    private MetaHiveAccessMapper metaHiveAccessMapper;
    /**
     * 查询所有的权限类型
     *
     * @return
     */
    public List<MetaAccessPrivilege> queryMetaAccessType() {
        return metaAccessPrivilegeMapper.queryMetaAccessType();
    }

    public MetaAccessPrivilege findById(Long id) {
        return metaAccessPrivilegeMapper.findById(id);
    }

    //分页及查询
    public PageInfo pageAccessPrivilege(String type, HttpServletRequest request) {
        //过滤器
        //获取当前页
        int currentPage = request.getParameter("page") == null ? 1 : Integer.parseInt(request.getParameter("page"));
        //使用分页
        PageHelper.startPage(currentPage, 10, "id asc");
        //加入组织好的wherw 条件
        List<MetaAccessPrivilege> metaAccessPrivilege = metaAccessPrivilegeMapper.queryMetaAccessPrivilegeByType(type);
        PageInfo pageInfo = new PageInfo(metaAccessPrivilege);
        return pageInfo;
    }

    /**
     * 根据类型查询相关权限
     * @param type
     * @return
     */
    public List<MetaAccessPrivilege> findAccessPrivilegeByType(String type){
        List<MetaAccessPrivilege> metaAccessPrivilege = metaAccessPrivilegeMapper.queryMetaAccessPrivilegeByType(type);
        return metaAccessPrivilege;
    }

    /**
     * 添加权限
     * @param resourceId
     * @param userId
     * @param ids
     * @return
     */
    public Result addPrivilege(Long resourceId, String userId, Long[] ids,String hiveAccessId) {
        Result result = new Result();
        result.setFlag(true);
        List<String> messageList = result.getMessageList();
        Map<String,Object> params = initParams(resourceId,userId);
        //如果页面传过来的hiveAccessId为空并且勾选的权限不为空，需要新增一条access记录，否则不需要新增
        Long accessId = null;
        Boolean isAddHdfsAccess = false;
        if((null == hiveAccessId || "".equals(hiveAccessId))&& null != ids && ids.length>0){
            MetaHiveAccess metaHiveAccess = new MetaHiveAccess();
            metaHiveAccess.setCreateTime(System.currentTimeMillis());
            metaHiveAccess.setClusterUserId(Long.valueOf(userId));
            metaHiveAccess.setHiveInfoId(resourceId);
            metaHiveInfoService.addHiveAccess(metaHiveAccess);
            accessId = metaHiveAccess.getId();
            isAddHdfsAccess = true;
        }else if(null != hiveAccessId && !"".equals(hiveAccessId)){
            accessId = Long.valueOf(hiveAccessId);
        }

//        if(null == accessId &&( null == ids || ids.length == 0)){
//            messageList.add("<div>提示信息:请选择权限!</div>");
//            return result;
//        }

        Map<Long,Long> map = new HashMap<Long ,Long>();
        List<Long> deleteList = new ArrayList<Long>();
        Boolean isDeleteHiveAndHdfs = false;
        if(null != ids && ids.length>0 ){
            for(int j = 0;j<ids.length;j++){
                map.put(ids[j],ids[j]);
            }
            List<MetaPrivileges> privilegeses = metaPrivilegesService.findByAccessId(accessId);
            if(null != privilegeses && privilegeses.size()>0){
                for(MetaPrivileges p:privilegeses){
                    if(map.get(p.getPrivilegeId()) != null){
                        map.remove(p.getPrivilegeId());
                    }else{
                        deleteList.add(p.getId());
                    }
                }
            }
        }else{
            deleteList = metaPrivilegesService.findIdByAccessId(accessId);
            isDeleteHiveAndHdfs = true;
        }
        if(null != map && map.size()>0){
            for(Map.Entry<Long,Long> entry:map.entrySet()){
                Long id = entry.getValue();
                MetaPrivileges metaPrivileges = new MetaPrivileges();
                metaPrivileges.setAccessId(accessId);
                metaPrivileges.setPrivilegeId(id);
                metaPrivilegesService.addMetaPrivileges(metaPrivileges);
                MetaAccessPrivilege metaAccessPrivilege = metaAccessPrivilegeMapper.findById(id);
                //具体权限
                params.put(com.ideal.hadoopadmin.api.hive.HiveAPI.HIVE_PRIVILE_TYPE, metaAccessPrivilege.getPrivilegeType());
                //赋权限
                ContextResult contextResult = HiveAPI.GrantHivePrivilege(params);
                ResultAPI.initAPIResult(contextResult.getLastResult());
                messageList.addAll(ResultAPI.messageList);
                if (!ResultAPI.flag) {
                    result.setFlag(false);//调用接口失败
                }
                messageList.add("<div>数据库信息:添加"+metaAccessPrivilege.getPrivilegeType()+"权限成功!</div>");
            }
            if(isAddHdfsAccess){
                MetaHdfsAccess metaHdfsAccess = new MetaHdfsAccess();
                metaHdfsAccess.setCreateTime(System.currentTimeMillis());
                metaHdfsAccess.setClusterUserId(Long.valueOf(userId));
                //当前hive信息
                MetaHiveInfo hiveInfo = metaHiveInfoService.queryMetaHiveInfoById(resourceId);
                Long hdfsInfoId = metaHdfsInfoService.findHdfsInfoIdByHiveInfoId(hiveInfo.getHdfsInfoBakId());
                metaHdfsAccess.setHdfsInfoId(hdfsInfoId);
                metaHdfsAccessService.saveMetaHdfsAccess(metaHdfsAccess);
            }

        }else{
//            messageList.add("<div>提示信息:权限未做修改!</div>");
        }

        if(null !=deleteList && deleteList.size()>0){
            for(int i=0;i<deleteList.size();i++){
                MetaPrivileges metaPrivileges = metaPrivilegesService.findIdById(deleteList.get(i));
                MetaAccessPrivilege metaAccessPrivilege = metaAccessPrivilegeMapper.findById(metaPrivileges.getPrivilegeId());
                //具体权限
                params.put(com.ideal.hadoopadmin.api.hive.HiveAPI.HIVE_PRIVILE_TYPE, metaAccessPrivilege.getPrivilegeType());
                metaPrivilegesService.deleteById(deleteList.get(i));
                String otherAccess = HDFSAPI.HDFS_FALSE;

                MetaHiveInfo hiveInfo = metaHiveInfoService.queryMetaHiveInfoById(resourceId);
                Long hdfsInfoId = metaHdfsInfoService.findHdfsInfoIdByHiveInfoId(hiveInfo.getHdfsInfoBakId());
                int count = metaHdfsAccessMapper.findHdfsHasOtherAcess(Long.valueOf(userId),hdfsInfoId);
                if(count>1){
                    otherAccess = HDFSAPI.HDFS_TRUE;
                }
                params.put(HDFSAPI.HDFS_HAS_OTHER_ACCESS, otherAccess);
                //移除权限
                ContextResult contextResult = HiveAPI.RevokeHivePrivilege(params);
                ResultAPI.initAPIResult(contextResult.getLastResult());
                messageList.addAll(ResultAPI.messageList);
                if (!ResultAPI.flag) {
                    result.setFlag(false);//调用接口失败
                }
                messageList.add("<div>数据库信息:删除"+metaAccessPrivilege.getPrivilegeType()+"权限成功!</div>");
            }
            if(isDeleteHiveAndHdfs){
                MetaHiveAccess metaHiveAccess = metaHiveAccessMapper.findById(accessId);
                metaHiveAccessMapper.deleteById(accessId);
                MetaHiveInfo hiveInfo = metaHiveInfoService.queryMetaHiveInfoById(metaHiveAccess.getHiveInfoId());
                Long hdfsInfoId = metaHdfsInfoService.findHdfsInfoIdByHiveInfoId(hiveInfo.getHdfsInfoBakId());
                metaHdfsAccessService.deleteMetaHdfsAccess(Long.valueOf(userId), hdfsInfoId);
            }

        }
        return result;
    }

    /**
     * 组装接口需要的参数
     * @param resourceId
     * @param userId
     * @return
     */
    public Map<String, Object>  initParams(Long resourceId, String userId){
        //当前hive信息
        MetaHiveInfo hiveInfo = metaHiveInfoService.queryMetaHiveInfoById(resourceId);
        //被赋权限用户
        ClusterUser needPrivileUser = clusterUserService.queryClusterUserById(Long.valueOf(userId));
        Map<String,Object> params = new HashMap<String,Object>();
        String path = MetaHdfsInfoService.ordinalSubstring(hiveInfo.getHdfsInfoBak().getHdfsPath(), "/", 4);
        params.put(HDFSAPI.HDFS_PUB_TABLE_DIR, path);
        params.put(UserAPI.Cluster_User_Name, hiveInfo.getClusterUser().getUserName());
        params.put(com.ideal.hadoopadmin.api.hive.HiveAPI.HIVE_TABLE_NAME, hiveInfo.getTableName());
        params.put(com.ideal.hadoopadmin.api.hive.HiveAPI.HIVE_PRIVILE_USER,needPrivileUser.getUserName());
        return  params;
    }
}
