package com.ideal.hadoopadmin.web.controller.meta.hive;

import com.github.pagehelper.PageInfo;
import com.ideal.hadoopadmin.entity.cluster.ClusterType;
import com.ideal.hadoopadmin.entity.cluster.ClusterUser;
import com.ideal.hadoopadmin.entity.meta.hdfs.MetaHdfsInfoBak;
import com.ideal.hadoopadmin.entity.meta.hive.MetaHiveAccess;
import com.ideal.hadoopadmin.entity.meta.hive.MetaHiveInfo;
import com.ideal.hadoopadmin.entity.meta.privilege.MetaAccessPrivilege;
import com.ideal.hadoopadmin.entity.system.company.SystemCompany;
import com.ideal.hadoopadmin.framework.message.WebMessageLevel;
import com.ideal.hadoopadmin.framework.web.json.JsonObject;
import com.ideal.hadoopadmin.service.cluster.ClusterTypeService;
import com.ideal.hadoopadmin.service.cluster.ClusterUserService;
import com.ideal.hadoopadmin.service.meta.hive.MetaHiveInfoService;
import com.ideal.hadoopadmin.service.meta.privilege.MetaAccessPrivilegeService;
import com.ideal.hadoopadmin.service.system.company.SystemCompanyService;
import com.ideal.hadoopadmin.web.controller.UIController;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.util.WebUtils;

import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.*;

/**
 * Created by fwj on 16-2-22.
 * Hive元数据管理
 */
@Controller
@RequestMapping(HiveController.PORTAL_PREFIX)
public class HiveController extends UIController {
    public final static String PORTAL_PREFIX = "/meta/hive";
    private Logger logger = LoggerFactory.getLogger(HiveController.class);
    private static final String FIRST_TIME = "0";//0表示第一次加载页面

    @Resource
    private MetaHiveInfoService metaHiveInfoService;
    @Resource
    private ClusterUserService clusterUserService;
    @Resource
    private SystemCompanyService systemCompanyService;
    @Resource
    private ClusterTypeService clusterTypeService;
    @Resource
    private MetaAccessPrivilegeService metaAccessPrivilegeService;
    @RequestMapping("hive_metadata_management")
    public void hiveMetadataManagement(HttpServletRequest request, Model model){
        List<String> dbNames = metaHiveInfoService.getDbNames();
        model.addAttribute("dbNames", dbNames);
        List<SystemCompany> queryClusterCompany = systemCompanyService.queryClusterCompany();
        model.addAttribute("cusPage", queryClusterCompany);
        Map<String, Object> searchParams = WebUtils.getParametersStartingWith(request, "Q_");
        String paramProperties = request.getParameter("Q_EQ_hdfsInfoBak.properties");
        List<ClusterUser> userList = null;
        //根据页面选中的参数回相应参数下的值update20160717qinfengxia
        if(null != searchParams && searchParams.size()>0){
            //租户类型
            Long  id = searchParams.get("EQ_systemCompany.id").toString() == "" ? null : Long.valueOf(searchParams.get("EQ_systemCompany.id").toString());
            //集群类型
            Long clusterTypeId = searchParams.get("EQ_clusterType.clusterTypeId").toString() == "" ? null : Long.valueOf(searchParams.get("EQ_clusterType.clusterTypeId").toString());
            userList = selectCluterUserByParams(id,clusterTypeId);
        }else{
            userList = selectCluterUserByParams(null,null);
        }
        model.addAttribute("hpPage", userList);
        //如果为null,设置默认条件
        if (paramProperties == null) {
            searchParams.put("EQ_hdfsInfoBak.properties", MetaHdfsInfoBak.pubProperty);//默认公有
        }
        PageInfo page = metaHiveInfoService.queryMetaHdfsInfo(searchParams, request);
        model.addAttribute("page", page);

        //查询出所有的集群类型
        List<ClusterType> clusterTypes = clusterTypeService.findAllClusterType();
        model.addAttribute("clusterTypePage", clusterTypes);
    }

    /**
     * 通过公司id查找所有数据
     * update20160713qinfengxia
     * 原先只有id作为查询参数，现clusterTypeId也作为查询参数
     * @param model
     * @param id
     * @param clusterTypeId
     * @return
     */
    @RequestMapping("select_hpUn")
    @ResponseBody
    public JsonObject selectHpun(Model model, Long id, Long clusterTypeId) {
        List<ClusterUser> hpList = selectCluterUserByParams(id,clusterTypeId);
        return JsonObject.success(hpList);
    }
    /**
     * 根据租户类型和集群类型，获取符合条件的用户
     * add20160717qinfengxia
     * @param id:租户类型
     * @param clusterTypeId:集群类型
     * @return
     */
    public List<ClusterUser> selectCluterUserByParams( Long id , Long clusterTypeId){
        List<ClusterUser> hpList = new ArrayList<ClusterUser>();
        try {
            if(id != null && clusterTypeId != null){
                hpList = clusterUserService.queryClusterUserByParams(id, clusterTypeId);
            }else if(id != null){
                hpList = clusterUserService.queryClusterUserBySystemId(id);
            }else if(clusterTypeId != null){
                hpList = clusterUserService.queryClusterUserByClusterTypeId(clusterTypeId);
            }else{
                hpList = clusterUserService.queryClusterUser();
            }
        } catch (Exception e) {
            e.printStackTrace();
            logger.info(e.getMessage());
        }
        return hpList;
    }
    /**
     * 刷新hive数据
     *
     * @return
     */
    @RequestMapping("flush_hive")
    @ResponseBody
    public JsonObject flushHive() {
        //注释掉旧方法，调用新方法update20160802qinfengxia
        metaHiveInfoService.flushHiveInfoAPINew();
        return JsonObject.success();
    }

    /**
     * 编辑hive
     *
     * @param id
     * @param model
     */
    @RequestMapping("edit_hive_pop")
    public void editHive(Long id, Model model) {
        MetaHiveInfo hiveInfo = metaHiveInfoService.queryMetaHiveInfoById(id);
        model.addAttribute("hiveInfo", hiveInfo);
    }

    //查看hiveSql
    @RequestMapping("seeHiveSql")
    @ResponseBody
    public JsonObject seeHiveSql(Long hiveInfoId) {
        metaHiveInfoService.updateHiveSqlAPI(hiveInfoId);
        String sql = metaHiveInfoService.seeHiveSql(hiveInfoId);
        return JsonObject.success(sql);
    }

    @RequestMapping("config_hive_pop")
    public void configHive(Long resourceId, HttpServletRequest request) {
        request.setAttribute("resourceId", resourceId);
    }

    /**
     * 查询组内用户
     */
    @RequestMapping("query_hive_groupuser_pop")
    public void queryGroupUser(HttpServletRequest request, Long resourceId, String group, String divId, String divId_groupuser) {
        Map<String, Object> searchParams = WebUtils.getParametersStartingWith(request, "Q_");
        Map<String, Object> map = metaHiveInfoService.findGroupUserListByHive(group, resourceId, searchParams, request);
        request.setAttribute("page_group", map.get("inner"));
        if (null == divId) {
            request.setAttribute("divId", divId_groupuser);
        } else {
            request.setAttribute("divId", divId);
        }
        request.setAttribute("resourceId", resourceId);
    }

    /**
     * 查询组外用户
     */
    @RequestMapping("query_hive_otheruser_pop")
    public void queryOtherUser(HttpServletRequest request, String group, Long resourceId, String divId, String divId_otheruser) {
        Map<String, Object> searchParams = WebUtils.getParametersStartingWith(request, "Q_");
        Map<String, Object> map = metaHiveInfoService.findGroupUserListByHive(group, resourceId, searchParams, request);
        request.setAttribute("resourceId", resourceId);
        request.setAttribute("page_other", map.get("outer"));
        if (divId == null) {
            request.setAttribute("divId", divId_otheruser);
        } else {
            request.setAttribute("divId", divId);
        }
    }

    /**
     * 移除组内用户,根据组内用户的userId在hiveAcess表进行删除
     */
    @RequestMapping("del_hive_pri_pop")
    @ResponseBody
    public JsonObject delGroupUser(Long[] hadoopUserId, Long resourceId) {
        MetaHiveInfo hiveInfo = metaHiveInfoService.queryMetaHiveInfoById(resourceId);
        //判断要移除的权限是不是自身
        if (Arrays.asList(hadoopUserId).contains(hiveInfo.getClusterUser().getId())) {
            return JsonObject.alert("不能移除自身", WebMessageLevel.ERROR);
        }
//        metaHiveInfoService.deleteByInnerUserIds(hadoopUserId);
        List<String> messageList = metaHiveInfoService.deleteInnerUser(hadoopUserId, resourceId);
        return JsonObject.alert(StringUtils.strip(messageList.toString(),"[]"), WebMessageLevel.SUCCESS);
    }


    /**
     * 添加组内用户,在hiveAccess表里面添加一条数据
     */
    @RequestMapping("add_pri_pop")
    @ResponseBody
    public JsonObject addGroupUser(Long[] hadoopUserId, Long resourceId) {
        List<String> messageList = metaHiveInfoService.addOuterUser(hadoopUserId, resourceId);
        return JsonObject.alert(StringUtils.strip(messageList.toString(),"[]"), WebMessageLevel.SUCCESS);
    }

    /**
     * 用户权限配置
     * add20160810qinfengxia
     */
    @RequestMapping("config_hive_privilege_pop")
    public void configHivePrivilege(Long resourceId, Long userId ,HttpServletRequest request) {
        request.setAttribute("resourceId", resourceId);
        request.setAttribute("userId", userId);
    }

    /**
     * 查询用户权限
     * add20160810qinfengxia
     */
    @RequestMapping("query_hive_user_privilege_pop")
    public void queryHiveUserPrivilege(HttpServletRequest request, Long resourceId,Long userId) {
        Map<String, Object> searchParams = WebUtils.getParametersStartingWith(request, "Q_");
        PageInfo page = metaHiveInfoService.queryHiveUserPrivilege(resourceId, userId, searchParams, request);
        List<MetaAccessPrivilege> metaAccessPrivilege = metaAccessPrivilegeService.findAccessPrivilegeByType("hive");
        request.setAttribute("metaAccessPrivilege",metaAccessPrivilege);
        request.setAttribute("page", page);
        request.setAttribute("resourceId", resourceId);
        request.setAttribute("userId", userId);
    }
    @Override
    public String getPortalPrefix() {
        return PORTAL_PREFIX;
    }
}
