package com.ideal.hadoopadmin.service.meta.hive;

import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.ideal.hadoopadmin.api.hdfs.HDFSAPI;
import com.ideal.hadoopadmin.api.hive.HiveAPI;
import com.ideal.hadoopadmin.api.linux.UserAPI;
import com.ideal.hadoopadmin.common.entity.Result;
import com.ideal.hadoopadmin.common.entity.ResultAPI;
import com.ideal.hadoopadmin.common.framework.orm.SearchFilter;
import com.ideal.hadoopadmin.crontab.hive.FlushHiveInfo;
import com.ideal.hadoopadmin.entity.cluster.ClusterUser;
import com.ideal.hadoopadmin.entity.meta.hdfs.MetaHdfsAccess;
import com.ideal.hadoopadmin.entity.meta.hive.MetaHiveAccess;
import com.ideal.hadoopadmin.entity.meta.hive.MetaHiveAccessCustom;
import com.ideal.hadoopadmin.entity.meta.hive.MetaHiveInfo;
import com.ideal.hadoopadmin.entity.report.HiveInfoCustom;
import com.ideal.hadoopadmin.mapper.webdb.cluster.ClusterUserMapper;
import com.ideal.hadoopadmin.mapper.webdb.meta.MetaHdfsAccessMapper;
import com.ideal.hadoopadmin.mapper.webdb.meta.MetaHiveAccessMapper;
import com.ideal.hadoopadmin.mapper.webdb.meta.MetaHiveInfoMapper;
import com.ideal.hadoopadmin.mapper.webdb.meta.MetaHiveSqlMapper;
import com.ideal.hadoopadmin.service.cluster.ClusterMachineService;
import com.ideal.hadoopadmin.service.cluster.ParameterService;
import com.ideal.hadoopadmin.service.meta.hdfs.MetaHdfsAccessService;
import com.ideal.hadoopadmin.service.meta.hdfs.MetaHdfsInfoBakService;
import com.ideal.hadoopadmin.service.meta.hdfs.MetaHdfsInfoService;
import com.ideal.tools.ssh.common.CommonProperties;
import com.ideal.tools.ssh.context.ClusterContext;
import com.ideal.tools.ssh.entity.ContextResult;
import com.ideal.tools.ssh.entity.LinuxMachine;
import com.ideal.tools.ssh.result.LinuxResult;
import org.apache.commons.lang3.StringUtils;
import org.apache.poi.hssf.usermodel.*;
import org.apache.poi.hssf.util.HSSFColor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.*;

/**
 * Created by fwj on 16-3-1.
 * hive元数据管理service
 */
@Service
public class MetaHiveInfoService {
    private static Logger logger = LoggerFactory.getLogger(MetaHiveInfoService.class);

    @Resource
    private MetaHiveInfoMapper metaHiveInfoMapper;
    @Resource
    private MetaHiveSqlMapper metaHiveSqlMapper;
    @Resource
    private MetaHiveAccessMapper metaHiveAccessMapper;
    @Resource
    private ClusterUserMapper clusterUserMapper;
    @Resource
    MetaHdfsInfoBakService metaHdfsInfoBakService;
    @Resource
    private MetaHdfsInfoService metaHdfsInfoService;
    @Resource
    private ClusterMachineService clusterMachineService;
    @Resource
    private ParameterService parameterService;
    @Resource
    MetaHdfsAccessMapper metaHdfsAccessMapper;
    /**
     * 查询所有的数据库
     */
    public List<String> getDbNames() {
        return metaHiveInfoMapper.getDbNames();
    }

    public PageInfo queryMetaHdfsInfo(Map<String, Object> searchParams, HttpServletRequest request){
        int currentPage = Integer.parseInt(request.getParameter("page") == null ? "1" : request.getParameter("page"));
        //过滤器
        String where = SearchFilter.parseToString(searchParams);
        //使用分页
        PageHelper.startPage(currentPage, 10);
        //加入组织好的where 条件
        PageHelper.setAppendWhere(where);

        List<MetaHiveInfo> metaHdfsInfoList = metaHiveInfoMapper.findHiveInfo();
        PageInfo pageInfo = new PageInfo(metaHdfsInfoList);
        return pageInfo;
    }

    /**
     * 根据id查询
     */
    public MetaHiveInfo queryMetaHiveInfoById(Long id) {
        return metaHiveInfoMapper.findHiveInfoById(id);

    }

    public String seeHiveSql(Long hiveInfoId) {
        return metaHiveSqlMapper.findSqlById(hiveInfoId);
    }

    /**
     * 查询组内和组外用户
     */
    public Map<String, Object> findGroupUserListByHive(String group, Long resourceId, Map<String, Object> searchParams, HttpServletRequest request) {
        Map<String, Object> map = new HashMap<String, Object>();
        Long selfUserId = metaHiveInfoMapper.findUserIdInfoById(resourceId);
        int currentPage = Integer.parseInt(request.getParameter("page") == null ? "1" : request.getParameter("page"));
        if ("inner".equals(group)) {
            //过滤器
            String where = SearchFilter.parseToString(searchParams);
            //使用分页
            PageHelper.startPage(currentPage, 8);
            //加入组织好的where 条件
            PageHelper.setAppendWhere(where);
            //获取组内用户
            List<ClusterUser> innerUsers = clusterUserMapper.findInnerByGroupId(resourceId, selfUserId);
            PageInfo innerPage = new PageInfo(innerUsers);
            map.put("inner", innerPage);
        }
        if ("outer".equals(group)) {
            //过滤器
            String where = SearchFilter.parseToString(searchParams);
            //使用分页
            PageHelper.startPage(currentPage, 8);
            //获取组外用户
            PageHelper.setAppendWhere(where);
            List<ClusterUser> outerUsers = clusterUserMapper.findOuterByGroupId(resourceId, selfUserId);
            PageInfo outerPage = new PageInfo(outerUsers);
            map.put("outer", outerPage);
        }
        return map;
    }

    public void deleteByInnerUserIds(Long[] userId) {
        metaHiveAccessMapper.deleteByUserIds(userId);
    }

    //删除组内用户
    public List<String> deleteInnerUser(Long[] userId, Long hiveId) {
        List<String> messageList = new ArrayList<String>();
        for (int i = 0; i < userId.length; i++) {
            //注释掉旧方法，调用新方法update20160802qinfengxia
            //List<LinuxResult> linuxResults = deleteInnerAPI(userId[i], hiveId);
            List<LinuxResult> linuxResults = deleteInnerAPINew(userId[i], hiveId);
            ResultAPI.initAPIResult(linuxResults);
            messageList.addAll(ResultAPI.messageList);
            if (ResultAPI.flag) {
                deleteByInnerUserId(userId[i]);
                metaHdfsInfoService.deleteInnerByUserId(userId[i]);
                messageList.add("<div>数据库信息:移除组内用户成功!</div>");
            }
        }
        return messageList;
    }

    public void deleteByInnerUserId(Long userId) {
        metaHiveAccessMapper.deleteByUserId(userId);
    }

    /**
     * machineList 需要放入hive namenode  机器
     * HDFS_PUB_TABLE_DIR 用户地址
     * Cluster_User_Name 地址所对应的表名
     * HIVE_TABLE_NAME hive 表名
     * HIVE_PRIVILE_USER
     */
    public List<LinuxResult> deleteInnerAPI(Long userId, Long hiveId) {
        String otherAccess = HDFSAPI.HDFS_FALSE;
        int count = metaHdfsAccessMapper.findHdfsHasOtherAcess(userId,hiveId);
        if(count>1){
            otherAccess = HDFSAPI.HDFS_TRUE;
        }
        ClusterContext context = initHiveAPI(userId, hiveId,otherAccess);
        HiveAPI.revokeHivePrivilege(context);
        return context.getContextResult().getLastResult();
    }

    /**
     * machineList 需要放入hive namenode  机器
     * HDFS_PUB_TABLE_DIR 用户地址
     * Cluster_User_Name 地址所对应的表名
     * HIVE_TABLE_NAME hive 表名
     * HIVE_PRIVILE_USER
     */
    public List<LinuxResult> deleteInnerAPINew(Long userId, Long hiveId) {
        String otherAccess = HDFSAPI.HDFS_FALSE;
        int count = metaHdfsAccessMapper.findHdfsHasOtherAcess(userId,hiveId);
        if(count>1){
            otherAccess = HDFSAPI.HDFS_TRUE;
        }
        Map<String, Object> params = initHiveAPINew(userId, hiveId, otherAccess);
        ContextResult contextResult = com.ideal.hadoopadmin.api.better.hive.HiveAPI.RevokeHivePrivilege(params);
        return contextResult.getLastResult();
    }
    public void addAccessUser(MetaHiveAccess metaHiveAccess) {
        metaHiveAccessMapper.saveMetaHiveAccess(metaHiveAccess);
    }

    public List<String> addOuterUser(Long[] userIds, Long hiveId) {
        List<String> messageList = new ArrayList<String>();
        for (int i = 0; i < userIds.length; i++) {
            //调接口,注释掉旧方法，调用新方法
            //List<LinuxResult> linuxResults = addOuterUserAPI(userIds[i], hiveId);
            List<LinuxResult> linuxResults = addOuterUserAPINew(userIds[i], hiveId);
            ResultAPI.initAPIResult(linuxResults);
            messageList.addAll(ResultAPI.messageList);
            if (ResultAPI.flag) {
                //入库
                MetaHiveAccess metaHiveAccess = new MetaHiveAccess();
                metaHiveAccess.setCreateTime(System.currentTimeMillis());
                metaHiveAccess.setClusterUserId(userIds[i]);
                metaHiveAccess.setHiveInfoId(hiveId);
                addHiveAccess(metaHiveAccess);
                MetaHdfsAccess metaHdfsAccess = new MetaHdfsAccess();
                metaHdfsAccess.setCreateTime(System.currentTimeMillis());
                metaHdfsAccess.setClusterUserId(userIds[i]);
                //根据hiveInfoId查找hdfsInfoId
                Long hdfsInfoId = metaHdfsInfoService.findHdfsInfoIdByHiveInfoId(hiveId);
                metaHdfsAccess.setHdfsInfoId(hdfsInfoId);
                metaHdfsAccessMapper.saveMetaHdfsAccess(metaHdfsAccess);
                messageList.add("<div>数据库信息:添加组内用户成功!</div>");
            }
        }
        return messageList;
    }

    public void addHiveAccess(MetaHiveAccess metaHiveAccess) {
        metaHiveAccessMapper.saveMetaHiveAccess(metaHiveAccess);
    }

    public ClusterContext initHiveAPI(Long userId, Long hiveId) {
        //根据userId查找出相应的userName
        ClusterUser clusterUser = clusterUserMapper.findById(userId);
        //hiveId查找出hdfs地址,和相对于的userName
        MetaHiveInfo metaHiveInfo = metaHiveInfoMapper.findHiveInfoById(hiveId);
        //初始化参数
        List<LinuxMachine> linuxMachines = clusterMachineService.getMachineList(null);
        Map<String, String> map = parameterService.getAllParameter();
        CommonProperties commonProperties = new CommonProperties(map);
        String path = MetaHdfsInfoService.ordinalSubstring(metaHiveInfo.getHdfsInfoBak().getHdfsPath(), "/", 4);
        commonProperties.setArgument(HDFSAPI.HDFS_PUB_TABLE_DIR, path);
        commonProperties.setArgument(UserAPI.Cluster_User_Name, metaHiveInfo.getClusterUser().getUserName());
        commonProperties.setArgument(HiveAPI.HIVE_TABLE_NAME, metaHiveInfo.getTableName());
        commonProperties.setArgument(HiveAPI.HIVE_PRIVILE_USER, clusterUser.getUserName());
        ClusterContext context = new ClusterContext(commonProperties);
        context.setOriginalList(linuxMachines);
        return context;
    }

    public Map<String, Object> initHiveAPINew(Long userId, Long hiveId) {
        //根据userId查找出相应的userName
        ClusterUser clusterUser = clusterUserMapper.findById(userId);
        //hiveId查找出hdfs地址,和相对于的userName
        MetaHiveInfo metaHiveInfo = metaHiveInfoMapper.findHiveInfoById(hiveId);
        //初始化参数
        String path = MetaHdfsInfoService.ordinalSubstring(metaHiveInfo.getHdfsInfoBak().getHdfsPath(), "/", 4);
        Map<String, Object> params = new HashMap<String, Object>();
        params.put(HDFSAPI.HDFS_PUB_TABLE_DIR, path);
        params.put(UserAPI.Cluster_User_Name, metaHiveInfo.getClusterUser().getUserName());
        params.put(HiveAPI.HIVE_TABLE_NAME, metaHiveInfo.getTableName());
        params.put(HiveAPI.HIVE_PRIVILE_USER, clusterUser.getUserName());

        return params;
    }
    public ClusterContext initHiveAPI(Long userId, Long hiveId,String otherAcess) {
        //根据userId查找出相应的userName
        ClusterUser clusterUser = clusterUserMapper.findById(userId);
        //hiveId查找出hdfs地址,和相对于的userName
        MetaHiveInfo metaHiveInfo = metaHiveInfoMapper.findHiveInfoById(hiveId);
        //初始化参数
        List<LinuxMachine> linuxMachines = clusterMachineService.getMachineList(null);
        Map<String, String> map = parameterService.getAllParameter();
        CommonProperties commonProperties = new CommonProperties(map);
        String path = MetaHdfsInfoService.ordinalSubstring(metaHiveInfo.getHdfsInfoBak().getHdfsPath(), "/", 4);
        commonProperties.setArgument(HDFSAPI.HDFS_HAS_OTHER_ACCESS, otherAcess);
        commonProperties.setArgument(HDFSAPI.HDFS_PUB_TABLE_DIR, path);
        commonProperties.setArgument(UserAPI.Cluster_User_Name, metaHiveInfo.getClusterUser().getUserName());
        commonProperties.setArgument(HiveAPI.HIVE_TABLE_NAME, metaHiveInfo.getTableName());
        commonProperties.setArgument(HiveAPI.HIVE_PRIVILE_USER, clusterUser.getUserName());
        ClusterContext context = new ClusterContext(commonProperties);
        context.setOriginalList(linuxMachines);
        return context;
    }

    public Map<String, Object> initHiveAPINew(Long userId, Long hiveId,String otherAcess) {
        //根据userId查找出相应的userName
        ClusterUser clusterUser = clusterUserMapper.findById(userId);
        //hiveId查找出hdfs地址,和相对于的userName
        MetaHiveInfo metaHiveInfo = metaHiveInfoMapper.findHiveInfoById(hiveId);
        //初始化参数
        String path = MetaHdfsInfoService.ordinalSubstring(metaHiveInfo.getHdfsInfoBak().getHdfsPath(), "/", 4);
        Map<String, Object> params = new HashMap<String, Object>();
        params.put(HDFSAPI.HDFS_HAS_OTHER_ACCESS, otherAcess);
        params.put(HDFSAPI.HDFS_PUB_TABLE_DIR, path);
        params.put(UserAPI.Cluster_User_Name, metaHiveInfo.getClusterUser().getUserName());
        params.put(HiveAPI.HIVE_TABLE_NAME, metaHiveInfo.getTableName());
        params.put(HiveAPI.HIVE_PRIVILE_USER, clusterUser.getUserName());
        return params;
    }
    public List<LinuxResult> addOuterUserAPI(Long userId, Long hiveId) {
        ClusterContext context = initHiveAPI(userId, hiveId);
        HiveAPI.grantHivePrivilege(context);
        return context.getContextResult().getLastResult();
    }

    public List<LinuxResult> addOuterUserAPINew(Long userId, Long hiveId) {
        Map<String, Object> params = initHiveAPINew(userId, hiveId);
        List<Map<String,Object>> list = findPrivilege(userId,hiveId);
        ContextResult contextResult = null;
        for(Map<String,Object> map:list){
            params.put("HIVE_PRIVILE_TYPE", map.get("privilegeType"));
            contextResult = com.ideal.hadoopadmin.api.better.hive.HiveAPI.GrantHivePrivilege(params);
        }
        return contextResult.getLastResult();
    }

    //调用接口
    public void flushHiveInfoAPI() {
        Map<String, String> map = parameterService.getAllParameter();
        List<LinuxMachine> linuxMachines = clusterMachineService.getMachineList(null);
        CommonProperties commonProperties = new CommonProperties(map);
        ClusterContext context = new ClusterContext(commonProperties);
        context.setOriginalList(linuxMachines);
        HiveAPI.flushHiveInfo(context);
    }

    /**
     * 调用接口
     * update20160802qinfengxia
     */
    public void flushHiveInfoAPINew() {
        Map<String, Object> params = new HashMap<String, Object>();
        com.ideal.hadoopadmin.api.better.hive.HiveAPI.FlushHiveInfo(params);
    }

    public void updateHiveSqlAPI(Long hiveInfoId) {
        HiveAPI.flushHiveInfo_sql(hiveInfoId);
    }

    public String getHiveTableNames(Long userId) {
        List<String> tableNameList = metaHiveInfoMapper.findTableNamesByUserId(userId);
        String s = "";
        for (String tableName : tableNameList) {
            s += "," + tableName;
        }
        String tableNames = s.replaceFirst(",", "");//把第一个逗号去掉
//        String tableNames = tableNameList.toString().replace("[","").replace("]","");
        return tableNames;
    }

    /**
     * hive第一级数据
     * update20160721qinfengxia
     * @param currentPage
     * @param defaultSize
     * @param searchParams
     * @return
     */
    public PageInfo searchReportPage(int currentPage,int defaultSize,Map<String,Object> searchParams){
        String where = SearchFilter.parseToString(searchParams);
        PageHelper.startPage(currentPage,defaultSize);
        PageHelper.setAppendWhere(where);
        List<MetaHiveInfo> hiveInfoCustoms = metaHiveInfoMapper.hiveParentList();
        return new PageInfo(hiveInfoCustoms);
    }

    /**
     * hive（用户主导）第一级数据
     * update20160918qinfengxia
     * @param currentPage
     * @param defaultSize
     * @param searchParams
     * @return
     */
    public PageInfo searchHiveUserPage(int currentPage,int defaultSize,Map<String,Object> searchParams){
        Map<String, Object> paraMap = new HashMap<String, Object>();
        //String where = SearchFilter.parseToString(searchParams);
        PageHelper.startPage(currentPage,defaultSize);
        //PageHelper.setAppendWhere(where);
       // Map<String, Object> paraMap = new HashMap<String, Object>();
        Integer startRecordNumb = (currentPage-1)*defaultSize;
//        paraMap.put("startRecordNumb",startRecordNumb);
//        paraMap.put("defaultSize",defaultSize);
        String userName = searchParams.get("LIKE_userName")==null ||searchParams.get("LIKE_userName")==""?null:searchParams.get("LIKE_userName").toString() ;
        if(userName!=null){
            paraMap.put("userName",userName);
        }
        //Long total  = metaHiveInfoMapper.hiveUserParentCount(paraMap);
        List<Map<String,Object>> hiveInfoCustoms = metaHiveInfoMapper.hiveUserParentList(paraMap);
//        PageInfo pageInfo = new PageInfo();
//
//        pageInfo.setPageNum(currentPage);
//        pageInfo.setPageSize(defaultSize);
//        pageInfo.setPages(1);
//        pageInfo.setList(hiveInfoCustoms);
//        pageInfo.setSize(hiveInfoCustoms.size());
//
//        pageInfo.setStartRow(0);
//        pageInfo.setEndRow(hiveInfoCustoms.size() > 0?hiveInfoCustoms.size() - 1:0);
//        pageInfo.setNavigatePages(8);
//        Math.ceil(hiveInfoCustoms.size()/defaultSize);
//        int c = Integer.valueOf(total.toString())%defaultSize==0?Integer.valueOf(total.toString())/defaultSize:Integer.valueOf(total.toString())/defaultSize+1;
//        pageInfo.setPages(c);
//        pageInfo.setTotal(total);
        return new PageInfo(hiveInfoCustoms);
        //return pageInfo;
    }

    /**
     * hive第二级数据（权限）
     * add20160721qinfengxia
     * @param clusterUserId
     * @param tableName
     * @return
     */
    public List<Map<String,Object>>  searchReportChildPage(int clusterUserId,String tableName) {
        Map<String, Object> paraMap = new HashMap<String, Object>();
        if(null != tableName && !"".equals(tableName)){
            paraMap.put("tableName",tableName);
        }
        paraMap.put("clusterUserId",clusterUserId);
        List<Map<String,Object>> list = metaHiveInfoMapper.findAccessUsersList(paraMap);
        return list;
    }

    /**
     * hive(用户主导)第二级数据
     * add20160918qinfengxia
     * @param clusterUserId
     * @param tableName
     * @return
     */
    public List<Map<String,Object>>  searchHiveUserChildPage(int clusterUserId,String tableName) {
        Map<String, Object> paraMap = new HashMap<String, Object>();
        if(null != tableName && !"".equals(tableName)){
            paraMap.put("tableName",tableName);
        }
        paraMap.put("clusterUserId",clusterUserId);
        List<Map<String,Object>> list = metaHiveInfoMapper.hiveUserChildList(paraMap);
        return list;
    }

    public List<MetaHiveInfo> findAllHiveInfo() throws Exception{
        return metaHiveInfoMapper.findHiveInfo();
    }
    public List<HiveInfoCustom>downReport(Map<String,Object> searchParam){
        //重新写一个sql,和这个查询一样,手动添加where条件
        String where = SearchFilter.parseToString(searchParam);
       return metaHiveInfoMapper.findAccessUsers();
    }

    /**
     * 根据条件查询hive信息
     * add20160721qinfengxia
     */
    public  List<Map<String,Object>> queryHiveByDown(String tableName) {
        Map<String, Object> paraMap = new HashMap<String, Object>();
        if(null != tableName && !"".equals(tableName)){
            paraMap.put("tableName",tableName);
        }
        List<Map<String,Object>> list = metaHiveInfoMapper.findAccessUsersList(paraMap);
        return list;
    }

    /**
     * 根据条件查询hive信息
     * add20160721qinfengxia
     */
    public  List<Map<String,Object>> queryHiveUserByDown(String userName) {
        Map<String, Object> paraMap = new HashMap<String, Object>();
        if(null != userName && !"".equals(userName)){
            paraMap.put("userName",userName);
        }
        List<Map<String,Object>> list = metaHiveInfoMapper.hiveUserChildList(paraMap);
        return list;
    }

    /**
     * 创建excel
     * add20160721qinfengxia
     * @param list
     * @return
     */
    public HSSFWorkbook creatSheet(List<Map<String,Object>> list) {
        HSSFWorkbook hssfWorkbook = new HSSFWorkbook();//创建excel工作薄
        HSSFCellStyle cellStyle = hssfWorkbook.createCellStyle(); //创建表格样式
        cellStyle.setWrapText(true);                        //自动换行
        HSSFSheet hssfSheet = hssfWorkbook.createSheet();//创建excel表格
        hssfSheet.setColumnWidth(0, 4000);
        hssfSheet.setColumnWidth(1, 10000);
        hssfSheet.setColumnWidth(2, 4000);
        HSSFRow hssfRow1 = hssfSheet.createRow(0);//创建行 标题
        HSSFRow hssfRow2 = hssfSheet.createRow(1);//创建行 th名
        HSSFCell cell1 = hssfRow2.createCell(0);
        HSSFCell cell2 = hssfRow2.createCell(1);
        HSSFCell cell3 = hssfRow2.createCell(2);
        cell1.setCellValue(new HSSFRichTextString("所属用户"));
        cell2.setCellValue(new HSSFRichTextString("hive表名"));
        cell3.setCellValue(new HSSFRichTextString("可访问用户"));

        HSSFCellStyle thStyle = hssfWorkbook.createCellStyle();       //创建样式
        thStyle.setFillPattern(HSSFCellStyle.FINE_DOTS);
        thStyle.setFillForegroundColor(new HSSFColor.GREY_25_PERCENT().getIndex());
        thStyle.setFillBackgroundColor(new HSSFColor.GREY_25_PERCENT().getIndex());
        cell1.setCellStyle(thStyle);
        cell2.setCellStyle(thStyle);
        cell3.setCellStyle(thStyle);
        for (int i = 0; i < list.size(); i++) {
            Map<String,Object> map = list.get(i);
            HSSFRow hssfRow = hssfSheet.createRow(i + 2);//创建行
            HSSFCell hssfCell1 = hssfRow.createCell(0);
            HSSFCell hssfCell2 = hssfRow.createCell(1);
            HSSFCell hssfCell3 = hssfRow.createCell(2);
            hssfCell1.setCellStyle(cellStyle);
            hssfCell2.setCellStyle(cellStyle);
            hssfCell3.setCellStyle(cellStyle);
            hssfCell1.setCellValue(new HSSFRichTextString(map.get("ownerUserName").toString()));
            hssfCell2.setCellValue(new HSSFRichTextString(map.get("tableName").toString()));
            hssfCell3.setCellValue(new HSSFRichTextString(map.get("accessUserName").toString()));
        }
        return hssfWorkbook;
    }

    /**
     * 创建excel
     * add20160918qinfengxia
     * @param list
     * @return
     */
    public HSSFWorkbook creatSheetHiveUser(List<Map<String,Object>> list) {
        HSSFWorkbook hssfWorkbook = new HSSFWorkbook();//创建excel工作薄
        HSSFCellStyle cellStyle = hssfWorkbook.createCellStyle(); //创建表格样式
        cellStyle.setWrapText(true);                        //自动换行
        HSSFSheet hssfSheet = hssfWorkbook.createSheet();//创建excel表格
        hssfSheet.setColumnWidth(0, 4000);
        hssfSheet.setColumnWidth(1, 10000);
        HSSFRow hssfRow1 = hssfSheet.createRow(0);//创建行 标题
        HSSFRow hssfRow2 = hssfSheet.createRow(1);//创建行 th名
        HSSFCell cell1 = hssfRow2.createCell(0);
        HSSFCell cell2 = hssfRow2.createCell(1);
        cell1.setCellValue(new HSSFRichTextString("用户名"));
        cell2.setCellValue(new HSSFRichTextString("hive表名"));

        HSSFCellStyle thStyle = hssfWorkbook.createCellStyle();       //创建样式
        thStyle.setFillPattern(HSSFCellStyle.FINE_DOTS);
        thStyle.setFillForegroundColor(new HSSFColor.GREY_25_PERCENT().getIndex());
        thStyle.setFillBackgroundColor(new HSSFColor.GREY_25_PERCENT().getIndex());
        cell1.setCellStyle(thStyle);
        cell2.setCellStyle(thStyle);
        for (int i = 0; i < list.size(); i++) {
            Map<String,Object> map = list.get(i);
            HSSFRow hssfRow = hssfSheet.createRow(i + 2);//创建行
            HSSFCell hssfCell1 = hssfRow.createCell(0);
            HSSFCell hssfCell2 = hssfRow.createCell(1);
            hssfCell1.setCellStyle(cellStyle);
            hssfCell2.setCellStyle(cellStyle);
            hssfCell1.setCellValue(new HSSFRichTextString(map.get("userName") == null || map.get("userName") == "" ? "" : map.get("userName").toString()));
            hssfCell2.setCellValue(new HSSFRichTextString(map.get("tableName") == null || map.get("tableName") == ""?"":map.get("tableName").toString()));
        }
        return hssfWorkbook;
    }
    /**
     * 查询用户权限
     * add20160810qinfengxia
     */
    public PageInfo queryHiveUserPrivilege(Long resourceId, Long userId,Map<String, Object> searchParams, HttpServletRequest request) {
        String where = SearchFilter.parseToString(searchParams);
        int currentPage = request.getParameter("page") == null ? 1 : Integer.parseInt(request.getParameter("page"));
        PageHelper.startPage(currentPage, 10);
        if(StringUtils.isNotBlank(where)){
            where += " and clusterUser.id !="+userId;
        }else{
            where = " clusterUser.id !="+userId;
        }

        PageHelper.setAppendWhere(where);
        List<Map<String,Object>> clusterUsers = clusterUserMapper.findOtherClusterUserMap();
        Map<String,Object> map = new HashMap<String,Object>();
        map.put("hiveId",resourceId);
        map.put("userId",userId);
        List<Map<String,Object>> userByHiveId = metaHiveInfoMapper.findUserByHiveId(map);
        for(int i = 0;i<clusterUsers.size();i++){
            Map<String,Object> m = clusterUsers.get(i);
            String privilegeStr = ",";
            if(null != userByHiveId && userByHiveId.size()>0){
                for(int j = 0; j<userByHiveId.size();j++){
                    Map<String,Object> map1 = userByHiveId.get(j);
                    if(map1.get("userId").toString().equals(m.get("userId").toString())){
                        privilegeStr += map1.get("privilegeId")+",";
                        m.put("hiveAccessId",map1.get("hiveAccessId"));
                    }
                }
                m.put("privilegeStr",privilegeStr);

            }
        }
        PageInfo page = new PageInfo(clusterUsers);
        return page;
    }

    /**
     * add20160830qinfengxia 根据用户id和hiveId查找对应权限
     * @param clusterUserId
     * @param hiveInfoId
     * @return
     */
    public List<Map<String,Object>> findPrivilege(Long clusterUserId,Long hiveInfoId){
        Map<String,Object> map = new HashMap<String,Object>();
        map.put("clusterUserId",clusterUserId);
        map.put("hiveInfoId",hiveInfoId);
        return  metaHiveAccessMapper.findPrivilege(map);
    }

}
