package cn.getech.data.development.service.impl;

import cn.getech.data.development.config.properties.BdpJobConfig;
import cn.getech.data.development.config.properties.HiveConfig;
import cn.getech.data.development.constant.DataDevelopmentBizExceptionEnum;
import cn.getech.data.development.constant.UdfUsableType;
import cn.getech.data.development.entity.*;
import cn.getech.data.development.entity.permission.SysTenantDataPermission;
import cn.getech.data.development.mapper.*;
import cn.getech.data.development.mapper.permission.SysTenantDataPermissionMapper;
import cn.getech.data.development.model.dto.JobRunUdfDto;
import cn.getech.data.development.model.dto.ProcDatasDto;
import cn.getech.data.development.model.req.procinfo.ProcInfoListOwerReq;
import cn.getech.data.development.model.res.procinfo.ProcInfoListRes;
import cn.getech.data.development.model.vo.UdfJarPropertyVo;
import cn.getech.data.development.service.*;
import cn.getech.data.development.utils.*;
import cn.getech.data.intelligence.common.exception.RRException;
import cn.getech.data.intelligence.common.utils.*;
import cn.getech.system.center.entity.SysTenantUser;
import cn.getech.system.center.service.SysRoleService;
import cn.getech.system.center.service.SysTenantUserService;
import cn.getech.system.center.service.SysUserRoleService;
import cn.getech.system.center.utils.ShiroUtils;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.map.MapUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.google.common.collect.Sets;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import javax.annotation.Resource;
import java.io.File;
import java.sql.SQLException;
import java.time.LocalDateTime;
import java.util.*;
import java.util.stream.Collectors;

/**
 * <p>
 * 项目信息表 服务实现类
 * </p>
 *
 * @author zenith
 * @since 2018-08-21
 */
@Service
@Slf4j
public class ProcInfoServiceImpl extends ServiceImpl<ProcInfoMapper, ProcInfo> implements ProcInfoService {
    @Autowired
    BdpJobConfig bdpJobConfig;
    @Autowired
    ProcInfoService procInfoService;

    @Autowired
    JobRunUdfService jobRunUdfService;
    @Resource
    JobRunUdfMapper jobRunUdfMapper;
    @Autowired
    HiveConfig hiveConfig;

    @Autowired
    private SysUserRoleService sysUserRoleService;


    @Resource
    private JobInfoMapper jobInfoMapper;
    @Resource
    private ProcConnectMapper procConnectMapper;
    @Resource
    private ProcTableMapper procTableMapper;
    @Autowired
    private AnalysisInfoMapper analysisInfoMapper;
    @Autowired
    private ConfConnectMapper confConnectMapper;
    @Autowired
    private TableInfoMapper tableInfoMapper;
    @Autowired
    private SysTenantDataPermissionMapper sysTenantDataPermissionMapper;
    @Autowired
    private TableFieldInfoMapper tableFieldInfoMapper;


    @Autowired
    private CusMaskUdfService cusMaskUdfService;
    @Autowired
    private ProcUserMapper procUserMapper;

    @Autowired
    private BdpUdfJarInfoMapper bdpUdfJarInfoMapper;
    @Autowired
    private IWorkMenuService iWorkMenuService;
    @Autowired
    private RealTimeTaskMenuService realTimeTaskMenuService;
    @Autowired
    private JobRunHistoryMapper jobRunHistoryMapper;
    @Autowired
    private JobRunHistoryService jobRunHistoryService;
    @Autowired
    private JobDepRunHistoryService jobDepRunHistoryService;
    @Autowired
    private SysRoleService sysRoleService;
    @Autowired
    private SysTenantUserService  sysTenantUserService;
    @Autowired
    private RangerDataService rangerDataService;
    @Override
    public boolean isProcOwer(Long userId, Long procId) {
        return procUserMapper.selectCount(
                new QueryWrapper<ProcUser>()
                        .eq("proc_id", procId)
                        .eq("user_id", userId)
                        .eq("is_owner", 1)) > 0; //is_owner 1:是管理员
    }

    @Override
    public PageUtils selectListPage(Map<String, Object> params) {
        //超级管理员可以看到所有的项目、其它人只能看到自己关联的项目
        Long userId = ShiroUtils.getUserId();
        if (DataPermissionUtil.isAdmin(userId.intValue())) {
            params.remove("current_per");
        }
        params.put("tenantId",ShiroUtils.getTenantId());//租户id
        params.putIfAbsent(Constant.LIMIT, 999);//如果不存在  默认999
        params.putIfAbsent(Constant.PAGE, 1);//如果不存在  默认1
        IPage<ProcInfo> page = this.baseMapper.selectListPage((Page<ProcInfo>)
                new Query<ProcInfo>().getPage(params), params);
        return new PageUtils<>(page);
    }

    @Override
    public List<Map<String, Integer>> connectTypes(Integer procId) {
        return this.baseMapper.connectTypes(procId);
    }

    @Override
    public List<Map<String, Object>> tableTypes(Integer procId) {
        return this.baseMapper.tableTypes(procId);
    }

    @Override
    public List<Map<String, String>> jobAlerts(Integer procId) {
        return this.baseMapper.jobAlerts(procId);
    }

    @Override
    public List<Map<String, Object>> last1Hour(Integer procId) {
        return this.baseMapper.last1Hour(procId);
    }

    @Override
    public List<Map<String, Object>> last6Hour(Integer procId) {
        return this.baseMapper.last6Hour(procId);
    }

    @Override
    public List<Map<String, Object>> last1Day(Integer procId) {
        return this.baseMapper.last1Day(procId);
    }

    @Override
    public List<Map<String, Object>> last1Week(Integer procId) {
        return this.baseMapper.last1Week(procId);
    }

    @Override
    public List<Map<String, Object>> last1Month(Integer procId) {
        return this.baseMapper.last1Month(procId);
    }

    /**
     * 判断是否包含数据源 任务 数据表
     *
     * @param procId
     * @return
     */
    @Override
    public boolean checkIfRef(Integer procId) {
        if (procTableMapper.selectCount(new QueryWrapper<ProcTable>().eq("proc_id", procId)) > 0) {
            return true;
        }
        if (procConnectMapper.selectCount(new QueryWrapper<ProcConnect>().eq("proc_id", procId)) > 0) {
            return true;
        }
        if (jobInfoMapper.selectCount(new QueryWrapper<JobInfo>().eq("proc_id", procId)) > 0) {
            return true;
        }
        //是否有分析任务
        if (analysisInfoMapper.selectCount(new QueryWrapper<AnalysisInfo>().eq("proc_id", procId)) > 0) {
            return true;
        }
        return false;
    }

    @Override
    public boolean checkExistByName(ProcInfo procInfo) {
        if (null == procInfo || StringUtils.isEmpty(procInfo.getName())) {
            return true;
        }
        if (null == procInfo.getId()) {
            return this.baseMapper.selectCount(new QueryWrapper<ProcInfo>().eq("name", procInfo.getName())) > 0;
        }
        return this.baseMapper.selectCount(new QueryWrapper<ProcInfo>().eq("name", procInfo.getName()).ne("id", procInfo.getId())) > 0;
    }

    @Override
    public boolean checkPermission(Integer procId) {
        ProcInfo entity = this.baseMapper.selectById(procId);
        if (entity == null) {
            throw new RRException(DataDevelopmentBizExceptionEnum.PROCINFO_PERMISSIOIN.getMessage());
        }
        return entity.getCreatePer() == ShiroUtils.getUserId().intValue();
    }

    @Override
    public boolean checkExistById(Integer procId) {
        return this.baseMapper.selectById(procId) != null;
    }


    @Override
    public Map<String,Boolean> getJarDetials(MultipartFile multipartFile, int procId) throws Exception {
        List<UdfJarPropertyVo> jarDetails = new ArrayList<>();
        File file = FileUtils.multipartFileToFile(multipartFile);
        //根据项目id获取到对应的租户配置的包名前缀
        List<String> packUrls = procInfoService.getPackUrls(procId);
        if(CollectionUtil.isEmpty(packUrls)){
            throw new RRException("请联系租户管理员前往数据字典中配置UDF_dirpath!",500);
        }
        jarDetails = JarParseUtil.getJarClassAndFuncs(file,packUrls);
        Set<String> jarClassNames = new HashSet<>();
        for(UdfJarPropertyVo j:jarDetails){
            jarClassNames.add(j.getJarClassName());
        }

        System.out.println("jarClassNames:"+jarClassNames);
        String jarOriginName = multipartFile.getOriginalFilename().trim();
        //检测是否存在相同名称的jar包名
        BdpUdfJarInfo udfJarInfo = bdpUdfJarInfoMapper.selectOne(new QueryWrapper<BdpUdfJarInfo>()
                .eq("jar_origin_name",jarOriginName)
                .eq("proc_id",procId));

        if (null!=udfJarInfo){
            List<JobRunUdf> procUdfFuncs = jobRunUdfMapper.selectList(new QueryWrapper<JobRunUdf>().eq("jar_id",udfJarInfo.getId()).eq("proc_id",procId));
            Map<String,Boolean> reloadResult = new HashMap<>();
            //删除缺失类的udf，并将jar包上传到hdfs，reload udf函数
            deleteUdfsByClassNotFound(procUdfFuncs,jarClassNames,reloadResult);
            uploadUdfJar(multipartFile,procId,udfJarInfo.getJarName());
            udfJarInfo.setUpdateTime(LocalDateTime.now());
            bdpUdfJarInfoMapper.updateById(udfJarInfo);
            reloadUdfs(procUdfFuncs,udfJarInfo.getId(),jarClassNames,reloadResult);
            return reloadResult;
        }else{
            String fileType = ".jar";
            String jarName = UUID.randomUUID().toString() + fileType;
            //将jar包上传到hdfs，并保存到数据库
            uploadUdfJar(multipartFile,procId,jarName);
            BdpUdfJarInfo bdpUdfJarInfo = new BdpUdfJarInfo();
            bdpUdfJarInfo.setJarName(jarName);
            bdpUdfJarInfo.setJarOriginName(jarOriginName);
            bdpUdfJarInfo.setUpdateTime(LocalDateTime.now());
            bdpUdfJarInfo.setProcId(procId);
            bdpUdfJarInfoMapper.insert(bdpUdfJarInfo);
            return null;
        }
    }

    public void uploadUdfJar(MultipartFile multipartFile, int procId,String jarName){
        HdfsUtil hdfsUtil = null;
        try{
            hdfsUtil = new HdfsUtil(bdpJobConfig);
            String uploadPath = String.format("%s/%s/%s",bdpJobConfig.getUdf(),procId,jarName);
            hdfsUtil.writeFile(multipartFile.getBytes(),uploadPath);
            hdfsUtil.close();
        }catch (Exception e){
            if(null != hdfsUtil){
                hdfsUtil.close();
            }
            log.error("上传jar包"+multipartFile.getOriginalFilename()+"失败："+e.getMessage());
            throw new RRException("上传jar包"+multipartFile.getOriginalFilename()+"失败");
        }finally {
            if(null != hdfsUtil){
                hdfsUtil.close();
            }
        }
    }

    @Override
    public void deleteUdfJar(BdpUdfJarInfo bdpUdfJarInfo){
        HdfsUtil hdfsUtil = null;
        try{
            hdfsUtil = new HdfsUtil(bdpJobConfig);
            String uploadPath = String.format("%s/%s/%s",bdpJobConfig.getUdf(),bdpUdfJarInfo.getProcId(),bdpUdfJarInfo.getJarName());
            hdfsUtil.delete(uploadPath);
            hdfsUtil.close();
            bdpUdfJarInfoMapper.deleteById(bdpUdfJarInfo.getId());
        }catch (Exception e){
            if(null != hdfsUtil){
                hdfsUtil.close();
            }
            log.error("删除hdfs上的jar包"+bdpUdfJarInfo.getJarOriginName()+"失败："+e.getMessage());
            throw new RRException("删除hdfs上的jar包"+bdpUdfJarInfo.getJarOriginName()+"失败");
        }finally {
            if(null != hdfsUtil){
                hdfsUtil.close();
            }
        }

    }

    public void deleteUdfsByClassNotFound(List<JobRunUdf> procUdfFuncs,Set<String> jarClassNames,Map<String,Boolean> reloadResult){
        for (JobRunUdf jobRunUdf:procUdfFuncs){
            if (!jarClassNames.contains(jobRunUdf.getJarClassName())) {
                //缺失类的udf则删除hive的udf函数
                HiveTableUtil hiveTableUtil = null;
                try {
                    hiveTableUtil = new HiveTableUtil(hiveConfig.getPressionUrl(),"hdfs","hdfs");
                    hiveTableUtil.execute(String.format(" drop function if exists %s ",jobRunUdf.getJarMethodName()));
                    hiveTableUtil.close();
                    log.info("刷新udf时，由于缺失类，删除udf函数+" + jobRunUdf.getJarMethodName());
                }catch (SQLException e){
                    if(null != hiveTableUtil){
                        hiveTableUtil.close();
                    }
                    log.error("刷新udf时，由于缺失类，需要删除udf函数，却删除失败"+e.getMessage());
                }
                jobRunUdf.setIsUsable(UdfUsableType.DISUSABLE.getCode());
                jobRunUdfMapper.updateById(jobRunUdf);
                reloadResult.put(jobRunUdf.getJarMethodName(),false);
            }
        }
    }

    public Map<String,Boolean> reloadUdfs(List<JobRunUdf> procUdfFuncs,Integer jarId,Set<String> jarClassNames,Map<String,Boolean> reloadResult){
        for (JobRunUdf jobRunUdf:procUdfFuncs){
            if (jarClassNames.contains(jobRunUdf.getJarClassName())){
                if (jobRunUdf.getIsUsable().equals(UdfUsableType.USABLE.getCode())){
                    //可用的udf进行reload
                    reloadUdf(jobRunUdf,reloadResult);
                }else{
                    //不可用的udf进行重新注册
                    reRegistUdf(jobRunUdf,reloadResult);
                }
            }
        }
        return reloadResult;
    }


    public void reloadUdf(JobRunUdf jobRunUdf,Map<String,Boolean> reloadResult){
        HiveTableUtil hiveTableUtil = new HiveTableUtil(hiveConfig.getPressionUrl(),"hdfs","hdfs");
        try {
            hiveTableUtil.execute("RELOAD "+jobRunUdf.getJarMethodName());
            jobRunUdf.setIsUsable(UdfUsableType.USABLE.getCode());
            jobRunUdfMapper.updateById(jobRunUdf);
            reloadResult.put(jobRunUdf.getJarMethodName(),true);
        } catch (SQLException e) {
            if(null != hiveTableUtil){
                hiveTableUtil.close();
            }
            log.error("重载udf函数包失败："+e.getMessage());
            jobRunUdf.setIsUsable(UdfUsableType.DISUSABLE.getCode());
            jobRunUdfMapper.updateById(jobRunUdf);
            reloadResult.put(jobRunUdf.getJarMethodName(),false);
        }
        hiveTableUtil.close();
    }


    public void reRegistUdf(JobRunUdf jobRunUdf,Map<String,Boolean> reloadResult){
        HiveTableUtil hiveTableUtil = null;
        try {
            BdpUdfJarInfo bdpUdfJarInfo = bdpUdfJarInfoMapper.selectById(jobRunUdf.getJarId());
            hiveTableUtil = new HiveTableUtil(hiveConfig.getPressionUrl(),"hdfs","hdfs");
            hiveTableUtil.execute(String.format("CREATE function %s as '%s' USING JAR 'hdfs:///%s/%s/%s'",jobRunUdf.getJarMethodName(),jobRunUdf.getJarClassName(),bdpJobConfig.getUdf(),jobRunUdf.getProcId(),bdpUdfJarInfo.getJarName()));
            hiveTableUtil.close();
            jobRunUdf.setIsUsable(UdfUsableType.USABLE.getCode());
            jobRunUdfMapper.updateById(jobRunUdf);
            reloadResult.put(jobRunUdf.getJarMethodName(),true);
        } catch (SQLException e) {
            if(null != hiveTableUtil){
                hiveTableUtil.close();
            }
            if (e.getMessage()==null){
                reloadResult.put(jobRunUdf.getJarMethodName(),false);
                log.error("刷新时，重新注册udf函数失败"+e.getMessage());
            }
            else if(e.getMessage().contains("AlreadyExistsException")){
                reloadUdf(jobRunUdf,reloadResult);
            }

        }

    }


        @Override
    public void registUdf(UdfJarPropertyVo udfJarPropertyVo,BdpUdfJarInfo bdpUdfJarInfo){
        Integer createrId = ShiroUtils.getUserId().intValue();
        String jarName = bdpUdfJarInfo.getJarName();
        String className = udfJarPropertyVo.getJarClassName();
        ProcInfo procInfo = procInfoService.getById(udfJarPropertyVo.getProcId());
        String erroInfo ="";
        if(null == procInfo){
            throw new RRException("未查询到当前项目信息",500);
        }
        Integer procId = procInfo.getId();

        HiveTableUtil hiveTableUtil = new HiveTableUtil(hiveConfig.getPressionUrl(),"hdfs","hdfs");
        try {
            hiveTableUtil.execute(String.format("CREATE function %s as '%s' USING JAR 'hdfs:///%s/%s/%s'",udfJarPropertyVo.getJarMethodName(),className,bdpJobConfig.getUdf(),procId,jarName));
            hiveTableUtil.close();
        } catch (SQLException e) {
            if(null != hiveTableUtil){
                hiveTableUtil.close();
            }
            if (e.getMessage()==null){
                throw new RRException(e.getMessage());
            }
            else if(e.getMessage().contains("AlreadyExistsException")){
                JobRunUdf procUdfFunc2 = jobRunUdfMapper.selectOne(new QueryWrapper<JobRunUdf>().eq("jar_method_name",udfJarPropertyVo.getJarMethodName()));
                ProcUser procUser1 = null;
                ProcInfo procInfo2 = null;
                if (null!=procUdfFunc2){
                    procUser1 = procUserMapper.selectOne(new QueryWrapper<ProcUser>().eq("proc_id",procUdfFunc2.getProcId()).eq("user_id",createrId));
                    procInfo2 = procInfoService.getById(procUdfFunc2.getProcId());
                }
                log.info(String.format("方法名%s已被%s注册%s;",
                        udfJarPropertyVo.getJarMethodName(),
                        null == procUdfFunc2 ? "" : (null == procUser1 ? "其他项目" + procUdfFunc2.getProcId() : "项目[" + (null == procInfo2 ? procUdfFunc2.getProcId() : procInfo2.getName()) + "]"),
                        e.getMessage()));
                erroInfo =erroInfo + String.format("方法名%s已被%s注册;",
                        udfJarPropertyVo.getJarMethodName(),
                        null==procUdfFunc2?"":(null==procUser1?"其他项目(项目id="+procUdfFunc2.getProcId()+")":"项目["+(null==procInfo2?procUdfFunc2.getProcId():procInfo2.getName())+"]"));

            }
            else if (e.getMessage().contains("ClassNotFoundException")){
                log.info("jar包中不包含" + udfJarPropertyVo.getJarClassName() + "类;");
                erroInfo = erroInfo + "jar包中不包含" + udfJarPropertyVo.getJarClassName() + "类;";
            }
            else{
                log.info("方法" + udfJarPropertyVo.getJarMethodName() + ":" + e.getMessage() + ";");
                erroInfo = erroInfo + "方法" + udfJarPropertyVo.getJarMethodName() + ":" + e.getMessage() + ";";
            }
            throw new RRException(erroInfo);
        }


        //兼容对应的数据脱敏的udf函数注册
        packMaskUdfFunc(className,udfJarPropertyVo.getJarMethodName(),jarName);
        udfJarPropertyVo.setProcId(procId);
        udfJarPropertyVo.setJarId(bdpUdfJarInfo.getId());
        udfJarPropertyVo.setCreaterPerId(createrId);
        udfJarPropertyVo.setCreateTime(LocalDateTime.now());
        JobRunUdf jobRunUdf = new JobRunUdf();
        BeanUtil.copyProperties(udfJarPropertyVo,jobRunUdf);
        jobRunUdf.setIsAlreadyRegist(1);
        jobRunUdfMapper.insert(jobRunUdf);
    }

    private void packMaskUdfFunc(String className, String jarMethodName, String jarName) {
        CusMaskUdf cusMaskUdf = cusMaskUdfService.getOne(new QueryWrapper<CusMaskUdf>().eq("jar_class_name", className));
        if(null != cusMaskUdf){
            if(0 == cusMaskUdf.getIsReginster()){
                cusMaskUdf.setIsReginster(1);
                cusMaskUdf.setUdfFuncName(jarMethodName);
                cusMaskUdf.setJarName(jarName);
                cusMaskUdfService.updateById(cusMaskUdf);
            }
        }
    }

    @Override
    public List<JobRunUdf> getProcUdfFuncs(Integer procId,Integer jarId) {
        List<JobRunUdf> procUdfFuncs = jobRunUdfMapper.selectList(new QueryWrapper<JobRunUdf>().eq("proc_id",procId).eq("jar_id",jarId));
        return procUdfFuncs;
    }

    @Override
    public PageUtils getProcUdfFuncsByPage(Map<String, Object> params) {
        IPage<JobRunUdf> procUdfFuncs = jobRunUdfMapper.getProcUdfFuncsByPage((Page<JobRunUdf>) new Query<JobRunUdf>().getPage(params),params);
        return new PageUtils(procUdfFuncs);
    }

    @Override
    public void deleteUdfFunc(Integer id,Integer userId,Boolean dbDataDelete) {
        JobRunUdf procUdfFunc = jobRunUdfMapper.selectOne(new QueryWrapper<JobRunUdf>().eq("id",id));
        if(procUdfFunc==null){
            throw new RRException("该udf函数不存在");
        }
        if (!procInfoService.checkPermission(procUdfFunc.getProcId())){
            throw new RRException("没有权限删除udf函数");
        }
        HiveTableUtil hiveTableUtil = null;
        try {
            hiveTableUtil = new HiveTableUtil(hiveConfig.getPressionUrl(),"hdfs","hdfs");
            hiveTableUtil.execute(String.format(" drop function %s ",procUdfFunc.getJarMethodName()));
            hiveTableUtil.close();
            if (dbDataDelete){
                jobRunUdfService.remove(new QueryWrapper<JobRunUdf>().eq("id",id));
            }
        } catch (SQLException e) {
            if(null != hiveTableUtil){
                hiveTableUtil.close();
            }
            BdpUdfJarInfo bdpUdfJarInfo = bdpUdfJarInfoMapper.selectById(procUdfFunc.getJarId());
            if (null!=bdpUdfJarInfo) {
                jobRunUdfService.saveOrUpdate(procUdfFunc);
                HiveTableUtil hiveTableUtil1 = null;
                try {
                    hiveTableUtil1 = new HiveTableUtil(hiveConfig.getPressionUrl(), "hdfs", "hdfs");
                    hiveTableUtil1.execute(String.format("CREATE function %s as '%s' USING JAR 'hdfs:///%s/%s/%s'", procUdfFunc.getJarMethodName(), procUdfFunc.getJarClassName(), bdpJobConfig.getUdf(), procUdfFunc.getProcId(), bdpUdfJarInfo.getJarName()));
                    hiveTableUtil1.close();
                } catch (SQLException e1) {
                    if(null != hiveTableUtil1){
                        hiveTableUtil1.close();
                    }
                    if (e1.toString().contains("AlreadyExistsException")) {
                        JobRunUdf procUdfFunc2 = jobRunUdfMapper.selectOne(new QueryWrapper<JobRunUdf>().eq("jar_method_name", procUdfFunc.getJarMethodName()));
                        ProcUser procUser1 = null;
                        ProcInfo procInfo = null;
                        if (null != procUdfFunc2) {
                            procUser1 = procUserMapper.selectOne(new QueryWrapper<ProcUser>().eq("proc_id", procUdfFunc2.getProcId()).eq("user_id", userId));
                            procInfo = procInfoService.getById(procUdfFunc2.getProcId());
                        }
                        log.error(String.format("删除udf函数失败并且恢复udf失败，方法名%s已被%s注册%s",
                                procUdfFunc.getJarMethodName(),
                                null == procUdfFunc2 ? "" : (null == procUser1 ? "其他项目" + procUdfFunc2.getProcId() : "项目[" + (null == procInfo ? procUdfFunc2.getProcId() : procInfo.getName()) + "]"),
                                e1.getMessage()));
                        throw new RRException(String.format("删除udf函数失败并且恢复udf失败，方法名%s已被%s注册",
                                procUdfFunc.getJarMethodName(),
                                null == procUdfFunc2 ? "" : (null == procUser1 ? "其他项目(项目id=" + procUdfFunc2.getProcId() + ")" : "项目[" + (null == procInfo ? procUdfFunc2.getProcId() : procInfo.getName()) + "]")));
                    }
                    log.error("删除udf函数失败并且恢复udf失败" + e1.getMessage());
                    throw new RRException("删除udf函数失败并且恢复udf失败");
                }
            }
            log.info("删除udf函数失败" + e.getMessage());
            throw new RRException("删除udf函数失败");
        }
    }

    @Override
    public List<ProcInfo> listUserProcs(Long userId) {
        //查询到这个用户对应的角色，是否是超级管理员.超级管理员可以看到所有的项目
        if(sysUserRoleService.superAdmin(userId)){
            return this.baseMapper.listUserProcs(null,ShiroUtils.getTenantId());
        }
        return this.baseMapper.listUserProcs(userId,ShiroUtils.getTenantId());
    }

    @Override
    public List<ProcInfo> listUserProcsByTenantId(Long userId, Integer tenantId) {
        //查询到这个用户对应的角色，是否是超级管理员.超级管理员可以看到所有的项目
        if(sysUserRoleService.superAdminByTenantId(userId,tenantId)){
            return this.baseMapper.listUserProcs(null,tenantId);
        }
        return this.baseMapper.listUserProcs(userId,tenantId);
    }

    @Override
    public PageUtils2<ProcInfoListRes> listOwer(ProcInfoListOwerReq req, Long userId) {
        Map<String,Object> params = new HashMap<>();
        if (req.getLimit() != null && req.getPage() != null) {
            params.put("offset", (req.getPage() - 1) * req.getLimit());
            params.put("limit", req.getLimit());
        }
        if(StringUtils.isNotEmpty(req.getName())){
            params.put("name", req.getName());
        }
        if(StringUtils.isNotEmpty(req.getCreatePer())){
            params.put("createPer", req.getCreatePer());
        }
        Integer tenantId = ShiroUtils.getTenantId();
        if(sysRoleService.isSuperAdmin(userId)){
            params.put("userId", null);
        }else{
            params.put("userId", userId);
        }
        params.put("tenantId", tenantId);//租户id
        params.put("currentUserId",userId);
        //如果不是租户管理员，值查询关联这个项目的用户
        List<ProcInfoListRes> list = baseMapper.selectProcOwer(params);
        Integer total = baseMapper.selectProcOwerCount(params);
        return new PageUtils2<ProcInfoListRes>(list, total, req.getLimit(), req.getPage());
    }

    @Override
    public PageUtils listUser(Map<String, Object> paster) {
        int page =1 ;
        int limit=10;
        if(paster.get("page")!=null){
            page = Integer.parseInt(paster.get("page") + "");
        }
        if(paster.get("limit")!=null){
            limit = Integer.parseInt(paster.get("limit") + "");
        }
        paster.put("page", (page-1) * limit);
        paster.put("limit", limit);
        Long userId = ShiroUtils.getUserId();
        Integer tenantId = ShiroUtils.getTenantId();
        if(sysRoleService.isSuperAdmin(userId)){
            paster.put("userId", null);
        }else{
            paster.put("userId", userId);
        }
        paster.put("tenantId", tenantId);
        List<Map<String, String>> maps = baseMapper.userList(paster);
        Integer integer = baseMapper.userListCount(paster);
        return new PageUtils(maps,integer,limit,page);
    }

    @Override
    public List<JobRunUdfDto> listUdfFuncs() {
        return jobRunUdfService.listUdfFuncs();
    }


    /**
    * @Description 获取所有项目列表信息
    * @Author  chengweiping
    * @Date   2021/1/28 9:59
    */
    @Override
    public  List<ProcInfo>   getAllProcList(){

        List allList=new ArrayList();
        //调度运行类型，设置为3，把手动和定时的运行任务都查出来
        Map<String,Object> params=new HashMap();
        params.put("proc_id",null);
        params.put("state",null);
        params.put("jobLogType",null);
        params.put("queueName",null);
        params.put("name",null);
        params.put("runType",3);
        params.put("page",1);
        params.put("limit",10000);
        params.put("jobStartTime",null);
        params.put("jobEndTime",null);
        params.put("tenantId",ShiroUtils.getTenantId());
        allList= jobRunHistoryMapper.queryAllJobAndDepProcId(params);
        List<ProcInfo>  procInfoList=new ArrayList<>();
        if(CollectionUtil.isNotEmpty(allList)){
             procInfoList=this.list(Wrappers.<ProcInfo>query().lambda().in(ProcInfo::getId, allList));
        }

        return procInfoList;
    }

    @Override
    public List<ProcInfo> procs(Integer type) {
        List<ProcInfo> iiLists = new ArrayList<>();
        Long userId = ShiroUtils.getUserId();
        if(Objects.equals(0,type)){
            //查询离线的
            iiLists = iWorkMenuService.procs(userId,ShiroUtils.getTenantId());
        }
        if(Objects.equals(1,type)){
            //查询实时的
            iiLists = realTimeTaskMenuService.procs(userId,ShiroUtils.getTenantId());
        }
        return iiLists;
    }

    @Override
    public List<ProcInfo> logProcs(Integer type) {
        //如果是0：手动 1：调度
        //作业流
        List<ProcInfo> procInfos = jobRunHistoryService.logProcs(type);
        //作业流依赖
        List<ProcInfo> procInfoDeps = jobDepRunHistoryService.logProcs(type);
        Set<ProcInfo> lists = new HashSet<>();
        if(CollectionUtil.isNotEmpty(procInfos)){
            lists.addAll(procInfos);
        }
        if(CollectionUtil.isNotEmpty(procInfoDeps)){
            lists.addAll(procInfoDeps);
        }
        if(CollectionUtil.isNotEmpty(lists)){
            return lists.stream().sorted(Comparator.comparing(ProcInfo :: getId)).collect(Collectors.toList());
        }
        return new ArrayList<>();
    }

    @Override
    public List<ProcInfo> seeList() {
        int userId = ShiroUtils.getUserId().intValue();
        Integer tenantId = ShiroUtils.getTenantId();
        QueryWrapper<ProcInfo> query = new QueryWrapper<ProcInfo>();
        if (!DataPermissionUtil.isAdmin(userId)) {
            List<Integer> list = procUserMapper.procIdListByUserId(userId,tenantId);
            if(CollectionUtil.isNotEmpty(list)){
                query.in("id",list.stream().distinct().collect(Collectors.toList()));
            }else{
                query.in("id",0);
            }
        }else{
            query.eq("tenant_id",ShiroUtils.getTenantId());
        }
        query.orderByDesc("id");
        return baseMapper.selectList(query);
    }

    @Override
    public void initProcConnectsAndTables() {
        //初始化项目关联的用户和租户的关联表数据
        List<ProcDatasDto> allUserLists = procUserMapper.listProcUsers();
        List<SysTenantUser> list = sysTenantUserService.list();
        List<SysTenantUser> insertAllLists = new ArrayList<>();
        if(CollectionUtil.isNotEmpty(allUserLists)){
            Map<Integer, List<ProcDatasDto>> usersMaps = allUserLists.stream().collect(Collectors.groupingBy(ProcDatasDto::getId));
            for (Map.Entry<Integer, List<ProcDatasDto>> entery : usersMaps.entrySet()){
                Integer userId = entery.getKey();
                List<ProcDatasDto> procDatasDtos = entery.getValue();
                if(CollectionUtil.isNotEmpty(procDatasDtos)){
                    List<Integer> tenantIds = procDatasDtos.stream().map(ProcDatasDto::getTenantId).distinct().collect(Collectors.toList());
                    List<SysTenantUser> userAndTenants = null;
                    if(CollectionUtil.isNotEmpty(list)){
                        userAndTenants = list.stream().filter(o -> Objects.equals(o.getUserId(), userId)).collect(Collectors.toList());
                    }
                    for (Integer tenantId : tenantIds) {
                        if(CollectionUtil.isEmpty(userAndTenants)){
                            insertAllLists.add(packBeanTenantUser(userId,tenantId));
                        }else{
                            SysTenantUser sysTenantUser = userAndTenants.stream().filter(o -> Objects.equals(o.getTenantId(), tenantId)).findAny().orElse(null);
                            if(sysTenantUser == null){
                                insertAllLists.add(packBeanTenantUser(userId,tenantId));
                            }
                        }
                    }
                }
            }
        }
        if(CollectionUtil.isNotEmpty(insertAllLists)){
            sysTenantUserService.saveBatch(insertAllLists);
        }

        //初始化数据源
        //查询到所有的数据源关联的项目的租户
        List<ProcDatasDto> allConnectLists = confConnectMapper.listProcConnects();
        //按照数据源id进行分组
        if(CollectionUtil.isNotEmpty(allConnectLists)){
            List<SysTenantDataPermission> initConnects = new ArrayList<>();
            Map<Integer, List<ProcDatasDto>> connectMaps = allConnectLists.stream().collect(Collectors.groupingBy(ProcDatasDto::getId));
            if(MapUtil.isNotEmpty(connectMaps)){
                for(Map.Entry<Integer, List<ProcDatasDto>> entry : connectMaps.entrySet()){
                    List<ProcDatasDto> value = entry.getValue();
                    Integer key = entry.getKey();
                    if(CollectionUtil.isNotEmpty(value)){
                        Set<Integer> tenantIds = value.stream().map(ProcDatasDto::getTenantId).collect(Collectors.toSet());
                        if(CollectionUtil.isNotEmpty(tenantIds)){
                            for (Integer tenantId : tenantIds) {
                                SysTenantDataPermission connectSysTenantDataPermission = new SysTenantDataPermission();
                                connectSysTenantDataPermission.yesValue(tenantId,key,1);
                                initConnects.add(connectSysTenantDataPermission);
                            }
                        }
                    }
                }
            }
            if(CollectionUtil.isNotEmpty(initConnects)){
                sysTenantDataPermissionMapper.insertOrUpdateByQuKey(initConnects);
            }
        }

        //初始化离线表和其下的字段
        List<ProcDatasDto> allTableLists = tableInfoMapper.listProcTables();
        List<TableFieldInfo> allTableFiledLists = tableFieldInfoMapper.listAll();
        if(CollectionUtil.isNotEmpty(allTableLists)){
            Set<String> intiCreateTable = Sets.newHashSet();
            List<SysTenantDataPermission> initTables = new ArrayList<>();
            List<SysTenantDataPermission> initTableFileds = new ArrayList<>();
            Map<Integer, List<ProcDatasDto>> tableMaps = allTableLists.stream().collect(Collectors.groupingBy(ProcDatasDto::getId));
            List<Integer> allTenantIds = allTableLists.stream().map(ProcDatasDto::getTenantId).distinct().collect(Collectors.toList());
            List<SysTenantUser> users = sysTenantUserService.list(new LambdaQueryWrapper<SysTenantUser>().in(SysTenantUser::getTenantId, allTenantIds).eq(SysTenantUser::getTenantType, 0));
            if(MapUtil.isNotEmpty(tableMaps)){
                for(Map.Entry<Integer, List<ProcDatasDto>> entry : tableMaps.entrySet()){
                    List<ProcDatasDto> value = entry.getValue();
                    Integer key = entry.getKey();
                    if(CollectionUtil.isNotEmpty(value)){
                        Set<Integer> tenantIds = value.stream().map(ProcDatasDto::getTenantId).collect(Collectors.toSet());
                        ProcDatasDto procDatasDto = value.get(0);
                        if(CollectionUtil.isNotEmpty(tenantIds)){
                            for (Integer tenantId : tenantIds) {
                                SysTenantDataPermission tableSysTenantDataPermission = new SysTenantDataPermission();
                                tableSysTenantDataPermission.yesValue(tenantId,key,2);
                                initTables.add(tableSysTenantDataPermission);
                            }
                        }
                        //查询到表下面的字段
                        if(CollectionUtil.isNotEmpty(allTableFiledLists)){
                            List<TableFieldInfo> tableFieldInfos = allTableFiledLists.stream().filter(o -> Objects.equals(o.getTableId(), key)).collect(Collectors.toList());
                            if(CollectionUtil.isNotEmpty(tableFieldInfos) && CollectionUtil.isNotEmpty(tenantIds)){
                                for (Integer tenantId : tenantIds) {
                                    //初始化这个租户下的租户管理员对应的表权限
                                    if(CollectionUtil.isNotEmpty(users)){
                                        List<SysTenantUser> userAndTenants = users.stream().filter(o -> Objects.equals(o.getTenantId(), tenantId)).collect(Collectors.toList());
                                        if(CollectionUtil.isNotEmpty(userAndTenants)){
                                            for (SysTenantUser userAndTenant : userAndTenants) {
                                                intiCreateTable.add(procDatasDto.getDbName() + "|" + procDatasDto.getTableName()
                                                        + "|" + key + "|" + userAndTenant.getUserId());
                                            }
                                        }
                                    }
                                    for (TableFieldInfo tableFieldInfo : tableFieldInfos) {
                                        SysTenantDataPermission tableFieldSysTenantDataPermission = new SysTenantDataPermission();
                                        tableFieldSysTenantDataPermission.yesValue(tenantId,tableFieldInfo.getId(),3);
                                        initTableFileds.add(tableFieldSysTenantDataPermission);
                                    }
                                }
                            }
                        }
                    }
                }
            }
            //表
            if(CollectionUtil.isNotEmpty(initTables)){
                sysTenantDataPermissionMapper.insertOrUpdateByQuKey(initTables);
            }
            //字段
            if(CollectionUtil.isNotEmpty(initTableFileds)){
                sysTenantDataPermissionMapper.insertOrUpdateByQuKey(initTableFileds);
            }
            //表的权限
            if(CollectionUtil.isNotEmpty(intiCreateTable)){
                rangerDataService.intiCreateTable(intiCreateTable);
            }
        }

    }

    @Override
    public List<String> getPackUrls(int procId) {
        return baseMapper.getPackUrls(procId);
    }

    @Override
    public String getByJobId(Integer jobInfoId) {
        return baseMapper.getByJobId(jobInfoId);
    }

    private SysTenantUser packBeanTenantUser(Integer userId, Integer tenantId) {
        SysTenantUser sysTenantUser = new SysTenantUser();
        sysTenantUser.setTenantId(tenantId);
        sysTenantUser.setUserId(userId);
        sysTenantUser.setTenantType(1);
        sysTenantUser.setIsMain(0);
        return sysTenantUser;
    }
}
