package com.inspur.cloud.service.dataspace;

import com.inspur.cloud.dao.dataspace.KafkaResourceDao;
import com.inspur.cloud.dao.dataspace.UserDao;
import com.inspur.cloud.dao.dataspace.UserResourceRelationDao;
import com.inspur.cloud.entity.dataspace.KafkaResourceEntity;
import com.inspur.cloud.entity.dataspace.UserEntity;
import com.inspur.cloud.entity.dataspace.UserResourceRelationEntity;
import com.inspur.cloud.entity.dataspace.ao.resource.DistributeAO;
import com.inspur.cloud.entity.dataspace.common.OpEntityResult;
import com.inspur.cloud.entity.dataspace.common.PageReqParam;
import com.inspur.cloud.entity.dataspace.common.PageResult;
import com.inspur.cloud.entity.dataspace.dto.KafkaTopicInfo;
import com.inspur.cloud.entity.dataspace.dto.UserResourceRelationDTO;
import com.inspur.cloud.entity.dataspace.vo.KafkaAddVO;
import com.inspur.cloud.entity.dataspace.vo.KafkaReq;
import com.inspur.cloud.entity.dataspace.vo.KafkaVO;
import com.inspur.cloud.enums.PermissionEnum;
import com.inspur.cloud.enums.ResourceTypeEnum;
import com.inspur.cloud.enums.RoleEnum;
import com.inspur.cloud.security.AuthorizationHelper;
import com.inspur.cloud.util.StringUtil;
import com.inspur.cloud.util.TConstants;
import com.inspur.cloud.util.TimeHelper;
import com.inspur.cloud.util.UtilTenant;
import org.hibernate.query.internal.NativeQueryImpl;
import org.hibernate.transform.Transformers;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import javax.persistence.criteria.Predicate;
import java.math.BigInteger;
import java.util.*;

@Service
public class KafkaManagerService {
    @PersistenceContext
    private EntityManager em;
    @Autowired
    private KafkaResourceService kafkaResourceService;
    @Autowired
    UserResRelationSerivce userResRelationSerivce;
    @Autowired
    RangerService rangerService;
    @Autowired
    UserDao userDao;
    @Autowired
    KafkaService kafkaService;
    @Autowired
    UserResourceRelationService userResourceRelationService;


    public PageResult<KafkaVO> showUserList(PageReqParam<KafkaReq> kafkaReq) {
        PageResult<KafkaVO> ret = new PageResult<>();
        StringBuilder sql = new StringBuilder(" SELECT h.* FROM kafka_resource h ");
        StringBuilder countSql = new StringBuilder(" SELECT count(*) from (SELECT count(*) FROM kafka_resource h ");
        StringBuilder condition = new StringBuilder(" WHERE h.status=1 ");
        if (kafkaReq.getReqParam().getResourceId() != null && kafkaReq.getReqParam().getResourceId() != 0) {
            KafkaResourceEntity kafka=kafkaResourceService.findKafkaResourceEntityById(kafkaReq.getReqParam().getResourceId());
            if(kafka==null){
                ret.setCount(1L);
                ret.setMsg("当前资源ID不存在");
                ret.setCode(-1);
                return ret;
            }
            condition.append(" AND h.parent_id=").append(kafkaReq.getReqParam().getResourceId());
        } else {
            condition.append(" AND h.parent_id=0 ");
        }
        if (!StringUtil.isEmpty(kafkaReq.getReqParam().getResourceName())) {
            condition.append(" AND h.topic_name  like '%").append(kafkaReq.getReqParam().getResourceName()).append("%'");
        }
        condition.append(" AND h.topic_name  not in (").append(TConstants.KAFKATOPICHIDDEN).append(") ");
        if(!AuthorizationHelper.isAdmin()){
            Specification<UserResourceRelationEntity> specification = (Specification<UserResourceRelationEntity>) (root, query, cb) -> {
                List<Predicate> list = new ArrayList<>();
                list.add(cb.equal(root.get("userId"), AuthorizationHelper.getUser().getId()));
                list.add(cb.equal(root.get("resourceType"), ResourceTypeEnum.KAFKA.name()));
                if (!StringUtil.isEmpty(kafkaReq.getReqParam().getResourceName())) {
                    list.add(cb.like(root.get("resourceContent"), "%" + kafkaReq.getReqParam().getResourceName() + "%"));
                }
                return cb.and(list.toArray(new Predicate[list.size()]));
            };
            List<UserResourceRelationEntity> urList = userResRelationSerivce.findAll(specification);
            List<Integer> idList=new ArrayList<>();
            if(urList==null|| urList.size()==0){
                ret.setCount(0L);
                ret.setMsg("查询结果为空");
                ret.setCode(1);
                return ret;
            }
            for (UserResourceRelationEntity ur : urList) {
                idList.add(ur.getResourceId());
            }
            String ids = StringUtil.concatIntListDistinct(idList, ",");
            condition.append(" AND h.id in ( ").append(ids).append(" )");
        }
        condition.append(" group by h.id ");
        countSql.append(condition).append(") aa");
        sql.append(condition).append(" ORDER BY h.upd_date  DESC limit :limit ,:offset");
        Query countQuery = em.createNativeQuery(countSql.toString());
        BigInteger count = (BigInteger) countQuery.getSingleResult();
        Integer limit = (kafkaReq.getPageIndex() - 1) * kafkaReq.getPageSize();
        Query listQuery = em.createNativeQuery(sql.toString());
        listQuery.setParameter("limit", limit);
        listQuery.setParameter("offset", kafkaReq.getPageSize());
        listQuery.unwrap(NativeQueryImpl.class).setResultTransformer(Transformers.ALIAS_TO_ENTITY_MAP);
        List<Map> result = listQuery.getResultList();
        List<KafkaVO> voList = new ArrayList<>();
        for (Map map : result) {
            KafkaVO kafkaVO = new KafkaVO();
            kafkaVO.setReplication(map.get("replication") != null ? Integer.valueOf(map.get("replication").toString()) : 0);
            kafkaVO.setPartitionNum(map.get("partition_num") != null ? Integer.valueOf(map.get("partition_num").toString()) : 0);
            kafkaVO.setTopicName(map.get("topic_name") != null ? map.get("topic_name").toString() : "");
            kafkaVO.setResourceId(Integer.valueOf(map.get("id").toString()));
            kafkaVO.setInsDate(map.get("ins_date") != null ? TimeHelper.StringToDate(map.get("ins_date").toString()) : null);
            kafkaVO.setUpdDate(map.get("upd_date") != null ? TimeHelper.StringToDate(map.get("upd_date").toString()) : null);
            UserResourceRelationEntity userResourceRelationEntity = userResRelationSerivce.findFirstByResourceIdAndUserIdAndResourceType(kafkaVO.getResourceId(), AuthorizationHelper.getUser().getId(), ResourceTypeEnum.KAFKA.name());
            if (userResourceRelationEntity != null) {
                kafkaVO.setPowerList(userResourceRelationEntity.getAuthor());
            }
            UserResourceRelationDTO userResourceRelationDTO = userResourceRelationService.queryUserByResourceIdExceptOwner(kafkaVO.getResourceId(), ResourceTypeEnum.KAFKA, AuthorizationHelper.getUser().getId());
            kafkaVO.setTenantList(userResourceRelationDTO.getTannetList());
            kafkaVO.setUserList(userResourceRelationDTO.getUserList());
            Integer creator = map.get("creator") != null ? Integer.valueOf(map.get("creator").toString()) : null;
            if (creator != null) {//hive的表和列创建者来自后台,这里大概率是取不到的
                UserEntity user = userDao.findUserById(creator);
                kafkaVO.setCreator(user == null ? "" : user.getName());
            }
            voList.add(kafkaVO);
        }
        ret.setPageList(voList);
        ret.setCount(count.longValue());
        return ret;
    }

    /**
     *
     * @param kafkaVO 资源实体
     * @param user  资源所属用户
     * @return
     * @throws Exception
     */
    @Transactional(transactionManager = "transactionManagerDataspace")
    public OpEntityResult<Integer> addRes(KafkaAddVO kafkaVO,UserEntity user) throws Exception {
        OpEntityResult<Integer> opEntityResult = new OpEntityResult<>();
        KafkaResourceEntity kafkaExist = kafkaResourceService.findFirstByTopicName(kafkaVO.getTopicName());
        if (kafkaExist != null) {
            return OpEntityResult.fail("该Topic已存在，不能新增");
        }
        if (kafkaVO.getReplication() > getReplicationNum()) {
            return OpEntityResult.fail("副本数最大不能超过broker数");
        }
        Date now = new Date();
        KafkaResourceEntity kafka = new KafkaResourceEntity();
        kafka.setPartitionNum(kafkaVO.getPartitionNum());
        kafka.setReplication(kafkaVO.getReplication());
        kafka.setTopicName(kafkaVO.getTopicName());
        kafka.setLevel(1);
        if (RoleEnum.Admin.getValue()==user.getRole() && kafkaVO.getType()==1) {//是管理员登陆并且创建公共资源
            kafka.setOwner(TConstants.ADMIN_ID);
        } else {
            kafka.setOwner(user.getId());
        }
        kafka.setParentId(0);
        kafka.setStatus(TConstants.RES_STATUS_AVAILABLE);
        kafka.setUpdDate(now);
        kafka.setInsDate(now);
        kafka.setCreator(AuthorizationHelper.getUser().getId());

        kafkaService.createKafkaTopic(kafka.getTopicName(), kafka.getPartitionNum(), kafka.getReplication());
        kafkaResourceService.save(kafka);
        rangerService.setPermissionInRanger(ResourceTypeEnum.KAFKA.name().toLowerCase(), kafka.getTopicName(), PermissionEnum.getAllPermissionByType(ResourceTypeEnum.KAFKA.getType()), user.getName());

        UserResourceRelationEntity ur = new UserResourceRelationEntity();
        ur.setResourceType(ResourceTypeEnum.KAFKA.name());
        ur.setResourceId(kafka.getId());
        ur.setUserId(user.getId());
        ur.setResourceLevel(kafka.getLevel());
        ur.setStatus(TConstants.RES_STATUS_AVAILABLE);
        ur.setResourceContent(kafka.getTopicName());
        ur.setUpdDate(now);
        ur.setInsDate(now);
        ur.setAuthor(PermissionEnum.getAllPermissionByType(ResourceTypeEnum.KAFKA.getType()));
        userResRelationSerivce.save(ur);
        opEntityResult.setCode(1);
        opEntityResult.setData(kafka.getId());
        opEntityResult.setMsg("新增Kafka Topic成功");
        return opEntityResult;
    }

    @Transactional(transactionManager = "transactionManagerDataspace")
    public OpEntityResult<Integer> modifyRes(KafkaAddVO kafkaVO) {
        OpEntityResult<Integer> opEntityResult = new OpEntityResult<>();
        KafkaResourceEntity kafka = kafkaResourceService.findKafkaResourceEntityById(kafkaVO.getResourceId());
        if (kafka == null) {
            return OpEntityResult.fail("对应的Topic不存在，不能编辑");
        }
        Date now = new Date();
        if (kafkaVO.getPartitionNum() < kafka.getPartitionNum()) {
            return OpEntityResult.fail("分区数只能增加不能减少");
        } else if (kafkaVO.getPartitionNum() > kafka.getPartitionNum()) {
            kafka.setPartitionNum(kafkaVO.getPartitionNum());
            kafkaService.updateTopicPartitions(kafka.getTopicName(), kafkaVO.getPartitionNum());
            kafka.setUpdDate(now);
            kafkaResourceService.save(kafka);
        }
        opEntityResult.setCode(1);
        opEntityResult.setData(kafka.getId());
        opEntityResult.setMsg("编辑Kafka Topic成功");
        return opEntityResult;
    }

    @Transactional(transactionManager = "transactionManagerDataspace")
    public OpEntityResult<Integer> delRes(Integer resourceId) {
        OpEntityResult<Integer> opEntityResult = new OpEntityResult<>();
        KafkaResourceEntity kafka = kafkaResourceService.findKafkaResourceEntityById(resourceId);
        if (kafka == null) {
            return OpEntityResult.fail("对应的Topic不存在");
        }
        kafkaService.deleteKafkaTopic(kafka.getTopicName());
        userResourceRelationService.deleteAllUserRelations(resourceId, ResourceTypeEnum.KAFKA.name());//删除资源对应权限
        kafkaResourceService.deleteById(resourceId);//后删资源
        opEntityResult.setData(resourceId);
        opEntityResult.setMsg("删除Kafka Topic成功");
        return opEntityResult;
    }

    @Transactional(transactionManager = "transactionManagerDataspace")
    public OpEntityResult<Integer> distributeRes(DistributeAO kafkaVO) throws Exception {
        OpEntityResult<Integer> opEntityResult = new OpEntityResult<>();
        KafkaResourceEntity kafka = kafkaResourceService.findKafkaResourceEntityById(kafkaVO.getResourceId());
        if (kafka == null) {
            return new OpEntityResult<>(-1, "指定的资源不存在", -1);
        }
        Integer result=userResourceRelationService.distribute(kafkaVO,ResourceTypeEnum.KAFKA,false);
        opEntityResult.setCode(1);
        if(UtilTenant.empty(kafkaVO)) {
            opEntityResult.setMsg("分配已置空");
        }else{
            opEntityResult.setMsg("KAFKA资源分配成功");
        }
        opEntityResult.setData(result);
        return opEntityResult;
    }

    @Transactional(transactionManager = "transactionManagerDataspace")
    public OpEntityResult<Integer> share(DistributeAO kafkaVO) throws Exception {
        OpEntityResult<Integer> opEntityResult = new OpEntityResult<>();
        KafkaResourceEntity kafka = kafkaResourceService.findKafkaResourceEntityById(kafkaVO.getResourceId());
        if (kafka == null) {
            return new OpEntityResult<>(-1, "指定的资源不存在", -1);
        }
        Integer result=userResourceRelationService.distribute(kafkaVO,ResourceTypeEnum.KAFKA,true);
        opEntityResult.setCode(1);
        opEntityResult.setData(result);
        if(UtilTenant.empty(kafkaVO)) {
            opEntityResult.setMsg("分享已置空");
        }else{
            opEntityResult.setMsg("KAFKA资源分享成功");
        }
        return opEntityResult;
    }

    @Transactional(transactionManager = "transactionManagerDataspace")
    public OpEntityResult<Integer> sync() {
        Date now = new Date();
        Collection<String> topicList = kafkaService.getKafkaTopicNameSet();
        if (topicList == null || topicList.size() <= 0) {
            return OpEntityResult.fail("获取同步数据为空");
        }
        List<KafkaResourceEntity> kafkaList = kafkaResourceService.findByParentId(0);
        List<String> topicDbList = new ArrayList<>();
        for (KafkaResourceEntity kafkaResourceEntity : kafkaList) {
            topicDbList.add(kafkaResourceEntity.getTopicName());
            if (!topicList.contains(kafkaResourceEntity.getTopicName())) {
                //服务器上的不包含数据库里，那就要删除数据库里的
                userResourceRelationService.deleteAllUserRelations(kafkaResourceEntity.getId(), ResourceTypeEnum.KAFKA.name());
                kafkaResourceService.deleteById(kafkaResourceEntity.getId());
            }
        }
        List<String> kafkaTopicList = new ArrayList<>(topicList);
        Map<String, List<KafkaTopicInfo>> infoList = kafkaService.getKafkaTopicInfo(kafkaTopicList);
        for (String topic : topicList) {
            if (!topicDbList.contains(topic)) {//需要新增到数据库里
                KafkaResourceEntity kafka = new KafkaResourceEntity();
                kafka.setTopicName(topic);
                List<String> kafkaTopic = new ArrayList<>();
                kafkaTopic.add(topic);
                if (infoList != null && infoList.get(topic) != null) {
                    List<KafkaTopicInfo> kinfo = infoList.get(topic);
                    if (kinfo.size() > 0) {
                        kafka.setPartitionNum(kinfo.size());
                        KafkaTopicInfo kafkaTopicInfo = kinfo.get(0);
                        if (kafkaTopicInfo != null && kafkaTopicInfo.getReplicas() != null) {
                            kafka.setReplication(kafkaTopicInfo.getReplicas().size());
                        }
                    }
                } else {
                    kafka.setPartitionNum(1);
                    kafka.setReplication(1);
                }
                kafka.setOwner(TConstants.ADMIN_ID);
                kafka.setLevel(1);
                kafka.setParentId(0);
                kafka.setStatus(TConstants.RES_STATUS_AVAILABLE);
                kafka.setInsDate(now);
                kafka.setUpdDate(now);
                kafka.setCreator(TConstants.ADMIN_ID);
                kafkaResourceService.save(kafka);

                UserResourceRelationEntity ur = new UserResourceRelationEntity();
                ur.setResourceType(ResourceTypeEnum.KAFKA.name());
                ur.setResourceId(kafka.getId());
                ur.setUserId(TConstants.ADMIN_ID);
                ur.setResourceLevel(1);
                ur.setStatus(TConstants.RES_STATUS_AVAILABLE);
                ur.setResourceContent(kafka.getTopicName());
                ur.setUpdDate(now);
                ur.setInsDate(now);
                ur.setAuthor(PermissionEnum.getAllPermissionByType(ResourceTypeEnum.KAFKA.getType()));
                userResRelationSerivce.save(ur);

                rangerService.setPermissionInRanger(ResourceTypeEnum.KAFKA.name(),kafka.getTopicName(),PermissionEnum.getAllPermissionByType(ResourceTypeEnum.KAFKA.getType()),TConstants.ADMIN_NAME);
            } else {
                //编辑，更新分区和副本数
                KafkaResourceEntity kafkaExist = kafkaResourceService.findFirstByTopicName(topic);
                if (kafkaExist != null) {
                    if (infoList != null && infoList.get(topic) != null) {
                        List<KafkaTopicInfo> kinfo = infoList.get(topic);
                        if (kinfo.size() > 0) {
                            kafkaExist.setPartitionNum(kinfo.size());
                            KafkaTopicInfo kafkaTopicInfo = kinfo.get(0);
                            if (kafkaTopicInfo != null && kafkaTopicInfo.getReplicas() != null) {
                                kafkaExist.setReplication(kafkaTopicInfo.getReplicas().size());
                            }
                            //kafkaExist.setUpdDate(now);//暂时不更新这个时间
                            kafkaResourceService.save(kafkaExist);
                        }
                    }
                }//end if
            }
        }
        return OpEntityResult.of(1, "同步成功数据：" + topicList.size());
    }



    public Integer getReplicationNum() {
        List<String> brokers = kafkaService.getBrokerList();
        if (brokers != null) {
            return brokers.size();
        }
        return 0;
    }
}
