package com.quectel.core.module.scheduling.service.impl;


import cn.hutool.core.lang.func.LambdaUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.quectel.base.tuple.Tuple2;
import com.quectel.base.tuple.Tuples;
import com.quectel.constant.core.scheduling.SchedulingAuditPipelineSeqConstants;
import com.quectel.constant.global.SystemConstants;
import com.quectel.core.constants.RedisCacheConstants;
import com.quectel.core.module.scheduling.dao.SchedulingAuditPipelineDao;
import com.quectel.core.module.scheduling.dto.SchedulingAuditPipelineDto;
import com.quectel.core.module.scheduling.dto.SchedulingAuditPipelineSeqDeptPersonDto;
import com.quectel.core.module.scheduling.dto.SchedulingAuditPipelineSeqDto;
import com.quectel.core.module.scheduling.entity.SchedulingAuditPipelineEntity;
import com.quectel.core.module.scheduling.service.SchedulingAuditPipelineSeqDeptPersonService;
import com.quectel.core.module.scheduling.service.SchedulingAuditPipelineSeqService;
import com.quectel.core.module.scheduling.service.SchedulingAuditPipelineService;
import com.quectel.core.module.village.dto.VillageDto;
import com.quectel.core.module.village.service.VillageService;
import com.quectel.util.common.CopyUtils;
import com.quectel.util.kit.CacheKit;
import com.quectel.util.kit.Snowflake;
import com.quectel.util.mybatis.MyBatisPlusUtils;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.dubbo.config.annotation.DubboService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;

import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Function;

/**
 * @author zhangheng
 * @email nolan.zhang@quectel.com
 * @date 2023/07/05 13:24
 */
@DubboService
public class SchedulingAuditPipelineServiceImpl implements SchedulingAuditPipelineService {

    private static final Logger LOGGER = LoggerFactory.getLogger(SchedulingAuditPipelineServiceImpl.class);

    /**
     * 入参解释:实体id
     */
    private static final Function<Long, String> SCHEDULING_AUDIT_PIPELINE_CACHE_BY_ID_FUNC = id -> String.format(RedisCacheConstants.PROJECT_NAME + "SCHEDULING_AUDIT_PIPELINE_CACHE_BY_ID:%d", id);


    /**
     * tuple[key1] 园区id
     * tuple[key2] 审批流类型
     */
    private static final Function<Tuple2<Long, Byte>, String> AUDIT_PIPELINE_RULE_FUNC =
            tuple2 -> String.format(RedisCacheConstants.PROJECT_NAME + "AUDIT_PIPELINE_RULE_:%d+:%d", tuple2.getT1(), tuple2.getT2());


    /**
     * 清除缓存方法
     */
    private static final Consumer<SchedulingAuditPipelineDto> CLEAR_CACHE_FUNC = dto -> {
        CacheKit.invalidRedisCache(SCHEDULING_AUDIT_PIPELINE_CACHE_BY_ID_FUNC.apply(dto.getId()));
    };

    @Autowired
    private SchedulingAuditPipelineDao schedulingAuditPipelineDao;
    @Autowired
    private VillageService villageService;
    @Autowired
    private SchedulingAuditPipelineSeqService auditPipelineSeqService;
    @Autowired
    private SchedulingAuditPipelineSeqDeptPersonService auditPipelineSeqDeptPersonService;


    @Override
    public SchedulingAuditPipelineDto selectRuleCachetByVillageAndType(Long villageId, Byte type) {
        return CacheKit.cacheToRedis(
                () -> selectByVillageAndType(villageId, type),
                AUDIT_PIPELINE_RULE_FUNC.apply(Tuples.of(villageId, type)),
                SystemConstants.NOT_NULL_CACHE_EXPIRE_SECONDS
        );
    }

    @Override
    public SchedulingAuditPipelineDto selectByVillageAndType(Long villageId, Byte type) {
        SchedulingAuditPipelineEntity auditPipelineEntity = schedulingAuditPipelineDao.selectOne(new LambdaQueryWrapper<SchedulingAuditPipelineEntity>()
                .eq(SchedulingAuditPipelineEntity::getVillageId, villageId)
                .eq(SchedulingAuditPipelineEntity::getType, type));
        return paddingField(CopyUtils.copyObj(auditPipelineEntity, SchedulingAuditPipelineDto.class));
    }


    @Override
    public SchedulingAuditPipelineDto selectCacheById(Long id) {
        return CacheKit.cacheToRedis(
                () -> selectById(id),
                SCHEDULING_AUDIT_PIPELINE_CACHE_BY_ID_FUNC.apply(id),
                SystemConstants.NOT_NULL_CACHE_EXPIRE_SECONDS
        );
    }

    @Override
    public SchedulingAuditPipelineDto selectById(Long id) {
        SchedulingAuditPipelineEntity entity = schedulingAuditPipelineDao.selectById(id);
        return paddingField(CopyUtils.copyObj(entity, SchedulingAuditPipelineDto.class));
    }

    @Override
    public List<SchedulingAuditPipelineDto> queryList(Map<String, Object> params) {
        List<SchedulingAuditPipelineEntity> list = schedulingAuditPipelineDao.queryList(params);
        List<SchedulingAuditPipelineDto> result = CopyUtils.copyList(list, SchedulingAuditPipelineDto.class);
        result.forEach(this::paddingField);
        return result;
    }

    @Override
    public List<SchedulingAuditPipelineDto> queryListWithoutPadding(Map<String, Object> params) {
        List<SchedulingAuditPipelineEntity> list = schedulingAuditPipelineDao.queryList(params);
        return CopyUtils.copyList(list, SchedulingAuditPipelineDto.class);
    }

    /**
     * 此方法慎用 禁止填充List一类 比如一个人有多个地址这里不允许填充
     * 填充的原则是: 1:被填充对象不经常变换 2:根据id可以走缓存 3数据足够简单 比如设备类型 所属园区等
     *
     * @param dto
     * @return
     */
    private SchedulingAuditPipelineDto paddingField(SchedulingAuditPipelineDto dto) {
        if (dto != null) {
            if (dto.getVillageId() != null) {
                VillageDto villageDto = villageService.selectCacheById(dto.getVillageId());
                if (villageDto != null) {
                    dto.setVillage(villageDto);
                }
            }

            Map<String, Object> params = new HashMap<>(1);
            params.put(LambdaUtil.getFieldName(SchedulingAuditPipelineSeqDto::getAuditPipelineId), String.valueOf(dto.getId()));
            List<SchedulingAuditPipelineSeqDto> auditPipelineSeqDtos = auditPipelineSeqService.queryList(params);
            auditPipelineSeqDtos.sort(Comparator.comparing(SchedulingAuditPipelineSeqDto::getSeq));
            dto.setAuditPipelineSeqDtos(auditPipelineSeqDtos);

            if (CollectionUtils.isNotEmpty(auditPipelineSeqDtos)) {
                for (SchedulingAuditPipelineSeqDto auditPipelineSeqDto : auditPipelineSeqDtos) {
                    if (SchedulingAuditPipelineSeqConstants.AUDIT_TYPE_DEPARTMENT_DESIGNATED_PERSON.equals(auditPipelineSeqDto.getAuditType())) {
                        List<SchedulingAuditPipelineSeqDeptPersonDto> personDtoList = auditPipelineSeqDeptPersonService.selectBySeqId(auditPipelineSeqDto.getId());
                        auditPipelineSeqDto.setAuditPipelineSeqDeptPersonDtos(personDtoList);
                    }
                }
            }

        }
        return dto;
    }

    @Override
    public int queryTotal(Map<String, Object> params) {
        return schedulingAuditPipelineDao.queryTotal(params);
    }

    @Override
    public Long save(SchedulingAuditPipelineDto dto) {
        SchedulingAuditPipelineEntity entity = CopyUtils.copyObj(dto, SchedulingAuditPipelineEntity.class);
        entity.setId(Snowflake.nextId());
        schedulingAuditPipelineDao.insert(entity);
        return entity.getId();
    }

    @Override
    public void updateById(SchedulingAuditPipelineDto dto) {
        SchedulingAuditPipelineEntity entity = CopyUtils.copyObj(dto, SchedulingAuditPipelineEntity.class);

        schedulingAuditPipelineDao.updateById(entity);

        CLEAR_CACHE_FUNC.accept(dto);
    }

    @Override
    public void updateAllColumnById(SchedulingAuditPipelineDto dto) {
        SchedulingAuditPipelineEntity entity = CopyUtils.copyObj(dto, SchedulingAuditPipelineEntity.class);

        MyBatisPlusUtils.updateAllColumnById(entity, schedulingAuditPipelineDao);

        CLEAR_CACHE_FUNC.accept(dto);
    }

    @Override
    public void deleteById(Long id) {

        CLEAR_CACHE_FUNC.accept(selectById(id));

        schedulingAuditPipelineDao.deleteById(id);

    }

    @Override
    @Transactional(rollbackFor = Exception.class)
    public void deleteBatch(Long[] ids) {
        for (Long id : ids) {
            deleteById(id);
        }
    }
}
