package com.meida.module.arc.provider.service.impl;

import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.ObjectUtil;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
import com.meida.common.base.entity.EntityMap;
import com.meida.common.base.utils.FlymeUtils;
import com.meida.common.mybatis.base.service.impl.BaseServiceImpl;
import com.meida.common.mybatis.model.ResultBody;
import com.meida.common.mybatis.query.CriteriaQuery;
import com.meida.common.mybatis.query.CriteriaSave;
import com.meida.common.mybatis.vo.JoinBean;
import com.meida.common.utils.ApiAssert;
import com.meida.common.utils.JsonUtils;
import com.meida.module.arc.client.entity.*;
import com.meida.module.arc.client.enums.ArchiveEnumInteger;
import com.meida.module.arc.client.enums.CategoryTypeEnum;
import com.meida.module.arc.client.vo.ArcRefJson;
import com.meida.module.arc.client.vo.ForEachIndex;
import com.meida.module.arc.provider.mapper.ArcBatchRefMapper;
import com.meida.module.arc.provider.service.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;

import java.lang.reflect.Field;
import java.util.*;
import java.util.stream.Collectors;

/**
 * 批量关联接口实现类
 *
 * @author flyme
 * @date 2021-11-29
 */
@Service
@Transactional(rollbackFor = Exception.class)
@DS("sharding")
public class ArcBatchRefServiceImpl extends BaseServiceImpl<ArcBatchRefMapper, ArcBatchRef> implements ArcBatchRefService {

    @Autowired
    private ArcCategoryService arcCategoryService;

    @Autowired
    private ArcBatchService arcBatchService;

    @Autowired
    @Lazy
    private ArcInfoService arcInfoService;

    @Autowired
    private ArcFieldService arcFieldService;

    @Autowired
    private ArcBatchRecordService arcBatchRecordService;

    @Value("${spring.datasource.dynamic.datasource.master.url:mysql}")
    private String dataSourceUrl;


    @Override
    public ResultBody beforeAdd(CriteriaSave cs, ArcBatchRef abr, EntityMap extra) {
        return ResultBody.ok();
    }



    @Override
    @Transactional(propagation = Propagation.NOT_SUPPORTED, readOnly = true)
    public ResultBody beforePageList(CriteriaQuery<ArcBatchRef> cq, ArcBatchRef abr, EntityMap requestMap) {
      cq.orderByDesc("abr.createTime");
      return ResultBody.ok();
    }

    @Override
    public ResultBody saveOrUpdate(Map var1) {
        Object arcBatchId = var1.get("arcBatchId");
        Object batchRefs = var1.get("batchRefs");
        ApiAssert.isNotEmpty("批量关联接口不能为空",arcBatchId);
        ApiAssert.isNotEmpty("批量关联配置不能为空",batchRefs);
        ArcBatch batch = this.arcBatchService.getById(Long.parseLong(arcBatchId.toString()));
        ApiAssert.isNotEmpty("批量关联接口不能为空",batch);
        ArcCategory srcCat = this.arcCategoryService.getById(batch.getSrcCategoryId());
        ArcCategory targetCat = this.arcCategoryService.getById(batch.getTargetCategoryId());
        ApiAssert.isNotEmpty("源门类不能为空",srcCat);
        ApiAssert.isNotEmpty("目标门类不能为空",targetCat);
        if(FlymeUtils.isEmpty(srcCat.getQzId())||!srcCat.getQzId().equals(targetCat.getQzId())){
            return ResultBody.failed("源门类和目标门类全宗不一致");
        }

        //删除已有源门类配置，增加新的配置
        CriteriaQuery<ArcBatchRef> refQuery = new CriteriaQuery<ArcBatchRef>(ArcBatchRef.class);
        refQuery.lambda().eq(ArcBatchRef::getArcBatchId,batch.getBatchId());
        this.remove(refQuery);

        List<ArcRefJson> configMap = JsonUtils.json2list(batchRefs.toString(), ArcRefJson.class);

        List<ArcBatchRef> saveList = new ArrayList();
        for(int i=0;i<configMap.size();i++){
            ArcRefJson json = configMap.get(i);
            ArcBatchRef obj = new ArcBatchRef();
            obj.setDeleted(0);
            obj.setQzId(srcCat.getQzId());
            obj.setSrcType(srcCat.getType());
            obj.setTargetType(targetCat.getType());
            obj.setSrcCategoryId(srcCat.getCategoryId());
            obj.setTargetCategoryId(targetCat.getCategoryId());
            obj.setSrcCategoryName(srcCat.getCnName());
            obj.setTargetCategoryName(targetCat.getCnName());
            obj.setSrcFieldId(json.getSrcFieldId());
            obj.setTargetFieldId(json.getTargetFieldId());
            obj.setSrcSeq(i);
            obj.setTargetSeq(i);
            obj.setArcBatchId(batch.getBatchId());
            saveList.add(obj);
        }
        this.saveBatch(saveList);
        return ResultBody.ok();
    }

    @Override
    public ResultBody targetCategoryInfo(Map var1) {
        Object srcCategoryId = var1.get("srcCategoryId");
        ApiAssert.isNotEmpty("源门类id不能为空",srcCategoryId);
        ArcCategory srcCategory = this.arcCategoryService.getById(Long.parseLong(srcCategoryId.toString()));
        ApiAssert.isNotEmpty("没有查询到源门类",srcCategory);

        CategoryTypeEnum type = CategoryTypeEnum.matchCode(srcCategory.getType());
        ApiAssert.isNotEmpty("源门类类型不能为空",type);
        CriteriaQuery<ArcCategory> targetCategoryQuery = new CriteriaQuery<ArcCategory>(ArcCategory.class);
        switch (type){
            case CATEGORY_TYPE_1:
                //TODO 目前只查询下级第一条门类字段,等待业务沟通
                targetCategoryQuery.lambda().eq(ArcCategory::getType,CategoryTypeEnum.CATEGORY_TYPE_2.getCode());
                break;
            case CATEGORY_TYPE_2:
                //TODO 目前只查询下级第一条门类字段,等待业务沟通
                targetCategoryQuery.lambda().eq(ArcCategory::getType,CategoryTypeEnum.CATEGORY_TYPE_3.getCode());
                break;
            default:
                ApiAssert.failure("源门类类型只能为项目或案卷");
                break;
        }

        targetCategoryQuery.lambda().eq(ArcCategory::getParentId,srcCategory.getCategoryId())
                .orderByAsc(ArcCategory::getSeq);
        targetCategoryQuery.limit(1);
        ArcCategory targetCategory = this.arcCategoryService.getOne(targetCategoryQuery);
        ApiAssert.isNotEmpty("找不到目标门类",targetCategory);
        return ResultBody.ok(targetCategory);
    }

    @Override
    public ResultBody batchRefField(Map var1) {
        Object targetCategoryId = var1.get("targetCategoryId");
        ApiAssert.isNotEmpty("目标门类id不能为空",targetCategoryId);

        LambdaQueryWrapper<ArcBatch> arcBatchLambdaQueryWrapper = new LambdaQueryWrapper<>();
        arcBatchLambdaQueryWrapper.eq(ArcBatch::getTargetCategoryId,Long.parseLong(targetCategoryId.toString()));
        List<ArcBatch> arcBatches = this.arcBatchService.list(arcBatchLambdaQueryWrapper);
        ApiAssert.isNotEmpty("该目标门类没有配置批量关联",arcBatches);
        if(arcBatches.size()>1){
            ApiAssert.isNotEmpty("该目标门类存在多条批量关联信息，请保留一条批量关联信息",arcBatches);
        }
        ArcBatch arcBatch = arcBatches.get(0);


        LambdaQueryWrapper<ArcBatchRef> arcBatchRefLambdaQueryWrapper = new LambdaQueryWrapper<>();
        arcBatchRefLambdaQueryWrapper.eq(ArcBatchRef::getArcBatchId,arcBatch.getBatchId());
        List<ArcBatchRef> arcBatcheRefs = this.list(arcBatchRefLambdaQueryWrapper);
        ApiAssert.isNotEmpty("该目标门类批量关联信息没有配置字段",arcBatcheRefs);
        //校验批量关联配置 源门类字段与目标门类字段是否都不为空
        List<Long> srcFieldList = arcBatcheRefs.stream().map(item->{
            if(FlymeUtils.isEmpty(item.getSrcFieldId())||FlymeUtils.isEmpty(item.getTargetFieldId())){
                ApiAssert.failure("该目标门类批量关联信息字段映射有问题");
            }
            return item.getSrcFieldId();
        }).collect(Collectors.toList());
        List<Long> targetFieldList = arcBatcheRefs.stream().map(item->{
            return item.getTargetFieldId();
        }).collect(Collectors.toList());

        LambdaQueryWrapper<ArcField> targetArcFieldLambdaQueryWrapper = new LambdaQueryWrapper<>();
        targetArcFieldLambdaQueryWrapper.in(ArcField::getFieldId,targetFieldList);
        List<ArcField> targetBatchRefFields = arcFieldService.list(targetArcFieldLambdaQueryWrapper);
        ApiAssert.isNotEmpty("批量关联目标门类字段为空",targetBatchRefFields);

        LambdaQueryWrapper<ArcField> srcArcFieldLambdaQueryWrapper = new LambdaQueryWrapper<>();
        srcArcFieldLambdaQueryWrapper.in(ArcField::getFieldId,srcFieldList);
        List<ArcField> srcBatchRefFields = arcFieldService.list(srcArcFieldLambdaQueryWrapper);
        ApiAssert.isNotEmpty("批量关联源门类字段为空",srcBatchRefFields);
        if(targetBatchRefFields.size()!=srcBatchRefFields.size()){
            ApiAssert.failure("批量关联源门类与目标门类字段数量不一致，无法关联");
        }
        Map result = new HashMap();
        result.put("srcFieldNames",srcBatchRefFields.stream().map(item->{
            return item.getFieldCnName();
        }).collect(Collectors.joining("+")));
        result.put("targetFieldNames",targetBatchRefFields.stream().map(item->{
            return item.getFieldCnName();
        }).collect(Collectors.joining("+")));
        return ResultBody.ok(result);
    }


    @Override
    public ResultBody batchRef(Map var1) {
        Object targetCategoryId = var1.get("targetCategoryId");
        Object arcInfoIds = var1.get("arcInfoIds");
        ApiAssert.isNotEmpty("目标门类id不能为空",targetCategoryId);
        //ApiAssert.isNotEmpty("档案id不能为空",arcInfoIds);
        String relevanceNo  = DateUtil.format(new Date(),"yyyyMMddHHmmssSSS");
        Map<Long,Integer> parentCountMap = new HashMap<Long,Integer>();
        ForEachIndex childCount = new ForEachIndex();

        LambdaQueryWrapper<ArcBatch> arcBatchLambdaQueryWrapper = new LambdaQueryWrapper<>();
        arcBatchLambdaQueryWrapper.eq(ArcBatch::getTargetCategoryId,Long.parseLong(targetCategoryId.toString()));
        List<ArcBatch> arcBatches = this.arcBatchService.list(arcBatchLambdaQueryWrapper);
        ApiAssert.isNotEmpty("该目标门类没有配置批量关联",arcBatches);
        if(arcBatches.size()>1){
            ApiAssert.isNotEmpty("该目标门类存在多条批量关联信息，请保留一条批量关联信息",arcBatches);
        }
        ArcBatch arcBatch = arcBatches.get(0);
        LambdaQueryWrapper<ArcBatchRef> arcBatchRefLambdaQueryWrapper = new LambdaQueryWrapper<>();
        arcBatchRefLambdaQueryWrapper.eq(ArcBatchRef::getArcBatchId,arcBatch.getBatchId());
        List<ArcBatchRef> arcBatcheRefs = this.list(arcBatchRefLambdaQueryWrapper);
        ApiAssert.isNotEmpty("该目标门类批量关联信息没有配置字段",arcBatcheRefs);
        //校验批量关联配置 源门类字段与目标门类字段是否都不为空
        List<Long> srcFieldList = arcBatcheRefs.stream().map(item->{
            if(FlymeUtils.isEmpty(item.getSrcFieldId())||FlymeUtils.isEmpty(item.getTargetFieldId())){
                ApiAssert.failure("该目标门类批量关联信息字段映射有问题");
            }
            return item.getSrcFieldId();
        }).collect(Collectors.toList());
        List<Long> targetFieldList = arcBatcheRefs.stream().map(item->{
            return item.getTargetFieldId();
        }).collect(Collectors.toList());
        //查询目标门类信息

        List<ArcInfo> targetArcInfos = new ArrayList<>();
        LambdaQueryWrapper<ArcInfo> targetArcInfoQueryWrapper = new LambdaQueryWrapper<>();
        targetArcInfoQueryWrapper.eq(ArcInfo::getCategoryId, Long.parseLong(targetCategoryId.toString())).isNull(ArcInfo::getParentId);

        if (ObjectUtil.isNotEmpty(arcInfoIds)) {
            targetArcInfoQueryWrapper.in(ArcInfo::getArcInfoId, Arrays.stream(arcInfoIds.toString().split(",")).map(item -> {
                return Long.parseLong(item);
            }).collect(Collectors.toList()));
            targetArcInfos = this.arcInfoService.list(targetArcInfoQueryWrapper);
        ApiAssert.isNotEmpty("档案信息为空或档案已关联",targetArcInfos);
        } else {
            //targetArcInfoQueryWrapper.isNull(ArcInfo::getParentId);
            targetArcInfos = this.arcInfoService.list(targetArcInfoQueryWrapper);
        }

        LambdaQueryWrapper<ArcField> targetArcFieldLambdaQueryWrapper = new LambdaQueryWrapper<>();
        targetArcFieldLambdaQueryWrapper.in(ArcField::getFieldId,targetFieldList);
        List<ArcField> targetBatchRefFields = arcFieldService.list(targetArcFieldLambdaQueryWrapper);
        ApiAssert.isNotEmpty("批量关联目标门类字段为空",targetBatchRefFields);

        LambdaQueryWrapper<ArcField> srcArcFieldLambdaQueryWrapper = new LambdaQueryWrapper<>();
        srcArcFieldLambdaQueryWrapper.in(ArcField::getFieldId,srcFieldList);
        List<ArcField> srcBatchRefFields = arcFieldService.list(srcArcFieldLambdaQueryWrapper);
        ApiAssert.isNotEmpty("批量关联源门类字段为空",srcBatchRefFields);
        if(targetBatchRefFields.size()!=srcBatchRefFields.size()){
            ApiAssert.failure("批量关联源门类与目标门类字段数量不一致，无法关联");
        }

        //循环每一条档案信息
        targetArcInfos.forEach(item->{
            //基于批量关联字段映射，查询档案
            QueryWrapper<ArcInfo> srcQueryWrapper = new QueryWrapper<ArcInfo>();
            srcQueryWrapper.lambda().eq(ArcInfo::getCategoryId,arcBatch.getSrcCategoryId());
            Map<String,Object> targetExpandMap = null;
            for(int i=0;i<targetBatchRefFields.size();i++){
                if(ArchiveEnumInteger.IS_TRUE.getCode().equals(targetBatchRefFields.get(i).getIsBlob())){
                    if(targetExpandMap==null&&FlymeUtils.isNotEmpty(item.getExpand())){
                        targetExpandMap = JsonUtils.jsonToBean(item.getExpand(),Map.class);
                    }
                    if(targetExpandMap==null||FlymeUtils.isEmpty(targetExpandMap.get(targetBatchRefFields.get(i).getFieldName()))){
                        genBatchRefSrcQuery(srcQueryWrapper,srcBatchRefFields.get(i),null);
                    }else{
                        genBatchRefSrcQuery(srcQueryWrapper,srcBatchRefFields.get(i),targetExpandMap.get(targetBatchRefFields.get(i).getFieldName()));
                    }
                }else{
                    Map<String,Object> targetArcInfoMap = JsonUtils.beanToMap(item);
                    genBatchRefSrcQuery(srcQueryWrapper,srcBatchRefFields.get(i),targetArcInfoMap.get(targetBatchRefFields.get(i).getFieldName()));
                }
            }
            List<ArcInfo> srcArcInfoList = this.arcInfoService.list(srcQueryWrapper);
            if(FlymeUtils.isNotEmpty(srcArcInfoList)&&srcArcInfoList.size()>1){
                //匹配到多个结果
            }

            if(FlymeUtils.isNotEmpty(srcArcInfoList)&&srcArcInfoList.size()==1){
                //匹配成功
                //1更新上级的childCount
                LambdaUpdateWrapper<ArcInfo> srcUpdateWrapper = new LambdaUpdateWrapper<ArcInfo>();
                srcUpdateWrapper.eq(ArcInfo::getCategoryId,srcArcInfoList.get(0).getCategoryId())
                        .eq(ArcInfo::getArcInfoId,srcArcInfoList.get(0).getArcInfoId())
                        .set(ArcInfo::getChildCount,
                                FlymeUtils.isEmpty(srcArcInfoList.get(0).getChildCount())?1:srcArcInfoList.get(0).getChildCount()+1);
                this.arcInfoService.update(srcUpdateWrapper);
                if(FlymeUtils.isEmpty(parentCountMap.get(srcArcInfoList.get(0).getArcInfoId()))){
                    parentCountMap.put(srcArcInfoList.get(0).getArcInfoId(),1);
                }else{
                    parentCountMap.put(srcArcInfoList.get(0).getArcInfoId(),parentCountMap.get(srcArcInfoList.get(0).getArcInfoId())+1);
                }
                //2更新下级的parentId和批量关联号relevanceNo
                LambdaUpdateWrapper<ArcInfo> targetUpdateWrapper = new LambdaUpdateWrapper<ArcInfo>();
                targetUpdateWrapper.eq(ArcInfo::getCategoryId,item.getCategoryId())
                        .eq(ArcInfo::getArcInfoId,item.getArcInfoId())
                        .set(ArcInfo::getParentId,srcArcInfoList.get(0).getArcInfoId())
                        .set(ArcInfo::getRelevanceNo,relevanceNo);
                this.arcInfoService.update(targetUpdateWrapper);
                childCount.indexPlus();
            }

        });
        //3增加批量关联记录
        if(childCount.getIndex()>0){
            ArcBatchRecord record = new ArcBatchRecord();

            record.setArcBatchId(arcBatch.getBatchId());
            record.setBatchNo(relevanceNo);
            record.setBatchTime(new Date());
            record.setParentFieldName(srcBatchRefFields.stream().map(item->{
                return item.getFieldCnName();
            }).collect(Collectors.joining("+")));
            record.setChildFieldName(targetBatchRefFields.stream().map(item->{
                return item.getFieldCnName();
            }).collect(Collectors.joining("+")));
            ForEachIndex parentCount = new ForEachIndex();

            parentCountMap.forEach((key,value)->{
                parentCount.indexPlusNum(value);
            });
            record.setParentCount(parentCount.getIndex());
            record.setChildCount(childCount.getIndex());
            this.arcBatchRecordService.save(record);

        }

        return ResultBody.ok();
    }

    private void genBatchRefSrcQuery(QueryWrapper<ArcInfo> srcQueryWrapper,ArcField srcField,Object value){
        if(ArchiveEnumInteger.IS_TRUE.getCode().equals(srcField.getIsBlob())){
            if(FlymeUtils.isEmpty(value)){
                srcQueryWrapper.isNull(getExpandColumn(srcField.getFieldName()));
            }else{
                srcQueryWrapper.eq( getExpandColumn(srcField.getFieldName()),value.toString());
            }

        }else{
            Field[] declaredFields = ArcInfo.class.getDeclaredFields();
            Map<String,Field> declaredFieldMap = Arrays.stream(declaredFields).collect(Collectors.toMap(Field::getName, Field -> Field));
            String fieldType = null;
            if(declaredFieldMap.containsKey(srcField.getFieldName())){
                fieldType = declaredFieldMap.get(srcField.getFieldName()).getType().getSimpleName();
            }
            if(FlymeUtils.isEmpty(fieldType)){
                fieldType = srcField.getDataType().intValue() == 1 ? "String" : "Integer";
            }
            if(FlymeUtils.isEmpty(value)){
                srcQueryWrapper.isNull(srcField.getFieldName());
            }else{
                srcQueryWrapper.eq(srcField.getFieldName(),arcInfoService.changeFieldValueType(fieldType,value));
            }
        }

    }

    @Override
    public ResultBody beforeListEntityMap(CriteriaQuery<ArcBatchRef> cq, ArcBatchRef t, EntityMap requestMap) {
        ApiAssert.isNotEmpty("批量关联接口ID不能为空",t.getArcBatchId());
        cq.eq(ArcBatchRef.class,"arcBatchId",t.getArcBatchId());
        cq.select(ArcBatchRef.class,"srcFieldId");
        cq.select(ArcBatchRef.class,"targetFieldId");
        cq.select(ArcBatchRef.class,"arcBatchId");
        cq.select(ArcBatchRef.class,"batchRefId");

        JoinBean srcjoin = cq.createJoin(ArcField.class);
        srcjoin.setMainField("srcFieldId");
        srcjoin.setJoinField("fieldId");
        srcjoin.setJoinAlias("srcField");
        cq.addSelect("srcField.fieldCnName as srcFieldCnName");
        cq.addSelect("srcField.dataType as srcDataType");

        JoinBean targetJoin = cq.createJoin(ArcField.class);
        targetJoin.setMainField("targetFieldId");
        targetJoin.setJoinField("fieldId");
        targetJoin.setJoinAlias("targetField");
        cq.addSelect("targetField.fieldCnName as targetFieldCnName");
        cq.addSelect("targetField.dataType as targetDataType");


        return ResultBody.ok();
    }

    private String getExpandColumn(String column) {
        //人大金仓数据源
        if (FlymeUtils.isNotEmpty(dataSourceUrl) && dataSourceUrl.indexOf("postgresql") > -1) {
            return "json_extract_path_text(expand::JSON,'" + column + "')";
            //瀚高数据源
        } else if (FlymeUtils.isNotEmpty(dataSourceUrl) && dataSourceUrl.indexOf("highgo") > -1) {
            return "json_extract_path_text(expand::JSON,'" + column + "')";
        } else {//mysql数据源
            //"json_extract(expand,\"$." + obj.getFieldName() + "\")"
            return "json_extract(expand,\"$." + column + "\")";
        }
    }
}
