package com.meida.module.arc.provider.service.impl;

import cn.hutool.core.util.ObjectUtil;
import com.baomidou.dynamic.datasource.annotation.DS;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.meida.common.base.utils.FlymeUtils;
import com.meida.common.mybatis.model.ResultBody;
import com.meida.common.utils.DateUtil;
import com.meida.common.utils.RedisUtils;
import com.meida.module.arc.client.entity.ArcDestory;
import com.meida.module.arc.client.entity.ArcDestorySetting;
import com.meida.module.arc.client.entity.ArcInfo;
import com.meida.module.arc.client.enums.ArcDestoryStatusEnum;
import com.meida.module.arc.client.enums.ArchiveEnumInteger;
import com.meida.module.arc.provider.service.ArcDestoryService;
import com.meida.module.arc.provider.service.ArcInfoService;
import com.meida.module.arc.provider.service.ArcOriginalService;
import com.meida.module.arc.provider.service.SyncService;
import com.meida.module.file.provider.oss.client.MioClient;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.AsyncResult;
import org.springframework.stereotype.Service;

import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Future;
import java.util.stream.Collectors;

/**
 * <b>功能名：SyncServiceImpl</b><br>
 * <b>说明：</b><br>
 * <b>著作权：</b> Copyright (C) 2021 HUIFANEDU  CORPORATION<br>
 * <b>修改履历：
 *
 * @author 2022-02-24 jiabing
 */
@Service
@DS("sharding")
@Slf4j
public class SyncServiceImpl implements SyncService {

    @Autowired
    @Lazy
    private ArcInfoService arcInfoService;

    @Autowired
    @Lazy
    private ArcDestoryService arcDestoryService;

    @Autowired
    @Lazy
    private ArcOriginalService arcOriginalService;

    @Autowired
    private RedisUtils redisUtils;

    @Autowired
    private MioClient mioClient;

    @Override
    @Async("arcInfoQueryExecutor")
    public Future<Long> categoryHasArcQuery(Long categoryId, String queryStr, Map map) {
        try {
            final Map param = new HashMap();
            param.putAll(map);
            //arcInfoService.buildAllArcInfoQuery(categoryId,queryStr,param);
            if (param == null) {
                return new AsyncResult(0L);
            }
            Object userId = param.get("tempUserId_");
            Long threadId = Thread.currentThread().getId();
            redisUtils.set(threadId.toString(), userId.toString(), 60);
            param.put("page", 1);
            param.put("limit", 1);
            param.put("categoryId", categoryId);
            ResultBody resultBody = arcInfoService.pageList(param);
            Long total = 0L;
            if (FlymeUtils.isNotEmpty(resultBody.getData())) {
                Map m = (Map) resultBody.getData();
                total = Long.parseLong(m.get("total").toString());
            }
            if (total > 0) {
                return new AsyncResult(categoryId);
            } else {
                return new AsyncResult(0L);
            }
        } catch (Exception e) {
            log.error("全文检索门类id=" + categoryId + "查询异常", e);
            return new AsyncResult(0L);
        }
    }


    @Override
    @Async("arcInfoQueryExecutor")
    public void destoryArcTask(ArcDestorySetting setting) {
        try {
            Integer isThorough = setting.getIsThorough();
            if (ArchiveEnumInteger.IS_TRUE.getCode().equals(isThorough)) {
                Integer holdBackDay = setting.getHoldBackDay();
                Integer autoDestoryTime = setting.getAutoDestoryTime();
                Integer autoDestoryType = setting.getAutoDestoryType();//自动清除类型1按月2按天
                if (FlymeUtils.allNotNull(holdBackDay, autoDestoryTime, autoDestoryType)) {
                    boolean canDel = false;
                    if (1 == autoDestoryType.intValue() && autoDestoryTime.equals(Integer.parseInt(DateUtil.date2Str(new Date(), "MM")))) {
                        canDel = true;
                    }
                    if (2 == autoDestoryType.intValue() && autoDestoryTime.equals(Integer.parseInt(DateUtil.date2Str(new Date(), "HH")))) {
                        canDel = true;
                    }

                    if (canDel) {
                        //查询全宗下
                        LambdaQueryWrapper<ArcDestory> queryWrapper = new LambdaQueryWrapper();
                        queryWrapper.eq(ArcDestory::getStatus, ArcDestoryStatusEnum.ARC_STATUS_3)
                                .eq(ArcDestory::getDestoryType, ArchiveEnumInteger.IS_FALSE.getCode())
                                .eq(ArcDestory::getQzId, setting.getQzId())
                                .gt(ArcDestory::getVerifyTime,
                                        DateUtil.date2Str(new Date(System.currentTimeMillis() - holdBackDay * 1000 * 60 * 60 * 24L),
                                                "yyyyMMdd"));
                        List<ArcDestory> arcDestories = this.arcDestoryService.list(queryWrapper);
                        //删除原文
                        if (FlymeUtils.isNotEmpty(arcDestories)) {
                            Map<Long, Long> arcIdCategoryIdMap = arcDestories.stream()
                                    .collect(Collectors.toMap(ArcDestory::getArcInfoId, ArcDestory::getCategoryId));
                            Map<Long, String> categoryIdArcIdsMap = new HashMap<>();
                            arcIdCategoryIdMap.forEach((key, value) -> {
                                String arcIds = categoryIdArcIdsMap.get(value);
                                if (FlymeUtils.isEmpty(arcIds)) {
                                    categoryIdArcIdsMap.put(value, key.toString());
                                } else {
                                    categoryIdArcIdsMap.put(value, arcIds + "," + key.toString());
                                }
                            });
                            categoryIdArcIdsMap.forEach((key, value) -> {
                                Map param = new HashMap();
                                param.put("categoryId", key);
                                param.put("arcInfoIds", value);
                                arcOriginalService.removeOriginal(param);
                            });
                        }
                    }


                }
            }
        } catch (Exception e) {
            log.error("定时清除销毁库原文异常", e);
        }
    }

    @Override
    @Async("arcInfoQueryExecutor")
    public Future<Long> getCategoryHasStoreRoom(Long categoryId, Long storeId) {
        LambdaQueryWrapper<ArcInfo> queryWrapper = new LambdaQueryWrapper();
        queryWrapper.eq(ArcInfo::getCategoryId, categoryId)
                .eq(ArcInfo::getStoreRoomId, storeId);
        Long count = this.arcInfoService.count(queryWrapper);
        if (FlymeUtils.isEmpty(count)) {
            return new AsyncResult(0);
        } else {
            return new AsyncResult(count);
        }
    }

    @Override
    @Async("arcInfoQueryExecutor")
    public void removeOriginalOss(List<String> objects) {
        if (FlymeUtils.isNotEmpty(objects)) {
            mioClient.removeObjects(objects);
        }
    }

    @Override
    @Async("arcInfoQueryExecutor")
    public Future<Long> categoryHasRecycleArcQuery(Long categoryId, Object o, Map map) {
        try {
            final Map param = new HashMap();
            param.putAll(map);
            if (param == null) {
                return new AsyncResult(0L);
            }
            Object userId = param.get("tempUserId_");
            Long threadId = Thread.currentThread().getId();
            redisUtils.set(threadId.toString(), userId.toString(), 60);

            long total = arcInfoService.count(new QueryWrapper<ArcInfo>().eq("categoryId", categoryId).eq("isRecycle", 1).eq("status", param.get("status")));

            if (total > 0) {
                return new AsyncResult(categoryId);
            } else {
                return new AsyncResult(0L);
            }
        } catch (Exception e) {
            log.error("全文检索门类id=" + categoryId + "查询异常", e);
            return new AsyncResult(0L);
        }
    }

    @Override
    public Future<Long> categoryHasCollectArcQuery(Long categoryId, Object o, Map map) {
        try {
            final Map param = new HashMap();
            param.putAll(map);
            if (param == null) {
                return new AsyncResult(0L);
            }
            Object userId = param.get("tempUserId_");
            Long threadId = Thread.currentThread().getId();
            redisUtils.set(threadId.toString(), userId.toString(), 60);

            long total = arcInfoService.count(new QueryWrapper<ArcInfo>().eq("categoryId", categoryId).eq("isRecycle", 0).eq("isDestory", 0).eq(ObjectUtil.isNotNull(param.get("status")), "status", param.get("status")));

            if (total > 0) {
                return new AsyncResult(categoryId);
            } else {
                return new AsyncResult(0L);
            }
        } catch (Exception e) {
            log.error("全文检索门类id=" + categoryId + "查询异常", e);
            return new AsyncResult(0L);
        }
    }


}
