package com.gaswell.controller;

import cn.hutool.core.date.DateUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.gaswell.aop.annotation.JHPermission;
import com.gaswell.aop.annotation.RedisCache;
import com.gaswell.aop.annotation.UserInfo;
import com.gaswell.common.log.LogAnnotation;
import com.gaswell.entity.Qba01;
import com.gaswell.entity.Qba01M;
import com.gaswell.entity.Qfc01;
import com.gaswell.pojo.ReciveCycleData;
import com.gaswell.service.*;
import com.gaswell.utils.DateUtils;
import com.gaswell.utils.ObjectUtils;
import com.gaswell.utils.RedisEnumeration;
import com.gaswell.utils.StatusDetect;
import com.gaswell.vo.*;
import com.xiaow.utils.redis.RedisUtils;
import io.netty.handler.codec.redis.RedisEncoder;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.apache.commons.lang.StringUtils;
import org.apache.ibatis.session.ResultContext;
import org.apache.ibatis.session.ResultHandler;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.web.bind.annotation.*;

import java.math.BigDecimal;
import java.sql.Date;
import java.sql.Timestamp;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.stream.Collectors;

/**
 * @ClassName Qba01MController
 * @Author xiaow
 * @DATE 2023/2/21 15:32
 **/


@RestController
@RequestMapping("/qba01m")
@Api(tags = "采气井日数据mysql 前端控制器   mysql数据库")
public class Qba01MController {
    @Autowired
    private IQba01Service qba01Service;
    @Autowired
    private IQba01MService qba01mService;

    @Autowired
    RedisTemplate redisTemplate;

    @Autowired
    RedisUtils redisUtils;


    @GetMapping("getByPropertiesDirect")
    @ApiOperation(value = "直接查询mysql中的气井日数据并作诊断 多线程")
    @LogAnnotation(module = "采气井日数据mysql", operator = "直接查询mysql中的气井日数据并作诊断 多线程")
    @JHPermission
    public Result getByPropertiesDirectUpdate(String start, String end, String type, @RequestParam(required =
            false, defaultValue = " ") String jh, int sort,
                                              @RequestParam(required = false, defaultValue = "-1") int current, @RequestParam(required = false, defaultValue = "-1") int size, @RequestParam(required = false, defaultValue = "1") Integer sfsjh
            , @RequestHeader("token") String token, @RequestParam(required = false) List<String> ywbhs,
                                              @RequestParam(required = false) List<String> zwbhs) throws ExecutionException, InterruptedException {
        String redisKey = RedisEnumeration.getQba01inspection(start, end, type, jh, sort, current, size, sfsjh);
//      redis中没有缓存时D
//        if (!redisTemplate.hasKey(redisKey)) {
        if (!redisTemplate.hasKey(redisKey)) {


//          存储结果的,采用代码块的synchronized控制同步
            List<Qba01> final_list = Collections.synchronizedList(new LinkedList<Qba01>());
//          按日期存储采井日数据
            HashMap<String, List<Qba01>> dateQba01Map = new HashMap<>();
//          存储返回的feature对象
            List<CompletableFuture> futureList = new ArrayList<>();


//          拿到气井日数据
            Map map = qba01Service.selectByMutliProperties(start, end, type, jh,
                    sort, current, size, sfsjh,
                    token, ywbhs, zwbhs);
            List<Qba01> list = (List<Qba01>) map.get("list");
            Long total = (Long) map.get("total");
            Long current_page = (Long) map.get("current_page");

            for (Qba01 qba01 : list) {
                String qba01Date = qba01.getRq().toString();
                if (!dateQba01Map.containsKey(qba01Date))
                    dateQba01Map.put(qba01Date, new ArrayList<>());
                List<Qba01> qba01s = dateQba01Map.get(qba01Date);
                qba01s.add(qba01);
                dateQba01Map.put(qba01Date, qba01s);
            }

//          按日期进行分类
            for (String date : dateQba01Map.keySet()) {
                CompletableFuture asyncResult = qba01mService.getJiYeAndDDThreadGroupByDate(dateQba01Map.get(date), date, final_list);
                futureList.add(asyncResult);
            }

//          等待所有的统计线程结束
            CompletableFuture.allOf(futureList.toArray(new CompletableFuture[futureList.size()])).join();


//          判断数据有没有问题
            for (Qba01 qba01M : final_list) {
                qba01M.setDataStatus("数据正常");
                if (qba01M.getJkwd() == null || qba01M.getJkwd().equals("0")
                        || qba01M.getPjty() == null || qba01M.getPjty().equals("0")
                        || qba01M.getPjyy() == null || qba01M.getPjyy().equals("0")
                        || qba01M.getRcsl() == null || qba01M.getRcsl().equals("0")
                        || qba01M.getRcql() == null || qba01M.getRcql().equals("0")
                )
                    qba01M.setDataStatus("数据异常");
            }
            if (sort == 1)
                Collections.sort(final_list, (o1, o2) -> {
                    return o1.getRq().compareTo(o2.getRq());
                });
            else
                Collections.sort(final_list, (o1, o2) -> {
                    return o2.getRq().compareTo(o1.getRq());
                });
//          添加缓存
            HashMap<String, Integer> stringIntegerHashMap = new HashMap<>();
            stringIntegerHashMap.put("totalData", (int) (total.longValue()));
            stringIntegerHashMap.put("totalPage", (int) (current_page.longValue()));
            redisTemplate.opsForList().rightPushAll(redisKey, final_list);
            redisTemplate.opsForList().rightPush(redisKey, stringIntegerHashMap);
            redisTemplate.expire(redisKey, Duration.ofDays(1));
            return new Result(true, 200, "success", final_list, (int) (total.longValue()), (int) (current_page.longValue()));
        } else {
            List range = redisTemplate.opsForList().range(redisKey, 0, -1);
            Map o = (Map) range.get(range.size() - 1);
            return new Result(true, 200, "success", range.subList(0, range.size() - 1), (int) o.get("totalData"), (int) o.get("totalPage"));
        }

    }


    @GetMapping("getByPropertiesDirectSingleThread")
    @ApiOperation(value = "直接查询mysql中的气井日数据并作诊断")
    @LogAnnotation(module = "采气井日数据mysql", operator = "直接查询mysql中的气井日数据并作诊断 改进版")
    @JHPermission
    public Result getByPropertiesDirectSingleThread(String start, String end, String type, @RequestParam(required =
            false, defaultValue = " ") String jh, int sort,
                                                    @RequestParam(required = false, defaultValue = "-1") int current, @RequestParam(required = false, defaultValue = "-1") int size, @RequestParam(required = false, defaultValue = "1") Integer sfsjh
            , @RequestHeader("token") String token, @RequestParam(required = false) List<String> ywbhs,
                                                    @RequestParam(required = false) List<String> zwbhs) throws ExecutionException, InterruptedException {
        String redisKey = RedisEnumeration.getQba01inspection(start, end, type, jh, sort, current, size, sfsjh);
//      redis中没有缓存时
        if (!redisTemplate.hasKey(redisKey)) {
//        if (true) {
//          拿到所需的所有数据
            IPage byPropertiesDirectSingleThread = qba01Service.getByPropertiesDirectSingleThread(DateUtils.stringToDate(start), end, type, jh, sort, current, size, sfsjh, token, ywbhs, zwbhs);
            Long total = byPropertiesDirectSingleThread.getTotal();
            Long current_page = byPropertiesDirectSingleThread.getCurrent();
            List records = byPropertiesDirectSingleThread.getRecords();
            ArrayList<Qba01Vo2> qba01Vo2s = new ArrayList<>();
            for (int i = 0; i < records.size(); i++) {
                double g = 0.6;
                double d = 62;
                double vg = 2.5;
                Qba01Vo2 qba01 = (Qba01Vo2) records.get(i);
                if (qba01.getD() != null)
                    d = qba01.getD().doubleValue();
                if (qba01.getG() != null)
                    g = qba01.getG().doubleValue();
                if (qba01.getVg() != null)
                    vg = qba01.getVg().doubleValue();

                ReciveCycleData reciveCycleData = new ReciveCycleData()
                        .setJkwd(qba01.getJkwd() == null ? 0 : qba01.getJkwd().doubleValue())
                        .setTgyl(qba01.getPjty() == null ? 0 : qba01.getPjty().doubleValue())
                        .setYgyl(qba01.getPjyy() == null ? 0 : qba01.getPjyy().doubleValue())
                        .setWsyl(qba01.getWsyl() == null ? 0 : qba01.getWsyl().doubleValue())
                        .setCqill(qba01.getRcql() == null ? 0 : qba01.getRcql().doubleValue());
                String jiYe = StatusDetect.isJiYe(reciveCycleData, d, g, vg);
                String shuiHeWu = StatusDetect.isShuiHeWu(reciveCycleData, g);
                qba01.setJy(jiYe)
                        .setDd(shuiHeWu);
                qba01Vo2s.add(qba01);

            }


            //          添加缓存
            HashMap<String, Integer> stringIntegerHashMap = new HashMap<>();
            stringIntegerHashMap.put("totalData", (int) (total.longValue()));
            stringIntegerHashMap.put("totalPage", (int) (current_page.longValue()));
            redisTemplate.opsForList().rightPushAll(redisKey, qba01Vo2s);
            redisTemplate.opsForList().rightPush(redisKey, stringIntegerHashMap);
            redisTemplate.expire(redisKey, Duration.ofDays(1));
            return new Result(true, 200, "success", qba01Vo2s, (int) (total.longValue()), (int) (current_page.longValue()));


        } else {
            List range = redisTemplate.opsForList().range(redisKey, 0, -1);
            Map o = (Map) range.get(range.size() - 1);
            return new Result(true, 200, "success", range.subList(0, range.size() - 1), (int) o.get("totalData"), (int) o.get("totalPage"));
        }

    }

    /**
     * 由切片处理缓存
     * @param depart
     * @param ywbhs
     * @param zwbhs
     * @param current
     * @param size
     * @return
     */
    @GetMapping("countMonthData")
    @ApiOperation(value = "查询对应depart的30天的日数据并作统计")
    @LogAnnotation(module = "采气井日数据mysql", operator = "查询对应depart的30天的日数据并作统计")
    @JHPermission
    @UserInfo
    @RedisCache(keyprefix = RedisEnumeration.QBA01COUNT, argsIndexs = {0})
    public Result countMonthData(@RequestParam(required = false) String depart,
                                 @RequestParam(required = false) List<String> ywbhs,
                                 @RequestParam(required = false) List<String> zwbhs,
                                 @RequestParam(required = false) int current,
                                 @RequestParam(required = false) int size) {
        List<Qba01Count> result = null;
        //          获取一个月前的日期
        Date start = new Date(System.currentTimeMillis() - DateUtils.DAY * 30);
        LambdaQueryWrapper<Qba01> qba01LambdaQueryWrapper = new LambdaQueryWrapper<>();
        qba01LambdaQueryWrapper.ge(Qba01::getRq, start);
//          拿到总数据
        int total = qba01Service.count(qba01LambdaQueryWrapper);
//      这种情况是用来应对redis还未及时更新的情况，此时多线程处理，并写入redis.
        result = qba01mService.countQba01(ywbhs, zwbhs, total, start);
        return new Result(true, 200, "yes", result, result.size(), 1);
    }

    public void pushQba01(String key, List result) {
//        如果存在值，就直接退出了。同理阻塞过程中如果别人添加好了直接退出。
        if (!redisTemplate.hasKey(key)) {
            try {

//              加个锁，加锁之后要再判断一遍有没有数据
                boolean b = redisUtils.tryLock(RedisEnumeration.MUTEXLOCK + key);
                if (b) {
                    if (!redisTemplate.hasKey(key)) {
                        redisTemplate.opsForList().rightPushAll(key, result);
                    }
                } else {
                    Thread.sleep(50);
                    pushQba01(key, result);
                }
            } catch (Exception e) {
                e.printStackTrace();
            } finally {
                redisUtils.unlock(RedisEnumeration.MUTEXLOCK + key);
            }
        }
    }
}
