package com.smsc.headend.task.engine.service.task.impl;

import cn.hutool.json.JSONUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.smsc.headend.common.enums.DeviceType;
import com.smsc.headend.module.asset.entity.ComEntity;
import com.smsc.headend.module.report.entity.LogScheduleTask;
import com.smsc.headend.module.report.entity.LogScheduleTaskDevice;
import com.smsc.headend.module.task.consts.TaskKafkaTopic;
import com.smsc.headend.module.task.dto.Task;
import com.smsc.headend.task.engine.mapper.LogScheduleTaskMapper;
import com.smsc.headend.task.engine.service.FeignAssetManagementService;
import com.smsc.headend.task.engine.service.asset.AssetService;
import com.smsc.headend.task.engine.service.task.LogScheduleTaskDeviceService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;

import java.util.Date;
import java.util.concurrent.TimeUnit;


/**
 * (业务实现)
 *
 * @author wjh
 * @date 2022/3/4 13:45.
 */
@Service
public class LogScheduleTaskDeviceServiceImpl implements LogScheduleTaskDeviceService {

    @Autowired
    KafkaTemplate kafkaTemplate;
    @Autowired
    LogScheduleTaskMapper logScheduleTaskMapper;
    @Autowired
    AssetService assetService;

    Cache<String, Long> logDeviceTaskStartTimeCache =  CacheBuilder.newBuilder().expireAfterAccess(30, TimeUnit.MINUTES).build();

    @Override
    public void sendMessage(Task task, Integer status, Integer statusDesc) {
        ComEntity comEntity = assetService.getComById(task.getComId());
        Long createTime = getTaskCreateTime(task.getMessageId());
        if (createTime == null) {
            return;
        }
        Long deviceId;
        Integer deviceType;
        if (null == task.getMeterId()) {
            deviceId = comEntity.getDeviceId();
            deviceType = comEntity.getComDeviceType().intValue();
        } else {
            deviceId = task.getMeterId();
            deviceType = DeviceType.METER.getId().intValue();
        }
        LogScheduleTaskDevice logScheduleTaskDevice = new LogScheduleTaskDevice();
        logScheduleTaskDevice.setMessageId(task.getMessageId());
        logScheduleTaskDevice.setDeviceType(deviceType);
        logScheduleTaskDevice.setDeviceId(deviceId);
        logScheduleTaskDevice.setStatus(status);
        logScheduleTaskDevice.setStatusDesc(statusDesc);
        logScheduleTaskDevice.setTvCreate(createTime);
        logScheduleTaskDevice.setTvEnd(System.currentTimeMillis() / 1000);
        kafkaTemplate.send(TaskKafkaTopic.LOG_SCHEDULE_TASK_DEVICE_DATA_SAVE, JSONUtil.toJsonStr(logScheduleTaskDevice).getBytes());
    }

    private Long getTaskCreateTime(String messageId) {
        Long cachedTv = logDeviceTaskStartTimeCache.getIfPresent(messageId);
        if (cachedTv != null) {
            return cachedTv;
        }
        LambdaQueryWrapper queryWrapper = Wrappers.<LogScheduleTask>lambdaQuery().eq(LogScheduleTask::getMessageId, messageId);
        LogScheduleTask logScheduleTask = logScheduleTaskMapper.selectOne(queryWrapper);
        if (null == logScheduleTask) {
            return null;
        }
        logDeviceTaskStartTimeCache.put(messageId, logScheduleTask.getTvStart());
        return logScheduleTask.getTvStart();
    }

    @Override
    public boolean isLatest(Date startDate, Date endDate, Integer interval) {
        if (null == startDate || null == endDate || null == interval) {
            return false;
        }
        Long startTime = startDate.getTime() / 1000;
        Long endTime = endDate.getTime() / 1000;
        //对于定抄来说，若endTime < startTime + interval  则认为当前任务为无效任务，此时直接结束，然后保存任务状态结果为成功
        // -60是由于定抄开始生成时间进度开始时加上了60s
        if (startTime - 60 + interval > endTime) {
            return true;
        }
        return false;
    }
}
