package com.fanxuankai.boot.upload;

import cn.hutool.crypto.digest.MD5;
import cn.hutool.extra.spring.SpringUtil;
import com.alibaba.fastjson.JSON;
import com.fanxuankai.boot.upload.autoconfigure.UploadProperties;
import com.fanxuankai.boot.upload.dao.FileDao;
import com.fanxuankai.boot.upload.dao.JobDao;
import com.fanxuankai.boot.upload.dao.ReadItemDao;
import com.fanxuankai.boot.upload.dao.WriteItemDao;
import com.fanxuankai.boot.upload.domain.File;
import com.fanxuankai.boot.upload.domain.Job;
import com.fanxuankai.boot.upload.domain.ReadItem;
import com.fanxuankai.boot.upload.domain.WriteItem;
import com.fanxuankai.boot.upload.mapper.ReadItemMapper;
import com.fanxuankai.commons.extra.spring.util.GenericTypeUtils;
import com.fanxuankai.commons.util.DateUtils;
import com.fanxuankai.commons.util.IdUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;

import javax.annotation.Resource;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

/**
 * @author fanxuankai
 */
@Component
public class Uploader implements ApplicationRunner {
    private static final Logger LOGGER = LoggerFactory.getLogger(Uploader.class);
    @Resource
    private JobDao jobDao;
    @Resource
    private FileDao fileDao;
    @Resource
    private ReadItemDao itemDao;
    @Resource
    private WriteItemDao writeItemDao;
    @Resource
    private ReadItemMapper readItemMapper;
    @Resource
    private ReadItemDao readItemDao;
    @Resource
    private UploadProperties properties;
    @Resource
    private ScheduledThreadPoolExecutor scheduled;
    private Map<String, UploadHandler<Object, Object>> handlerCache;

    @Transactional(rollbackFor = {Exception.class})
    public Long createJob(MultipartFile multipartFile, String handler) {
        UploadHandler<Object, Object> uploadHandler = handlerCache.get(handler);
        Job job = newJob(uploadHandler, handler);
        List<ReadItem> items = null;
        boolean failed = false;
        try {
            File file = convertFile(multipartFile);
            job.setFileId(file.getId());
            items = readItems(multipartFile, uploadHandler, job);
            fileDao.save(file);
            itemDao.saveBatch(items);
        } catch (Exception e) {
            String message = "文件读取异常";
            job.setStatus(Status.FAILED.name());
            job.setMessage(message);
            failed = true;
            LOGGER.error(message + job.getId(), e);
        }
        jobDao.save(job);
        uploadHandler.onCreatedJob(job);
        if (failed) {
            uploadHandler.onFailedJob(job);
        } else {
            handle(job, items);
        }
        return job.getId();
    }

    boolean canHandle(String handlerName) {
        return handlerCache.containsKey(handlerName);
    }

    private Job newJob(UploadHandler<?, ?> uploadHandler, String name) {
        Date now = new Date();
        Job job = new Job();
        job.setId(IdUtils.nextId());
        job.setTimeout(uploadHandler.timeout());
        job.setCreateTime(now);
        job.setExpectEndTime(DateUtils.plusMillisecond(now, uploadHandler.timeout()));
        job.setStartTime(now);
        job.setStatus(Status.PROCESSING.name());
        job.setHandler(name);
        return job;
    }

    private List<ReadItem> readItems(MultipartFile multipartFile, UploadHandler<Object, Object> uploadHandler,
                                     Job job) throws IOException {
        Class<Object> itemType = GenericTypeUtils.getGenericType(uploadHandler.getClass(),
                UploadHandler.class, "I");
        Reader<Object> reader = uploadHandler.reader();
        List<?> readItems = reader.readItems(multipartFile, itemType);
        List<ReadItem> items = new ArrayList<>(readItems.size());
        for (int i = 0; i < readItems.size(); i++) {
            ReadItem item = new ReadItem();
            item.setId(IdUtils.nextId());
            item.setNumber(i);
            item.setContent(JSON.toJSONString(readItems.get(i)));
            item.setJobId(job.getId());
            items.add(item);
        }
        return items;
    }

    private File convertFile(MultipartFile multipartFile) throws IOException {
        File file = new File();
        file.setId(IdUtils.nextId());
        file.setName(multipartFile.getOriginalFilename());
        file.setSize(multipartFile.getSize());
        file.setContent(multipartFile.getBytes());
        file.setMime(multipartFile.getContentType());
        file.setMd5(MD5.create().digestHex(multipartFile.getBytes()));
        return file;
    }

    private void handle(Job job, List<ReadItem> notProcessed) {
        scheduled.execute(() -> {
            // 只处理未处理过的项目
            UploadHandler<Object, Object> uploadHandler = handlerCache.get(job.getHandler());
            List<ReadItem> readItems = Optional.ofNullable(notProcessed)
                    .orElse(readItemMapper.listNotProcessed(job.getId()));
            List<Object> output = new ArrayList<>(readItems.size());
            List<WriteItem> writeItems = new ArrayList<>(readItems.size());
            Class<?> itemType = GenericTypeUtils.getGenericType(uploadHandler.getClass(), UploadHandler.class, "I");
            boolean success = true;
            for (ReadItem readItem : readItems) {
                try {
                    Object process = uploadHandler.process(JSON.parseObject(readItem.getContent(), itemType));
                    output.add(process);
                    WriteItem writeItem = new WriteItem();
                    writeItem.setId(IdUtils.nextId());
                    writeItem.setNumber(readItem.getNumber());
                    writeItem.setJobId(readItem.getJobId());
                    writeItem.setReadItemId(readItem.getId());
                    writeItem.setContent(JSON.toJSONString(process));
                    writeItems.add(writeItem);
                    success = true;
                } catch (Exception e) {
                    LOGGER.error("数据处理异常" + readItem.getId(), e);
                    readItemDao.processFailed(readItem.getId(), e.getMessage());
                    success = false;
                    if (uploadHandler.strategy() == Strategy.ALL_SUCCESS) {
                        break;
                    }
                }
            }
            if (success) {
                SpringUtil.getBean(Uploader.class).complete(job, uploadHandler, output, writeItems);
            } else {
                if (jobDao.failed(job.getId())) {
                    uploadHandler.onFailedJob(job);
                }
            }
        });
    }

    @Transactional(rollbackFor = {Exception.class})
    void complete(Job job, UploadHandler<Object, Object> uploadHandler,
                  List<Object> output, List<WriteItem> writeItems) {
        if (jobDao.complete(job.getId())) {
            uploadHandler.write(output);
            writeItemDao.saveBatch(writeItems);
            uploadHandler.onCompletedJob(job);
        }
    }

    @Override
    @SuppressWarnings("unchecked")
    public void run(ApplicationArguments args) {
        handlerCache = SpringUtil.getBeansOfType(UploadHandler.class)
                .values()
                .stream()
                .collect(Collectors.toMap(UploadHandler::getName, o -> o));
        // 处理超时的任务重新进入流程
        scheduled.scheduleWithFixedDelay(() -> jobDao.listTimeout()
                        .stream()
                        .filter(job -> jobDao.resetTimeout(job))
                        .filter(job -> jobDao.lock(job))
                        .forEach(job -> handle(job, null)),
                0, properties.getTimeoutInterval(), TimeUnit.MILLISECONDS);
        // 待处理任务处理
        scheduled.scheduleWithFixedDelay(() -> {
                    Set<String> skipHandlers = new HashSet<>();
                    for (Job job : jobDao.listUploaded()) {
                        UploadHandler<Object, Object> uploadHandler = handlerCache.get(job.getHandler());
                        if (skipHandlers.contains(job.getHandler())) {
                            continue;
                        }
                        if (!jobDao.lock(job)) {
                            if (!uploadHandler.parallel()) {
                                skipHandlers.add(job.getHandler());
                            }
                            continue;
                        }
                        handle(job, null);
                    }
                },
                0, properties.getProcessInterval(), TimeUnit.MILLISECONDS);
    }
}
