package com.bupt.ilink.service.studyaboutTeacherCourse;
import com.bupt.ilink.entity.Studyspace.StudyCourseFile;
import com.bupt.ilink.entity.Studyspace.StudyFileSource;
import com.bupt.ilink.mapper.StudyCourseFileMapper;
import com.bupt.ilink.mapper.StudyFileSourceMapper;
import com.bupt.ilink.service.OssService;
import com.bupt.ilink.utils.SnowflakeIdGenerator;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import java.util.concurrent.TimeUnit;
import com.bupt.ilink.utils.R;

import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;

//@Service  //如果使用这个注解，会报错，因为这个类被spring管理了，需要bean注入，而基础的数据结构比如int不能简单的由构造函数参数注入，需要@Value 注解 和 配置类来注入
@Slf4j
public class uploadFileSource {
//    private final BlockingQueue<Runnable> uploadTaskQueue = new LinkedBlockingQueue<>();
    // 定义线程池，最大并发处理50个任务,并设置线程闲置的存活时间60秒，这里使用最大200个的有界队列，
//    private final ExecutorService executorService = new ThreadPoolExecutor(
//            20, 50, 60L, TimeUnit.SECONDS, new LinkedBlockingQueue<>(200)
//    );
    private final ThreadPoolExecutor taskExecutor;
//    private final BlockingQueue<Runnable> uploadTaskQueue;

    private final OssService ossService;

    private final StudyFileSourceMapper studyFileSourceMapper;

    private final StudyCourseFileMapper studyCourseFileMapper;

//    @Autowired
//    private RedisLockService redisLockService;

    public uploadFileSource(OssService ossService,
                            StudyFileSourceMapper studyFileSourceMapper,
                            StudyCourseFileMapper studyCourseFileMapper,
                            int corePoolSize,
                            int maxPoolSize,
                            long keepAliveSeconds,
                            int queueCapacity
    ) {
        this.ossService = ossService;
        this.studyFileSourceMapper = studyFileSourceMapper;
        this.studyCourseFileMapper = studyCourseFileMapper;
        // 动态传入配置参数
        this.taskExecutor = new ThreadPoolExecutor(corePoolSize, maxPoolSize, keepAliveSeconds, TimeUnit.SECONDS, new LinkedBlockingQueue<>( queueCapacity));
//        this.uploadTaskQueue = threadPoolConfig.uploadTaskQueue();
    }

    //上传文档
    public R uploadFileTask(MultipartFile file, Long courseFileId, Long courseId) {
        try {
            // 获取原始文件名
            String originalFilename = file.getOriginalFilename();

            // 获取文件扩展名
            String fileExtension = "";
            if (originalFilename != null && originalFilename.contains(".")) {
                fileExtension = originalFilename.substring(originalFilename.lastIndexOf("."));
            } else {
                return R.failure("文件解析扩展名出现错误：" + originalFilename);
            }

            // 生成新文件名，使用雪花算法生成的ID
            SnowflakeIdGenerator idGenerator = new SnowflakeIdGenerator(1, 1);
            Long id = idGenerator.nextId();
            String newFilename = id + fileExtension;

            // 获取对应的文件路径，用于关联和存储上传
            StudyCourseFile studyCourseFile = studyCourseFileMapper.selectById(courseFileId);
            if (studyCourseFile == null) {
                return R.failure("文件夹ID无效");
            }
            String filePath = studyCourseFile.getFilePath();

            // 构建并保存资源信息到数据库
            StudyFileSource courseSource = new StudyFileSource();
            courseSource.setFilesourceId(id);
            courseSource.setFileName(file.getOriginalFilename());
            courseSource.setFileId(courseFileId);
            courseSource.setCourseId(courseId);
            courseSource.setFilesourcePath(filePath + newFilename);
            studyFileSourceMapper.insert(courseSource);

            // 更新 Redis 状态为 "待处理"
//            redisTemplate.opsForHash().put("fileUploadStatus", courseSource.getFilesourceId().toString(), "待处理");

            // 将上传任务提交到线程池执行
            taskExecutor.execute(() -> processFileUpload(file, filePath, newFilename, courseSource));

            return R.ok("文件已加入队列，稍后处理: " + file.getOriginalFilename());
        } catch (Exception e) {
            return R.failure("文件上传失败: " + file.getOriginalFilename(),e.getMessage());
        }
    }
    // 处理文件上传
    private void processFileUpload(MultipartFile file, String filePath, String newFilename, StudyFileSource courseSource) {
        try {
            boolean uploadSuccess = ossService.uploadFile(file, filePath, newFilename);
            if (uploadSuccess) {
                courseSource.setOssWriteStatus(true);
                boolean updateSuccess =studyFileSourceMapper.updateById(courseSource)>0;
                if (!updateSuccess) {
                    log.error("课程:"+courseSource.getCourseId()+"中的文件上传成功，但更新数据库失败: " + file.getOriginalFilename());
                    //需要进行回退处理删除
                    ossService.deleteFile(newFilename,filePath);
                    log.info("课程:"+courseSource.getCourseId()+"中的文件上传成功，但更新数据库失败，已进行OSS删除回退处理: " + file.getOriginalFilename());
                }else {
                    log.info("课程:"+courseSource.getCourseId()+"中的文件上传成功: " + file.getOriginalFilename());
                }
                // 更新 Redis 状态为 "上传成功"
//                redisTemplate.opsForHash().put("fileUploadStatus", courseSource.getFilesourceId().toString(), "上传成功");
            } else {
                log.error("课程:"+courseSource.getCourseId()+"中的文件上传失败: " + file.getOriginalFilename());
                // 更新 Redis 状态为 "上传失败"
//                redisTemplate.opsForHash().put("fileUploadStatus", courseSource.getFilesourceId().toString(), "上传失败");
            }

        } catch (Exception e) {
            // 更新 Redis 状态为 "上传失败"
//            redisTemplate.opsForHash().put("fileUploadStatus", courseSource.getFilesourceId().toString(), "上传失败");
            log.error("课程:"+courseSource.getCourseId()+"中的文件上传中出现异常: " + file.getOriginalFilename(), e);
        }
    }



}

