package com.xiangxiao.rpan.storage.local;
import cn.hutool.core.io.FileUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.ttl.TransmittableThreadLocal;
import com.google.common.collect.Lists;
import com.xiangxiao.rpan.storage.constant.GlobalConsts;
import com.xiangxiao.rpan.storage.core.*;
import com.xiangxiao.rpan.storage.exception.StorageExcetion;
import com.xiangxiao.rpan.storage.factory.ApplicationContextFactory;
import com.xiangxiao.rpan.storage.threadpool.AsyncTaskWithThreadPool;
import com.xiangxiao.rpan.storage.utils.FileTransfer;
import com.xiangxiao.rpan.utils.DateUtil;
import com.xiangxiao.rpan.utils.UUIDUtil;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Component;
import org.springframework.web.multipart.MultipartFile;

import javax.annotation.Resource;
import javax.servlet.http.HttpServletResponse;
import java.io.*;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;

/**
 * @auther xiangxiao
 * @email 573768011@qq.com
 * @data 2023/8/18 15:34
 * 存储方式一--本机服务器直接存储
 */
@Component(value = "localStorageProcessor")
@ConditionalOnProperty(prefix = "rpan.storage.processor", name = "type", havingValue = "com.xiangxiao.rpan.storage.local.LocalStorageProcessor")
public class LocalStorageProcessor implements StorageProcessor {
  private static final String COMMON_SEPARATOR = "__,__";
  @Autowired
  @Qualifier(value = "localStorageConfig")
  private LocalStorageConfig localStorageConfig;

  @Autowired
  @Qualifier(value = "cacheManager")
  private CacheManager cacheManager;

  @Value("${rpan.storage.method.type}")
  private String storageMethod;

  @Resource(name = "asyncTaskExecutor")
  private ThreadPoolTaskExecutor asyncTaskExecutor;

  @Autowired
  @Qualifier(value = "transferByCacheManager")
  private TransferByCacheManager transferByCacheManager;

  @Autowired
  @Qualifier(value = "transferByDirectManager")
  private TransferByDirectManager transferByDirectManager;

  private Map<String, List<String>> storeChunkPathsMap = new HashMap<>();

  public String generateFilePath(String filePrefix, String suffix) {
    return new StringBuffer(filePrefix)
        .append(File.separator)
        .append(suffix)
        .toString();
  };

  public String generateChunkFilePath(String chunksFolder, String identifier, int chunkNumber) {
    String lastDire = new StringBuffer(UUIDUtil.getUUID()).append(COMMON_SEPARATOR).append(chunkNumber).toString();
    return new StringBuffer(chunksFolder)
        .append(File.separator)
        .append(identifier)
        .append(File.separator)
        .append(lastDire).toString();
  }

  @Override
  public String store(MultipartFile file, String suffix) throws IOException {
    String filePath = this.generateFilePath(localStorageConfig.getRootFilePath(), suffix);
//    File srcFile = this.convertMultipartToFile(file);
//    File desFile = new File(filePath);
//
//    if (!StringUtils.isBlank(this.storageMethod) && this.storageMethod.equals("transfer")) {
//      FileInputStream inputStream = new FileInputStream(srcFile);
//      FileOutputStream outputStream = new FileOutputStream(desFile);
//      transferByCacheManager.writeFileToStream(inputStream, outputStream);
//    } else {
//      transferByDirectManager.copyFileToStorage(srcFile, desFile);
//    };
    return filePath;
  }

  /**
   * 转换单片文件为流
   * */
  private String convertSingleChunkFile(MultipartFile file, Integer chunkNumber, Integer totalChunks) throws IOException {
    long blockSize = 0;
    if (chunkNumber < totalChunks) { // 非最后一块分片,那就是固定大小
      blockSize = GlobalConsts.FILE_SPLIT_SIZE;
    } else { // 最后一块分片,大小要进行计算
      blockSize = file.getSize() - GlobalConsts.FILE_SPLIT_SIZE * (chunkNumber - 1);
    }

    return transferByCacheManager.convertMuitlFileToStream(file, chunkNumber, totalChunks, blockSize);
  }
  /**
   * 文件分片存储,大文件分片上传
   * */
  @Override
  public String storeWithChunk(MultipartFile file, String identifier, Integer totalChunks, Integer chunkNumber,
                               Long totalSize, Long chunkSize, String redisFileChunkKey) throws IOException {
    String cacheFileDateKey = GlobalConsts.FILE_DATA_KEY + ":" + identifier;
    String chunkFilePath = this.generateChunkFilePath(localStorageConfig.getChunksPath(), identifier, chunkNumber);
    if (chunkNumber == 1) { // 若chunkNumber为1, 则说明是新的文件,需重置storeChunkPaths
      storeChunkPathsMap.put(identifier, new ArrayList<>());
    };
    List<String> storeChunkPaths = storeChunkPathsMap.get(identifier);
    Future<String> future = asyncTaskExecutor.submit(() -> convertSingleChunkFile(file, chunkNumber, totalChunks));
    File chunkFile = new File(chunkFilePath);
    if (!chunkFile.getParentFile().exists()) {
      chunkFile.getParentFile().mkdirs();
    };
    chunkFile.createNewFile();

    try {
      String tempPath = future.get();
      File srcFile = new File(tempPath);

      FileInputStream inputStream = new FileInputStream(srcFile);
      FileOutputStream outputStream = new FileOutputStream(chunkFile, true);
      if (!StringUtils.isBlank(this.storageMethod) && this.storageMethod.equals("transfer")) {
        transferByCacheManager.writeFileToStream(inputStream, outputStream);
      } else {
        transferByDirectManager.copyFileToStorage(srcFile, chunkFile);
      };
      storeChunkPaths.add(future.get());
      inputStream.close();
      outputStream.close();
      chunkFile.delete(); // delete方法不论该文件是否存在都直接粗暴的删除,调用后立即执行
                          // deleteOnExit方法在JVM退出时才执行

      storeChunkPathsMap.put(identifier, storeChunkPaths);
      cacheManager.put(cacheFileDateKey, JSON.toJSONString(storeChunkPaths), GlobalConsts.ONE_DAY_LONG);
      return chunkFilePath;
    } catch (Exception e) {
      e.printStackTrace();
      throw new StorageExcetion("文件分片上传失败");
    }
  }

  @Override
  public String mergeChunks(String md5, int chunkCount, String suffix) throws IOException {
    String uploadPath = this.generateFilePath(localStorageConfig.getRootFilePath(), suffix);
    // 创建合并文件的输出流
    File destFile = new File(uploadPath);
    if (!destFile.getParentFile().exists()) {
      destFile.getParentFile().mkdirs();
    };
    destFile.createNewFile();
    OutputStream out = new BufferedOutputStream(new FileOutputStream(destFile), 1024);

    try {
      // 依次读取每个分片文件的内容，并写入到合并文件中
      byte[] buffer = new byte[1024];
      String cacheFileDateKey = GlobalConsts.FILE_DATA_KEY + ":" + md5;
      if (cacheManager.get(cacheFileDateKey) != null) {
        List<String> chunkPaths = JSONArray.parseArray(cacheManager.get(cacheFileDateKey).toString(), String.class);
        for (int i = 0; i < chunkPaths.size(); i++) {
          System.out.println(chunkPaths.get(i));
          File chunkFile = new File(chunkPaths.get(i));
          try (InputStream in = new BufferedInputStream(new FileInputStream(chunkFile), 1024)) {
            int len;
            while ((len = in.read(buffer)) > 0) {
              out.write(buffer, 0, len);
            }
          }

          // 删除分片文件
          chunkFile.delete();
        }
      };
      cacheManager.delete(cacheFileDateKey);

      out.close();
      System.out.println(uploadPath);
      return uploadPath;
    } catch (Exception e) {
      e.printStackTrace();
      throw new StorageExcetion("文件合并失败");
    }
  }

  @Override
  public String download(String absoluteUrl, HttpServletResponse response) throws IOException {
    return null;
  }

  @Override
  public String downloadWithChunk(byte[] bytes, HttpServletResponse response) throws IOException {
    return null;
  }
}