package com.ruoyi.yey.FastDfs;/*
 *@title FastDfsServiceImpl
 *@description
 *@author 24844
 *@version 1.0
 *@create 2024/1/9 14:36
 */

import cn.hutool.core.lang.Console;
import cn.hutool.http.HttpUtil;
import com.ruoyi.common.core.web.domain.AjaxResult;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Duration;
import java.time.Instant;
import java.util.HashMap;
import java.util.Map;
import java.util.StringJoiner;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;

@Service
public class FastDfsServiceImpl   {

    private static String UPLOAD_PATH = "http://localhost:9999/group1/upload";
    private static String DELETE_PATH = "http://localhost:9999/group1/delete";
    private static String INFO_PATH = "http://localhost:9999/group1/info";
    private static String FILE_PATH = "D:\\new\\";


//    断点续传所需的线程数
    private static final Integer THREAD_NUM = 2;



    public AjaxResult Upload(MultipartFile file) {
        String result = "";

        try {

            File file1 = new File(file.getOriginalFilename());
            Path path = Paths.get(FILE_PATH+file.getOriginalFilename());


            FileOutputStream fos = new FileOutputStream(file1);
            fos.write(file.getBytes());


            this.breakpointResume(file1,FILE_PATH);

            Map<String, Object> params = new HashMap<>();
            params.put("file", file1);
            params.put("path", "test");
            params.put("output", "json");
            String resp = HttpUtil.post(UPLOAD_PATH, params);
            Console.log("resp: {}", resp);
            result = resp;

            Files.deleteIfExists(path);


        } catch (IOException e) {
            e.printStackTrace();
            return AjaxResult.error();
        }

        return AjaxResult.success(result);
    }



    /**
     * 断点续传功能 先将文件上传到本地,然后再上传到dfs服务器 再将本地文件删除
     * @param dataFile   目标文件
     * @param targetStr 存放地址
     */

    public void breakpointResume(File dataFile, String targetStr) throws IOException {

        System.out.println("方法被调用");
        targetStr += dataFile.getName();
        if (!dataFile.exists()){
            dataFile.createNewFile();
        }

        long length = dataFile.length();
        //每个线程均分文件大小，且向上取整
        long part = (long) Math.ceil(length / THREAD_NUM);
        //线程减法计数器
        CountDownLatch countDownLatch = new CountDownLatch(THREAD_NUM);
        Instant beginTime = Instant.now();
        //记录传输的日志文件
        File logFile = new File(targetStr + ".log");
        String[] splitData = null;//不是null就需要断点续传
        BufferedReader reader = null;
        try {
            if (logFile.exists()) {
                //存在日志文件，需要进行断点续传
                reader = new BufferedReader(new FileReader(logFile));
                String data = reader.readLine();
                splitData = data.split(",");
            } else {
                //不存在日志文件，创建日志文件
                logFile.createNewFile();
            }
            Map<Integer, Long> maps = new ConcurrentHashMap<>();
            for (int i = 0; i < THREAD_NUM; i++) {
                final int k = i;
                System.out.println("线程正在执行任务：" + k);
                String[] finalData = splitData;
                String finalTargetStr = targetStr;
                new Thread(() -> {
                    RandomAccessFile inFile = null;
                    RandomAccessFile outFile = null;
                    RandomAccessFile rafLog = null;
                    try {
                        inFile = new RandomAccessFile(dataFile, "r");//读
                        outFile = new RandomAccessFile(finalTargetStr, "rw");//写
                        rafLog = new RandomAccessFile(logFile, "rw");//操作日志文件的流
                        //确定每个线程读取文件的开始和结束的位置,有断点续传就从日志文件取出的位置开始读取
                        inFile.seek(finalData == null ? k * part : Long.parseLong(finalData[k]));//设置每个线程读取的启始位置
                        outFile.seek(finalData == null ? k * part : Long.parseLong(finalData[k]));//设置每个线程写入的启始位置
                        byte[] bytes = new byte[1024 * 10];//每次读取字节大小
                        int len = -1, allLen = 0;
                        while (true) {
                            len = inFile.read(bytes);//从磁盘读取到缓存
                            if (len == -1) { //数据读完，结束
                                break;
                            }
                            //如果不等于 -1，把每次读取的字节累加
                            allLen = allLen + len;
                            //将读取的字节数放入到map中
                            maps.put(k, allLen + (finalData == null ? k * part : Long.parseLong(finalData[k])));//每个线程的绝对偏移量
                            outFile.write(bytes, 0, len);//从缓存写入到磁盘
                            //将map中的字节日志信息数据写入磁盘
                            StringJoiner stringJoiner = new StringJoiner(",");
                            maps.forEach((key, value) -> stringJoiner.add(String.valueOf(value)));
                            //将日志信息写入磁盘
                            rafLog.seek(0);//覆盖之前的日志信息
                            rafLog.write(stringJoiner.toString().getBytes("UTF-8"));
                            /**
                             * 当前线程读取的内容
                             *  allLen + (k * part)
                             *  或
                             *  allLen + finalData[k] 日志文件里面的偏移量
                             *  >=
                             *  下个线程的起始部分((k + 1) * part)
                             *  当前线程就不再读取写入数据，结束任务
                             */
                            if (allLen + (finalData == null ? k * part : Long.parseLong(finalData[k])) >= (k + 1) * part) {
                                break;
                            }
                        }

                    } catch (IOException e) {
                        e.printStackTrace();
                    } finally {
                        //关流
                        if (null != outFile && null != inFile && null != rafLog) {
                            try {

                                outFile.close();
                                inFile.close();
                                rafLog.close();
                            } catch (IOException e) {
                                e.printStackTrace();
                            }
                        }
                        countDownLatch.countDown();//减一
                    }
                }).start();
            }

            //主线程要等到线程计数器归零，再继续往下执行
            countDownLatch.await();
            Instant endTime = Instant.now();
            System.out.println("总耗时：" + (Duration.between(beginTime, endTime).toMillis()) + "毫秒");
            //删除日志文件
//            logFile.delete();
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (null != reader) {
                try {
                    reader.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }

            if (logFile.exists()){
                logFile.delete();
            }
        }

    }

public AjaxResult Delete(String file) {
        String result = "";
        try {


            Map<String, Object> params = new HashMap<>();
            params.put("path", "86501729");
            params.put("output", "json");
            String resp = HttpUtil.post(DELETE_PATH, params);
            Console.log("resp: {}", resp);
            result = resp;
        } catch (Exception e) {
            e.printStackTrace();
            return AjaxResult.error();
        }

        return AjaxResult.success(result);
    }


}
