package com.bishe.cyh.compont;

import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import com.bishe.cyh.config.DirectRabbitConfig;
import com.bishe.cyh.mapper.TaskMapper;
import com.bishe.cyh.mapper.UploadMapper;
import com.bishe.cyh.mode.Dataji;
import com.bishe.cyh.mode.SparkMode;
import com.bishe.cyh.mode.TaskSubmit;
import com.bishe.cyh.spark.SparkTask;
import com.bishe.cyh.utils.HDFSUtils;
import com.bishe.cyh.utils.ZipUtils;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;

import java.io.File;
import java.io.IOException;

/**
 * @Author: Poppin
 * @Date: 2022/1/12 12:52
 * @Version: 1.0
 */
@Service
public class Decompression {

    private UploadMapper uploadMapper;
    private TaskMapper taskMapper;
    private RabbitTemplate rabbitTemplate;

    @Autowired
    public Decompression(UploadMapper uploadMapper,TaskMapper taskMapper,RabbitTemplate rabbitTemplate)
    {
        this.rabbitTemplate=rabbitTemplate;
        this.uploadMapper=uploadMapper;
        this.taskMapper=taskMapper;
    }

    @Async
    public void decompression(String srcPath, String dest,Integer id) throws IOException {
        try {
            ZipUtils.decompress(srcPath,dest);
            Integer total = Math.toIntExact(ZipUtils.getTotalSizeOfFilesInDir(new File(dest)) / 1024 / 1024);
            HDFSUtils.CopyFromLocalFile(dest);
            UpdateWrapper<Dataji> updateWrapper = new UpdateWrapper<>();
            updateWrapper.eq("id",id)
                    .set("status",1)
                    .set("fileSize",total);
            uploadMapper.update(null,updateWrapper);
        }catch (Exception e){
            UpdateWrapper<Dataji> updateWrapper = new UpdateWrapper<>();
            updateWrapper.eq("id",id)
                    .set("status",2);
            uploadMapper.update(null,updateWrapper);
        }

    }
    @RabbitListener(queues = {DirectRabbitConfig.Queueu})
    public void Spark(String json){
        SparkMode sparkMode = JSONObject.parseObject(json,SparkMode.class);
        try{

            SparkTask.Task(sparkMode.getDatachoice(), sparkMode.getA(), sparkMode.getSnowid());
            UpdateWrapper<TaskSubmit> updateWrapper = new UpdateWrapper<>();
            updateWrapper.eq("jgid",sparkMode.getSnowid())
                    .set("status",1);
            taskMapper.update(null,updateWrapper);
        }catch (Exception e){
            UpdateWrapper<TaskSubmit> updateWrapper = new UpdateWrapper<>();
            updateWrapper.eq("jgid",sparkMode.getSnowid())
                    .set("status",2);
            taskMapper.update(null,updateWrapper);
        }

    }

    @Async
    public void Delete(String srcPath){
        try {
            HDFSUtils.Delete(srcPath);
            QueryWrapper queryWrapper = new QueryWrapper();
            queryWrapper.eq("newname",srcPath);
            uploadMapper.delete(queryWrapper);
        }catch (Exception e){
            UpdateWrapper<Dataji> updateWrapper = new UpdateWrapper<>();
            updateWrapper.eq("newname",srcPath)
                    .set("status",3);
            uploadMapper.update(null,updateWrapper);

        }
    }


}
