package com.webank.wedatasphere.linkis.jobhistory.conf;

import com.webank.wedatasphere.linkis.common.conf.CommonVars;
import com.webank.wedatasphere.linkis.common.conf.CommonVars$;
import com.webank.wedatasphere.linkis.common.io.Fs;
import com.webank.wedatasphere.linkis.common.io.FsPath;
import com.webank.wedatasphere.linkis.jobhistory.dao.TaskMapper;
import com.webank.wedatasphere.linkis.jobhistory.entity.ClearCountInfo;
import com.webank.wedatasphere.linkis.jobhistory.entity.ClearPathInfo;
import com.webank.wedatasphere.linkis.storage.FSFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.stream.Collectors;

/**
 * @ClassName: LogClearConfigration
 * @Description: 描述定时清理日志程序
 * @author: zys
 * @date: 2021/5/21  10:17
 */
@Configuration
public class LogClearConfigration {
    private static final long PERIOD_DAY = 24 * 60 * 60 * 1000;
    public static final CommonVars JSON_ROOT_PATH = CommonVars$.MODULE$.apply("wds.linkis.jobhistory.logs.keep.days",30);
    public static final String DATE_FORMAT = "yyyy-MM-dd";
    private SimpleDateFormat format=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    private final Logger LOGGER = LoggerFactory.getLogger(getClass());
    @Autowired TaskMapper mapper;
    @Bean
    public Timer clearLog() {
        Calendar calendar=Calendar.getInstance();
        calendar.add(Calendar.DATE,1);
        calendar.set(Calendar.HOUR_OF_DAY,0);
        calendar.set(Calendar.MINUTE,0);
        calendar.set(Calendar.SECOND,0);
        calendar.set(Calendar.MILLISECOND,0);
        LOGGER.info("首次清理时间为："+format.format(calendar.getTime()));
        Timer timer = new Timer();
        timer.schedule(new TimerTask() {
            @Override
            public void run() {
                cleanLogs();
            }
        },calendar.getTime(),PERIOD_DAY);
        return timer;
    }
    private void cleanLogs() {
        LOGGER.info(format.format(new Date())+":开始清理日志-----");
        Fs fs = FSFactory.getFs("hdfs");
        List<String> type = Arrays.asList("Succeed", "Cancelled", "Failed");
        SimpleDateFormat format = new SimpleDateFormat(DATE_FORMAT);
        Calendar calendar = Calendar.getInstance();
        calendar.add(Calendar.DATE, -Integer.parseInt(JSON_ROOT_PATH.getValue().toString()));
        String timeBefore = format.format(calendar.getTime());
        LOGGER.info("清理的日志日期为:"+timeBefore+"以前");
        List<ClearCountInfo> listCountAll = mapper.selectLogAllPathCount(timeBefore);
        List<ClearCountInfo> listCountDelete = mapper.selectLogPathCount(timeBefore, type);
        Map<String, Long> countAllMap = listCountAll.stream().collect(Collectors.toMap(ClearCountInfo::getUpdatedTime, ClearCountInfo::getCount));
        try {
            fs.init(new HashMap<>());
            for (ClearCountInfo info : listCountDelete) {
                if (info.getCount().equals(countAllMap.getOrDefault(info.getUpdatedTime(), -1L))) {
                    //删除数量与总数量相等，删除文件夹
                    List<ClearPathInfo> clearPathInfos = mapper.selectLogPath(info.getUpdatedTime(), type);
                    //删除日志与结果
                    Map<String, Integer> hasDelete = new HashMap<>();
                    List<Long> ids=new ArrayList<>();
                    for (ClearPathInfo pathInfo : clearPathInfos) {
                        //循环删除路径文件
                        String folderLog = pathInfo.getLogPath().replaceAll("/[\\d]*\\.log$", "");
                        String folderResult = pathInfo.getResultLocation().replaceAll("/[a-zA-Z]*/[\\d]*$", "");
                        ids.add(pathInfo.getId());
                        if (!hasDelete.containsKey(folderLog)) {
                            //删除日志文件夹
                            FsPath logPath = new FsPath(folderLog);
                            if(fs.exists(logPath)){
                                fs.delete(logPath);
                                hasDelete.put(folderLog, 1);
                            }
                        }
                        if (!hasDelete.containsKey(folderResult)) {
                            //删除结果文件夹
                            FsPath resultPath=new FsPath(folderResult);
                            if(fs.exists(resultPath)){
                                fs.delete(new FsPath(folderResult));
                                hasDelete.put(folderResult, 1);
                            }
                        }
                    }
                    mapper.updateDelete(ids.isEmpty()?null:ids);
                } else {
                    //不相等删除文件
                    List<ClearPathInfo> clearPathInfos = mapper.selectLogPath(info.getUpdatedTime(), type);
                    List<Long> ids=new ArrayList<>();
                    for (ClearPathInfo pathInfo : clearPathInfos) {
                        //循环删除路径文件
                        FsPath logPath=new FsPath(pathInfo.getLogPath());
                        FsPath locationPath=new FsPath(pathInfo.getResultLocation());
                        if(fs.exists(logPath)){
                            fs.delete(logPath);
                        }
                        if(fs.exists(locationPath)){
                            fs.delete(locationPath);
                        }
                        ids.add(pathInfo.getId());
                    }
                    mapper.updateDelete(ids.isEmpty()?null:ids);
                }
            }
            for(ClearCountInfo clearInfo:listCountDelete){
                LOGGER.info("清理完成，日期：{}---结果集数量：{}",clearInfo.getUpdatedTime(),clearInfo.getCount());
            }

        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            if (fs != null) {
                try {
                    fs.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
    }

}
