package com.atguigu.dga.governance.assessor.quality;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.dga.constant.DgaConstant;
import com.atguigu.dga.governance.assessor.Assessor;
import com.atguigu.dga.governance.bean.AssessParam;
import com.atguigu.dga.governance.bean.GovernanceAssessDetail;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.io.IOException;
import java.math.BigDecimal;
import java.net.URI;
import java.net.URISyntaxException;
import java.text.ParseException;
import java.time.temporal.ValueRange;
import java.util.Date;

/**
 * ClassName: TableProduceDataAssessor
 * Package: com.atguigu.dga.governance.assessor.quality
 * Description:
 *
 * @Author:
 * @Create: 2024/3/19 08:50
 * @Version: 1.0
 */
@Component("TABLE_PRODUCE_DATA")
public class TableProduceDataAssessor extends Assessor {

    @Value("${hdfs.uris}")
    private String hdfsUris;
    @Override
    public void checkProblem(AssessParam assessParam, GovernanceAssessDetail governanceAssessDetail) throws ParseException, URISyntaxException, IOException, InterruptedException {
        // 求出日分区表 当日产出数据量  前days天平均产出数据量

        // TODO 1 过滤日分区表
        if(!DgaConstant.LIFECYCLE_TYPE_DAY.equals(assessParam.getTableMetaInfo().getTableMetaInfoExtra().getLifecycleType())) {
            return;
        }

        // TODO 2 取指标参数
        String metricParamsJson = assessParam.getGovernanceMetric().getMetricParamsJson();
        JSONObject paramJsonObj = JSON.parseObject(metricParamsJson);
        Integer paramDays = paramJsonObj.getInteger("days");
        Integer paramUpperLimit = paramJsonObj.getInteger("upper_limit");
        Integer paramLowerLimit = paramJsonObj.getInteger("lower_limit");

        // TODO 3 计算当日产出数据量:
        //  先算出当日日期（考评日期前一天）、拼接出当日分区路径
        //  创建FS文件对象
        //  调用封装的方法
        Date assessDt = DateUtils.parseDate(assessParam.getAssessDate(), "yyyy-MM-dd");
        Date beforeOneDay = DateUtils.addDays(assessDt, -1);
        String beforeOneDayStr = DateFormatUtils.format(beforeOneDay, "yyyy-MM-dd");

        String tableFsPath = assessParam.getTableMetaInfo().getTableFsPath();
        tableFsPath = tableFsPath.replace("hadoop102:8020" , "localhost:18020");
        String currTablePartitionPath = tableFsPath + "/dt=" + beforeOneDayStr;

        FileSystem fs = FileSystem.get(new URI(hdfsUris),
                new Configuration(), assessParam.getTableMetaInfo().getTableFsOwner());

        Long currPartitionTableSize = calcTablePartitionData(currTablePartitionPath, fs);

        // TODO 5 循环前day天累计大小，并记录实际存在的天数
        Long beforeDaysTotalDataSize = 0L;
        long realBeforeDays = 0L;

        for (int i = 1; i <= paramDays; i++) {
            Date beforeNDay = DateUtils.addDays(beforeOneDay, -i);
            String beforeNDayStr = DateFormatUtils.format(beforeNDay, "yyyy-MM-dd");
            String beforeTablePartitionPath = tableFsPath + "/dt=" + beforeNDayStr;
            Long beforePartitionDataSize = calcTablePartitionData(beforeTablePartitionPath, fs);
            if(beforePartitionDataSize != null) {
                // 标识当前分区存在 累积数据大小 计数
                beforeDaysTotalDataSize += beforePartitionDataSize;
                realBeforeDays += 1;
            }

        }

        // TODO 6 求平均 然后与参数比较 给分给备注
        if(beforeDaysTotalDataSize > 0L) {
            Long beforeDaysAvgDataSize = beforeDaysTotalDataSize / realBeforeDays;

            if(currPartitionTableSize > beforeDaysAvgDataSize * ( paramUpperLimit + 100L) / 100) {
                // 超出
                governanceAssessDetail.setAssessScore(BigDecimal.ZERO);
                governanceAssessDetail.setAssessProblem("超过前" + paramDays + "天平均产出数据量的" + paramUpperLimit + "%");
            }
            if(currPartitionTableSize < beforeDaysAvgDataSize * paramLowerLimit / 100) {
                // 低于
                governanceAssessDetail.setAssessScore(BigDecimal.ZERO);
                governanceAssessDetail.setAssessProblem("低于前" + paramDays + "天平均产出数据量的" + paramLowerLimit + "%");
            }

            // 给备注
            governanceAssessDetail.setAssessComment("实际存在前" + realBeforeDays + "天,当日产出数据量：" +
                    currPartitionTableSize + ",平均产出数据量：" + beforeDaysAvgDataSize);
        }
    }

    // TODO 4 封装计算某天数据量方法
    private Long calcTablePartitionData(String tablePartitionPath, FileSystem fs) throws IOException {
//        System.out.println(tablePartitionPath);
//        System.out.println("fs.exists(new Path(tablePartitionPath)) = " + fs.exists(new Path(tablePartitionPath)));
        if(!fs.exists(new Path(tablePartitionPath))) {
            return null;
        }
        // 获取当前路径下内容
        tablePartitionPath = tablePartitionPath.replace("hadoop102:8020", "localhost:18020");
        FileStatus[] fileStatuses = fs.listStatus(new Path(tablePartitionPath));
        // 递归汇总给定分区路径下的文件大小
        Long totalSize = sumPartitionDataSize(fileStatuses, fs, 0L);
        return totalSize;

    }

    // TODO 4 计算某天数据量方法中的递归处理
    private Long sumPartitionDataSize(FileStatus[] fileStatuses, FileSystem fs, Long totalSize) throws IOException {
        for (FileStatus fileStatus : fileStatuses) {
            if(fileStatus.isFile()) {
                totalSize += fileStatus.getLen();
            } else {
                // 目录 下探
                // 获取目录下所有内容
                String subPathStr = fileStatus.getPath().toString().replace("hadoop102:8020", "localhost:18020");

                FileStatus[] subFileStatuses = fs.listStatus(new Path(subPathStr));
                return sumPartitionDataSize(subFileStatuses, fs, totalSize);
            }
        }
        return totalSize;
    }
}
