package com.atguigu.dga.assess.assess.quality;

import com.alibaba.fastjson.JSON;
import com.atguigu.dga.assess.assess.AssessorTemplate;
import com.atguigu.dga.assess.bean.AssessParam;
import com.atguigu.dga.assess.bean.GovernanceAssessDetail;
import com.atguigu.dga.assess.bean.GovernanceMetric;
import com.atguigu.dga.assess.bean.PartitionSize;
import com.atguigu.dga.config.MetaConstant;
import com.atguigu.dga.meta.bean.TableMetaInfo;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.math.BigDecimal;
import java.math.RoundingMode;
import java.net.URI;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

/*
必须日分区表

前一天产出的数据量，超过前x天平均产出量{upper_limit}%，或低于{lower_limit}%，则给0分，其余给10分
 */
@Component("TABLE_PRODUCT_VOLUME_MONITOR")
public class TableDataVolumeCheck extends AssessorTemplate {
    @Value("hdfs://hadoop102:8020")
    private String hdfsUri;

    @Value(("${hdfs.admin}"))
    private String hdfsAdmin;

    @Override
    protected void assess(AssessParam param, GovernanceAssessDetail detail) throws Exception {
        //先取参数
        GovernanceMetric metric = param.getMetric();
        Integer days = JSON.parseObject(metric.getMetricParamsJson()).getInteger("days");
        Integer upperLimit = JSON.parseObject(metric.getMetricParamsJson()).getInteger("upper_limit");
        Integer lowerLimit = JSON.parseObject(metric.getMetricParamsJson()).getInteger("lower_limit");
        String assessDate = param.getAssessDate();

        //判断是否是日分区表
        TableMetaInfo tableMetaInfo = param.getTableMetaInfo();
        String lifecycleType = tableMetaInfo.getTableMetaInfoExtra().getLifecycleType();
        
        //准备客户端
        FileSystem hdfs = FileSystem.get(new URI(hdfsUri), new Configuration(), hdfsAdmin);
        
        //获取表目录
        String tableFsPath = tableMetaInfo.getTableFsPath();
        
        /*
        当天考评，昨天的数据
            获取最新的数据：分区 = 考评日期 - 1
         在Java中，所有的基本数据类型，及其包装类的类型传递都是值传递
         */
        if (MetaConstant.LIFECYCLE_TYPE_DAY.equals(lifecycleType)){
            //获取到当天的分区分区日期
            Date date = DateUtils.addDays(DateUtils.parseDate(assessDate, "yyyy-MM-dd"), -1);
            String dateStr = DateFormatUtils.format(date, "yyyy-MM-dd");
            
            /*
                包含了当天及前N天的统计结果
             */
            List<PartitionSize> stats = listPartitionFiles(hdfs, days, tableFsPath, assessDate);

            //如果只有当天的，没有前n天，也无需比较
            if (stats == null || stats.size() < 1){
                return;
            }

            //计算当天的分区产出
            Long currentDataSize = stats.get(0).getSize();
            //统计前days天平均产出量
            long sum = stats.stream().filter(ps -> !ps.getDt().equals(dateStr))
                    .mapToLong(ps -> ps.getSize())
                    .sum();
            BigDecimal avgDataSize = BigDecimal.valueOf(sum).divide(BigDecimal.valueOf(stats.size() - 1), 2, RoundingMode.HALF_UP);
            BigDecimal upperSize = avgDataSize.multiply(BigDecimal.valueOf(100).add(BigDecimal.valueOf(upperLimit))).movePointLeft(2);
            BigDecimal lowerSize = avgDataSize.multiply(BigDecimal.valueOf(100).subtract(BigDecimal.valueOf(lowerLimit))).movePointLeft(2);

            detail.setAssessComment(" 当天分区的数据量是: " + currentDataSize + ",过去3天的平均数据量是:" + avgDataSize);

            if (BigDecimal.valueOf(currentDataSize).compareTo(lowerSize) == -1
                ||
                BigDecimal.valueOf(currentDataSize).compareTo(upperSize) == 1){
                //%s是占位符，%是一种特殊字符
                String msgTemplate = "当前表当日的产出 %s,超过 过去 %s天 的上限%s%% 或 低于%s%% 的下限。";
                String msg = String.format(msgTemplate, currentDataSize, days, upperLimit, lowerLimit);
                assessZeroScore(detail,msg,param,false);
            }
        }
        hdfs.close();
    }

    @Value("dt")
    private String dt;
    /*
    统计某一个表某下某一天的分区的数据总量
            如何知道按天分区的字段名是什么？
                假设获取到了表的分区字段列表{{name = a,comment = 你猜},{name = b,comment = 你再猜}}
               强制在开发手册上要求，按日分区，分区字段必须是dt

               -----------------------------
               offset:求前N天的分区
     */
    private List<PartitionSize> listPartitionFiles(FileSystem hdfs, Integer days, String tableFsPath, String assessDate) throws Exception {
        List<PartitionSize> stats = new ArrayList<>();

        for (Integer i = 0;i < days + 1 ;i++){
            //获取到分区的路径
            Date date = DateUtils.addDays(DateUtils.parseDate(assessDate, "yyyy-MM-dd"), -1 - i);
            String dateStr = DateFormatUtils.format(date, "yyyy-MM-dd");
            Path path = new Path(tableFsPath, dt + "=" + dateStr);

            if (hdfs.exists(path)){
                //列出分区目录下的文件列表
                FileStatus[] fileStatuses = hdfs.listStatus(path);
                //递归统计分区下的文件大小
                Long size = statPartitionDataSize(fileStatuses, hdfs);
                //封装结果，加入到集合中
                stats.add(new PartitionSize(dateStr,size));
            }
        }
        return stats;
    }


    //编写方法统计文件大小
    private Long statPartitionDataSize(FileStatus[] fileStatuses,FileSystem hdfs) throws Exception {
        Long size  = 0l;
        for (FileStatus fileStatus : fileStatuses) {
            if (fileStatus.isFile()){
                size += fileStatus.getLen();
            }else {
                FileStatus[] subFileStatus = hdfs.listStatus(fileStatus.getPath());
                size += statPartitionDataSize(subFileStatus, hdfs);
            }
        }
        return size;
    }
}
