package com.atguigu.dga.assessor.quality;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.dga.assessor.Assessor;
import com.atguigu.dga.governance.bean.AssessParam;
import com.atguigu.dga.governance.bean.GovernanceAssessDetail;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.io.IOException;
import java.math.BigDecimal;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Date;
import java.util.List;

@Component("PRODUCE_DATA_SIZE")
public class ProduceDataSizeAssessor extends Assessor {

    @Value("${hdfs.uri}")
    private String hdfsUri ;
    @Override
    public void checkProblem(GovernanceAssessDetail governanceAssessDetail, AssessParam assessParam) throws Exception {
        //获取指标参数
        String metricParamsJson = assessParam.getGovernanceMetric().getMetricParamsJson();
        JSONObject jsonObject = JSONObject.parseObject(metricParamsJson);

        Integer days = Integer.valueOf(jsonObject.getString("days"));
        Integer upperLimit = Integer.valueOf(jsonObject.getString("upper_limit"));
        Integer lowerLimit = Integer.valueOf(jsonObject.getString("lower_limit"));

        //获取考评日期
        String assessDate = assessParam.getAssessDate();
        //获取当前表的hdfs路径
        String tableFsPath = assessParam.getTableMetaInfo().getTableFsPath();
        //获取所有者
        String tableFsOwner = assessParam.getTableMetaInfo().getTableFsOwner();

        //获取分区信息
        String partitionColNameJson = assessParam.getTableMetaInfo().getPartitionColNameJson();
        List<JSONObject> jsonObjects = JSONObject.parseArray(partitionColNameJson, JSONObject.class);
        //判断是否有分区,如果没有结束
        if (partitionColNameJson==null || partitionColNameJson.trim().isEmpty()){
            return;
        }
        //判断是否取到分区信息
        if(jsonObjects.size() <=0 ){
            return ;
        }

        //获取当前分区
        String name = jsonObjects.get(0).getString("name");
        //获取昨天日期
        Date parseDate = DateUtils.parseDate(assessDate, "yyyy-MM-dd");
        Date date = DateUtils.addDays(parseDate, -1);
        String s = DateFormatUtils.format(date, "yyyy-MM-dd");
        //拼接当前分区路径
        String currentPartitonPath=tableFsPath+"/"+name+"="+s;
        //获取当前分区的数据量大小
        Long partitionDataSize = calcPartitionDataSize(currentPartitonPath, tableFsOwner);

        //获取前days天的数据量
        //数据量
        Long totalDataSize = 0L ;
        //存在的天数
        Long totalDays = 0L ;
        for (int i=1;i<days;i++){
            Date date1 = DateUtils.addDays(date, -i);
            String s1 =  DateFormatUtils.format(date1, "yyyy-MM-dd");
            String currentPartitonPath1=tableFsPath+"/"+name+"="+s1;
            Long partitionDataSize1 = calcPartitionDataSize(currentPartitonPath1, tableFsOwner);
            //判断当前分区是否有数据,如果没有则不存在该分区
            if (partitionDataSize1>0){
                totalDataSize+=partitionDataSize1;
                totalDays++;
            }
        }
        //计算前days天的平均数据量
        if(totalDataSize>0){
            long avgsize=totalDataSize/totalDays;
            //计算超出的比率
            long size=(partitionDataSize-avgsize)*100/avgsize;
            if (size>upperLimit){
                //分数置零
                governanceAssessDetail.setAssessScore(BigDecimal.ZERO);
                //提示
                governanceAssessDetail.setAssessProblem("当日产出的数据量,超出了前"+days+"天数据量的"+size+"%");
            }else if (size<-lowerLimit){
                //分数置零
                governanceAssessDetail.setAssessScore(BigDecimal.ZERO);
                //提示
                governanceAssessDetail.setAssessProblem("当日产出的数据量,低于了前"+days+"天数据量的"+-size+"%");
            }


        }

    }

    //计算分区数据量大小
    private Long calcPartitionDataSize(String currentPartitonPath, String tableFsOwner) throws URISyntaxException, IOException, InterruptedException {
        //获取hdfs文件系统对象
        FileSystem fileSystem = FileSystem.get(new URI(hdfsUri), new Configuration(), tableFsOwner);
        //判断当前分区路径是否存在
        boolean exists = fileSystem.exists(new Path(currentPartitonPath));
        if (!exists){
            return 0L;
        }
        //获取当前目录下的文件和目录
        FileStatus[] fileStatuses = fileSystem.listStatus(new Path(currentPartitonPath));
        //递归遍历获取文件大小
        Long dataSize = sumDataSize( fileStatuses , fileSystem , 0L );
        return dataSize;

    }

    //递归遍历获取文件大小
    private Long sumDataSize(FileStatus[] fileStatuses, FileSystem fileSystem, long l) throws IOException {
        for (FileStatus fileStatus : fileStatuses) {
            if (fileStatus.isFile()){
                l+=fileStatus.getLen();
            }else {
                FileStatus[] fileStatuses1 = fileSystem.listStatus(fileStatus.getPath());
                l=sumDataSize(fileStatuses1,fileSystem,l);
            }
        }
        return l;
    }
}
