package com.seaboxdata.dao;

import com.seaboxdata.entity.HiveDataInfo;
import com.seaboxdata.entity.TableConfig;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.io.IOException;
import java.math.BigDecimal;
import java.net.URI;
import java.util.List;

/**
 * @author
 * @create 2020-12-30 16:20
 **/
@Component
public class HdfsDao {

    @Value("${hdfs.url}")
    private String hdfsUrl;

    @Value("${hdfs.principal}")
    private String principal;

    @Value("${krb5.path}")
    private String krb5Path;

    @Value("${hdfs.keytab.path}")
    private String keyTabPath;

    @Value("${hive.database}")
    private String database;

    @Value("${hive.base.path}")
    private String hiveBasePath;

    private FileSystem fs;

    public FileSystem getFileSystem() {
        if (this.fs == null) {
            Configuration conf = new Configuration();
            if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
                this.krb5Path = "F:/dfjx/keytab/krb5.conf";
                this.keyTabPath = "F:/dfjx/keytab/hdfs.headless.keytab";
            }
            System.setProperty("java.security.krb5.conf", krb5Path);
            System.setProperty("sun.security.krb5.debug", "false");
            conf.set("hadoop.security.authentication", "Kerberos");
            UserGroupInformation.setConfiguration(conf);
            conf.set("keytab.file", keyTabPath);
            conf.set("kerberos.principal", principal);
            try {
                UserGroupInformation.loginUserFromKeytab(principal, keyTabPath);
                this.fs = FileSystem.get(URI.create(hdfsUrl), conf);
            } catch (IOException e) {
                e.printStackTrace();
                return null;
            }
        }
        return this.fs;
    }

    /*public HiveDataInfo getDataInfo(TableConfig config, HiveDataInfo hiveDataInfo) throws IOException {
        FileSystem fs = getFileSystem();
        String spaceUsed = null;
        try {
            long spaceConsumed = fs.getContentSummary(new Path(hiveBasePath + database + ".db/" + config.getTableName())).getSpaceConsumed();
            BigDecimal space = BigDecimal.valueOf(spaceConsumed);
            BigDecimal kbValue = space.divide(new BigDecimal(1024 * 3), 2, BigDecimal.ROUND_HALF_UP);
            if (kbValue.compareTo(new BigDecimal(1024)) > 0) {
                BigDecimal mValue = kbValue.divide(new BigDecimal(1024), 2, BigDecimal.ROUND_HALF_UP);
                if (mValue.compareTo(new BigDecimal(1024)) > 0) {
                    spaceUsed = mValue.divide(new BigDecimal(1024), 2, BigDecimal.ROUND_HALF_UP) + "G";
                } else {
                    spaceUsed = mValue + "M";
                }
            } else {
                spaceUsed = kbValue + "K";
            }
            hiveDataInfo.setStorageOccupied(spaceUsed);
        } catch (Exception e) {
            hiveDataInfo.setNote(hiveDataInfo.getNote() + "hdfs get storage failed");
            e.printStackTrace();
        }
        fs.close();
        this.fs = null;
        return hiveDataInfo;
    }*/

    public String getTotalStorage(String path) {
        FileSystem fs = null;
        long spaceConsumed = 0;
        try {
            fs = getFileSystem();
            spaceConsumed = fs.getContentSummary(new Path(path)).getSpaceConsumed();
            fs.close();
            this.fs = null;
        } catch (IOException e) {
            e.printStackTrace();
        }
        return spaceConsumed + "";
    }

    public void setDataInfos(List<TableConfig> tableConfigs, List<HiveDataInfo> hiveDataInfos) {
        FileSystem fs = getFileSystem();
        TableConfig currentConfig = null;
        if (fs != null) {
            for (int i = 0; i < hiveDataInfos.size(); i++) {
                currentConfig = tableConfigs.get(i);
                String spaceUsed = null;
                HiveDataInfo currentDataInfo = hiveDataInfos.get(i);
                try {
                    long spaceConsumed = fs.getContentSummary(new Path(hiveBasePath + database + ".db/" + currentConfig.getTableName())).getSpaceConsumed();
                    BigDecimal space = BigDecimal.valueOf(spaceConsumed);
                    BigDecimal kbValue = space.divide(new BigDecimal(1024 * 3), 2, BigDecimal.ROUND_HALF_UP);
                    if (kbValue.compareTo(new BigDecimal(1024)) > 0) {
                        BigDecimal mValue = kbValue.divide(new BigDecimal(1024), 2, BigDecimal.ROUND_HALF_UP);
                        if (mValue.compareTo(new BigDecimal(1024)) > 0) {
                            spaceUsed = mValue.divide(new BigDecimal(1024), 2, BigDecimal.ROUND_HALF_UP) + "G";
                        } else {
                            spaceUsed = mValue + "M";
                        }
                    } else {
                        spaceUsed = kbValue + "K";
                    }
                    currentDataInfo.setStorageOccupied(spaceUsed);
                } catch (Exception e) {
                    currentDataInfo.setNote(currentDataInfo.getNote() + " hdfs get storage failed");
                    e.printStackTrace();
                }
            }
            try {
                fs.close();
                this.fs = null;
            } catch (IOException e) {
                e.printStackTrace();
                this.fs = null;
            }
        }
    }
}
