package com.webank.wedatasphere.linkis.metadata.service.impl;

import com.google.common.collect.Lists;
import com.webank.wedatasphere.linkis.common.utils.ByteTimeUtils;
import com.webank.wedatasphere.linkis.hadoop.common.utils.HDFSUtils;
import com.webank.wedatasphere.linkis.metadata.domain.mdq.vo.MdqTablePartitionStatisticInfoVO;
import com.webank.wedatasphere.linkis.metadata.hive.config.DSEnum;
import com.webank.wedatasphere.linkis.metadata.hive.config.DataSource;
import com.webank.wedatasphere.linkis.metadata.hive.dao.HiveMetaDao;
import com.webank.wedatasphere.linkis.metadata.service.DataSourceTypeService;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.joda.time.DateTime;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;

/**
 * @ClassName: DataSourceHiveServiceImpl
 * @Description: 描述
 * @author: zys
 * @date: 2021/3/3  13:59
 */
@Service
public class DataSourceHiveServiceImpl implements DataSourceTypeService {
    @Autowired
    HiveMetaDao hiveMetaDao;
    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public List<Map<String, Object>> getTablesByDbNameAndUser(String database, Map<String, String> map) {
        List<Map<String, Object>> showTables = Lists.newArrayList();
        hiveMetaDao.useDB(database);
        List<Map<String, Object>> listTables = hiveMetaDao.getTablesByDbNameAndUser(map);
        listTables.forEach(list -> {
            Map<String, Object> item = new HashMap<>();
            item.put("NAME", list.get("tab_name"));
            showTables.add(item);
        });
        return showTables;
    }
    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public List<String> getDbsByUser(String userName) {
        List<String> dbs = hiveMetaDao.getDbsByUser(userName);
        return dbs;
    }
    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public List<Map<String, Object>> getColumns(String database, Map<String, String> param) {
        hiveMetaDao.useDB(database);
        List<Map<String, Object>> columns = new ArrayList<>();
        List<Map<String, Object>> tableDesc = hiveMetaDao.getColumns(param);
        Map<String, Boolean> keys = new HashMap<>();
        //取出分区字段
        for (int i = 0; i < tableDesc.size(); i++) {
            if ("# Partition Information".equals(tableDesc.get(i).get("col_name").toString())) {
                i += 3;
                while (i < tableDesc.size()) {
                    keys.put(tableDesc.get(i).get("col_name").toString(), true);
                    i += 1;
                }
            }
        }
        //取出所有字段
        for (int i = 0; i < tableDesc.size(); i++) {
            if (StringUtils.isEmpty(tableDesc.get(i).get("col_name").toString())) {
                break;
            }
            if (keys.containsKey(tableDesc.get(i).get("col_name").toString())) {
                tableDesc.get(i).put("partitioned", true);
            }
            columns.add(tableDesc.get(i));
        }

        return columns;
    }
    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public List<Map<String, Object>> getPartitionKeys(Map<String, String> param) {
        //查询分区信息
        List<Map<String, Object>> partitionKeys = new ArrayList<>();
        List<Map<String, Object>> tableDetail = hiveMetaDao.getPartitionKeys(param);
        boolean flag = false;
        for (int i = 0; i < tableDetail.size(); i++) {
            if ("# Partition Information".equals(tableDetail.get(i).get("col_name"))) {
                //跳过空行
                i += 3;
                while (i < tableDetail.size()) {
                    partitionKeys.add(tableDetail.get(i));
                    i += 1;
                    if (StringUtils.isEmpty(tableDetail.get(i).get("col_name").toString())) {
                        flag = true;
                        break;
                    }
                }
            }
            if (flag) {
                break;
            }
        }
        return partitionKeys;
    }
    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public List<String> getPartitions(Map<String, String> param) {
        hiveMetaDao.useDB(param.get("dbName"));
        List<String> partitions = new ArrayList<>();
        try {
            partitions = hiveMetaDao.getPartitions(param);
        } catch (Exception e) {
            System.out.println("没有分组" + e.getMessage());
            e.printStackTrace();
        }
        return partitions;
    }
    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public String getLocationByDbAndTable(Map<String, String> param) {
        hiveMetaDao.useDB(param.get("dbName"));
        String location = "";
        List<Map<String, Object>> tableDesc = hiveMetaDao.getPartitionKeys(param);
        for (int i = 0; i < tableDesc.size(); i++) {
            if (tableDesc.get(i).get("col_name").toString().startsWith("Location")) {
                location = tableDesc.get(i).get("data_type").toString();
                break;
            }
        }
        return location;
    }
    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public Map<String, Object> getTableDesc(Map<String, String> param) {
        hiveMetaDao.useDB(param.get("dbName"));
        Map<String, Object> table = new HashMap<>();
        table.put("CREATE_TIME", "UNKNOWN");
        table.put("LAST_ACCESS_TIME", "UNKNOWN");
        table.put("OWNER", "");
        table.put("tab_name", param.get("tableName"));
        List<Map<String, Object>> tableDesc = hiveMetaDao.getPartitionKeys(param);
        for (int i = 0; i < tableDesc.size(); i++) {
            String item = tableDesc.get(i).get("col_name").toString();
            if (item.startsWith("Owner")) {
                table.put("OWNER", tableDesc.get(i).get("data_type"));
            } else if (item.startsWith("CreateTime:")) {
                try {
                    Date data_type = new Date(tableDesc.get(i).get("data_type").toString());
                    DateTime dateTime = new DateTime(data_type);
                    String createTime = dateTime.toString("yyyy-MM-dd HH:mm:ss");
                    table.put("CREATE_TIME", createTime);
                } catch (Exception e) {
                    System.out.println("转换时间出错");
                }
            } else if (item.startsWith("LastAccessTime")) {
                try {
                    Date data_type = new Date(tableDesc.get(i).get("data_type").toString());
                    DateTime dateTime = new DateTime(data_type);
                    String lastAccessTime = dateTime.toString("yyyy-MM-dd HH:mm:ss");
                    table.put("LAST_ACCESS_TIME", lastAccessTime);
                } catch (Exception e) {
                    System.out.println("转换时间出错");
                }
            }
        }
        return table;
    }
    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public String getTableSize(Map<String,String> param) throws IOException {
        String location=param.get("location");
        String tableSize = "0B";
        if (StringUtils.isNotBlank(location)) {
            FileStatus tableFile = getRootHdfs().getFileStatus(new Path(location));
            tableSize = ByteTimeUtils.bytesToString(getRootHdfs().getContentSummary(tableFile.getPath()).getLength());
        }
        return tableSize;
    }
    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public int getTableFileNum(Map<String,String> param) throws IOException {
        String location=param.get("location");
        int tableFileNum = 0;
        if (StringUtils.isNotBlank(location)) {
            FileStatus tableFile = getRootHdfs().getFileStatus(new Path(location));
            tableFileNum = (int) getRootHdfs().getContentSummary(tableFile.getPath()).getFileCount();
        }
        return tableFileNum;
    }
    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public int getPartitionsNum(Map<String,String> param) throws IOException {
        String location=param.get("location");
        int partitionsNum = 0;
        if (StringUtils.isNotBlank(location)) {
            FileStatus tableFile = getRootHdfs().getFileStatus(new Path(location));
            partitionsNum = getRootHdfs().listStatus(tableFile.getPath()).length;
        }
        return partitionsNum;
    }
    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public List<MdqTablePartitionStatisticInfoVO> getMdqTablePartitionStatisticInfoVO(Map<String,String> map,List<String> partitions, String tableLocation) throws IOException {
        partitions.sort((a, b) -> -a.compareTo(b));
        //partitions
        //partitions.stream().forEach();
        List<MdqTablePartitionStatisticInfoVO> list = new ArrayList<>();
        FileStatus tableFile = getRootHdfs().getFileStatus(new Path(tableLocation));
        FileStatus[] fileStatuses = getRootHdfs().listStatus(tableFile.getPath());
        for (FileStatus fileStatuse : fileStatuses) {
            list.add(create(fileStatuse.getPath().toString()));
        }
        return list;
    }
    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public int dropTable(Map<String, String> param) {
        int count = hiveMetaDao.dropTable(param);
        return count;
    }

    private MdqTablePartitionStatisticInfoVO create(String path) throws IOException {
        Map<String,String> map=new HashMap<>();
        map.put("location",path);
        MdqTablePartitionStatisticInfoVO mdqTablePartitionStatisticInfoVO = new MdqTablePartitionStatisticInfoVO();
        mdqTablePartitionStatisticInfoVO.setName(new Path(path).getName());
        mdqTablePartitionStatisticInfoVO.setFileNum(getTableFileNum(map));
        mdqTablePartitionStatisticInfoVO.setPartitionSize(getTableSize(map));
        mdqTablePartitionStatisticInfoVO.setModificationTime(getTableModificationTime(path));
        FileStatus tableFile = getRootHdfs().getFileStatus(new Path(path));
        FileStatus[] fileStatuses = getRootHdfs().listStatus(tableFile.getPath());
        List<FileStatus> collect = Arrays.stream(fileStatuses).filter(f -> f.isDirectory()).collect(Collectors.toList());
        for (FileStatus fileStatuse : collect) {
            mdqTablePartitionStatisticInfoVO.getChildrens().add(create(fileStatuse.getPath().toString()));
        }
        return mdqTablePartitionStatisticInfoVO;
    }

    private Date getTableModificationTime(String tableLocation) throws IOException {
        if (StringUtils.isNotBlank(tableLocation)) {
            FileStatus tableFile = getRootHdfs().getFileStatus(new Path(tableLocation));
            return new Date(tableFile.getModificationTime());
        }
        return null;
    }

    volatile private static FileSystem rootHdfs = null;

    private FileSystem getRootHdfs() {
        if (rootHdfs == null) {
            synchronized (this) {
                if (rootHdfs == null) {
                    rootHdfs = HDFSUtils.getHDFSFileSystem();
                }
            }
        }
        return rootHdfs;
    }
}
