package cn.getech.data.development.service.impl;

import cn.getech.data.development.config.HiveMetaDataConfig;
import cn.getech.data.development.config.properties.BdpJobConfig;
import cn.getech.data.development.config.properties.HiveConfig;
import cn.getech.data.development.constant.Constant;
import cn.getech.data.development.constant.DBTypeEnum;
import cn.getech.data.development.entity.TableFieldInfo;
import cn.getech.data.development.entity.TableInfo;
import cn.getech.data.development.entity.TableStatistics;
import cn.getech.data.development.entity.jobEntity.ScheduleJobLogEntity;
import cn.getech.data.development.mapper.ScheduleJobLogMapper;
import cn.getech.data.development.mapper.TableInfoMapper;
import cn.getech.data.development.model.dto.CountDbAndTableSizeDto;
import cn.getech.data.development.service.CountTableRowsNumService;
import cn.getech.data.development.service.TableFieldInfoService;
import cn.getech.data.development.service.TableStatisticsService;
import cn.getech.data.development.utils.HdfsUtil;
import cn.getech.data.development.utils.MysqlTableUtil;
import cn.getech.data.intelligence.common.utils.ByteConverFormat;
import cn.hutool.core.collection.CollectionUtil;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;


@Slf4j
@Service
public class CountTableRowsNumServiceImpl extends ServiceImpl<TableInfoMapper, TableInfo> implements CountTableRowsNumService {
    @Autowired
    private BdpJobConfig bdpJobConfig;

    @Resource
    private TableInfoMapper tableInfoMapper;

    @Autowired
    private HiveConfig hiveConfig;

    @Resource
    private ScheduleJobLogMapper scheduleJobLogMapper;
    @Autowired
    private TableFieldInfoService tableFieldInfoService;
    @Autowired
    private HiveMetaDataConfig hiveMetaDataConfig;
    @Autowired
    private TableStatisticsService tableStatisticsService;

    /**
     * 根据项目id获取总的、各数据库的数据条数和大小，最大单表总条数、空间、名称统计结果
     * @param procId
     * @return
     */
    @Override
    public Map<String, Object> countTableRowsNumsAndSize(Integer procId) {
        List<CountDbAndTableSizeDto> countDbAndTableSizeDtos = tableInfoMapper.countTableRowsAndSizeGroupByDb(procId);
        Double totalSize = 0.00d;
//        Long totalRows=0L;
        JSONObject countResult = new JSONObject();
        JSONObject dbCountResult = new JSONObject();
        for (CountDbAndTableSizeDto c:countDbAndTableSizeDtos
        ) {
//            if(null!=c.getDbTotalRows()){
//                totalRows+=c.getDbTotalRows();
//            }
            if(null!=c.getDbTotalSize()){
                totalSize+=c.getDbTotalSize();
            }
            JSONObject countDbResult = new JSONObject();
            countDbResult.put("max_table",c.getMaxTable());
            if(null != c.getDbTotalSize()){
                countDbResult.put("db_total_size", ByteConverFormat.convertFormat(c.getDbTotalSize()));
            }else {
                countDbResult.put("db_total_size","0B");
            }
//            countDbResult.put("db_total_rows",c.getDbTotalRows());
//            countDbResult.put("max_table_rows",c.getMaxTableRows());
            if(null != c.getMaxTableSize()){
                countDbResult.put("max_table_size", ByteConverFormat.convertFormat(c.getMaxTableSize()));
            }else{
                countDbResult.put("max_table_size","0B");
            }
            countDbResult.put("last_count_time",c.getLastCountTime());
            countResult.put(c.getDbName(),countDbResult);
        }
        dbCountResult.put("itemlist",countResult);
        dbCountResult.put("total_size", ByteConverFormat.convertFormat(totalSize));
//        countResult.put("total_rows",totalRows);
        return dbCountResult;
    }

    /**
     * 自动统计数据仓库表的条数以及大小
     */
    @Override
    public void postCountTableRowsNum() {
        log.info("开始统计数据仓库表的条数以及大小");
        ScheduleJobLogEntity scheduleJobLogEntity =new ScheduleJobLogEntity();
        scheduleJobLogEntity.setBeanName("countTableRowsNumTask");
        scheduleJobLogEntity.setJobId(Long.valueOf(1));
        scheduleJobLogEntity.setParams("1");
        scheduleJobLogEntity.setCreateTime(new Date());
        scheduleJobLogEntity.setStatus(Constant.ScheduleStatus.NORMAL.getValue());
        long startTime = System.currentTimeMillis();
        List<JSONObject> tableSizeList= null;
        try {
            tableSizeList = getDbTableSizeList();
            countTableSize(tableSizeList, scheduleJobLogEntity, startTime);
            if(scheduleJobLogEntity.getStatus().equals(Constant.ScheduleStatus.NORMAL.getValue())){
                long times = System.currentTimeMillis() - startTime;
                scheduleJobLogEntity.setTimes((int)times);
                scheduleJobLogMapper.insert(scheduleJobLogEntity);
            }
            log.info("数据仓库表的条数以及大小统计完成（postCountTableRowsNum finish）");
        } catch (Exception e) {
            log.error("统计数据仓库数据失败！" + e);
        }
    }
    public void countTableSize(List<JSONObject> tableSizeList, ScheduleJobLogEntity scheduleJobLogEntity, Long startTime) {
        TableStatistics tableStatistics;
        for (int l = 0; l < tableSizeList.size(); l++) {
            try {
                List<TableInfo> tableInfos = tableInfoMapper.selectList(new QueryWrapper<TableInfo>().eq("db_name", tableSizeList.get(l).getString("dbName"))
                        .eq("table_name", tableSizeList.get(l).getString("tableName")));
                if (null != tableInfos) {
                    TableInfo tableInfo = tableInfos.get(0);
                    log.info("开始更新数据仓库" + tableInfo.getDbName() + "." + tableInfo.getTableName() + "的统计信息");
                    List<TableFieldInfo> tableFieldInfoList = tableFieldInfoService.list(new QueryWrapper<TableFieldInfo>().eq("table_id", tableInfo.getId()).eq("is_partition", 1));
                    Long rowsNum;
                    MysqlTableUtil mysqlTableUtil = new MysqlTableUtil(hiveMetaDataConfig.getHiveMetastoreUrl(), hiveMetaDataConfig.getHiveMetastoreDb(), hiveMetaDataConfig.getHiveMetastoreUsername(), hiveMetaDataConfig.getHiveMetastorePassword());
                    if (CollectionUtil.isEmpty(tableFieldInfoList)) {
                        rowsNum = mysqlTableUtil.dataPreviewRowNum(tableInfo);
                        if (rowsNum == 0) {//如果正常统计为0  就用分区方式统计一下
                            MysqlTableUtil partitionQuery = new MysqlTableUtil(hiveMetaDataConfig.getHiveMetastoreUrl(), hiveMetaDataConfig.getHiveMetastoreDb(), hiveMetaDataConfig.getHiveMetastoreUsername(), hiveMetaDataConfig.getHiveMetastorePassword());
                            rowsNum = partitionQuery.dataPreviewRowNumByPartition(tableInfo);
                        }
                    } else {
                        rowsNum = mysqlTableUtil.dataPreviewRowNumByPartition(tableInfo);
                    }
                    tableInfo.setLastCountTime(new Date());
                    tableInfo.setTotalSize(tableSizeList.get(l).getDouble("size"));
                    tableInfo.setRowsNum(rowsNum);
                    tableInfoMapper.updateById(tableInfo);

                    // 添加一条统计信息到数据仓库数据统计表
                    tableStatistics = new TableStatistics();
                    tableStatistics.setTableId(tableInfo.getId());
                    tableStatistics.setRowsNum(rowsNum.intValue());
                    tableStatistics.setTotalSize(tableInfo.getTotalSize());
                    tableStatistics.setCountTime(new Date());

                    // 获取分区数
                    mysqlTableUtil = new MysqlTableUtil(hiveMetaDataConfig.getHiveMetastoreUrl(), hiveMetaDataConfig.getHiveMetastoreDb(), hiveMetaDataConfig.getHiveMetastoreUsername(), hiveMetaDataConfig.getHiveMetastorePassword());
                    List<String> partitions = mysqlTableUtil.showPartitionAndValue(tableInfo,null);
                    if (partitions != null) {
                        tableStatistics.setPartitionCount(partitions.size());
                    }
                    tableStatisticsService.save(tableStatistics);
                    log.info("更新数据仓库的统计信息完毕{}.{}, rowNum:{}, size:{}", tableInfo.getDbName(), tableInfo.getTableName(), rowsNum, tableInfo.getTotalSize());
                }
            } catch (Exception e) {
                log.error("更新数据仓库统计数据失败！" + e.getMessage());
                scheduleJobLogEntity.setError(tableSizeList.get(l).getString("dbName") + "." + tableSizeList.get(l).getString("tableName") + ":" + e.getMessage());
                long times = System.currentTimeMillis() - startTime;
                scheduleJobLogEntity.setTimes((int) times);
                scheduleJobLogEntity.setStatus(Constant.ScheduleStatus.PAUSE.getValue());
                scheduleJobLogMapper.insert(scheduleJobLogEntity);
            }
        }
    }
    /**
     * 获取各个数据仓库表的大小size
     * @return
     * @throws Exception
     */
    public List<JSONObject> getDbTableSizeList() throws Exception {
        String dirHome = "/user/hive/warehouse/";
        String dbName;
        List<String> tableNameList;
        DBTypeEnum[] dbTypeEnums = DBTypeEnum.values();
        HdfsUtil hdfsUtil = new HdfsUtil(bdpJobConfig);
        List<JSONObject> countResult = new ArrayList<>();
        for (DBTypeEnum db : dbTypeEnums) {
            dbName = db.getName();
            String dbUrl = dirHome + dbName + ".db";
            tableNameList = hdfsUtil.dbTableNameList(dbUrl);
            if (tableNameList != null) {
                for (String t : tableNameList) {
                    JSONObject jsonObject = new JSONObject();
                    try {
                        Long tableSize = hdfsUtil.getTableOrDbSize(dbUrl + "/" + t);
                        jsonObject.put("dbName", dbName);
                        jsonObject.put("tableName", t);
                        jsonObject.put("size", tableSize.doubleValue());
                        countResult.add(jsonObject);
                    } catch (Exception e) {
                        log.error("获取表:{} 的大小出错:{}", dbName + "." + t, e);
                    }

                }
            }
        }
        if(null != hdfsUtil){
            hdfsUtil.close();
        }
        return countResult;
    }

}
