package avicit.bdp.dms.das.job;

import avicit.bdp.common.datasource.BaseDataSource;
import avicit.bdp.common.datasource.DataSourceFactory;
import avicit.bdp.common.service.dto.DataSourceDTO;
import avicit.bdp.common.service.service.DataSourceService;
import avicit.bdp.common.utils.BdpLogUtil;
import avicit.bdp.common.utils.DateUtils;
import avicit.bdp.common.utils.EntityUtils;
import avicit.bdp.common.utils.database.adapter.BaseDBAdapter;
import avicit.bdp.common.utils.database.adapter.DbAdapter;
import avicit.bdp.common.utils.database.adapter.DbAdapterUtils;
import avicit.bdp.common.utils.redis.RedisLockUtils;
import avicit.bdp.dms.das.dto.BdpDasStatDTO;
import avicit.bdp.dms.das.service.BdpDasStatService;
import avicit.bdp.dms.das.service.BdpDasTableService;
import avicit.platform6.commons.utils.ComUtil;
import org.apache.commons.collections4.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.util.List;

@Component
public class BdpDasJob {
    private static final Logger logger = LoggerFactory.getLogger(BdpDasJob.class);
    @Autowired
    private DataSourceService bdpDasDatasourceService;
    @Autowired
    private BdpDasTableService bdpDasTableService;
    @Autowired
    private BdpDasStatService bdpDasStatService;
    @Autowired
    private RedisLockUtils lockUtils;

    @Scheduled(cron = "0 0 0/2 * * ?")
    public void updateDataSourceStatus() throws Exception {

        boolean lock = lockUtils.lock("bdp:das:stat-datasource");
        if (lock) {
            logger.info("开始更新数据源状态");
            try {
                List<DataSourceDTO> datasourceDTOList = bdpDasDatasourceService.selectListAll();
                if (datasourceDTOList != null && datasourceDTOList.size() > 0) {
                    for (DataSourceDTO bdpDasDatasourceDTO : datasourceDTOList) {
                        BaseDataSource baseDataSource = DataSourceFactory.getDatasource(bdpDasDatasourceDTO);
                        //Integer status = bdpDasDatasourceService.testDbConnect(baseDataSource);
                        BaseDBAdapter dbAdapter = DbAdapterUtils.getDbAdapter(bdpDasDatasourceDTO.getDatasourceType());
                        Integer status = 0;
                        boolean flag = false;
                        try {
                            flag = dbAdapter.dataSourceConnTest(baseDataSource);
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                        if(flag){
                            status = 1;
                        }
                        DataSourceDTO newDTO = new DataSourceDTO();
                        newDTO.setId(bdpDasDatasourceDTO.getId());
                        newDTO.setDatasourceStatus(status);
                        bdpDasDatasourceService.updateByPrimaryKeySelective(newDTO);
                    }
                }
                logger.info("结束更新数据源状态");
            } catch (Exception e) {
                logger.info("更新数据源状态执行错误。{}", e);
            } finally {
                lockUtils.delete("bdp:das:stat-datasource");
            }
        }

    }


    @Scheduled(cron = "0 30 0 * * ?")
    public void statData() throws Exception {

        boolean lock = lockUtils.lock("bdp:das:stat-lakedata");
        if (lock) {
            logger.info("开始执行数据源统计。");
            try {
                //获取昨天日期
                String yesterday = DateUtils.getDayNumBeforeToday(1);
                //数据源数
                Long datasourceCount = bdpDasDatasourceService.selectCountAll();
                //模型数
                Long modelCount = bdpDasTableService.selectCountAll();
                //数据湖table数
                Long lakeTableCount = 0L;
                //数据湖file数
                Long lakeFileCount = 0L;
                //数据湖大小
                Long lakeDataCount = 0L;


                DataSourceDTO dataSourceQuery = new DataSourceDTO();
                dataSourceQuery.setProjectType(0);
                List<DataSourceDTO> dataSourceDTOList = bdpDasDatasourceService.selectList(dataSourceQuery);
                if(CollectionUtils.isNotEmpty(dataSourceDTOList)){
                    for (DataSourceDTO dataSourceDTO : dataSourceDTOList) {

                        DbAdapter adapter = DbAdapterUtils.getDbAdapter(dataSourceDTO.getDatasourceType());
                        if(adapter == null){
                            logger.error(dataSourceDTO.getDatasourceType() + "adapter is null");
                            continue;
                        }
                        Long lakeDataCountTemp = 0L;
                        try {
                            lakeDataCountTemp = adapter.getDatabaseSize(dataSourceDTO);
                        }catch (Exception e){
                            lakeDataCountTemp = 0L;
                        }
                        lakeDataCount += lakeDataCountTemp;
                        if(dataSourceDTO.getDatasourceStructure() == 0){
                            Long lakeTableCountTemp = 0L;
                            try {
                                lakeTableCountTemp = adapter.getTableCount(dataSourceDTO);
                            }catch (Exception e){
                                lakeTableCountTemp = 0L;
                            }
                            lakeTableCount += lakeTableCountTemp;
                        }else {
                            Long lakeFileCountTemp = 0L;
                            try {
                                lakeFileCountTemp = adapter.getTableCount(dataSourceDTO);
                            }catch (Exception e){
                                lakeFileCountTemp = 0L;
                            }
                            lakeFileCount += lakeFileCountTemp;
                        }
                    }
                }

                BdpDasStatDTO bdpDasStatDTO = new BdpDasStatDTO();
                bdpDasStatDTO.setStatisticsDate(yesterday);
                bdpDasStatDTO.setDatasourceCount(datasourceCount);
                bdpDasStatDTO.setModelCount(modelCount);
                bdpDasStatDTO.setLakeTableCount(lakeTableCount);
                bdpDasStatDTO.setLakeFileCount(lakeFileCount);
                bdpDasStatDTO.setLakeDataCount(lakeDataCount);
                bdpDasStatDTO.setId(ComUtil.getId());
                EntityUtils.setCreateAndUpdateInfo(bdpDasStatDTO);
                bdpDasStatService.insertSelective(bdpDasStatDTO);
                //记录日志
                BdpLogUtil.log4Insert(bdpDasStatDTO);
            } catch (Exception e) {
                logger.info("执行数据源统计执行错误。{}", e);
            } finally {
                lockUtils.delete("bdp:das:stat-lakedata");
            }
        }

    }
}
