package com.central.dataManage.service.impl;

import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.central.common.constant.CommonConstant;
import com.central.common.enums.DbType;
import com.central.common.feign.FileService;
import com.central.common.lock.DistributedLock;
import com.central.common.model.ColumnInfo;
import com.central.common.model.IndexInfo;
import com.central.common.model.PageResult;
import com.central.common.model.Result;
import com.central.common.service.impl.SuperServiceImpl;
import com.central.dataManage.common.Constants;
import com.central.dataManage.common.datasourcePlugin.BaseDatasourcePlugin;
import com.central.dataManage.common.datasourcePlugin.DatasourcePluginFactory;
import com.central.dataManage.common.datasourcePlugin.HdfsDatasourcePlugin;
import com.central.dataManage.common.datasourcePlugin.KafkaDatasourcePlugin;
import com.central.dataManage.common.utils.CommonUtils;
import com.central.dataManage.common.utils.PropertyUtils;
import com.central.dataManage.mapper.DatasourceInfoMapper;
import com.central.dataManage.model.DatabaseInfo;
import com.central.common.model.DatasourceInfo;
import com.central.common.model.TableInfo;
import com.central.dataManage.service.IDatabaseService;
import com.central.dataManage.service.IDatasourceService;
import com.central.dataManage.service.IDelCacheService;
import com.central.dataManage.service.ITableService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.protocol.FsPermissionExtension;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

import static com.central.dataManage.common.Constants.HIVE_METASTORE_WAREHOUSE_DIR;
import static com.central.dataManage.common.Constants.HIVE_METASTORE_WAREHOUSE_DIR_DEFAULT;

/**
 * @author Tindy
 * @date 2021/6/24
 * @describe
 */
@Slf4j
@Service
public class DatasourceServiceImpl extends SuperServiceImpl<DatasourceInfoMapper, DatasourceInfo> implements IDatasourceService {
    private final static String LOCK_KEY_NAME = CommonConstant.LOCK_KEY_PREFIX + "datasource:";

    @Autowired
    private DistributedLock lock;
    @Autowired
    private IDatabaseService databaseService;
    @Autowired
    private ITableService tableService;

    @Autowired
    private FileService fileService;
    @Autowired
    private IDelCacheService delCacheService;

    @Transactional(rollbackFor = Exception.class)
    @Override
    public Result saveOrUpdateDatasource(DatasourceInfo datasourceInfo) throws Exception {
        DatasourceInfo datasourceInfo2=null;
        if(datasourceInfo.getId()!=null){
            datasourceInfo2 = getById(datasourceInfo.getId());
        }
        String name = datasourceInfo.getName();
        if(StringUtils.isNotBlank(datasourceInfo.getKeytabUrl())){
            Long keytabQuoteId = fileService.quoteFile(datasourceInfo.getKeytabUrl(), "配置数据源：" + name + " keytab文件");
            datasourceInfo.setKeytabQuoteId(keytabQuoteId);
        }
        if(StringUtils.isNotBlank(datasourceInfo.getKeytabUrl())){
            Long krb5ConfQuoteId=fileService.quoteFile(datasourceInfo.getKrb5ConfUrl(),"配置数据源："+name+" krb5conf文件");
            datasourceInfo.setKrb5ConfQuoteId(krb5ConfQuoteId);
        }
        datasourceInfo.setPassword(CommonUtils.encodePassword(datasourceInfo.getPassword()));
        saveOrUpdate(datasourceInfo);
        if(datasourceInfo2!=null){
            fileService.delQuote(datasourceInfo2.getKeytabQuoteId());
            fileService.delQuote(datasourceInfo2.getKrb5ConfQuoteId());
        }
        return Result.succeed(datasourceInfo, "操作成功");
    }

    @Override
    public PageResult<DatasourceInfo> findDatasources(Map<String, Object> params) {
        Page<DatasourceInfo> page = new Page<>(MapUtils.getInteger(params, "page"), MapUtils.getInteger(params, "limit"));
        List<DatasourceInfo> list = baseMapper.findList(page, params);
        long total = page.getTotal();
        list.forEach(u -> {
            u.setKrb5ConfQuoteId(null);
            u.setKrb5ConfUrl(null);
            u.setKeytabQuoteId(null);
            u.setKeytabUrl(null);
            u.setPassword("********");
        });
        return PageResult.<DatasourceInfo>builder().data(list).code(0).count(total).build();
    }

    @Transactional(rollbackFor = Exception.class)
    @Override
    public void delById(Long id) {
        DatasourceInfo datasourceInfo = getById(id);
        baseMapper.deleteById(id);
        //删除对应元数据
        //删除缓存
        delCacheService.delDatabaseList(id);
        List<DatabaseInfo> databaseInfos = databaseService.list(new QueryWrapper<DatabaseInfo>().eq("datasource_id", id));
        for (DatabaseInfo databaseInfo : databaseInfos) {
            delCacheService.delTableList(databaseInfo.getId());
        }
        databaseService.delByDatasourceId(id);
        tableService.delByDatasourceId(id);

    }

    @Override
    public boolean connTest(DatasourceInfo datasourceInfo) {

        final Boolean[] isConnection = {false};
        BaseDatasourcePlugin datasourcePlugin = DatasourcePluginFactory.getDatasourcePlugin(datasourceInfo);
        if (datasourcePlugin != null) {
            switch (datasourceInfo.getDatasourceType()){
                case HIVE:case ORACLE:case MYSQL:
                    Connection conn = null;
                    try {
                        conn = datasourcePlugin.getConn();
                        conn.close();
                        isConnection[0] = true;
                    } catch (Exception e) {
                        log.error("close connection fail at DataSourceService::checkConnection()", e);
                    }
                    break;
                case HDFS:
                    HdfsDatasourcePlugin hdfsDatasourcePlugin = (HdfsDatasourcePlugin) datasourcePlugin;
                    try {
                        hdfsDatasourcePlugin.getFs().listFiles(new Path("/"),false);
                        isConnection[0] = true;
                    } catch (Exception e) {
                        log.error("close connection fail at DataSourceService::checkConnection()", e);
                    }
                    break;
                case KAFKA:
                    KafkaDatasourcePlugin kafkaDatasourcePlugin = (KafkaDatasourcePlugin)datasourcePlugin;
                    KafkaProducer producer = kafkaDatasourcePlugin.getProducer();
                    isConnection[0] = true;
                    producer.send(new ProducerRecord<String, String>("test", "connTest"), new Callback() {
                        @Override
                        public void onCompletion(RecordMetadata metadata, Exception e) {
                            if(e != null) {
                                isConnection[0] = false;
                            }
                        }
                    });
                    producer.close();
                    break;
                default:
                    break;
            }

        }
        return isConnection[0];
    }

    @Transactional(rollbackFor = Exception.class)
    @Override
    public boolean synch(Long id) throws SQLException {
        boolean flag = false;
        DatasourceInfo datasource = baseMapper.selectById(id);
        if(datasource.getDatasourceType()== DbType.KAFKA||datasource.getDatasourceType()== DbType.HDFS) return false;
        //删除缓存
        if(datasource.getIsSys() && datasource.getDatasourceType()==DbType.HIVE){
            delCacheService.delUserDbs();
            delCacheService.delProDbs();
        }
        List<DatabaseInfo> databaseInfos = databaseService.list(new QueryWrapper<DatabaseInfo>().eq("datasource_id", id));
        for (DatabaseInfo databaseInfo : databaseInfos) {
            delCacheService.delTableList(databaseInfo.getId());
        }
        //删除旧元数据
        databaseService.delByDatasourceId(id);
        tableService.delByDatasourceId(id);
        BaseDatasourcePlugin datasourcePlugin = DatasourcePluginFactory.getDatasourcePlugin(datasource);
        if (datasourcePlugin != null) {
            Connection conn = datasourcePlugin.getConn();
            if (conn != null) {
                try {
                    List<DatabaseInfo> databases = datasourcePlugin.getDatabases(conn);
                    databaseService.saveBatch(databases);
                    for (DatabaseInfo database : databases) {
                        List<TableInfo> tables = datasourcePlugin.getTables(database, conn);
                        for (TableInfo table : tables) {
                            log.info("start synch " + datasource.getName() + "." + database.getDbName() + "." + table.getTableName());
                            List<ColumnInfo> columns = datasourcePlugin.getColumns(database, table, conn);
                            table.setColumns(columns);
                            List<IndexInfo> indexs = datasourcePlugin.getIndexs(database, table, conn);
                            table.setIndexs(indexs);
                        }
                        tableService.saveBatch(tables);
                    }
                    flag = true;
                } catch (SQLException e) {
                    log.error("close connection fail at DataSourceService::checkConnection()", e);
                    throw e;
                }finally {
                    conn.close();
                }
            }
        }
        return flag;
    }

    @Override
    public List<Map<String,Object>> datasourcesByType(DbType type) {
        List<Map<String,Object>> list = baseMapper.selectMaps(new QueryWrapper<DatasourceInfo>().eq("datasource_type",type).select("id","name"));
        return list;
    }


    @Transactional(rollbackFor = Exception.class)
    @Override
    public Result createUserDatabase(String username) throws SQLException {
        DatasourceInfo datasource = baseMapper.selectOne(new QueryWrapper<DatasourceInfo>().eq("is_sys",true).eq("datasource_type",DbType.HIVE));
        if(datasource==null) return Result.failed("操作失败，数据源不存在");
        BaseDatasourcePlugin datasourcePlugin = DatasourcePluginFactory.getDatasourcePlugin(datasource);
        datasourcePlugin.createNewDatabase(CommonConstant.USER_DB_PREFIX+username);
        DatabaseInfo database=new DatabaseInfo();
        database.setDbName(CommonConstant.USER_DB_PREFIX+username);
        database.setDatasourceType(DbType.HIVE);
        database.setDatasourceId(datasource.getId());
        databaseService.saveOrUpdate(database);
        //删除库目录缓存
        delCacheService.delDatabaseList(datasource.getId());
        return Result.failed("操作成功");
    }

    @Transactional(rollbackFor = Exception.class)
    @Override
    public Result createProDatabase(String proName) throws SQLException, IOException {
        DatasourceInfo datasource = baseMapper.selectOne(new QueryWrapper<DatasourceInfo>().eq("is_sys",true).eq("datasource_type",DbType.HIVE));
        if(datasource==null) return Result.failed("操作失败，系统hive数据源不存在");
        BaseDatasourcePlugin datasourcePlugin = DatasourcePluginFactory.getDatasourcePlugin(datasource);
        datasourcePlugin.createNewDatabase(CommonConstant.PRO_DB_PREFIX+ proName);
        DatabaseInfo database=new DatabaseInfo();
        database.setDbName(CommonConstant.PRO_DB_PREFIX+proName);
        database.setDatasourceType(DbType.HIVE);
        database.setDatasourceId(datasource.getId());
        databaseService.saveOrUpdate(database);
        //删除库目录缓存
        delCacheService.delDatabaseList(datasource.getId());
        //创建项目udf目录，用于存放项目udf jar包文件
        Result result=createProHdfsUdfDir(proName);
        return result;
    }

    public Result createProHdfsUdfDir(String proName) throws IOException {
        DatasourceInfo datasource = baseMapper.selectOne(new QueryWrapper<DatasourceInfo>().eq("is_sys",true).eq("datasource_type",DbType.HDFS));
        if(datasource==null) return Result.failed("操作失败，系统hdfs数据源不存在");
        HdfsDatasourcePlugin datasourcePlugin = (HdfsDatasourcePlugin) DatasourcePluginFactory.getDatasourcePlugin(datasource);
        String resourceUploadPath = PropertyUtils.getString(Constants.RESOURCE_UPLOAD_PATH, Constants.RESOURCE_UPLOAD_PATH_DEFAULT);
        FileSystem fs =null;
        try {
            fs=datasourcePlugin.getFs();
            fs.mkdirs(new Path(resourceUploadPath+"/" + CommonConstant.KA_PREFIX + proName+"/udfs"),new FsPermission((short)00775));
            fs.setOwner(new Path(resourceUploadPath+"/" + CommonConstant.KA_PREFIX + proName+"/udfs"),null,CommonConstant.KA_PREFIX + proName);
        }finally {
            if(fs!=null) fs.close();
        }

        return Result.succeed("操作成功");
    }

    @Override
    public Result setDbSize(String username, String proName, Long size) throws SQLException {
        DatasourceInfo datasource = baseMapper.selectOne(new QueryWrapper<DatasourceInfo>().eq("is_sys",true).eq("datasource_type",DbType.HIVE));
        if(datasource==null) return Result.failed("操作失败，数据源不存在");
        String dbName=null;
        if(StringUtils.isNotBlank(username)){
            dbName=CommonConstant.USER_DB_PREFIX+username;
        }else if(StringUtils.isNotBlank(proName)){
            dbName=CommonConstant.PRO_DB_PREFIX+proName;
        }
        Path dbPath=new Path(PropertyUtils.getString(HIVE_METASTORE_WAREHOUSE_DIR,HIVE_METASTORE_WAREHOUSE_DIR_DEFAULT)+dbName+".db");
        HdfsDatasourcePlugin datasourcePlugin = (HdfsDatasourcePlugin) DatasourcePluginFactory.getDatasourcePlugin(datasource);
        FileSystem fs = datasourcePlugin.getFs();
        try {
            if(fs.exists(dbPath)){
                datasourcePlugin.getDfsAdmin().setQuota(dbPath,size*1024*1024*1024);
                return Result.succeed("操作成功");
            }else return Result.failed("操作失败，数据库路径不存在");
        } catch (IOException e) {
            return Result.failed("操作失败:"+e.getMessage());
        }
    }

    @Override
    public List<DbType> findAllDbTypes() {
        return Arrays.stream(DbType.values()).collect(Collectors.toList());
    }

    @Override
    public DatasourceInfo findSysDatasource() {
        return baseMapper.selectOne(new QueryWrapper<DatasourceInfo>().eq("is_sys",true).eq("datasource_type",DbType.HIVE));
    }

    @Override
    public Result createUserRoleOfHive(String proName, String username) throws SQLException {
        DatasourceInfo datasource = baseMapper.selectOne(new QueryWrapper<DatasourceInfo>().eq("is_sys",true).eq("datasource_type",DbType.HIVE));
        if(datasource==null) return Result.failed("操作失败，数据源不存在");
        BaseDatasourcePlugin datasourcePlugin = DatasourcePluginFactory.getDatasourcePlugin(datasource);
        //用户角色权限授权
        Connection conn = datasourcePlugin.getConn();
        conn.createStatement().execute("CREATE ROLE "+CommonConstant.KA_PREFIX+proName+"_"+username+"_role");
        conn.createStatement().execute("GRANT ROLE "+CommonConstant.KA_PREFIX+proName+"_"+username+"_role TO GROUP "+CommonConstant.KA_PREFIX+proName+"_"+username);
        conn.createStatement().execute("GRANT all ON DATABASE "+CommonConstant.USER_DB_PREFIX+username+" TO ROLE "+CommonConstant.KA_PREFIX+proName+"_"+username+"_role");
        //用于用户创建临时udf时使用的权限
        conn.createStatement().execute("GRANT all ON URI '*' TO ROLE "+CommonConstant.KA_PREFIX+proName+"_"+username+"_role");
        conn.close();
        return Result.succeed("操作成功");
    }

    @Override
    public Result createUserHdfsHomeDir(String proName, String username) throws IOException{
        DatasourceInfo datasource = baseMapper.selectOne(new QueryWrapper<DatasourceInfo>().eq("is_sys",true).eq("datasource_type",DbType.HDFS));
        if(datasource==null) return Result.failed("操作失败，数据源不存在");
        HdfsDatasourcePlugin datasourcePlugin = (HdfsDatasourcePlugin) DatasourcePluginFactory.getDatasourcePlugin(datasource);
        datasourcePlugin.getFs().mkdirs(new Path("/user/" + CommonConstant.KA_PREFIX + proName + "_" + username));
        datasourcePlugin.getFs().setOwner(new Path("/user/" + CommonConstant.KA_PREFIX + proName + "_" + username),CommonConstant.KA_PREFIX + proName + "_" + username,CommonConstant.KA_PREFIX + proName + "_" + username);
        return Result.succeed("操作成功");
    }
}