package com.hub.realtime.resource.service.impl;


import cn.hutool.core.date.DateUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.hub.realtime.common.core.domain.model.ClusterInfo;
import com.hub.realtime.common.core.domain.model.LoginUser;
import com.hub.realtime.common.enums.BuildState;
import com.hub.realtime.common.exception.UtilException;
import com.hub.realtime.common.model.JarModule;
import com.hub.realtime.common.utils.DateUtils;
import com.hub.realtime.common.utils.StringUtils;
import com.hub.realtime.common.utils.hadoop.HdfsUtil;
import com.hub.realtime.common.utils.password.PasswordUtil;
import com.hub.realtime.framework.service.ServiceBase;
import com.hub.realtime.resource.domain.*;
import com.hub.realtime.resource.dto.input.FlinkProjectQueryParam;
import com.hub.realtime.resource.model.GitModel;
import com.hub.realtime.resource.model.GitProcessMoitor;
import com.hub.realtime.resource.service.*;
import com.hub.realtime.system.service.ISysConfigService;
import com.streamxhub.streamx.common.util.CommandUtils;
import com.streamxhub.streamx.common.util.ThreadUtils;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.eclipse.jgit.api.CloneCommand;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.PullCommand;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.lib.BatchingProgressMonitor;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.lib.StoredConfig;
import org.eclipse.jgit.storage.file.FileRepositoryBuilder;
import org.eclipse.jgit.transport.UsernamePasswordCredentialsProvider;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.messaging.simp.SimpMessageSendingOperations;
import org.springframework.stereotype.Service;
import com.hub.realtime.resource.mapper.ResFlinkProjectMapper;


import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;

import static com.hub.realtime.common.constant.HdfsConstants.HDFS_WORK_SPACE_DIR;


/**
 * git项目管理Service业务层处理
 *
 * @author wupeng
 * @date 2022-01-09
 */
@Service
@Slf4j
public class ResFlinkProjectServiceImpl extends ServiceBase<ResFlinkProjectMapper, ResFlinkProject> implements IResFlinkProjectService {
    private final Map<Long, ConcurrentHashMap<String, List<String>>> logMap = new ConcurrentHashMap<>();
    private final Map<Long, StringBuilder> tailBuffer = new ConcurrentHashMap<>();

    private volatile Map<Long, Byte> tailOutMap = new ConcurrentHashMap<>();

    private final Map<Long, Byte> tailBeginning = new ConcurrentHashMap<>();

    @Autowired
    IResFlinkModuleService moduleService;

    @Autowired
    IResModuleJarService moduleJarService;

    @Autowired
    IResApplicationService applicationService;

    @Autowired
    IResClusterService clusterService;

    @Autowired
    private ISysConfigService configService;

    @Autowired
    private SimpMessageSendingOperations simpMessageSendingOperations;


    @Override
    public Page<ResFlinkProject> list(FlinkProjectQueryParam param) {

        LambdaQueryWrapper<ResFlinkProject> wrapper = createCommonWrapper(ResFlinkProject::getUserId, ResFlinkProject::getCreateTime);
        if (StringUtils.isNotEmpty(param.getName())) {
            wrapper.like(ResFlinkProject::getName, param.getName());
        }
        if (param.getBuildstate() != null) {
            wrapper.eq(ResFlinkProject::getBuildstate, param.getBuildstate());
        }
        return getPageList(param, wrapper);
    }

    @Override
    public int saveInfo(ResFlinkProject resFlinkProject) {
        int res = 0;
        if (PasswordUtil.userNameAndPasswordCheck(resFlinkProject.getUsername(), resFlinkProject.getPassword())) {
            resFlinkProject.setPassword(resFlinkProject.getPassword());
        }
        resFlinkProject.setBuildstate(0L);
        LoginUser loginUser = getLoginUser();
        String name = resFlinkProject.getName();
        LambdaQueryWrapper<ResFlinkProject> queryWrapper = new LambdaQueryWrapper<>();
        queryWrapper.eq(ResFlinkProject::getName, name);
        ResFlinkProject currentData = baseMapper.selectOne(queryWrapper);
        if (currentData != null) {
            if (resFlinkProject.getId() != null) {
                if (Objects.equals(resFlinkProject.getId(), currentData.getId())) {
                    resFlinkProject.setUpdateTime(DateUtils.getNowDate());
                    resFlinkProject.setUpdateBy(loginUser.getUsername());
                    res = baseMapper.updateById(resFlinkProject);
                } else {
                    throw new UtilException("存在相同名称的git项目!");
                }
            } else {
                throw new UtilException("存在相同名称的git项目!");
            }
        } else {
            if (resFlinkProject.getId() != null) {
                resFlinkProject.setUpdateTime(DateUtils.getNowDate());
                resFlinkProject.setUpdateBy(loginUser.getUsername());
                res = baseMapper.updateById(resFlinkProject);
            } else {
                resFlinkProject.setCreateBy(loginUser.getUsername());
                resFlinkProject.setCreateTime(DateUtils.getNowDate());
                resFlinkProject.setUserId(loginUser.getUserId());
                resFlinkProject.setUid(UUID.randomUUID().toString());
                res = baseMapper.insert(resFlinkProject);
            }
        }
        return res;
    }

    @Override
    public int deleteInfos(Long[] ids) {
        //先删除明细
        //workspace/appuid/projuid/
        List<Long> idList = Arrays.stream(ids).collect(Collectors.toList());
        LambdaQueryWrapper<ResFlinkProject> projectLambdaQueryWrapper = new LambdaQueryWrapper<>();
        projectLambdaQueryWrapper.in(ResFlinkProject::getId, idList);
        List<ResFlinkProject> resFlinkProjects = baseMapper.selectList(projectLambdaQueryWrapper);
        if (resFlinkProjects != null && resFlinkProjects.size() > 0) {
            resFlinkProjects.forEach(project -> {
                //删除hdfs上边的git编译后的目录
                if (StringUtils.isNotEmpty(project.getPath())) {
                    Long applicationId = project.getApplicationId();
                    ResApplication application = applicationService.getBaseMapper().selectById(applicationId);
                    Long clusterId = application.getClusterId();
                    ResCluster cluster = clusterService.getBaseMapper().selectById(clusterId);
                    ClusterInfo clusterInfo = new ClusterInfo();
                    clusterInfo.setHadoopConfDir(cluster.getHadoopDir());
                    clusterInfo.setKerberosConfDir(cluster.getKererosDir());
                    if (HdfsUtil.exists(clusterInfo, project.getPath())) {
                        HdfsUtil.delete(clusterInfo, project.getPath());
                    }
                }
            });
        }
        List<Long> moduleList = new ArrayList<>();
        List<Long> moduleJarList = new ArrayList<>();
        LambdaQueryWrapper<ResFlinkModule> flinkModuleLambdaQueryWrapper = new LambdaQueryWrapper<>();
        flinkModuleLambdaQueryWrapper.in(ResFlinkModule::getFlinkProjectId, idList);
        List<ResFlinkModule> resFlinkModules = moduleService.getBaseMapper().selectList(flinkModuleLambdaQueryWrapper);
        if (resFlinkModules != null && resFlinkModules.size() > 0) {
            resFlinkModules.forEach(module -> moduleList.add(module.getId()));
        }
        if (moduleList.size() > 0) {
            LambdaQueryWrapper<ResModuleJar> moduleJarLambdaQueryWrapper = new LambdaQueryWrapper<>();
            moduleJarLambdaQueryWrapper.in(ResModuleJar::getModuleId, moduleList);
            List<ResModuleJar> resModuleJars = moduleJarService.getBaseMapper().selectList(moduleJarLambdaQueryWrapper);
            if (resModuleJars != null && resModuleJars.size() > 0) {
                resModuleJars.forEach(moduleJar -> moduleJarList.add(moduleJar.getId()));
            }
        }
        if (moduleJarList.size() > 0) {
            moduleJarService.getBaseMapper().deleteBatchIds(moduleJarList);
        }
        if (moduleList.size() > 0) {
            moduleService.getBaseMapper().deleteBatchIds(moduleList);
        }
        return delByIds(ids);
    }


    /**
     * 在线编译
     *
     * @param id
     */
    @Override
    public void build(Long id) {
        ResFlinkProject resFlinkProject = baseMapper.selectById(id);

        ResApplication application = applicationService.getBaseMapper().selectById(resFlinkProject.getApplicationId());

        GitModel gitModel = getHdfDist(resFlinkProject);
        clearOldDbAndHdfs(gitModel.getClusterInfo(), resFlinkProject);
        String gitDir = configService.selectConfigByKey("git:store:path");
        if (StringUtils.isEmpty(gitDir)) {
            throw new UtilException("请在参数设置中设置git存放目录,Key为：" + "git:store:path");
        }
        resFlinkProject.setAppSource(gitDir);
        File appSource = resFlinkProject.getAppSource();
        if (appSource.exists()) {
            try {
                FileUtils.deleteDirectory(appSource);
            } catch (IOException e) {
                throw new UtilException(e);
            }
        }
        try {
            //开始编译,首先删除数据库中的原有的
            resFlinkProject.setBuildstate(BuildState.BUILDING.getType());
            resFlinkProject.setSessionId(UUID.randomUUID().toString());
            baseMapper.updateById(resFlinkProject);
            StringBuilder builder = new StringBuilder();
            tailBuffer.put(id, builder.append(resFlinkProject.getLog4BuildStart()));
            boolean cloneSuccess = cloneSourceCode(resFlinkProject);
            if (cloneSuccess) {
                executorService.execute(() -> {
                    boolean build = projectBuild(resFlinkProject);
                    if (build) {
                        try {
                            simpMessageSendingOperations.convertAndSend(getBuilderHeader(resFlinkProject), "正在往hdfs上边上传");
                            if(deploy(resFlinkProject))
                            {
                                resFlinkProject.setLastbuild(new Date());
                                resFlinkProject.setClusterId(application.getClusterId());
                                resFlinkProject.setBuildstate(BuildState.SUCCESS.getType());
                                baseMapper.updateById(resFlinkProject);
                                simpMessageSendingOperations.convertAndSend(getBuilderHeader(resFlinkProject), "hdfs上传成功");
                            }else
                            {
                                resFlinkProject.setClusterId(-1L);
                                resFlinkProject.setLastbuild(null);
                                resFlinkProject.setBuildstate(BuildState.FAILED.getType());
                                baseMapper.updateById(resFlinkProject);
                                simpMessageSendingOperations.convertAndSend(getBuilderHeader(resFlinkProject), "hdfs上传失败");
                            }


                        } catch (Exception ex) {
                            resFlinkProject.setClusterId(-1L);
                            resFlinkProject.setLastbuild(null);
                            resFlinkProject.setBuildstate(BuildState.FAILED.getType());
                            baseMapper.updateById(resFlinkProject);
                            log.error("构建失败："+ex.getMessage());
                            ex.printStackTrace();
                            simpMessageSendingOperations.convertAndSend(getBuilderHeader(resFlinkProject), "构建失败："+ex.getMessage());
                            throw new UtilException("构建失败："+ex.getMessage());
                        }
                    } else {
                        resFlinkProject.setBuildstate(BuildState.FAILED.getType());
                        baseMapper.updateById(resFlinkProject);
                    }

                });
            } else {
                log.error("[flinkserver] clone or pull error.");
                resFlinkProject.setClusterId(-1L);
                resFlinkProject.setLastbuild(null);
                resFlinkProject.setBuildstate(BuildState.FAILED.getType());
                baseMapper.updateById(resFlinkProject);
                baseMapper.updateById(resFlinkProject);

            }
        } catch (Exception ex) {
            resFlinkProject.setClusterId(-1L);
            resFlinkProject.setLastbuild(null);
            resFlinkProject.setBuildstate(BuildState.FAILED.getType());
            baseMapper.updateById(resFlinkProject);
            log.error("编译出错：" + ex.getMessage());
            ex.printStackTrace();
            simpMessageSendingOperations.convertAndSend(getBuilderHeader(resFlinkProject), "构建失败："+ex.getMessage());
            throw new UtilException("编译出错：" + ex.getMessage());
        }


    }


    private void clearOldDbAndHdfs(ClusterInfo clusterInfo, ResFlinkProject project) {
        ResApplication application = applicationService.getBaseMapper().selectById(project.getApplicationId());
        LambdaQueryWrapper<ResFlinkModule> flinkModuleLambdaQueryWrapper = new LambdaQueryWrapper<>();
        flinkModuleLambdaQueryWrapper.eq(ResFlinkModule::getFlinkProjectId, project.getId());
        List<ResFlinkModule> resFlinkModules = moduleService.getBaseMapper().selectList(flinkModuleLambdaQueryWrapper);
        if (resFlinkModules != null && resFlinkModules.size() > 0) {
            resFlinkModules.forEach(module -> {
                LambdaQueryWrapper<ResModuleJar> moduleJarLambdaQueryWrapper = new LambdaQueryWrapper<>();
                moduleJarLambdaQueryWrapper.eq(ResModuleJar::getModuleId, module.getId());
                moduleJarService.getBaseMapper().delete(moduleJarLambdaQueryWrapper);
            });
        }
        moduleService.getBaseMapper().delete(flinkModuleLambdaQueryWrapper);
        //删除hdfs上边的
        String hdfsPath = HDFS_WORK_SPACE_DIR.concat("/").concat(application.getUid()).concat("/")
                .concat(project.getUid());
        if (HdfsUtil.exists(clusterInfo, hdfsPath)) {
            HdfsUtil.delete(clusterInfo, hdfsPath);
        }

    }


    @Override
    public void closeBuildLog(Long id) {
        tailOutMap.remove(id);
        tailBeginning.remove(id);
    }

    @Override
    public void tailBuildLog(Long id) {
        this.tailOutMap.put(id, Byte.valueOf("0"));
        this.tailBeginning.put(id, Byte.valueOf("0"));
    }

    @Override
    public void seeBuildLog(Long id) {

    }

    @Override
    public List<ResFlinkProject> getGitProjectList() {
        LambdaQueryWrapper<ResFlinkProject>lambdaQueryWrapper=new LambdaQueryWrapper<>();
        return baseMapper.selectList(lambdaQueryWrapper);
    }

    @Override
    public List<ResFlinkModule> getModulList() {
        LambdaQueryWrapper<ResFlinkModule>lambdaQueryWrapper=new LambdaQueryWrapper<>();
       return moduleService.getBaseMapper().selectList(lambdaQueryWrapper);
    }

    @Override
    public List<ResModuleJar> getModuleJarList() {
        LambdaQueryWrapper<ResModuleJar>lambdaQueryWrapper=new LambdaQueryWrapper<>();
        return moduleJarService.getBaseMapper().selectList(lambdaQueryWrapper);
    }


    private String getBuilderHeader(ResFlinkProject project) {
        return "/resp/build/";//.concat(project.getId()+"/")
        // .concat(project.getSessionId());
    }

    /**
     * 开始使用maven编译
     *
     * @param project
     * @return
     */
    private boolean projectBuild(ResFlinkProject project) {

        String buildHeader = getBuilderHeader(project);

        StringBuilder builder = tailBuffer.get(project.getId());
        AtomicBoolean success = new AtomicBoolean(false);
        CommandUtils.execute(project.getMavenBuildCmd(), (line) -> {
            builder.append(line).append("\n");
            if (line.contains("BUILD SUCCESS")) {
                success.set(true);
            }
            if (tailOutMap.containsKey(project.getId())) {
                if (tailBeginning.containsKey(project.getId())) {
                    tailBeginning.remove(project.getId());
                    Arrays.stream(builder.toString().split("\n"))
                            .forEach(out -> simpMessageSendingOperations.convertAndSend(buildHeader, out));
                }
                simpMessageSendingOperations.convertAndSend(buildHeader, line);
            }
        });
        closeBuildLog(project.getId());
        log.info(builder.toString());
        tailBuffer.remove(project.getId());
        return success.get();
    }

    private ExecutorService executorService = new ThreadPoolExecutor(
            Runtime.getRuntime().availableProcessors() * 2,
            200,
            60L,
            TimeUnit.SECONDS,
            new LinkedBlockingQueue<>(1024),
            ThreadUtils.threadFactory("flinkserver-build-executor"),
            new ThreadPoolExecutor.AbortPolicy()
    );

    /**
     * 开始克隆代码
     *
     * @param project
     * @return
     */
    private boolean cloneSourceCode(ResFlinkProject project) {
        try {
            GitModel gitModel = getHdfDist(project);
            project.cleanCloned(gitModel.getClusterInfo(), gitModel.getHdfsDist());
            log.info("clone {}, {} starting...", project.getName(), project.getUrl());
            tailBuffer.get(project.getId()).append(project.getLog4CloneStart());
            CloneCommand cloneCommand = Git.cloneRepository()
                    .setURI(project.getUrl())
                    .setDirectory(project.getAppSource())
                    .setBranch(project.getBranches());
            if (PasswordUtil.userNameAndPasswordCheck(project.getUsername(), project.getPassword())) {
                cloneCommand.setCredentialsProvider(project.getCredentialsProvider());
            }

            Git git = cloneCommand.call();
            StoredConfig config = git.getRepository().getConfig();
            config.setBoolean("http", project.getUrl(), "sslVerify", false);
            config.setBoolean("https", project.getUrl(), "sslVerify", false);
            config.save();
            File workTree = git.getRepository().getWorkTree();
            gitWorkTree(project.getId(), workTree, "");
            tailBuffer.get(project.getId()).append(
                    String.format(
                            "[flink 任务管理平台] project [%s] git clone successful!\n",
                            project.getName()
                    )
            );
            return true;
        } catch (Exception e) {
            String errorLog = String.format(
                    "[flink 任务管理平台] project [%s] branch [%s] git clone failure, err: %s",
                    project.getName(),
                    project.getBranches(),
                    e
            );
            project.setBuildstate(BuildState.FAILED.getType());
            baseMapper.updateById(project);
            tailBuffer.get(project.getId()).append(errorLog);
            log.error(String.format("project %s clone error ", project.getName()), e);
            throw new UtilException("编译失败：" + e);
        }


    }

    private void gitWorkTree(Long id, File workTree, String space) {
        File[] files = workTree.listFiles();
        for (File file : Objects.requireNonNull(files)) {
            if (!file.getName().startsWith(".git")) {
                if (file.isFile()) {
                    tailBuffer.get(id).append(space).append("/").append(file.getName()).append("\n");
                } else if (file.isDirectory()) {
                    tailBuffer.get(id).append(space).append("/").append(file.getName()).append("\n");
                    gitWorkTree(id, file, space.concat("/").concat(file.getName()));
                }
            }
        }
    }


    /**
     * 获取要上传的集群和要上传的地址
     *
     * @param project
     * @return
     */
    private GitModel getHdfDist(ResFlinkProject project) {
        Long applicationId = project.getApplicationId();
        ResApplication application = applicationService.getBaseMapper().selectById(applicationId);
        Long clusterId = application.getClusterId();
        ResCluster cluster = clusterService.getBaseMapper().selectById(clusterId);
        ClusterInfo clusterInfo = new ClusterInfo();
        clusterInfo.setKerberosConfDir(cluster.getKererosDir());
        clusterInfo.setHadoopConfDir(cluster.getHadoopDir());
        String distPath = HDFS_WORK_SPACE_DIR.concat("/").concat(application.getUid())
                .concat("/")
                .concat(project.getUid());
        GitModel gitModel = new GitModel();
        gitModel.setHdfsDist(distPath);
        gitModel.setClusterInfo(clusterInfo);
        return gitModel;
    }


    private boolean deploy(ResFlinkProject project) throws Exception {
        GitModel gitModel = getHdfDist(project);
        File path = project.getAppSource();
        if (HdfsUtil.exists(gitModel.getClusterInfo(), gitModel.getHdfsDist())) {
            HdfsUtil.delete(gitModel.getClusterInfo(), gitModel.getHdfsDist());
        }
        HdfsUtil.mkdirs(gitModel.getClusterInfo(), gitModel.getHdfsDist());
        //本地存放
        String tmpPath = configService.selectConfigByKey("sys:temp:dir")
                .concat("/workspace/")
                .concat(project.getUid());
        File tmpFile = new File(tmpPath);
        if (tmpFile.exists()) {
            FileUtils.deleteDirectory(tmpFile);
        }
        tmpFile.mkdirs();
        List<JarModule> list = new ArrayList<>();
        findTarOrJar(project, list, path);
        if (list.size() > 0) {
            list.forEach(l -> {
                String uuid = UUID.randomUUID().toString();
                String hdfsModulePath = gitModel.getHdfsDist().concat("/")
                        .concat(uuid);
                HdfsUtil.mkdirs(gitModel.getClusterInfo(), hdfsModulePath);
                String hdfsJarPath = hdfsModulePath.concat("/")
                        .concat(l.getJarName());

                File localJar = new File(l.getJarPath());
                if (localJar.exists()) {
                    HdfsUtil.upload(gitModel.getClusterInfo(), localJar.getAbsolutePath(), hdfsJarPath, true, true);
                }

                ResFlinkModule module = new ResFlinkModule();
                module.setFlinkProjectId(project.getId());
                module.setUid(uuid);
                module.setName(l.getModuleName());
                module.setPath(HdfsUtil.getDefaultFS(gitModel.getClusterInfo()).concat("/").concat(hdfsModulePath));
                //module.setCreateBy(loginUser.getUsername());
                module.setCreateTime(DateUtils.getNowDate());
                moduleService.getBaseMapper().insert(module);


                ResModuleJar resModuleJar = new ResModuleJar();
                resModuleJar.setModuleId(module.getId());
                resModuleJar.setName(l.getJarName());
                resModuleJar.setPath(HdfsUtil.getDefaultFS(gitModel.getClusterInfo()).concat("/").concat(hdfsJarPath));
               // resModuleJar.setCreateBy(loginUser.getUsername());
                resModuleJar.setCreateTime(DateUtils.getNowDate());
                moduleJarService.getBaseMapper().insert(resModuleJar);

            });
        }
        return true;
    }


    private void findTarOrJar(ResFlinkProject project, List<JarModule> list, File path) {
        for (File file : Objects.requireNonNull(path.listFiles())) {
            // 定位到target目录下:
            if (file.isDirectory() && "target".equals(file.getName())) {
                JarModule jarModule = new JarModule();

                String modulName = file.getParentFile().getName();
                if (modulName.equals(project.getRootName().concat("-")
                        .concat(project.getBranches()))) {
                    modulName = project.getRootName();
                }
                jarModule.setModuleName(modulName);
                jarModule.setUid(UUID.randomUUID().toString());
                // 在target路径下找tar.gz的文件或者jar文件,注意:两者只选其一,不能同时满足,
                File jar = null;
                for (File targetFile : Objects.requireNonNull(file.listFiles())) {
                    // 1) 一旦找到tar.gz文件则退出.
//                    if (targetFile.getName().endsWith("tar.gz")) {
//                        tar = targetFile;
//                        break;
//                    }
                    // 2) 尝试寻找jar文件...可能存在发现多个jar.
                    if (!targetFile.getName().startsWith("original-")
                            && !targetFile.getName().endsWith("-sources.jar")
                            && targetFile.getName().endsWith(".jar")) {
                        if (jar == null) {
                            jar = targetFile;
                        } else {
                            // 可能存在会找到多个jar,这种情况下,选择体积最大的那个jar返回...(不要问我为什么.)
                            if (targetFile.getTotalSpace() > jar.getTotalSpace()) {
                                jar = targetFile;
                            }
                        }
                    }
                }
                File target = jar;
                if (target == null) {
                    //throw new RuntimeException("[flinkserver] can't find tar.gz or jar in " + file.getAbsolutePath());
                    throw new RuntimeException("[flinkserver] can't find  jar in " + file.getAbsolutePath());
                }
                jarModule.setJarPath(target.getAbsolutePath());
                jarModule.setJarName(target.getName());
                list.add(jarModule);
            }
            if (file.isDirectory()) {
                findTarOrJar(project, list, file);
            }
        }
    }

}
