package com.hwn.datax.service.impl;

import cn.hutool.core.util.IdUtil;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.hwn.datax.domain.DataxJobGroup;
import com.hwn.datax.service.ConfigService;
import com.hwn.datax.service.DataxJobGroupService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @author Administrator
 * @description 针对表【datax_job_group(datax任务组)】的数据库操作Service实现
 * @createDate 2023-06-05 21:06:28
 */
@Service
public class DataxJobGroupServiceImpl implements DataxJobGroupService {

    ConfigService configService;

    private final static String CONFIG_KEY = "DataxGroupTable";

    @Autowired
    public DataxJobGroupServiceImpl(ConfigService configService) {
        this.configService = configService;
        JSONObject config = configService.getConfig(CONFIG_KEY);
        if (config != null) {
            config.forEach((key, value) -> {
                DataxJobGroup bean = JSONUtil.toBean((JSONObject) value, DataxJobGroup.class);
                dataxJobGroupMap.put(bean.getId(), bean);
            });
        }
    }

    private static final Map<Long, DataxJobGroup> dataxJobGroupMap = new HashMap<>();

    /**
     * 获取全部的分组内容
     *
     * @return
     */
    @Override
    public List<DataxJobGroup> list() {
        return dataxJobGroupMap.values().stream().toList();
    }


    /**
     * 保存分组名称
     *
     * @param dataxJobGroup
     */
    @Override
    public DataxJobGroup save(DataxJobGroup dataxJobGroup) {
        if (dataxJobGroup.getId() == null) {
            dataxJobGroup.setId(IdUtil.getSnowflakeNextId());
        }
        dataxJobGroupMap.put(dataxJobGroup.getId(), dataxJobGroup);
        configService.saveConfig(CONFIG_KEY, JSONUtil.toJsonStr(dataxJobGroupMap));
        return dataxJobGroup;
    }
}




