/*
 * Copyright (C) 2010-2101 Alibaba Group Holding Limited.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.alibaba.otter.node.etl.load.loader.mq.producer;

import com.alibaba.otter.node.etl.load.exception.LoadException;
import com.alibaba.otter.shared.common.model.config.data.mq.MqDataMedia;
import com.alibaba.otter.shared.common.model.config.data.mq.MqMediaSource;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.MigrateMap;
import com.google.common.collect.OtterMigrateMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;

import java.util.HashSet;
import java.util.Map;
import java.util.Set;

/***
 * mq消息队列工厂
 * @author luchenghua
 * @since 2021-06-23
 */
public class MqProducerFactory implements DisposableBean {

    private static final Logger logger = LoggerFactory.getLogger(MqProducerFactory.class);


    // 第一层pipelineId , 第二层DbMediaSource id
    private final LoadingCache<Long, Map<MqDataMedia, MQProducer>> producers;

    public MqProducerFactory() {
        producers = OtterMigrateMap.makeSoftValueComputingMapWithRemoveListener(
                input -> MigrateMap.makeComputingMap(mqMediaSource -> {
                    MqMediaSource source = mqMediaSource.getSource();
                    if (source.getType().isKafka()) {

                        MQProperties mqProperties = new MQProperties();
                        KafkaProducer kafkaProducer = new KafkaProducer();
                        mqProperties.setServers(source.getUrl());
                        kafkaProducer.init(mqProperties);
                        return kafkaProducer;
                    }
                    throw new LoadException("not support media type [" + source.getType().name() + "]");
                }),

                (key, value) -> {
                    for (MQProducer mqProducer : value.values()) {
                        mqProducer.stop();
                    }
                });
    }

    public MQProducer getMQProducer(Long pipelineId, MqDataMedia source) {
        return producers.getUnchecked(pipelineId).get(source);
    }

    public void destroy(Long pipelineId) {

        Map<MqDataMedia, MQProducer> dialect = producers.getIfPresent(pipelineId);
        if (dialect != null) {
            for (MQProducer dbDialect : dialect.values()) {
                dbDialect.stop();
            }
            producers.invalidate(pipelineId);
        }
    }

    public void destroy() throws Exception {
        Set<Long> pipelineIds = new HashSet<Long>(producers.asMap().keySet());
        for (Long pipelineId : pipelineIds) {
            destroy(pipelineId);
        }
    }

}
