package org.jetlinks.pro.messaging.kafka.rule;

import com.alibaba.fastjson.JSONObject;
import lombok.AllArgsConstructor;
import lombok.Generated;
import lombok.Getter;
import lombok.Setter;
import org.hswebframework.web.bean.FastBeanCopier;
import org.hswebframework.web.validator.ValidatorUtils;
import org.jetlinks.pro.messaging.kafka.KafkaProducer;
import org.jetlinks.pro.messaging.kafka.Message;
import org.jetlinks.pro.messaging.kafka.SimpleMessage;
import org.jetlinks.pro.messaging.kafka.impl.KafkaDataSourceProvider;
import org.jetlinks.pro.rule.engine.editor.annotation.EditorResource;
import org.jetlinks.pro.rule.engine.model.nodes.NodeConverter;
import org.jetlinks.rule.engine.api.RuleData;
import org.jetlinks.rule.engine.api.model.RuleNodeModel;
import org.jetlinks.rule.engine.api.task.ExecutionContext;
import org.jetlinks.rule.engine.api.task.TaskExecutor;
import org.jetlinks.rule.engine.api.task.TaskExecutorProvider;
import org.jetlinks.rule.engine.defaults.FunctionTaskExecutor;
import org.reactivestreams.Publisher;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import reactor.core.publisher.Mono;

import javax.validation.constraints.NotBlank;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;

@Component
@AllArgsConstructor
@EditorResource(
    id = "kafka-producer",
    name = "写入Kafka",
    editor = "rule-engine/editor/network/kafka-producer.html",
    helper = "rule-engine/i18n/{language}/network/kafka-producer.html",
    order = 200
)
public class KafkaProducerTaskExecutorProvider implements TaskExecutorProvider, NodeConverter {
    @Generated
    @Override
    public String getNodeType() {
        return getExecutor();
    }

    @Override
    @Generated
    public RuleNodeModel convert(JSONObject nodeJson) {
        RuleNodeModel model = new RuleNodeModel();
        model.setExecutor(getExecutor());
        model.setConfiguration(nodeJson);
        return model;
    }

    @Override
    @Generated
    public String getExecutor() {
        return "kafka-producer";
    }

    @Override
    public Mono<TaskExecutor> createTask(ExecutionContext context) {
        return new KafkaProducerTaskExecutor(context).init();
    }

    static class KafkaProducerTaskExecutor extends FunctionTaskExecutor {

        private KafkaProducer producer;

        private String topic;

        public KafkaProducerTaskExecutor(ExecutionContext context) {
            super("Kafka Producer", context);
        }

        Mono<TaskExecutor> init() {
            return createProducer()
                .doOnNext(producer -> this.producer = producer)
                .thenReturn(this);
        }

        @Override
        protected Publisher<RuleData> apply(RuleData input) {

            return input
                .dataToMap()
                .<Message>handle((data, sink) -> {
                    Message msg = convert(data);
                    if (null != msg) {
                        sink.next(msg);
                    }
                })
                .as(producer::send)
                .then(Mono.empty());

        }

        protected Message convert(Map<String, Object> data) {
            if (StringUtils.hasText(topic)) {
                return SimpleMessage.of(topic, data);
            }
            String topic = (String) data.get("topic");
            Object payload = data.get("payload");
            if (StringUtils.hasText(topic) && payload != null) {
                return SimpleMessage.of(topic, payload);
            }
            context.logger().debug("The input data does not contain topic:{}", data);
            return null;
        }

        public KafkaProducerConfig createConfig() {
            return ValidatorUtils.tryValidate(
                FastBeanCopier
                    .copy(context.getJob().getConfiguration(), new KafkaProducerConfig())
            );
        }

        public Mono<KafkaProducer> createProducer() {
            KafkaProducerConfig config = createConfig();
            this.topic = config.topic;
            return KafkaDataSourceProvider
                .create("rule", config.createProperties(String.join(":", "rule-engine", context
                    .getJob()
                    .getInstanceId(), context.getJob().getNodeId())))
                .createProducer();
        }

        @Override
        public void reload() {
            if (disposable != null) {
                disposable.dispose();
            }
            this.producer.shutdown();

            init().subscribe(ignore -> this.disposable = doStart());
        }

        @Override
        public synchronized void shutdown() {
            super.shutdown();
            if (null != producer) {
                producer.shutdown();
            }
        }
    }

    @Getter
    @Setter
    public static class KafkaProducerConfig {
        @NotBlank
        private String bootstrapServers;

        private String topic;

        KafkaProperties createProperties(String clientId) {
            KafkaProperties source = new KafkaProperties();
            source.setBootstrapServers(Arrays.asList(bootstrapServers.split(",")));
            source.getProducer().setClientId(clientId);
            return source;
        }
    }
}
