package io.github.junxworks.junx.stream.pipeline;

import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import io.github.junxworks.junx.core.util.Asserts;
import io.github.junxworks.junx.core.util.StringUtils;
import io.github.junxworks.junx.stream.constants.DefaultTopics;
import io.github.junxworks.junx.stream.extractor.DataExtractor;
import io.github.junxworks.junx.stream.extractor.ExtractContext;
import io.github.junxworks.junx.stream.extractor.convert.Convertor;
import io.github.junxworks.junx.stream.extractor.db.DBDataExtractor;
import io.github.junxworks.junx.stream.extractor.file.FileExctractor;
import io.github.junxworks.junx.stream.handler.AbstractBatchDataDataHandler;
import io.github.junxworks.junx.stream.handler.AbstractDataHandler;
import io.github.junxworks.junx.stream.handler.AbstractErrorHandler;
import io.github.junxworks.junx.stream.handler.AbstractSingleDataDataHandler;

import javax.sql.DataSource;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Supplier;

import static io.github.junxworks.junx.stream.constants.DefaultTopics.TOPIC_DATAEXTRACT;


/**
 * 构建管道
 *
 * @author 王兴
 * @date 2024/09/10
 */
public class PipeBuilder {
    private ExtractContext extractContext = new ExtractContext();
    private DataExtractor dataExtractor = new DBDataExtractor();
    private Pipeline pipeline;
    private final Map<String, BaseComponent> componentMap = Maps.newHashMap();
    private List<BaseComponent> roots = Lists.newArrayList();
    private String flowName;
    private DataSource dataSource;
    private final Map<String, Object> globalParams = Maps.newConcurrentMap();
    private BaseComponent errorHandleComponent;
    private int waiteSeconds = 5;
    /**
     * 缓冲区大小，默认为64K个对象，设置component前设置，必须是2的次方
     */
    private int bufferSize = 64 * 1024;

    public PipeBuilder setWaiteSeconds(int waiteSeconds) {
        this.waiteSeconds = waiteSeconds;
        return this;
    }


    public PipeBuilder setDataExtractor(DataExtractor dataExtractor) {
        this.dataExtractor = dataExtractor;
        return this;
    }

    public PipeBuilder setBufferSize(int bufferSize) {
        this.bufferSize = bufferSize;
        return this;
    }

    public PipeBuilder setGlobalParams(Map<String, Object> globalParams) {
        this.globalParams.putAll(globalParams);
        return this;
    }

    public PipeBuilder setExceptionEventHandler(AbstractErrorHandler errorHandler) {
        return addStep(DisruptorComponent::new, DefaultTopics.TOPIC_EXCEPTION, "异常处理", errorHandler, null);
    }

    public PipeBuilder setDataSource(DataSource dataSource) {
        this.dataSource = dataSource;
        return this;
    }

    public PipeBuilder setFlowName(String flowName) {
        this.flowName = flowName;
        return this;
    }

    public PipeBuilder setDataExtractSql(String dataExtractSql) {
        this.extractContext.setSql(dataExtractSql);
        return this;
    }

    public PipeBuilder setDataExtractFile(String filePath) {
        this.extractContext.setFilePath(filePath);
        return this;
    }

    public PipeBuilder setDataExtractConvertor(Convertor<?> convertor) {
        this.extractContext.setConvertor(convertor);
        return this;
    }


    public PipeBuilder setDataExtractBatchSize(int batchSize) {
        this.extractContext.setBatchSize(batchSize);
        return this;
    }

    /**
     * 直接接收数据流出的根处理节点
     *
     * @param stepName
     * @param handler
     * @param outputTopic
     * @return {@link PipeBuilder }
     * @author 王兴
     */
    public PipeBuilder addRootSingle(String stepName, AbstractSingleDataDataHandler handler, String outputTopic) {
        return addRoot(DisruptorComponent::new, stepName, handler, outputTopic);
    }

    public PipeBuilder addRootConcurrent(String stepName, AbstractSingleDataDataHandler handler, String outputTopic) {
        return addRoot(ConcurrentComponent::new, stepName, handler, outputTopic);
    }

    public PipeBuilder addRootBatch(String stepName, AbstractBatchDataDataHandler handler, String outputTopic) {
        //直接使用数据提取主题的为root节点
        return addRoot(DisruptorComponent::new, stepName, handler, outputTopic);
    }

    private PipeBuilder addRoot(Supplier<BaseComponent> componentSupplier, String stepName, AbstractDataHandler handler, String outputTopic) {
        if (componentMap.containsKey(outputTopic)) {
            throw new RuntimeException("Duplicate outputTopic:" + outputTopic);
        }
        handler.setOutputTopic(outputTopic);
        BaseComponent root = componentSupplier.get();
        root.setEventChannelHandler(handler);
        root.setInputTopic(TOPIC_DATAEXTRACT);
        root.setOutputTopic(outputTopic);
        root.setStepName(stepName);
        root.setBufferSize(bufferSize);
        roots.add(root);
        componentMap.put(outputTopic, root);
        return this;
    }

    public PipeBuilder addStepSingle(String stepName, String inputTopic, AbstractSingleDataDataHandler handler, String outputTopic) {
        return addStep(DisruptorComponent::new, inputTopic, stepName, handler, outputTopic);
    }

    public PipeBuilder addStepConcurrent(String stepName, String inputTopic, AbstractSingleDataDataHandler handler, String outputTopic) {
        return addStep(ConcurrentComponent::new, inputTopic, stepName, handler, outputTopic);
    }

    public PipeBuilder addStepBatch(String stepName, String inputTopic, AbstractBatchDataDataHandler handler, String outputTopic) {
        return addStep(DisruptorComponent::new, inputTopic, stepName, handler, outputTopic);
    }

    private PipeBuilder addStep(Supplier<BaseComponent> componentSupplier, String inputTopic, String stepName, AbstractDataHandler handler, String outputTopic) {
        if (StringUtils.notNull(outputTopic) && componentMap.containsKey(outputTopic)) {
            throw new RuntimeException("Duplicate outputTopic:" + outputTopic);
        }
        handler.setOutputTopic(outputTopic);
        final BaseComponent component = componentSupplier.get();
        component.setStepName(stepName);
        component.setEventChannelHandler(handler);
        component.setInputTopic(inputTopic);
        component.setOutputTopic(outputTopic);
        component.setBufferSize(bufferSize);
        if (!DefaultTopics.TOPIC_EXCEPTION.equals(inputTopic)) {
            BaseComponent parent = componentMap.get(inputTopic);
            if (parent == null) {
                throw new RuntimeException("No component for inputTopic:" + inputTopic);
            }
            parent.addChild(component);
            component.setParent(parent);
            Optional.ofNullable(outputTopic).ifPresent(t -> {
                componentMap.put(t, component);
            });
        } else {
            errorHandleComponent = component;
        }
        return this;
    }

    public Pipeline build() {
        Asserts.check(extractContext != null, "ExtractContext is null");
        if (dataExtractor instanceof DBDataExtractor) {
            Asserts.check(dataSource != null, "DataSource is null");
            ((DBDataExtractor) dataExtractor).setDataSource(dataSource);
        } else if (dataExtractor instanceof FileExctractor) {
            Asserts.check(StringUtils.notNull(extractContext.getFilePath()), "File path is null");
        }
        Asserts.check(errorHandleComponent != null, "ExceptionEventHandler is null");
        Asserts.check(StringUtils.notNull(flowName), "FlowName is null");
        pipeline = new Pipeline();
        pipeline.setFlowName(flowName);
        pipeline.setDataExtractor(dataExtractor);
        pipeline.setExtractContext(extractContext);
        pipeline.setRoots(roots);
        pipeline.setGlobalParams(globalParams);
        pipeline.setErrorHandleComponent(errorHandleComponent);
        pipeline.setWaiteSeconds(waiteSeconds);
        return pipeline;
    }

    /*public static void main(String[] args) throws Exception {
        PipeBuilder builder = new PipeBuilder();
        Pipeline p = builder.setFlowName("test flow").setWaiteSeconds(0)
                .setExceptionEventHandler(new AbstractErrorHandler() {

                    @Override
                    public void handleException(Object data, Exception e, EventContext context, Map<String, Object> globalParams) {

                    }
                }).setDataExtractor(new AbstractExtractor() {

                    @Override
                    public void extract(ExtractContext context) throws Exception {
                        super.beforeExtract(context);
                        for (int i = 0; i < 500000; i++) {
                            Map<String, String> data = Maps.newHashMap();
                            data.put("name", "test" + i);
                            dataBoat.add(data);
                            this.flush();
                        }
                        stop();
                    }
                }).addRootSingle("test-root", new AbstractSingleDataDataHandler() {
                    private AtomicInteger count = new AtomicInteger(0);

                    @Override
                    public Object handle(Object data, Map<String, Object> globalParams) {
                        System.out.println("root:" + count.addAndGet(1));
                        return data;
                    }
                }, "test")
                .addStepSingle("test-single", "test", new AbstractSingleDataDataHandler() {
                    private AtomicInteger count = new AtomicInteger(0);

                    @Override
                    public Object handle(Object data, Map<String, Object> globalParams) {
                        System.out.println("single:" + count.addAndGet(1));
                        return data;
                    }
                }, null)
                .addStepBatch("test-batch", "test", new AbstractBatchDataDataHandler() {
                    private AtomicInteger count = new AtomicInteger(0);

                    @Override
                    public List<Object> handleEvents(List<Object> dataList, Map<String, Object> globalParams) throws Exception {
                        System.out.println("batch:" + count.addAndGet(dataList.size()));
                        return dataList;
                    }
                }, null)
                .addStepConcurrent("test-concurrent", "test", new AbstractSingleDataDataHandler() {
                    private AtomicInteger count = new AtomicInteger(0);

                    @Override
                    public Object handle(Object data, Map<String, Object> globalParams) {
                        System.out.println("concurrent:" + count.addAndGet(1));
                        return data;
                    }
                }, null)
                .build();
        p.start();
        ExecFuture future = p.getExecFuture();
        future.await();
        System.out.println("done");
    }*/
}
