package com.ruyuan.ingestion.kafka;
import com.ruyuan.ingestion.IngestionExecutor;
import com.ruyuan.ingestion.config.Configuration;
import com.ruyuan.ingestion.utils.KafkaUtils;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KStreamBuilder;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;

/**
 * 封装通用的逻辑
 *  userFriend和Attendees处理的通用逻辑.
 * */
public abstract class BaseStreamer implements IngestionExecutor {
    private String  zookeeperUrl = null;
    private String kafkaBrokerUrl = null;
    private String stateDir = null;

    protected abstract String getApplicationId();
    protected abstract String getSourceTopic();
    protected abstract String getTargetTopic();



    public void initialize(Properties loadSettings) {
        this.zookeeperUrl = loadSettings.getProperty(Configuration.ZOOKEEPER_URL);
        this.kafkaBrokerUrl = loadSettings.getProperty(Configuration.BROKERS);
        this.stateDir = loadSettings.getProperty(Configuration.STATE);
    }


    @Override
    public void execute(String[] args) throws Exception {
        if (args.length < 1) {
            throw new Exception("参数不匹配");
        }else {
            try {
                this.initialize(Configuration.loadSettings(args[0]));
                this.stream();
            }catch (Exception e) {
                e.printStackTrace();
            }
        }
    }

    protected void stream() throws Exception {
        Properties properties = KafkaUtils.getCommonProperties();
        properties.put(StreamsConfig.APPLICATION_ID_CONFIG,getApplicationId());
        properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,kafkaBrokerUrl);
        properties.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG,zookeeperUrl);
        properties.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
        properties.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
        properties.put(StreamsConfig.STATE_DIR_CONFIG,stateDir);

        KStreamBuilder kStreamBuilder = new KStreamBuilder();
        KStream<String, String> stream = kStreamBuilder.stream(Serdes.String(), Serdes.String(), this.getSourceTopic());

        //数据变换的具体逻辑
        KStream<String, String> result = stream.flatMap((k, v) -> transform(k, v)).filter((k, v) -> v != null && v.length() > 0);
        //publish
        result.to(Serdes.String(),Serdes.String(),this.getTargetTopic());

        StreamsConfig streamsConfig = new StreamsConfig(properties);
        //运行
        (new KafkaStreams(kStreamBuilder,streamsConfig)).start();
    }

    private Iterable<KeyValue<String, String>> transform(String key, String value) {
        List<KeyValue<String,String>> items = new ArrayList<KeyValue<String,String>>();
        String[] fields = value.split(",", -1);

        //check
        if (isHeader(fields) || !isValid(fields)) {
            items.add(new KeyValue<>(key,""));
        }else {
            //具体的解析逻辑，由实现类来做逻辑判断
            for(String[] keyValue:transform(fields)) {
                items.add(new KeyValue<>(key,String.join(",",keyValue)));
            }
        }
        return items;
    }
    protected abstract List<String[]> transform(String[] fields);
    protected abstract Boolean isHeader(String[] fields);
    protected abstract Boolean isValid(String[] fields);

}
