package org.iscas.tcse;

import org.apache.commons.cli.*;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.ml.linalg.DenseVector;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkFixedPartitioner;
import org.iscas.tcse.datasample.CoresetSampler;
import org.iscas.tcse.datasample.RandomSampler;
import org.iscas.tcse.datasample.SamplerConf;
import org.iscas.tcse.preprocessing.MinMaxScaler;
import org.iscas.tcse.util.DataGenSource;
import org.iscas.tcse.util.DenseVectorSerialization;
import org.iscas.tcse.util.KafkaUtil;

public class TorlinkLR {
    static CommandLine parseArgs(String[] args) throws ParseException {
        CommandLineParser commandLineParser = new DefaultParser();

        Options options = new Options();

        Option broker =
                Option.builder("b")
                        .longOpt("broker-address")
                        .hasArg()
                        .required()
                        .desc("broker address")
                        .build();
        options.addOption(broker);

        Option topic =
                Option.builder("t")
                        .longOpt("topic-name")
                        .hasArg()
                        .required()
                        .desc("topic name")
                        .build();
        options.addOption(topic);

        Option parallelism =
                Option.builder("n")
                        .longOpt("parallelism")
                        .hasArg()
                        .required()
                        .desc("parallelism")
                        .build();
        options.addOption(parallelism);

        Option totalSamples =
                Option.builder("a")
                        .longOpt("all-samples")
                        .hasArg()
                        .required()
                        .desc("all samples")
                        .build();
        options.addOption(totalSamples);

        Option redisAddr =
                Option.builder("r")
                        .longOpt("redis-address")
                        .hasArg()
                        .required()
                        .desc("redis address")
                        .build();
        options.addOption(redisAddr);
        Option redisPort =
                Option.builder("p")
                        .longOpt("redis-port")
                        .hasArg()
                        .required()
                        .desc("redis port")
                        .build();
        options.addOption(redisPort);

        Option secondSpeed =
                Option.builder("d")
                        .longOpt("second-speed")
                        .hasArg()
                        .required()
                        .desc("second speed")
                        .build();
        options.addOption(secondSpeed);

        Option sampleRatio =
                Option.builder("i")
                        .longOpt("sample-ratio")
                        .hasArg()
                        .required()
                        .desc("sample ratio")
                        .build();
        options.addOption(sampleRatio);

        Option samplerType =
                Option.builder("m")
                        .longOpt("sampler-type")
                        .hasArg()
                        .required()
                        .desc("sampler type")
                        .build();
        options.addOption(samplerType);

        return commandLineParser.parse(options, args);
    }

    public static void main(String[] args) throws Exception {

        CommandLine cmd = parseArgs(args);
        String brokerAddr = cmd.getOptionValue("b");
        String topicName = cmd.getOptionValue("t");
        Integer parallelism = Integer.parseInt(cmd.getOptionValue("n"));
        Integer allSamples = Integer.parseInt(cmd.getOptionValue("a"));
        String redisAddr = cmd.getOptionValue("r");
        Integer redisPort = Integer.parseInt(cmd.getOptionValue("p"));
        Integer secondSpeed = Integer.parseInt(cmd.getOptionValue("d"));
        Double initSampleRatio = Double.parseDouble(cmd.getOptionValue("i"));
        String samplerType = cmd.getOptionValue("m");

        KafkaUtil.prepareTopic(brokerAddr, topicName, parallelism);

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        int numFeatures = 18;

        DataGenSource source =
                new DataGenSource(
                        numFeatures,
                        allSamples / parallelism,
                        2,
                        false,
                        secondSpeed,
                        false,
                        null,
                        null,
                        -1);
        DataStream<DenseVector> trainData = env.addSource(source).name("gen_data_source");

        if (initSampleRatio > 0) {
            SamplerConf conf =
                    new SamplerConf(
                            redisAddr, redisPort, 0.1, 0.98, 1000, "source_sp", "ml_sp", "sample_ratio");
            if (samplerType.equals("coreset")) {
                CoresetSampler sampler =
                        new CoresetSampler(numFeatures, 80, initSampleRatio, 2, conf, true);
                trainData = trainData.process(sampler);
            } else {
                RandomSampler sampler = new RandomSampler(initSampleRatio, conf, true);
                trainData = trainData.process(sampler);
            }
        }

        int[] featureIndices = new int[numFeatures];
        for (int i = 0; i < numFeatures; i++) {
            featureIndices[i] = i;
        }
        MinMaxScaler scaler =
                new MinMaxScaler(4000, featureIndices, redisAddr, redisPort, "torlink_minmax");
        trainData = trainData.process(scaler);

        KafkaSink<DenseVector> sink =
                KafkaSink.<DenseVector>builder()
                        .setBootstrapServers(brokerAddr)
                        .setRecordSerializer(
                                KafkaRecordSerializationSchema.builder()
                                        .setTopic(topicName)
                                        .setPartitioner(new FlinkFixedPartitioner<>())
                                        .setValueSerializationSchema(new DenseVectorSerialization())
                                        .build())
                        .build();

        trainData.sinkTo(sink).name("kafka_sink");

        env.execute("torlink lr");
    }
}
