package com.cetc.sdp.kmga.cs.stream.nv;

import com.cetc.sdp.kmga.cs.common.RabbitMQConf;
import com.cetc.sdp.kmga.cs.stream.EventKryoRegistrator;
import com.cetc.sdp.kmga.cs.stream.StreamUtils;
import com.cetc.sdp.kmga.cs.util.*;
import org.apache.spark.sql.sources.In;

import java.util.HashMap;
import java.util.Map;

/**
 * 事件日志处理程序入口
 * @description:
 * @author： DengQiang
 * @date: 2018/3/8 16:45
 */
public final class EventStreamDriver {

    public static void main(String[] args) throws InterruptedException {
        String usage = "Usage: <application name> <the seconds of duration> \n" +
                "[kafka|rabbitmq] (if use rabbitmq must set <maxReceiveTime> <maxMessagesPerPartition> <levelParallelism>)";
        if (args.length < 2) {
            System.out.println(usage);
            return;
        }
        int duration = Integer.parseInt(args[1]);
        WorkConf workConf = new WorkConfImpl();
        workConf.setDuration(duration);
        workConf.setApplicationName(args[0]);
        workConf.addConf("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
        workConf.addConf("spark.kryo.registrator", EventKryoRegistrator.class.getName());

        boolean useR = false;
        int maxReceiveTime = 100;
        int maxMessagesPerPartition = 5000;
        int levelParallelism = 4;
        if (args.length >= 3) {
            if ("rabbitmq".equals(args[2])) {
                useR = true;
                if (args.length >= 4) {
                    maxReceiveTime = Integer.parseInt(args[3]);
                    if (args.length >= 5) {
                        maxMessagesPerPartition = Integer.parseInt(args[4]);
                        if (args.length >= 6) {
                            levelParallelism = Integer.parseInt(args[5]);
                        }
                    }
                }
            }
        }
        if (!useR) {
            //使用kafka作为输入源
            StreamWork sw = new StreamWork(workConf);
            //脱敏任务日志
            StreamJobConf taskConf = new StreamJobConfImpl("desens-task-log-config.xml");
            sw.addStream(new DesensTaskLogStreaming(taskConf, new KafkaDStream(taskConf.getSubscribeTopics(), taskConf.getKafkaConsumerConf())));
            //脱敏操作日志
            OpStreamJobConfImpl opConf = new OpStreamJobConfImpl("desens-op-log-config.xml");
            sw.addStream(new DesensOpLogStreaming(opConf, new KafkaDStream(opConf.getSubscribeTopics(), opConf.getKafkaConsumerConf())));
            //审计操作日志
            opConf = new OpStreamJobConfImpl("audit-op-log-config.xml");
            sw.addStream(new OpLogStreaming(opConf, new KafkaDStream(opConf.getSubscribeTopics(), opConf.getKafkaConsumerConf())));
            //运维操作日志
            opConf = new OpStreamJobConfImpl("ops-op-log-config.xml");
            sw.addStream(new OpsLogStreaming(opConf, new KafkaDStream(opConf.getSubscribeTopics(), opConf.getKafkaConsumerConf())));
            //平台操作日志
            opConf = new OpStreamJobConfImpl("supervise-op-log-config.xml");
            sw.addStream(new OpLogStreaming(opConf, new KafkaDStream(opConf.getSubscribeTopics(), opConf.getKafkaConsumerConf())));

            sw.startDeviceInfoReceiver(StreamUtils.createRabbitMQConf(taskConf.getRabbitMqConf()));
            sw.getContext().getDeviceSetBroadcastWrapper().setInterval(taskConf.getReBroadcastInterval());
            sw.getContext().getDeviceSetBroadcastWrapper().setUpdateByFixRate(taskConf.autoReBroadcast());
            sw.getContext().getSparkContext().setCheckpointDir("/EventStreamingCheckPoint");
            sw.startWork();
        } else {
            //使用RabbitMq作为输入源

            StreamWork sw = new StreamWork(workConf);
            //脱敏任务日志
            StreamJobConf taskConf = new StreamJobConfImpl("desens-task-log-config.xml");
            RabbitMQConf mqConf = StreamUtils.createRabbitMQConf(taskConf.getRabbitMqConf());
            Map<String, String> rabbitmqParams = new HashMap<>(32);
            rabbitmqParams.put("hosts", taskConf.getRabbitMqConf().get("rabbitmq.node.list"));
            rabbitmqParams.put("vHost", "/");
            rabbitmqParams.put("userName", mqConf.getMqUsername());
            rabbitmqParams.put("password", mqConf.getMqPassword());
            rabbitmqParams.put("ackType", "auto");
            rabbitmqParams.put("maxReceiveTime", String.valueOf(maxReceiveTime));
            rabbitmqParams.put("maxMessagesPerPartition", String.valueOf(maxMessagesPerPartition));
            rabbitmqParams.put("levelParallelism", String.valueOf(levelParallelism));
            sw.addStream(new DesensTaskLogStreaming(taskConf, new RabbitMqDStream(mqConf.getMqLogQueue(), rabbitmqParams)));

            //脱敏操作日志
            OpStreamJobConfImpl opConf = new OpStreamJobConfImpl("desens-op-log-config.xml");
            mqConf = StreamUtils.createRabbitMQConf(opConf.getRabbitMqConf());
            sw.addStream(new DesensOpLogStreaming(opConf, new RabbitMqDStream(mqConf.getMqLogQueue(), rabbitmqParams)));
            //审计操作日志
            opConf = new OpStreamJobConfImpl("audit-op-log-config.xml");
            mqConf = StreamUtils.createRabbitMQConf(opConf.getRabbitMqConf());
            sw.addStream(new OpLogStreaming(opConf, new RabbitMqDStream(mqConf.getMqLogQueue(), rabbitmqParams)));
            //运维操作日志
            opConf = new OpStreamJobConfImpl("ops-op-log-config.xml");
            mqConf = StreamUtils.createRabbitMQConf(opConf.getRabbitMqConf());
            sw.addStream(new OpsLogStreaming(opConf, new RabbitMqDStream(mqConf.getMqLogQueue(), rabbitmqParams)));
            //平台操作日志
            opConf = new OpStreamJobConfImpl("supervise-op-log-config.xml");
            mqConf = StreamUtils.createRabbitMQConf(opConf.getRabbitMqConf());
            sw.addStream(new OpLogStreaming(opConf, new RabbitMqDStream(mqConf.getMqLogQueue(), rabbitmqParams)));

            sw.startDeviceInfoReceiver(mqConf);
            sw.getContext().getDeviceSetBroadcastWrapper().setInterval(taskConf.getReBroadcastInterval());
            sw.getContext().getDeviceSetBroadcastWrapper().setUpdateByFixRate(taskConf.autoReBroadcast());
            sw.getContext().getSparkContext().setCheckpointDir("/EventStreamingCheckPoint");
            sw.startWork();
        }
    }

}
