package com.cetc.sdp.kmga.cs.stream.nv;

import com.cetc.sdp.kmga.cs.common.RabbitMQConf;
import com.cetc.sdp.kmga.cs.stream.EventKryoRegistrator;
import com.cetc.sdp.kmga.cs.stream.StreamUtils;
import com.cetc.sdp.kmga.cs.util.*;

import java.util.HashMap;
import java.util.Map;

/**
 * 数据库审计日志实时处理程序入口
 * @description:
 * @author： DengQiang
 * @date: 2018/3/8 16:45
 */
public final class AuditStreamDriver {

    public static void main(String[] args) throws InterruptedException {
        String usage = "Usage: <application name> <the seconds of duration> <the seconds of window>\n" +
                "[kafka|rabbitmq] (if use rabbitmq must set <maxReceiveTime> <maxMessagesPerPartition> <levelParallelism>)";
        if (args.length < 3) {
            System.out.println(usage);
            return;
        }

        boolean useR = false;
        int maxReceiveTime = 100;
        int maxMessagesPerPartition = 5000;
        int levelParallelism = 16;
        if (args.length >= 4) {
            if ("rabbitmq".equals(args[3])) {
                useR = true;
                if (args.length >= 5) {
                    maxReceiveTime = Integer.parseInt(args[4]);
                    if (args.length >= 6) {
                        maxMessagesPerPartition = Integer.parseInt(args[5]);
                        if (args.length >= 7) {
                            levelParallelism = Integer.parseInt(args[6]);
                        }
                    }
                }
            }
        }

        int duration = Integer.parseInt(args[1]);
        WorkConf workConf = new WorkConfImpl();
        workConf.setDuration(duration);
        workConf.setApplicationName(args[0]);
        workConf.addConf("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
        workConf.addConf("spark.kryo.registrator", EventKryoRegistrator.class.getName());

        if (!useR) {
            //使用Kafka作为输入源
            StreamWork sw = new StreamWork(workConf);
            StreamJobConf conf = new StreamJobConfImpl("audit-log-config.xml");
            AuditLogStreaming auditLogStreaming = new AuditLogStreaming(conf, new KafkaDStream(conf.getSubscribeTopics(), conf.getKafkaConsumerConf()));
            auditLogStreaming.setWindow(Long.parseLong(args[2]));
            sw.addStream(auditLogStreaming);
            sw.startDeviceInfoReceiver(StreamUtils.createRabbitMQConf(conf.getRabbitMqConf()));
            sw.getContext().getDeviceSetBroadcastWrapper().setInterval(conf.getReBroadcastInterval());
            sw.getContext().getDeviceSetBroadcastWrapper().setUpdateByFixRate(conf.autoReBroadcast());
            sw.getContext().getSparkContext().setCheckpointDir("/AuditStreamingCheckPoint");
            sw.startWork();
        } else {
            //使用RabbitMq作为输入源
            StreamWork sw = new StreamWork(workConf);
            StreamJobConf conf = new StreamJobConfImpl("audit-log-config.xml");
            RabbitMQConf mqConf = StreamUtils.createRabbitMQConf(conf.getRabbitMqConf());
            Map<String, String> rabbitmqParams = new HashMap<>(32);
            rabbitmqParams.put("hosts", conf.getRabbitMqConf().get("rabbitmq.node.list"));
            rabbitmqParams.put("vHost", "/");
            rabbitmqParams.put("userName", mqConf.getMqUsername());
            rabbitmqParams.put("password", mqConf.getMqPassword());
            rabbitmqParams.put("ackType", "auto");
            rabbitmqParams.put("maxReceiveTime", String.valueOf(maxReceiveTime));
            rabbitmqParams.put("maxMessagesPerPartition", String.valueOf(maxMessagesPerPartition));
            rabbitmqParams.put("levelParallelism", String.valueOf(levelParallelism));
            AuditLogStreaming auditLogStreaming = new AuditLogStreaming(conf, new RabbitMqDStream(mqConf.getMqLogQueue(), rabbitmqParams));
            auditLogStreaming.setWindow(Long.parseLong(args[2]));
            sw.addStream(auditLogStreaming);
            sw.startDeviceInfoReceiver(mqConf);
            sw.getContext().getDeviceSetBroadcastWrapper().setInterval(conf.getReBroadcastInterval());
            sw.getContext().getDeviceSetBroadcastWrapper().setUpdateByFixRate(conf.autoReBroadcast());
            sw.getContext().getSparkContext().setCheckpointDir("/AuditStreamingCheckPoint");
            sw.startWork();
        }
    }

}
