package com.cetc.sdp.kmga.cs.stream.nv;

import com.cetc.sdp.kmga.cs.common.TableMeta;
import com.cetc.sdp.kmga.cs.jdbc.AuditDAO;
import com.cetc.sdp.kmga.cs.stream.StreamUtils;
import com.cetc.sdp.kmga.cs.util.OpStreamJobConf;
import com.cetc.sdp.kmga.cs.util.Tool;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Job;
import org.apache.spark.util.LongAccumulator;
import scala.Tuple2;

import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.*;

/**
 * 脱敏操作日志处理流
 * @description:
 * @author： DengQiang
 * @date: 2018/3/28 15:57
 */
public class DesensOpLogStreaming implements StreamWork.StreamJob {

    private AbstractDStream dStream;
    private OpStreamJobConf jobConf;

    //数据库字段
    private static final byte[] IP_ADDR = Bytes.toBytes("IP_ADDR");
    private static final byte[] OPER_TM = Bytes.toBytes("OPER_TM");
    private static final byte[] USER_NAME = Bytes.toBytes("USER_NAME");
    private static final byte[] OPER_MODL = Bytes.toBytes("OPER_MODL");
    private static final byte[] OPER_TYPE = Bytes.toBytes("OPER_TYPE");
    private static final byte[] LOG_TYPE = Bytes.toBytes("LOG_TYPE");
    private static final byte[] LOG_LEVEL = Bytes.toBytes("LOG_LEVEL");
    private static final byte[] OPER_STAT = Bytes.toBytes("OPER_STAT");
    private static final byte[] REPT_DEVICE_NUM = Bytes.toBytes("REPT_DEVICE_NUM");
    private static final byte[] CREATE_TM = Bytes.toBytes("CREATE_TM");

    private static final byte[] COL_FAMILY = Bytes.toBytes("C1");
    private static final byte[] ROWKEY = Bytes.toBytes("ROWKEY");

    //字段索引
    private static final int IP_ADDR_IDX = 0;
    private static final int OPER_TM_IDX = 1;
    private static final int USER_NAME_IDX = 2;
    private static final int OPER_MODL_IDX = 3;
    private static final int OPER_TYPE_IDX = 4;
    private static final int LOG_TYPE_IDX = 5;
    private static final int LOG_LEVEL_IDX = 6;
    private static final int OPER_STAT_IDX = 7;
    private static final int REPT_DEVICE_NUM_IDX = 8;
    private static final int CREATE_TM_IDX = 9;

    private static final int NUM_OF_COL = 10;

    public DesensOpLogStreaming(OpStreamJobConf jobConf, AbstractDStream dStream) {
        this.jobConf = jobConf;
        this.dStream = dStream;
    }

    @Override
    public void processStreaming(StreamWork.Context context) {
        Job hbaseJob = context.configJob(jobConf.getHBaseJobConf());

        TableMeta tableMeta = AuditDAO.findTableMetaByName(jobConf.getHTableName());
        long initialSeq = StreamUtils.getInitialSeqForTable(tableMeta, LocalDate.now(Tool.zoneId), false);

        byte[] mainTable = Bytes.toBytes(jobConf.getHTableName());
        byte[] usrIndexTableBytes = Bytes.toBytes(jobConf.getUsrTableName());
        byte[] clientIpIndexTableBytes = Bytes.toBytes(jobConf.getClientIpTableName());
        char fillChar = jobConf.getFillChar();

        this.dStream.getDStreamAsPlaintext(context.getStreamingContext()).transform(trans ->
                trans.map(log -> {
                    //删除日志消息上多余的引号
                    if (log.startsWith("\"")) {
                        log = log.substring(1, log.length() - 1);
                    }
                    StringTokenizer tokenizer = new StringTokenizer(log, "&");
                    Map<String, String> pair = new HashMap<>(16);
                    while (tokenizer.hasMoreTokens()) {
                        String[] tmp = tokenizer.nextToken().split("=");
                        if (tmp.length == 2 && !tmp[1].isEmpty()) {
                            pair.put(tmp[0], tmp[1]);
                        }
                    }
                    String[] data = new String[NUM_OF_COL];
                    data[OPER_TM_IDX] = pair.get("date");
                    data[IP_ADDR_IDX] = pair.get("ip");
                    data[USER_NAME_IDX] = pair.get("user");
                    data[OPER_MODL_IDX] = pair.get("module");
                    data[OPER_TYPE_IDX] = pair.get("act");
                    data[LOG_TYPE_IDX] = pair.get("log_type");
                    data[LOG_LEVEL_IDX] = pair.get("log_level");
                    String operStat = pair.get("status");
                    if ("成功".equals(operStat)) {
                        data[OPER_STAT_IDX] = "success";
                    } else if ("失败".equals(operStat)) {
                        data[OPER_STAT_IDX] = "failed";
                    } else {
                        data[OPER_STAT_IDX] = "";
                    }
                    //设置上报设备
                    data[REPT_DEVICE_NUM_IDX] = "";
                    data[CREATE_TM_IDX] = Tool.DEFAULT_TIME_FORMATTER.format(LocalDateTime.now(Tool.zoneId));
                    return data;
                })).foreachRDD(rdd -> {
            LongAccumulator accumulator = StreamUtils.JavaIdGenerator.getInstance(context.getSparkContext(), jobConf.getHTableName(), initialSeq);
            Long seqStart =  accumulator.value();
            rdd.zipWithIndex().mapPartitionsToPair(iterator -> {
                List<Tuple2<ImmutableBytesWritable, Put>> res = new ArrayList<>(2048);
                int size = 0;
                StringBuilder sb = new StringBuilder();
                while (iterator.hasNext()) {
                    Tuple2<String[], Long> dataTuple = iterator.next();
                    String[] data = dataTuple._1;
                    String time = String.valueOf(StreamWork.getIdTimestamp(data[OPER_TM_IDX]));
                    String id = time + (dataTuple._2() + seqStart);
                    byte[] idBytes = Bytes.toBytes(id);
                    Put put = new Put(idBytes);
                    addCol(data[IP_ADDR_IDX], IP_ADDR, put);
                    addCol(data[OPER_TM_IDX], OPER_TM, put);
                    addCol(data[USER_NAME_IDX], USER_NAME, put);
                    addCol(data[OPER_MODL_IDX], OPER_MODL, put);
                    addCol(data[OPER_TYPE_IDX], OPER_TYPE, put);
                    addCol(data[LOG_TYPE_IDX], LOG_TYPE, put);
                    addCol(data[LOG_LEVEL_IDX], LOG_LEVEL, put);
                    addCol(data[OPER_STAT_IDX], OPER_STAT, put);
                    addCol(data[REPT_DEVICE_NUM_IDX], REPT_DEVICE_NUM, put);
                    addCol(data[CREATE_TM_IDX], CREATE_TM, put);
                    res.add(new Tuple2<>(new ImmutableBytesWritable(mainTable), put));

                    //添加user索引表
                    String userName = data[USER_NAME_IDX];
                    userName = userName == null ? "" : userName;
                    /**
                     * rowkey 规则
                     * 时间+USER_NAME+随机序列(19+17(不够用'|'补齐)+3=长度39) 位
                     * e.g 9223370531636475807+user01|||||||||||123
                     */
                    sb.setLength(0);
                    sb.append(time).append(userName);
                    /**
                     * 填充字段
                     */
                    while (sb.length() < 36) {
                        sb.append(fillChar);
                    }
                    sb.append(id.substring(id.length() - 3));
                    Put usrPut = new Put(Bytes.toBytes(sb.toString()));
                    usrPut.addColumn(COL_FAMILY, ROWKEY, idBytes);
                    res.add(new Tuple2<>(new ImmutableBytesWritable(usrIndexTableBytes), usrPut));


                    //添加client_ip索引表
                    String clientIp = data[IP_ADDR_IDX];
                    clientIp = clientIp == null ? "" : clientIp;
                    /**
                     * 时间+IP_ADDR(不够用'|'补齐)+随机序列(19+15+3=长度37)位
                     * e.g 9223370531636475807+127.0.0.1||||||321
                     */
                    sb.setLength(0);
                    sb.append(time).append(clientIp);
                    /**
                     * 填充字段
                     */
                    while (sb.length() < 34) {
                        sb.append(fillChar);
                    }
                    sb.append(id.substring(id.length() - 3));
                    Put ipPut = new Put(Bytes.toBytes(sb.toString()));
                    ipPut.addColumn(COL_FAMILY, ROWKEY, idBytes);
                    res.add(new Tuple2<>(new ImmutableBytesWritable(clientIpIndexTableBytes), ipPut));
                    size++;
                }
                accumulator.add(size);
                return res.iterator();
            }).saveAsNewAPIHadoopDataset(hbaseJob.getConfiguration());
            this.dStream.afterBatchProcessed();
        });
    }

    @Override
    public void afterProcess(StreamWork.Context context) {
//        this.dStream.afterBatchProcessed();
    }

    private static void addCol(String val, byte[] col, Put put) {
        if (val != null && !val.isEmpty()) {
            put.addColumn(COL_FAMILY, col, Bytes.toBytes(val));
        }
    }


    @Override
    public String toString() {
        return "DesensOpLogStreaming{}";
    }
}
