package com.cetc.sdp.kmga.cs.stream;

import com.cetc.sdp.kmga.cs.util.PooledMultiTableOutputFormat;
import com.cetc.sdp.kmga.cs.util.Tool;
import com.cetc.sdp.kmga.cs.util.WorkConf;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaStreamingContext;

import java.io.IOException;
import java.time.Instant;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

/**
 * Spark Streaming 流处理工作类
 * @description:
 * @author： DengQiang
 * @date: 2018/3/8 16:47
 */
public class StreamWork {

    private WorkConf workConf;

    private JavaSparkContext jsc;

    private JavaStreamingContext jssc;

    private Context context;

    private List<StreamJob> jobList;

    public StreamWork(WorkConf workConf) {
        this.workConf = workConf;
        jobList = new ArrayList<>();
        SparkConf conf = new SparkConf();
        workConf.getSparkConf().forEach(conf::set);
        jsc = new JavaSparkContext(conf);
        jssc = new JavaStreamingContext(jsc, Durations.seconds(workConf.getDuration()));
    }

    protected void init(JavaSparkContext jsc) {}

    public void startWork() throws InterruptedException {
        init(jsc);
        for (StreamJob job : jobList) {
            job.processStreaming(context);
        }
        jssc.start();
        jssc.awaitTermination();
        jssc.close();
    }

    public <T> void addBroadcastVar(String name, T var) {

    }

    public <T> Broadcast<T> getBroadcastVar(String name) {
        return null;
    }

    public synchronized void addStream(StreamJob streamJob) {
        jobList.add(streamJob);
    }


    /**
     * 获取id逆序时间戳
     * @param logTime
     * @return
     */
    public static long getIdTimestamp(LocalDateTime logTime) {
        if (logTime != null) {
            return Long.MAX_VALUE - Tool.dateToTimestamp(logTime);
        } else {
            return Long.MAX_VALUE - Instant.now().atZone(Tool.zoneId).toInstant().toEpochMilli();
        }
    }

    /**
     * 获取id逆序时间戳
     * @param logTime yyyy-MM-dd HH:mm:ss
     * @return
     */
    public static long getIdTimestamp(String logTime) {
        return getIdTimestamp(Tool.getTimeFromOrDefault(logTime));
    }


    protected interface StreamJob {

        /**
         * 处理流任务
         * @param context
         */
        void processStreaming(Context context);
    }

    public class Context {

        private DeviceSetBroadcastWrapper deviceSetBroadcastWrapper;

        public DeviceSetBroadcastWrapper getDeviceSetBroadcastWrapper() {
            return deviceSetBroadcastWrapper;
        }

        public JavaSparkContext getSparkContext() {
            return jsc;
        }

        public JavaStreamingContext getStreamingContext() {
            return jssc;
        }

        public WorkConf getWorkConf() {
            return workConf;
        }

        public Job configJob(Map<String, String> prop) {
            Job hbaseJob;
            try {
                hbaseJob = Job.getInstance(jsc.hadoopConfiguration());
                Configuration conf = hbaseJob.getConfiguration();
                prop.forEach((k, v) -> conf.set(k.toString(), v.toString()));
            } catch (IOException e) {
                e.printStackTrace();
                return null;
            }
            hbaseJob.setOutputKeyClass(ImmutableBytesWritable.class);
            hbaseJob.setOutputValueClass(Result.class);
            hbaseJob.setOutputFormatClass(PooledMultiTableOutputFormat.class);
            return hbaseJob;
        }
    }
}
