package com.cetc.sdp.kmga.cs.stream.nv;

import com.cetc.sdp.kmga.cs.util.Tool;
import com.cetc.sdp.kmga.cs.util.WorkConf;
import com.cetc.sdp.kmga.cs.util.WorkConfImpl;
import org.apache.spark.api.java.Optional;
import org.apache.spark.api.java.function.Function3;
import org.apache.spark.streaming.State;
import org.apache.spark.streaming.StateSpec;
import org.apache.spark.streaming.api.java.JavaMapWithStateDStream;
import scala.Tuple2;

import java.time.LocalDateTime;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;

/**
 * @description:
 * @author： DengQiang
 * @date: 2018/4/8 14:40
 */
public class TestWindowCount implements StreamWork.StreamJob {

    private AbstractDStream dStream;

    public TestWindowCount(AbstractDStream dStream) {
        this.dStream = dStream;
    }

    @Override
    public void processStreaming(StreamWork.Context context) {
        //起始时间
        long start = Tool.dateToTimestamp(LocalDateTime.now(Tool.zoneId))/1000;

        Function3<Long, Optional<Integer>, State<Integer>, Tuple2<Long, Integer>> mappingFunc =
                (key, one, state) -> {
                    int sum = one.orElse(0) + (state.exists() ? state.get() : 0);
                    Tuple2<Long, Integer> output = new Tuple2<>(key, sum);
                    state.update(sum);
                    if (key < 0 ) {
                        state.remove();
                        return null;
                    }
                    return output;
                };

        JavaMapWithStateDStream<Long, Integer, Integer, Tuple2<Long, Integer>> stateDStream = dStream.getDStreamAsPlaintext(context.getStreamingContext())
                .mapToPair(t -> {
                    /**假设数据格式：xxx,xxx
                     * 将时间映射到内部窗口key
                     */
                    String[] arr = t.split(",");
                    long time = Long.parseLong(arr[0]);
                    int num = Integer.parseInt(arr[1]);
                    return new Tuple2<>(getWindow(time, 10, start), num);
                }).filter(t -> t._1 > 0).mapWithState(StateSpec.function(mappingFunc));

        stateDStream.stateSnapshots().foreachRDD(rdd ->
                rdd.collect().forEach(System.out::println)
        );
        stateDStream.print();
    }

    private static long getWindow(long timestamp, long window, long start) {
        return  (timestamp - start) / window;
    }

    @Override
    public void afterProcess(StreamWork.Context context) {
        this.dStream.afterBatchProcessed();
    }

    public static void main(String[] args) throws InterruptedException {
        WorkConf workConf = new WorkConfImpl();
        workConf.setDuration(Integer.parseInt(args[1]));
        workConf.setApplicationName(args[0]);
        StreamWork sw = new StreamWork(workConf);
        Map<String, Object> para = new HashMap<>();
        para.put("bootstrap.servers", "192.168.112.23:6667,192.168.112.25:6667");
        para.put("group.id", "test_window_g1");
        para.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        para.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        sw.addStream(new TestWindowCount(new KafkaDStream(Collections.singletonList("TestEventWindow"), para)));
        sw.getContext().getSparkContext().setCheckpointDir("/TestEventWindow");
        sw.startWork();
    }
}
