package org.weibo.analysis;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.TypeReference;
import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.io.OutputFormat;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.io.jdbc.JDBCInputFormat;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.graph.Graph;
import org.apache.flink.graph.library.LabelPropagation;
import org.apache.flink.runtime.client.JobExecutionException;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
import org.apache.flink.streaming.api.functions.windowing.ProcessAllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.Types;
import org.apache.flink.table.api.java.BatchTableEnvironment;
import org.apache.flink.table.api.java.Slide;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;
import org.weibo.analysis.database.Config;
import org.weibo.analysis.database.LinkData;
import org.weibo.analysis.database.SqliteSink;
import org.weibo.analysis.entity.ControlMessage;
import org.weibo.analysis.entity.RelationLabel;
import org.weibo.analysis.entity.Vertex;
import org.weibo.analysis.graph.Edge;
import org.weibo.analysis.graph.GraphContainer;
import org.weibo.analysis.graph.TupleEdge;
import org.weibo.analysis.hash.HashPartition;
import org.weibo.analysis.network.Server;

import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import java.util.Properties;
import java.util.concurrent.*;

public class StreamingJob {
    private final transient static GraphContainer graphContainer = new GraphContainer();
    private final transient static MapStateDescriptor<String, ControlMessage> controlMessageDescriptor = new MapStateDescriptor<>(
            RelationLabel.Control.getLabel(),
            BasicTypeInfo.STRING_TYPE_INFO,
            TypeInformation.of(new TypeHint<ControlMessage>() {
            }));
    private static boolean loop = true;
    private static String defaultWindowSize = "30.seconds";
    private static String defaultSlideSize = "10.seconds";
    private static ExecutorService executorService = Executors.newSingleThreadExecutor();
    private static Long lastUpdatedAt = 0L;

    public static void run() throws Exception {

        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "192.168.31.111:9092");
        properties.setProperty("zookeeper.connect", "localhost:2181");
        properties.setProperty("group.id", "weibo");
        properties.setProperty("enable.auto.commit", "true");
        properties.setProperty("auto.commit.interval.ms", "1000");


        ControlMessage controlMessage = ControlMessage.buildDefault(defaultWindowSize, defaultSlideSize);

        graphContainer.setVersion(String.format("%s-%s-%s-%s-%s",
                controlMessage.getSlideSize(),
                controlMessage.getWindowSize(),
                controlMessage.getVertexLabel(),
                controlMessage.getEdgeLabel(),
                controlMessage.isWithGrouping() ? "Grouping" : "NoGrouping")
        );

        final MapStateDescriptor<String, ControlMessage> controlMessageDescriptor = new MapStateDescriptor<>(
                RelationLabel.Control.getLabel(),
                BasicTypeInfo.STRING_TYPE_INFO,
                TypeInformation.of(new TypeHint<ControlMessage>() {
                }));

        ExecutionEnvironment batchEnv = ExecutionEnvironment.getExecutionEnvironment();
        BatchTableEnvironment databaseEnv = BatchTableEnvironment.create(batchEnv);

        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
//        env.enableCheckpointing(5000);


        // register kafka as consumer to consume topic: weibo
        final FlinkKafkaConsumer<String> kafkaWeiboDataConsumer = new FlinkKafkaConsumer<>("weibo", new SimpleStringSchema(), properties);
        final DataStreamSource<String> kafkaWeiDataStringStreamSource = env.addSource(kafkaWeiboDataConsumer);


//        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // register kafka as consumer to consume topic: control as broadcast stream
        final FlinkKafkaConsumer<String> kafkaControlSignalConsumer = new FlinkKafkaConsumer<>("control", new SimpleStringSchema(), properties);
        final DataStream<String> broadcastStream = env.addSource(kafkaControlSignalConsumer).setParallelism(1);
        final BroadcastStream<ControlMessage> broadcastControlSignalStream = broadcastStream
                .map(new MapFunction<String, ControlMessage>() {
                    @Override
                    public ControlMessage map(String value) {
                        return ControlMessage.buildFromString(value);
                    }
                })
                .broadcast(controlMessageDescriptor);

        while (loop) {
            try {
                loop = false;

                DataStream<Edge<Vertex, Vertex>> kafkaWeiboDataStreamDataSet = kafkaWeiDataStringStreamSource
                        // Consistent Hashing
                        .map(new MapFunction<String, Edge<Vertex, Vertex>>() {
                            @Override
                            public Edge<Vertex, Vertex> map(String value) throws Exception {
                                Edge<String, String> edge = JSON.parseObject(value, new TypeReference<Edge<String, String>>(Edge.class) {
                                });
                                return Edge.build(edge);
                            }
                        })
                        .assignTimestampsAndWatermarks(new AscendingTimestampExtractor<Edge<Vertex, Vertex>>() {
                            @Override
                            public long extractAscendingTimestamp(Edge<Vertex, Vertex> element) {
                                return element.getTimestamp();
                            }

                        }).setParallelism(4).partitionCustom(new HashPartition(), "id");

                kafkaWeiboDataStreamDataSet.print();


//                if (controlMessage.isWithGrouping()) {
//                    processWithGrouping(kafkaWeiboDataStreamDataSet, tableEnv, broadcastControlSignalStream, controlMessage);
//                } else {
//                    processWithoutGrouping(kafkaWeiboDataStreamDataSet, broadcastControlSignalStream, controlMessage);
//                }
                processStreaming(kafkaWeiboDataStreamDataSet, broadcastControlSignalStream, controlMessage);

                Future<JobExecutionResult> streamingJobFuture = streamingRun(env, "Flink Weibo Streaming Job");

                if (!controlMessage.isWithGrouping()) {
                    streamingJobFuture.get();
                    return;
                }

//                env.execute("Weibo Data Streaming To Social Graph");

                processGellyGraph(controlMessage, batchEnv, databaseEnv, streamingJobFuture);


            } catch (JobExecutionException | ExecutionException e) {
                System.out.println("catch exception");
                if ((e.getCause() != null && e.getCause() instanceof ControlMessageTriggeredException) ||
                        (e.getCause() != null && e.getCause().getCause() != null && e.getCause().getCause() instanceof ControlMessageTriggeredException)) {
                    ControlMessage newControlMessage;
                    if (e.getCause() instanceof ControlMessageTriggeredException) {
                        newControlMessage = ControlMessage.buildFromString(e.getCause().getMessage());
                    } else {
                        newControlMessage = ControlMessage.buildFromString(e.getCause().getCause().getMessage());
                    }

                    // Keep window size and slide size to default value  when current control message's value is null
                    if (Objects.isNull(newControlMessage.getWindowSize()))
                        controlMessage.setWindowSize(defaultWindowSize);
                    else
                        controlMessage.setWindowSize(newControlMessage.getWindowSize());

                    if (Objects.isNull(newControlMessage.getSlideSize())) {
                        controlMessage.setSlideSize(defaultSlideSize);
                    } else {
                        controlMessage.setSlideSize(newControlMessage.getSlideSize());
                    }


                    controlMessage.setWithGrouping(newControlMessage.isWithGrouping());
                    controlMessage.setVertexLabel(newControlMessage.getVertexLabel());
                    controlMessage.setEdgeLabel(newControlMessage.getEdgeLabel());

                    System.out.format("Restart with Control Message: %s", controlMessage.toString());
                    graphContainer.clear();
                    loop = true;
                    Thread.sleep(10 * 1000L);
                } else {
                    loop = true;
                    e.printStackTrace();
                    System.out.println("================= Connection Problem, Sleep then retry!=========================");
                    Thread.sleep(60 * 1000L);
                }
            } catch (Exception e) { //other exceptions, try connection again
                loop = true;
                e.printStackTrace();
                System.out.println("================= Connection Problem, Sleep then retry!=========================");
                Thread.sleep(60 * 1000L);
            }
        }


    }

    private static void processGellyGraph(ControlMessage controlMessage,
                                          ExecutionEnvironment batchEnv,
                                          BatchTableEnvironment databaseEnv,
                                          Future<JobExecutionResult> streamingJobFuture)
            throws Exception {
        String[] w = controlMessage.getWindowSize().split("\\.");
        long wSize = Long.parseLong(w[0]); // windowSize parsed in long from incoming frontend message
        String wUnit = w[1].toUpperCase(); // eg: "MINUTES" or "SECONDS" from incoming frontend message
        Long statTimeAt = lastUpdatedAt - Time.of(wSize, TimeUnit.valueOf(wUnit)).toMilliseconds();
        while (controlMessage.isWithGrouping()) {
            DataSource<Row> dataSource = batchEnv.createInput(JDBCInputFormat.buildJDBCInputFormat()
                    .setDrivername(Config.SQLITE_DRIVER_NAME)
                    .setDBUrl(Config.SQLITE_JDBC_URL)
                    .setQuery(String.format("select * from weibo_link_data where timestamp >= %d", statTimeAt))
                    .setRowTypeInfo(
                            new RowTypeInfo(
                                    BasicTypeInfo.STRING_TYPE_INFO,
                                    BasicTypeInfo.STRING_TYPE_INFO,
                                    BasicTypeInfo.STRING_TYPE_INFO,
                                    BasicTypeInfo.STRING_TYPE_INFO,
                                    BasicTypeInfo.STRING_TYPE_INFO,
                                    BasicTypeInfo.LONG_TYPE_INFO,
                                    BasicTypeInfo.INT_TYPE_INFO
                            )
                    )
                    .finish());
            Table table1 = databaseEnv.fromDataSet(dataSource);
            DataSet<LinkData> rowDataSet = databaseEnv.toDataSet(table1, TypeInformation.of(LinkData.class));
            List<LinkData> dataList = rowDataSet.max(6).collect();
            if (dataList.size() > 0) {
                lastUpdatedAt = dataList.get(0).f5;
            }

            DataSet<Edge<Vertex, Vertex>> edgeDataSet = rowDataSet.flatMap(new FlatMapFunction<LinkData, Edge<Vertex, Vertex>>() {
                @Override
                public void flatMap(LinkData value, Collector<Edge<Vertex, Vertex>> out) throws Exception {
                    out.collect(Edge.of(value));
                }
            });
            DataSet<org.apache.flink.graph.Vertex<String, Vertex>> vertices = edgeDataSet.flatMap(new FlatMapFunction<Edge<Vertex, Vertex>, org.apache.flink.graph.Vertex<String, Vertex>>() {
                @Override
                public void flatMap(Edge<Vertex, Vertex> value, Collector<org.apache.flink.graph.Vertex<String, Vertex>> out) throws Exception {
                    out.collect(new org.apache.flink.graph.Vertex<>(value.getSource().id, value.getSource()));
                    out.collect(new org.apache.flink.graph.Vertex<>(value.getTarget().id, value.getTarget()));
                }
            });

            DataSet<org.apache.flink.graph.Edge<String, Edge<Vertex, Vertex>>> edges = edgeDataSet.flatMap(new FlatMapFunction<Edge<Vertex, Vertex>, org.apache.flink.graph.Edge<String, Edge<Vertex, Vertex>>>() {
                @Override
                public void flatMap(Edge<Vertex, Vertex> value, Collector<org.apache.flink.graph.Edge<String, Edge<Vertex, Vertex>>> out) throws Exception {
                    out.collect(new org.apache.flink.graph.Edge<>(value.getSource().getId(), value.getTarget().getId(), value));
                    graphContainer.addEdge(value);
                }
            });

            Graph<String, Vertex, Edge<Vertex, Vertex>> graph = Graph.fromDataSet(vertices, edges, batchEnv);
            LabelPropagation<String, Vertex, Edge<Vertex, Vertex>> labelPropagation = new LabelPropagation<>(3);
            DataSet<org.apache.flink.graph.Vertex<String, Vertex>> groupedVertices = labelPropagation.run(graph);
            groupedVertices.output(new OutputFormat<org.apache.flink.graph.Vertex<String, Vertex>>() {
                @Override
                public void configure(Configuration parameters) {
                }

                @Override
                public void open(int taskNumber, int numTasks) throws IOException {

                }

                @Override
                public void writeRecord(org.apache.flink.graph.Vertex<String, Vertex> record) throws IOException {
                    if (graphContainer.getVertices().keySet().size() == 0)
                        return;

                    Vertex vertex = graphContainer.getVertices()
                            .get(record.getId());
                    if (vertex == null)
                        return;

                    if (record.getValue() == null)
                        return;

                    vertex.setGroupId(record.getValue().getId());
                }


                @Override
                public void close() throws IOException {

                }
            });
            Server.sendToAll(graphContainer.toString());

            batchEnv.execute("Gelly Graph Job");
            if (lastUpdatedAt > 0L) {
                System.out.println("lastUpdatedAt: " + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date(lastUpdatedAt)));
            } else {
                System.out.println("lastUpdatedAt: " + lastUpdatedAt);
            }
            try {
                String[] s = controlMessage.getSlideSize().split("\\.");
                long sSize = Long.parseLong(s[0]); // slideSize parsed in long from incoming frontend message
                String sUnit = s[1].toUpperCase(); // eg: "MINUTES" or "SECONDS" from incoming frontend message
                streamingJobFuture.get(sSize, TimeUnit.valueOf(sUnit));
            } catch (TimeoutException exception) {
            }
        }
    }


    /**
     * @param kafkaStream            Consumes the kafkaStream and emits the output based on the windows/slider/vertex/edge to the web socket connected clients
     * @param broadcastControlStream broadcast stream from frontend ui by kafka control topic stream
     * @param controlMessage         current control message
     */
    private static void processStreaming(DataStream<Edge<Vertex, Vertex>> kafkaStream,
                                         BroadcastStream<ControlMessage> broadcastControlStream,
                                         ControlMessage controlMessage
    ) {
        String[] w = controlMessage.getWindowSize().split("\\.");
        long wSize = Long.parseLong(w[0]); // windowSize parsed in long from incoming frontend message
        String[] s = controlMessage.getSlideSize().split("\\.");
        long sSize = Long.parseLong(s[0]); // slideSize parsed in long from incoming frontend message
        String wUnit = w[1].toUpperCase(); // eg: "MINUTES" or "SECONDS" from incoming frontend message
        String sUnit = s[1].toUpperCase(); // eg: "MINUTES" or "SECONDS" from incoming frontend message
        AllWindowedStream<Edge<Vertex, Vertex>, TimeWindow> edgeTimeWindowAllWindowedStream = null;
        if (controlMessage.isWithGrouping()) {
            edgeTimeWindowAllWindowedStream = kafkaStream
                    .windowAll(SlidingEventTimeWindows.of(Time.of(sSize, TimeUnit.valueOf(sUnit)), Time.of(sSize, TimeUnit.valueOf(sUnit))));
        } else {
            edgeTimeWindowAllWindowedStream = kafkaStream
                    .windowAll(SlidingEventTimeWindows.of(Time.of(wSize, TimeUnit.valueOf(wUnit)), Time.of(sSize, TimeUnit.valueOf(sUnit))));
        }
        SingleOutputStreamOperator<GraphContainer> connectedStream = edgeTimeWindowAllWindowedStream
                .process(
                        new ProcessAllWindowFunction<Edge<Vertex, Vertex>, GraphContainer, TimeWindow>() {
                            @Override
                            public void process(Context context, Iterable<Edge<Vertex, Vertex>> iterable,
                                                Collector<GraphContainer> collector) throws Exception {
                                graphContainer.clear(); //clear graph points with each window
                                for (Edge<Vertex, Vertex> value : iterable) {
                                    if (!isInclude(value, controlMessage)) {
                                        continue;
                                    }
                                    graphContainer.addEdge(value);
                                }
                                collector.collect(graphContainer);
                            }

                        })
                .connect(broadcastControlStream)
                .process(new MyBroadcastProcessFunction<GraphContainer>());


        if (controlMessage.isWithGrouping()) {
            connectedStream
                    .addSink(new SqliteSink())
                    .setParallelism(1);
        } else {
            connectedStream
                    .addSink(new SinkFunction<GraphContainer>() {
                        @Override
                        public void invoke(GraphContainer value, Context context) {
                            Server.sendToAll(value.toString());
                        }
                    })
                    .setParallelism(1);

        }
    }

    private static boolean isInclude(Edge<Vertex, Vertex> value, ControlMessage controlMessage) {
        // do vertex label and edge label filter job

        boolean vertexLabelIsNull = Objects.isNull(controlMessage.getVertexLabel()) || "".equals(controlMessage.getVertexLabel());
        boolean edgeLabelIsNull = Objects.isNull(controlMessage.getEdgeLabel()) || "".equals(controlMessage.getEdgeLabel());


        if (vertexLabelIsNull && edgeLabelIsNull) {
            return true;
        }

        String sourceVertexLabel = value.getSource().getLabel();
        String targetVertexLabel = value.getTarget().getLabel();
        String relationLabel = value.getLabel();

        boolean hasSameVertexLabel = true;
        boolean hasSameEdgeLabel = true;

        if (!vertexLabelIsNull) {
            hasSameVertexLabel = sourceVertexLabel.equals(controlMessage.getVertexLabel())
                    || targetVertexLabel.equals(controlMessage.getVertexLabel());
        }

        if (!edgeLabelIsNull) {
            // when vertex label and edge label are both not null
            hasSameEdgeLabel = relationLabel.equals(controlMessage.getEdgeLabel());
        }

        return hasSameEdgeLabel && hasSameVertexLabel;

    }

    /**
     * @param kafkaStream            Consumes the kafkaStream and emits the output based on the windows/slider/vertex/edge to the web socket connected clients
     * @param tableEnv               table environment for grouping
     * @param broadcastControlStream broadcast stream from frontend ui by kafka control topic stream
     * @param controlMessage         current control message
     */
    private static void processWithGrouping(DataStream<Edge<Vertex, Vertex>> kafkaStream,
                                            StreamTableEnvironment tableEnv,
                                            BroadcastStream<ControlMessage> broadcastControlStream,
                                            ControlMessage controlMessage
    ) {
        String windowSize = controlMessage.getWindowSize();
        if (windowSize.contains("milliseconds")) {
            windowSize = windowSize.substring(0, windowSize.length() - 6);
        }
        String slideSize = controlMessage.getSlideSize();
        if (slideSize.contains("milliseconds")) {
            slideSize = slideSize.substring(0, slideSize.length() - 6);
        }
        DataStream<TupleEdge> tupleEdgeDataStream = kafkaStream
                .map(new MapFunction<Edge<Vertex, Vertex>, TupleEdge>() {
                    @Override
                    public TupleEdge map(Edge<Vertex, Vertex> value) throws Exception {
                        return new TupleEdge(value);
                    }
                });
        Table table = tableEnv.fromDataStream(tupleEdgeDataStream, "f0, f1, f2, f3, f4, f5, f6.rowtime");


        Table edgeTable = table.window(Slide.over(windowSize).every(slideSize).on("f6")
                .as("statWindow"))
                .groupBy("statWindow, f1, f3, f5")
                .select("f3 as sourceLabel, f5 as targetLabel, f1 as edgeLabel , f0.count as edgeCount");
        if (controlMessage.getEdgeLabel() != null && !"".equals(controlMessage.getEdgeLabel())) {
            edgeTable = edgeTable.filter(String.format("edgeLabel===\"%s\"", controlMessage.getEdgeLabel()));
        }

        Table sourceTable = table.select("f0,f2 as f1,f3 as f2,f6 as f3");

        Table targetTable = table.select("f0,f4 as f1,f5 as f2,f6 as f3");


        Table vertexTable = sourceTable.unionAll(targetTable);
        DataStream<Row> result = tableEnv.toAppendStream(vertexTable, Types.ROW(Types.STRING(), Types.STRING(), Types.STRING(), Types.SQL_TIMESTAMP()));
        Table groupedVertexTable = tableEnv.fromDataStream(result, "f0, f1, f2, f3.rowtime")
                .window(Slide.over(windowSize).
                        every(slideSize).
                        on("f3").
                        as("statWindow"))
                .groupBy("statWindow, f2")
                .select("f2 as vertexLabel , f0.count as vertexCount");

        if (controlMessage.getVertexLabel() != null && !"".equals(controlMessage.getVertexLabel())) {
            groupedVertexTable = groupedVertexTable.filter(String.format("vertexLabel===\"%s\"", controlMessage.getVertexLabel()));
        }

        tableEnv.toAppendStream(edgeTable, Row.class)
                .union(tableEnv.toAppendStream(groupedVertexTable, Row.class))
                .connect(broadcastControlStream)
                .process(new MyBroadcastProcessFunction<Row>())
                .addSink(new SinkFunction<Row>() {
                    @Override
                    public void invoke(Row value, Context context) throws Exception {
                        if (value.getArity() > 2) { //grouped edge
                            graphContainer.addEdge(value.getField(0).toString(), value.getField(1).toString(), value.getField(2), Integer.parseInt(value.getField(3).toString()));
                        } else {// grouped vertex
                            graphContainer.addVertex(value.getField(0), value.getField(0), Integer.parseInt(value.getField(1).toString()));
                        }
                        Server.sendToAll(graphContainer.toString());
                        graphContainer.clear();
                    }
                })
                .setParallelism(1);
    }

    private static Future<JobExecutionResult> streamingRun(StreamExecutionEnvironment environment, String jobName) throws Exception {
        return executorService.submit(() -> {
            try {
                return environment.execute(jobName);
            } catch (Exception e) {
                throw e;
            }
        });
    }

    protected static class ControlMessageTriggeredException extends Exception {
        private String controlMessage;

        ControlMessageTriggeredException(String controlMessage) {
            this.controlMessage = controlMessage;
        }

        public String getMessage() {
            return controlMessage;
        }

    }

    protected static class MyBroadcastProcessFunction<T extends Object> extends BroadcastProcessFunction<T, ControlMessage, T> {

        @Override
        public void processElement(T value, ReadOnlyContext ctx, Collector<T> out) throws Exception {
            out.collect(value);
        }


        @Override
        public void processBroadcastElement(ControlMessage value, Context ctx, Collector<T> out) throws Exception {
            BroadcastState<String, ControlMessage> controlMessageBroadcastState = ctx.getBroadcastState(controlMessageDescriptor);
            ControlMessage oldControlMessage = controlMessageBroadcastState.get("control");

            if (oldControlMessage == null) {
                controlMessageBroadcastState.put("control", value);
                throw new ControlMessageTriggeredException(value.toString());
            }

            if (!value.toString().equals(oldControlMessage.toString())) {
                controlMessageBroadcastState.put("control", value);
                throw new ControlMessageTriggeredException(value.toString());
            }

        }
    }
}