package com.jython.flink;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.windows.GlobalWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
import org.apache.flink.util.Collector;

import java.util.Properties;

public class JythonDemo {
    public static void main(String[] args) {
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "192.168.0.102:9092");
        properties.setProperty("group.id", "test");
//        properties.setProperty("client.id", "test2");
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.enableCheckpointing(5000);
        FlinkKafkaConsumer010<String> myConsumer = new FlinkKafkaConsumer010<>("test", new SimpleStringSchema(), properties);
        myConsumer.setStartFromGroupOffsets(); // the default behaviour
        myConsumer.setCommitOffsetsOnCheckpoints(true);
        DataStreamSource<String> stream = env.addSource(myConsumer);
        SingleOutputStreamOperator<Nginx> map = stream.map(new MapFunction<String, Nginx>() {
            @Override
            public Nginx map(String s) throws Exception {
                Nginx nginx = new Nginx();
                nginx.message=s;
                return nginx;
            }
        });
        stream.print().setParallelism(1);

//        SingleOutputStreamOperator<Object> message = map.keyBy("message").countWindow(3).process(new ProcessWindowFunction<String, Object, Tuple, GlobalWindow>() {
//            private Tuple tuple;
//            private Context context;
//            private Iterable<String> iterable;
//            private Collector<Object> collector;
//
////            @Override
////            public void process() throws Exception {
////                process(, , , );
////            }
//
//            @Override
//            public void process(Tuple tuple, Context context, Iterable<String> iterable, Collector<Object> collector) throws Exception {
//
//                this.tuple = tuple;
//                this.context = context;
//                this.iterable = iterable;
//                this.collector = collector;
//            }
//        });
//        message.print().setParallelism(1);

//        SingleOutputStreamOperator<String> mess = map.map(x -> {
//            return x.toString();
//        });
//        map1.print().setParallelism(1);
        try {
            env.execute("ok");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
