package cn.doitedu.flink.demos;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.configuration.ConfigOptions;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SocketTextStreamFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.internals.FlinkKafkaInternalProducer;
import pro.fraud_detect.config.Parameters;

import java.nio.charset.Charset;
import java.util.Properties;


/***
 * @author hunter.d
 * @qq 657270652
 * @wx haitao-duan
 * @date 2021/2/23
 **/
public class SourcesTest {
    public static void main(String[] args) throws Exception {
        Configuration config = new Configuration();
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(2);

        SourceFunction<String> source = createSocketSource("doitedu01",9999);
        //SourceFunction<String> source = createKafkaSource();

        DataStreamSource<String> stringDataStreamSource = env.addSource(source);
        //stringDataStreamSource.print();


        SingleOutputStreamOperator<Tuple3<String, String, Integer>> tp3 = stringDataStreamSource.map(line -> {
            String[] arr = line.split(",");
            return Tuple3.of(arr[0],arr[1],Integer.parseInt(arr[2]));
        },TypeInformation.of(new TypeHint<Tuple3<String, String, Integer>>() {})
        );

        KeyedStream<Tuple3<String, String, Integer>, Tuple2<String, String>> keyed = tp3.keyBy(new KeySelector<Tuple3<String, String, Integer>, Tuple2<String, String>>() {
            @Override
            public Tuple2<String, String> getKey(Tuple3<String, String, Integer> value) throws Exception {
                return Tuple2.of(value.f0,value.f1);
            }
        });


        //SingleOutputStreamOperator<Tuple3<String, String, Integer>> res = keyed.countWindow(3).sum("f2");

        SingleOutputStreamOperator<Tuple3<String, String, Integer>> res = keyed.countWindowAll(3).sum("f2");

        res.print();


        env.execute();
    }

    public static SourceFunction<String> createSocketSource(String host,int port){
        SocketTextStreamFunction skt = new SocketTextStreamFunction(host, port, "\n", -1);
        return skt;
    }

    public static SourceFunction<String> createKafkaSource(){
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "doitedu01:9092,doitedu02:9092,doitedu03:9092");
        props.setProperty("auto.offset.reset", "earliest");
        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<>("drools_rule", new SimpleStringSchema(Charset.forName("utf-8")), props);
        return consumer;
    }

}
