package com.galeno.yewu;

import com.galeno.pojo.UserBeanAndProvince;
import com.galeno.utils.AsyncHttpQueryFunction;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.Properties;
import java.util.concurrent.TimeUnit;

/**
 * @author galeno
 * @Title:
 * @Description:
 * @date 2021/11/117:48
 */
public class CountNewAndOldUser {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();


        Properties properties = new Properties();

        properties.setProperty("bootstrap.servers", "galeno01:9092,galeno02:9092,galeno03:9092");
        properties.setProperty("group.id", "first");
        properties.setProperty("auto.offset.reset", "latest");

        FlinkKafkaConsumer<String> flinkKafkaConsumer = new FlinkKafkaConsumer<>(
                "user-access-log",
                new SimpleStringSchema(),
                properties
        );
        flinkKafkaConsumer.setCommitOffsetsOnCheckpoints(false);
        DataStreamSource<String> lines = env.addSource(flinkKafkaConsumer);
        SingleOutputStreamOperator<UserBeanAndProvince> res = AsyncDataStream.unorderedWait(lines, new AsyncHttpQueryFunction(), 2000, TimeUnit.SECONDS, 10);

        //KeyedStream<UserBeanAndProvince, String> province = res.keyBy(UserBeanAndProvince::getProvince);

        SingleOutputStreamOperator<UserBeanAndProvince> old = res.filter(new FilterFunction<UserBeanAndProvince>() {
            @Override
            public boolean filter(UserBeanAndProvince value) throws Exception {

                return value.getIsNew() != 1;
            }
        });
        SingleOutputStreamOperator<Tuple2<String, Integer>> provinceAnd1 = old.map(new MapFunction<UserBeanAndProvince, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(UserBeanAndProvince value) throws Exception {
                return Tuple2.of(value.getProvince(), 1);
            }
        });
        SingleOutputStreamOperator<Tuple2<String, Integer>> sumed = provinceAnd1.keyBy(x -> x.f0).sum(1);
        sumed.print();



        //maped.print();












        env.execute();







    }
}
