import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.triggers.ContinuousProcessingTimeTrigger;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.Objects;
import java.util.Properties;

public class Main {
    public static void main(String[] args) throws Exception {

        // 创建Flink执行环境
        StreamExecutionEnvironment env1 = StreamExecutionEnvironment.getExecutionEnvironment();
        // Source一
        // Kafka参数
        Properties properties1 = new Properties();
        properties1.setProperty("bootstrap.servers", "192.168.68.141:9092");
        properties1.setProperty("group.id", "flink-group2");
        String inputTopic1 = "orderdetail";

        Properties properties2 = new Properties();
        properties2.setProperty("bootstrap.servers", "192.168.68.142:9092");
        properties2.setProperty("group.id", "flink-group2");
        String inputTopic2 = "order";

        FlinkKafkaConsumer<String> consumer1 =
                new FlinkKafkaConsumer<String>(inputTopic1, new SimpleStringSchema(), properties1);
        FlinkKafkaConsumer<String> consumer2 =
                new FlinkKafkaConsumer<String>(inputTopic2, new SimpleStringSchema(), properties2);

        SingleOutputStreamOperator<Tuple4<String,String,Double,Double>> sourceDS1
                = env1.addSource(consumer1).map(new SourceMap.ProSourceMap());
        SingleOutputStreamOperator<Tuple3<String,Long,String>> sourceDS2
                = env1.addSource(consumer2).map(new SourceMap.PressABookSourceMap()).filter(Objects::nonNull);

//        SingleOutputStreamOperator<CategoryPojo> tempAggResult = sourceDS.keyBy(1);
        SingleOutputStreamOperator<ClassObject.ProvinceMessage> tempAggResult1 = sourceDS1.keyBy(1)
                .window(TumblingProcessingTimeWindows.of(Time.days(1), Time.hours(-8)))//仅仅只定义了一个窗口大小
                .trigger(ContinuousProcessingTimeTrigger.of(Time.seconds(1)))
                .aggregate(new AggregateFun.ProvinceMAggFun(), new WindowFun.Pro_WindowRes());
        SingleOutputStreamOperator<ClassObject.Book> tempAggResult2 = sourceDS2.keyBy(0)
                .window(TumblingProcessingTimeWindows.of(Time.days(1), Time.hours(-8)))//仅仅只定义了一个窗口大小
                .trigger(ContinuousProcessingTimeTrigger.of(Time.seconds(1)))
                .aggregate(new AggregateFun.PriceAggregate(), new WindowFun.Book_WindowRes());
        SingleOutputStreamOperator<ClassObject.Press> tempAggResult3 = sourceDS2.keyBy(2)
                .window(TumblingProcessingTimeWindows.of(Time.days(1), Time.hours(-8)))//仅仅只定义了一个窗口大小
                .trigger(ContinuousProcessingTimeTrigger.of(Time.seconds(1)))
                .aggregate(new AggregateFun.PriceAggregate(), new WindowFun.Press_WindowRes());

//        tempAggResult.print();
////        DataStream<String> stream = env.addSource(consumer);
        // Transformations
        SingleOutputStreamOperator<ClassObject.ProvinceResult> result1 =tempAggResult1.keyBy("dateTime")
                .window(TumblingProcessingTimeWindows.of(Time.seconds(1)))
                .process(new WindowFun.Pro_WindowPro());
        SingleOutputStreamOperator<ClassObject.BookRes> result2 =tempAggResult2.keyBy("dateTime")//按照时间分组是因为需要每1s更新截至到当前时间的销售总额
                .window(TumblingProcessingTimeWindows.of(Time.seconds(1)))
                .process(new WindowFun.Book_WindowPro());//ndow后的process方法可以处理复杂逻辑
        SingleOutputStreamOperator<ClassObject.PressRes> result3 = tempAggResult3.keyBy("dateTime")//按照时间分组是因为需要每1s更新截至到当前时间的销售总额
                .window(TumblingProcessingTimeWindows.of(Time.seconds(1)))
                .process(new WindowFun.Press_WindowPro());//window后的process方法可以处理复杂逻辑
        // Sink
        result1.addSink(new sink.SinkToMysql_Province());
        result2.addSink(new sink.SinkToMysql_Book());
        result3.addSink(new sink.SinkToMysql_Press());

//        wordCount.print();
//        System.out.println(stream.getType());
//        stream.print();
        // execute
        env1.execute("kafka streaming word count");

    }
}
