package net.techfuser.channelorderreport;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.List;
import java.util.Properties;

/**
 * @Description:
 * @Author 左龙龙
 * @Date 21-4-22
 * @Version 1.0
 **/
public class ChannelOrderTask {

    public static void main(String[] args) throws Exception {

        // 0.env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        FlinkKafkaConsumer<String> flinkKafkaConsumer = getFlinkKafkaConsumer();
//        flinkKafkaConsumer.setStartFromEarliest();
//        flinkKafkaConsumer.setStartFromTimestamp(1623110400000L);
//        flinkKafkaConsumer.setStartFromLatest();
//        flinkKafkaConsumer.setStartFromGroupOffsets();
        flinkKafkaConsumer.setStartFromGroupOffsets();
        DataStreamSource<String> kafkaDS = env.addSource(flinkKafkaConsumer);

//        kafkaDS.print("kafka| ");
//        kafkaDS.writeAsText("./data/input/test_order_status.txt", FileSystem.WriteMode.OVERWRITE).setParallelism(1);

        SingleOutputStreamOperator<List<ChannelOrder>> orderDS = kafkaDS.map((o) -> {
                    List<JSONObject> rows = JSONArray.parseArray(o, JSONObject.class);
                    return ChannelOrderOplogAnalysis.analysis(rows);
                })
                .returns(Types.LIST(Types.POJO(ChannelOrder.class)))
                .filter(o -> !o.isEmpty())
                .name("订单oplog数据分析处理");

//        orderDS.print("CHANNEL_ORDER_REPORT: ");
//        orderDS.writeAsText("./data/output/test_order_status.txt", FileSystem.WriteMode.OVERWRITE).setParallelism(1);


        orderDS.addSink(new ChannelOrderMySqlSink()).name("数据汇聚到MySQL");

        // 4.execute
        env.execute("【门店渠道日报】订单oplog消费");
    }

    private static FlinkKafkaConsumer<String> getFlinkKafkaConsumer() {
        // 准备 kafka 连接参数
        Properties properties = new Properties();
        // 集群地址
        properties.setProperty("bootstrap.servers", "172.16.10.23:9092");
        // 消费组
        properties.setProperty("group.id", "channel_order_report");
        // 有 offset 消费记录则从记录位置继续消息, 没有消费记录则从最新的（最后的）消息开始消费
        //properties.setProperty("auto.offset.reset", "latest");
        // 自动提交
        properties.setProperty("enable.auto.commit", "true");
        // 自动提交的时间间隔
        properties.setProperty("auto.commit.interval.ms", "2000");

        return new FlinkKafkaConsumer<>("aggregate_order_data", new SimpleStringSchema(), properties);
    }
}
