package net.techfuser.storesalereport;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.List;
import java.util.Properties;

/**
 * @Description:
 * @Author 左龙龙
 * @Date 21-4-22
 * @Version 1.0
 **/
public class OrderTask {

    public static void main(String[] args) throws Exception {

        // 0.env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        FlinkKafkaConsumer<String> flinkKafkaConsumer = getFlinkKafkaConsumer();
//        flinkKafkaConsumer.setStartFromEarliest();
//        flinkKafkaConsumer.setStartFromTimestamp(1623024000000L);
//        flinkKafkaConsumer.setStartFromLatest();
        flinkKafkaConsumer.setStartFromGroupOffsets();
        DataStreamSource<String> kafkaDS = env.addSource(flinkKafkaConsumer);

//        kafkaDS.print("kafka| ");
//        DataStreamSource<String> kafkaDS = env.socketTextStream("localhost", 6666);

        SingleOutputStreamOperator<Order> orderDS = kafkaDS.map((o) -> {
                    List<JSONObject> rows = JSONArray.parseArray(o, JSONObject.class);
                    return OrderOplogAnalysis.analysis(rows);
                })
                .filter(o -> null != o)
                .name("订单过滤转换");

//        orderDS.print("STORE_SALE_REPORT: ");
//        orderDS.writeAsText("./data/output/store_sale_report.txt", FileSystem.WriteMode.OVERWRITE).setParallelism(1);


        orderDS.addSink(new OrderMySqlSink()).name("数据汇聚到MySQL");

        // 4.execute
        env.execute("【门店销售日报】订单oplog消费");
    }

    private static FlinkKafkaConsumer<String> getFlinkKafkaConsumer() {
        // 准备 kafka 连接参数
        Properties properties = new Properties();
        // 集群地址
        properties.setProperty("bootstrap.servers", "172.16.10.23:9092");
        // 消费组
        properties.setProperty("group.id", "base-report-test");
        // 有 offset 消费记录则从记录位置继续消息, 没有消费记录则从最新的（最后的）消息开始消费
        //properties.setProperty("auto.offset.reset", "latest");
        // 自动提交
        properties.setProperty("enable.auto.commit", "true");
        // 自动提交的时间间隔
        properties.setProperty("auto.commit.interval.ms", "2000");

        return new FlinkKafkaConsumer<>("aggregate_order_data", new SimpleStringSchema(), properties);
    }
}
