package net.techfuser.sync;

import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

/**
 * @Description:
 * @Author 左龙龙
 * @Date 21-6-2
 * @Version 1.0
 **/
public class OrderDetailSyncMySqlTask {

    final static Map<String, Class> fields = new HashMap<>();

    static {
        // mongodb 主键
        fields.put("_id", String.class);

        // 商品基本属性
        fields.put("ent_id", Long.class);
        fields.put("region_code", String.class);
        fields.put("region_name", String.class);
        fields.put("channel_keyword", String.class);
        fields.put("channel_sheetno", String.class);
        fields.put("item_code", String.class);
        fields.put("barcode", String.class);
        fields.put("item_name", String.class);
        fields.put("online_sup_code", String.class);
        fields.put("online_sup_name", String.class);
        fields.put("online_cat_code", String.class);
        fields.put("online_cat_name", String.class);
        fields.put("order_status", String.class);

        // 商品金额
        fields.put("sale_price", Double.class);
        fields.put("sale_qty", Double.class);
        fields.put("sale_value", Double.class);
        fields.put("original_price", Double.class);

        // 商品优惠
        fields.put("disc_bt", Double.class);
        fields.put("disc_bt_sj", Double.class);
        fields.put("disc_value", Double.class);
        fields.put("disc_value_pt", Double.class);

        // 时间
        fields.put("order_time", Date.class);
        fields.put("confirm_time", Date.class);
        fields.put("complete_time", Date.class);
        fields.put("last_modify_time", Date.class);
    }



    public static void main(String[] args) throws Exception {
        // 0.env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        // 1.source
        FlinkKafkaConsumer<String> flinkKafkaConsumer = getFlinkKafkaConsumer();
//        flinkKafkaConsumer.setStartFromLatest();
//        flinkKafkaConsumer.setStartFromEarliest();
        flinkKafkaConsumer.setStartFromGroupOffsets();
        DataStreamSource<String> kafkaDS = env.addSource(flinkKafkaConsumer);

//        kafkaDS.writeAsText("./data/input/order_detail_oplog.txt", FileSystem.WriteMode.OVERWRITE).setParallelism(1);
//        kafkaDS.print("kafka");

        SingleOutputStreamOperator<DMLData> dmlDS = kafkaDS.map(JSON::parseObject)
                .map(oplog -> new DMLData(oplog, fields))
                .filter(dml -> dml.vaild());


        dmlDS.addSink(new OrderDetailMySqlSink());


        // 4.execute
        env.execute("【订单明细oplog消费】同步订单明细数据到MySQL");

    }

    private static FlinkKafkaConsumer<String> getFlinkKafkaConsumer() {
        // 准备 kafka 连接参数
        Properties properties = new Properties();
        // 集群地址
        properties.setProperty("bootstrap.servers", "172.16.10.23:9092");
//        properties.setProperty("bootstrap.servers", "127.0.0.1:9092");
        // 消费组
        properties.setProperty("group.id", "sync-mysql-test");
        // 有 offset 消费记录则从记录位置继续消息, 没有消费记录则从最新的（最后的）消息开始消费
//        properties.setProperty("auto.offset.reset", "latest");
        // 会开启一个后台线程每隔5秒检查一下 kafka 的分区情况, 实现动态分区检测
//        properties.setProperty("flink.partition-discovery.interval-millis", "5000");
        // 自动提交
        properties.setProperty("enable.auto.commit", "true");
        // 自动提交的时间间隔
        properties.setProperty("auto.commit.interval.ms", "2000");

        return new FlinkKafkaConsumer<>("order_detail_data", new SimpleStringSchema(), properties);
    }
}
