package com.bw.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.bw.util.MyKafkaUtil;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

import java.util.Properties;

public class Test5_0 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        DataStream<String> stream = env.addSource(MyKafkaUtil.getKafkaConsumer("tms_ods_yk6","test669"));
        stream.print();
        //从order_org_bound读取数据过滤分拣数据写入kafka tms_dwd_bound_sort
        //开窗求每个结构的分拣次数
        //通过异步io补全省份和成熟数据

        SingleOutputStreamOperator<String> ttDS = stream.filter(new FilterFunction<String>() {
            @Override
            public boolean filter(String s) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(s);
                    String table = jsonObject.getString("table");
                    String status = jsonObject.getJSONObject("data").getString("status");
                    if ("order_org_bound".equals(table) && "64004".equals(status)) {
                        return true;
                    }
                } catch (Exception e) {
//                    e.printStackTrace();
                    return false;
                }
                return false;
            }
        });

        ttDS.map(x-> JSON.parseObject(x).getString("data"))
                .addSink(MyKafkaUtil.getKafkaProducer("tms_dwd_bound_sort_yk6"));

        env.execute();
    }
}
