package yuekao9.dwd;

import yuekao9.util.KafkaUtil;
import com.alibaba.fastjson.JSON;
import yuekao9.entity.TransportTask;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import com.alibaba.fastjson.JSONObject;

import java.text.SimpleDateFormat;
import java.util.TimeZone;

public class ReadOrderInfo {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //1.业务数据库的order_info表记录了多个业务过程的信息，任务是将这些业务过程的数据拆分到不同的事实表中。（5分）
        DataStreamSource<String> streamSource = env.addSource(KafkaUtil.kafkaSources("tms_ods"));
//        streamSource.print();

        OutputTag<String> tag60010 = new OutputTag<String>("60010") {
        };
        OutputTag<String> tag60020 = new OutputTag<String>("60020") {
        };
        OutputTag<String> tag60030 = new OutputTag<String>("60030") {
        };
        OutputTag<String> tag60040 = new OutputTag<String>("60040") {
        };
        OutputTag<String> tag60050 = new OutputTag<String>("60050") {
        };
        OutputTag<String> tag60060 = new OutputTag<String>("60060") {
        };
        OutputTag<String> tag60070 = new OutputTag<String>("60070") {
        };
        OutputTag<String> tag60080 = new OutputTag<String>("60080") {
        };
        OutputTag<String> tag60099 = new OutputTag<String>("60099") {
        };
        SingleOutputStreamOperator<String> process = streamSource.filter(x -> JSON.parseObject(x).getString("table").equals("order_info")).process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String s, ProcessFunction<String, String>.Context context, Collector<String> collector) throws Exception {
                String starus = JSON.parseObject(s).getJSONObject("data").getString("status");
                if ("60010".equals(starus)) {
                    context.output(tag60010, s);
                }
                if ("60020".equals(starus)) {
                    context.output(tag60020, s);
                }
                if ("60030".equals(starus)) {
                    context.output(tag60030, s);
                }
                if ("60040".equals(starus)) {
                    context.output(tag60040, s);
                }
                if ("60050".equals(starus)) {
                    context.output(tag60050, s);
                }
                if ("60060".equals(starus)) {
                    context.output(tag60060, s);
                }
                if ("60070".equals(starus)) {
                    context.output(tag60070, s);
                }
                if ("600 80".equals(starus)) {
                    context.output(tag60080, s);
                }
                if ("60099".equals(starus)) {
                    context.output(tag60099, s);
                }
            }
        });

//        process.getSideOutput(tag60010).addSink(KafkaUtil.kafkaSink("tag60010"));
//        process.getSideOutput(tag60020).addSink(KafkaUtil.kafkaSink("tag60020"));
//        process.getSideOutput(tag60030).addSink(KafkaUtil.kafkaSink("tag60030"));
//        process.getSideOutput(tag60040).addSink(KafkaUtil.kafkaSink("tag60040"));
//        process.getSideOutput(tag60050).addSink(KafkaUtil.kafkaSink("tag60050"));
//        process.getSideOutput(tag60060).addSink(KafkaUtil.kafkaSink("tag60060"));
//        process.getSideOutput(tag60070).addSink(KafkaUtil.kafkaSink("tag60070"));
//        process.getSideOutput(tag60080).addSink(KafkaUtil.kafkaSink("tag60080"));
//        process.getSideOutput(tag60099).addSink(KafkaUtil.kafkaSink("tag60099"));

        //2.任务是从原始业务数据库的 transport_task 表中提取运单完成操作，写入Kafka指定主题。（5分）
        SingleOutputStreamOperator<String> transporttask = streamSource.filter(x -> JSON.parseObject(x).getString("table").equals("transport_task") && JSON.parseObject(x).getJSONObject("data").getString("status").equals("67004"));
//        transporttask.print();
        SingleOutputStreamOperator<String> transporttaskdata = transporttask.map(new MapFunction<String, String>() {
            @Override
            public String map(String s) throws Exception {
                TransportTask data = JSON.parseObject(JSON.parseObject(s).getString("data"), TransportTask.class);
                SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
                //补全运输时长
                long l = data.getActual_end_time() - data.getActual_start_time();
                data.setTransport_time(l);
                //补全ts
                sdf.setTimeZone(TimeZone.getTimeZone("GMT+8"));
                long time = sdf.parse(data.getCreate_time()).getTime();
                data.setCreate_ts(time);
                //脱敏
                data.setDriver1_name(data.getDriver1_name() != null ? data.getDriver1_name().substring(0, 1) + "**" : data.getDriver1_name());
                data.setDriver2_name(data.getDriver2_name() != null ? data.getDriver2_name().substring(0, 1) + "**" : data.getDriver2_name());
                data.setTruck_no(data.getTruck_no() != null ? data.getTruck_no().substring(0, 1) + "**" : data.getTruck_no());
                return JSON.toJSONString(data);
            }
        });

//        transporttaskdata.print();
        transporttaskdata.addSink(KafkaUtil.kafkaSink("DwdTransFinishBean"));




        env.execute();
    }
}
