package com.bw.yk10.app;

import com.alibaba.fastjson.JSONObject;
import com.bw.yk10.util.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

public class Test03 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

//        5、明细数据层DWD：编写Flink SQL流式程序，使用Connector定义连接Kafka Topic，实时消费ODS层线上问诊的业务数据，过滤获取就诊表（consultation）数据，并编写SQL对数据进行ETL处理，最后数据存储Kafka不同Topic主题：dwd-trade-consultation；

        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env,settings);

        tEnv.executeSql("CREATE TABLE ods_medical_topic (\n" +
                "  `op` STRING,\n" +
                "  `data` Map<STRING,STRING>,\n" +
                "  `db` STRING,\n" +
                "  `table` STRING,\n" +
                "  `ts` TIMESTAMP(3) METADATA FROM 'timestamp'" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'ods-medical-topic',\n" +
                "  'properties.bootstrap.servers' = '192.168.18.105:9092',\n" +
                "  'properties.group.id' = 'test03_group',\n" +
                "  'scan.startup.mode' = 'earliest-offset',\n" +
                "  'format' = 'json'\n" +
                ")");

        Table consultationTable=tEnv.sqlQuery("select " +
                " CAST(data['id'] AS STRING) id," +
                " CAST(data['create_time'] AS STRING)  create_time," +
                " CAST(data['consultation_fee'] AS DECIMAL(19,2))  consultation_fee," +
                " CAST(data['patient_id'] AS STRING)  patient_id,"+
                " CAST(data['doctor_id'] AS STRING)  doctor_id"+
                " from ods_medical_topic " +
                " where `table`='consultation' and data['id'] is not null ");

        DataStream<ConsultationsBean> consultationResult=tEnv.toAppendStream(consultationTable,ConsultationsBean.class);

        consultationResult.print();
//
        consultationResult.map(JSONObject::toJSONString)
                .addSink(new MyKafkaUtil().getKafkaProducer("dwd-trade-consultation"));

        env.execute();
    }
}
