import com.group2.edu.realtime.common.base.BaseSQLApp;
import com.group2.edu.realtime.common.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;


/*
      需要启动：  zk、kf、f1、mxw、Base-log、此程序

 */
//TODO 1 创建环境设置状态后端
public class DwdUserUserRegister extends BaseSQLApp {
    public static void main(String[] args) {
        new DwdUserUserRegister().start(
                10016,
                4,
                "DwdUserUserRegister"
                );
    }

    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment streamTableEnvironment) {
        //TODO 2 设置表的TTL
        streamTableEnvironment.getConfig().setIdleStateRetention(Duration.ofSeconds(10L));
        //TODO 3 先读取kafka_db中的数据，使用kafka的connector映射成表
        readTopicDb(streamTableEnvironment, "DwdUserUserRegister");

        //streamTableEnvironment.executeSql("select *from topic_db").print();

        //TODO 4 读取page主题数据dwd_traffic_page
        streamTableEnvironment.executeSql("CREATE TABLE page_log (\n" +
                "  `common` map<string,string>,\n" +
                "  `page` string,\n" +
                "  `ts` BIGINT\n" +
                ")" + SQLUtil.getKafkaProperty("dwd_traffic_page","DwdUserUserRegister"));


        //TODO 5 过滤用户表数据
        Table userRegister = streamTableEnvironment.sqlQuery("select \n" +
                "    data['id'] id,\n" +
                "    data['create_time'] create_time,\n" +
                "    date_format(data['create_time'],'yyyy-MM-dd') create_date,\n" +
                "    ts\n" +
                "from topic_db\n" +
                "where `table`='user_info'\n" +
                "and `type`='insert'" +
                "");
        streamTableEnvironment.createTemporaryView("user_register",userRegister);


        //TODO 6 过滤注册日志的维度信息
        Table dimLog = streamTableEnvironment.sqlQuery("select \n" +
                        "    common['uid'] user_id,\n" +
                        "    common['ch'] channel, \n" +
                        "    common['ar'] province_id, \n" +
                        "    common['vc'] version_code, \n" +
                        "    common['sc'] source_id, \n" +
                        "    common['mid'] mid_id, \n" +
                        "    common['ba'] brand, \n" +
                        "    common['md'] model, \n" +
                        "    common['os'] operate_system \n" +
                        "from page_log\n" +
                        "where common['uid'] is not null \n"
                //"and page['page_id'] = 'register'"
        );
        streamTableEnvironment.createTemporaryView("dim_log",dimLog);

        //TODO 7 join两张表格
        Table resultTable = streamTableEnvironment.sqlQuery("select \n" +
                "    ur.id user_id,\n" +
                "    create_time register_time,\n" +
                "    create_date register_date,\n" +
                "    channel,\n" +
                "    province_id,\n" +
                "    version_code,\n" +
                "    source_id,\n" +
                "    mid_id,\n" +
                "    brand,\n" +
                "    model,\n" +
                "    operate_system,\n" +
                "    ts, \n" +
                "    current_row_timestamp() row_op_ts \n" +
                "from user_register ur \n" +
                "left join dim_log pl \n" +
                "on ur.id=pl.user_id");

        streamTableEnvironment.createTemporaryView("result_table",resultTable);

        //TODO 8 写出数据到kafka
        streamTableEnvironment.executeSql(" create table dwd_user_user_register(\n" +
                "    user_id string,\n" +
                "    register_time string,\n" +
                "    register_date string,\n" +
                "    channel string,\n" +
                "    province_id string,\n" +
                "    version_code string,\n" +
                "    source_id string,\n" +
                "    mid_id string,\n" +
                "    brand string,\n" +
                "    model string,\n" +
                "    operate_system string,\n" +
                "    ts BIGINT,\n" +
                "    row_op_ts TIMESTAMP_LTZ(3) ,\n" +
                "    PRIMARY KEY (user_id) NOT ENFORCED\n" +
                ")" + SQLUtil.getUpsertKafkaProperty("dwd_user_user_register"));

        //写入
        streamTableEnvironment.executeSql("insert into dwd_user_user_register " +
                "select * from result_table");
    }
}


//{
// "common":{"ar":"1","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone X","mid":"mid_239",
// "os":"iOS 13.2.3","sc":"2","sid":"79dd651d-9d2a-4f5a-8253-6edfef9debbf","uid":"2043","vc":"v2.1.134"},
// "displays":[{"display_type":"promotion","item":"4","item_type":"course_id","order":1,"pos_id":2},
// {"display_type":"promotion","item":"10","item_type":"course_id","order":2,"pos_id":4},
// {"display_type":"query","item":"4","item_type":"course_id","order":3,"pos_id":3},
// {"display_type":"query","item":"9","item_type":"course_id","order":4,"pos_id":5}],
// "page":{"during_time":12274,"item":"54468","item_type":"order_id","last_page_id":"order","page_id":"payment"},
// "ts":1734314213971
// }
