package com.bw.yk10.app;

import com.alibaba.fastjson.JSONObject;
import com.bw.yk10.func.JsonParserFunction;
import com.bw.yk10.util.MyKafkaUtil;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.runtime.state.StateBackend;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import java.sql.PreparedStatement;
import java.sql.SQLException;

public class Test04 {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        env.enableCheckpointing(5000);
        env.getCheckpointConfig().setCheckpointTimeout(60000);
        StateBackend fsStateBackend = new FsStateBackend(
                "hdfs://hadoop202:8020/gmall/flink/checkpoint/ProductStatsApp");
        env.setStateBackend(fsStateBackend);


        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env,settings);


        tEnv.executeSql("CREATE TABLE consultation (\n" +
                "  `id` STRING,\n" +
                "  `create_time` STRING,\n" +
                "  `consultation_fee` DECIMAL(19,2),\n" +
                "  `patient_id` STRING,\n" +
                "  `doctor_id` STRING,\n" +
                "  `proctime` as PROCTIME()," +      // 提取数据操作时间 PROCTIME()
                "   rowtime AS TO_TIMESTAMP(create_time)," +
                "   WATERMARK FOR rowtime AS rowtime - INTERVAL '10' SECOND" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'dwd-trade-consultation',\n" +
                "  'properties.bootstrap.servers' = '192.168.18.105:9092',\n" +
                "  'properties.group.id' = 'test07_group',\n" +
                "  'scan.startup.mode' = 'earliest-offset',\n" +
                "  'format' = 'json'\n" +
                ")");

//        tEnv.sqlQuery("select * from consultation").execute().print();

        // 1.创建表的时候，要加上对列的参数设置：column_encoded_bytes=0
        // 2.创建表的时候，要对列设置列族  (info)
        // 3.在Flink 中 创建表时，表名、列族名、列名 都必须大写
        // 4.lookup.async(可能时由于版本的原因 true 会报错) lookup.cache.max-rows  lookup.cache.ttl 要设置
        // 5. kafka 数据表中(主流)，要添加 PROCTIME()  字段
        //rowkey user_id:sex:age
        tEnv.executeSql("CREATE TABLE dim_dict (\n" +
                " ID STRING,\n" +              //rowkey 大写
                " INFO ROW<LINE STRING> , "+   //INFO: 列族 ROW<NAME STRING,SEX STRING>
                " PRIMARY KEY (ID) NOT ENFORCED" + //指定rowkey 字段
                ") WITH (\n" +
                " 'connector' = 'hbase-2.2',\n" +
                " 'table-name' = 'DIM_DICT',\n" +
                " 'zookeeper.quorum' = 'hadoop-single:2181'," +
                " 'lookup.async' = 'false' ," +    // 是否开启异步查询
                " 'lookup.cache.max-rows' = '5000' ," +
                " 'lookup.cache.ttl' = '10min'\n" +
                ")");

        tEnv.sqlQuery("select * from dim_dict ").execute().print();

        tEnv.executeSql("CREATE TABLE dim_doctor (\n" +
                " ID STRING,\n" +
                " INFO ROW<LINE STRING> , "+
                " PRIMARY KEY (ID) NOT ENFORCED" +
                ") WITH (\n" +
                " 'connector' = 'hbase-2.2',\n" +
                " 'table-name' = 'DIM2_DOCTOR',\n" +
                " 'zookeeper.quorum' = 'hadoop-single:2181'," +
                " 'lookup.async' = 'false' ," +
                " 'lookup.cache.max-rows' = '5000' ," +
                " 'lookup.cache.ttl' = '10min'\n" +
                ")");

        tEnv.executeSql("CREATE TABLE dim_patient (\n" +
                " ID STRING,\n" +
                " INFO ROW<LINE STRING> , "+
                " PRIMARY KEY (ID) NOT ENFORCED" +
                ") WITH (\n" +
                " 'connector' = 'hbase-2.2',\n" +
                " 'table-name' = 'DIM2_PATIENT',\n" +
                " 'zookeeper.quorum' = 'hadoop-single:2181'," +
                " 'lookup.async' = 'false' ," +
                " 'lookup.cache.max-rows' = '5000' ," +
                " 'lookup.cache.ttl' = '10min'\n" +
                ")");

        tEnv.executeSql("CREATE TABLE dim_hospital (\n" +
                " ID STRING,\n" +
                " INFO ROW<LINE STRING> , "+
                " PRIMARY KEY (ID) NOT ENFORCED" +
                ") WITH (\n" +
                " 'connector' = 'hbase-2.2',\n" +
                " 'table-name' = 'DIM2_HOSPITAL',\n" +
                " 'zookeeper.quorum' = 'hadoop-single:2181'," +
                " 'lookup.async' = 'false' ," +
                " 'lookup.cache.max-rows' = '5000' ," +
                " 'lookup.cache.ttl' = '10min'\n" +
                ")");

        tEnv.createTemporaryFunction("jsonParser", new JsonParserFunction());


        Table result_table=tEnv.sqlQuery("  SELECT cn.id id,\n" +
                "  cn.consultation_fee consultation_fee,\n" +
                "  cn.patient_id patient_id,\n" +
                "  cn.doctor_id doctor_id,\n" +
                "  cn.rowtime rowtime,\n" +
                "  jsonParser(dict.INFO.LINE, 'value') gender,\n" +
                "  CASE\n" +
                "    WHEN jsonParser(pt.INFO.LINE, 'birthday') <= '2006-01-01' THEN 'Adult'\n" +
                "    WHEN jsonParser(pt.INFO.LINE, 'birthday') > '2006-01-01' THEN\n" +
                "      CASE\n" +
                "        WHEN jsonParser(pt.INFO.LINE, 'birthday') <= '2011-01-01' THEN 'Child'\n" +
                "        WHEN jsonParser(pt.INFO.LINE, 'birthday') > '2011-01-01' THEN 'Baby'\n" +
//                "        ELSE 'Elderly'\n" +
                "      END\n" +
                "  END AS age_group,\n" +
                "  jsonParser(hol.INFO.LINE, 'name') hol_name\n" +
                "  FROM consultation AS cn\n" +
                "  LEFT JOIN dim_patient FOR SYSTEM_TIME AS OF cn.proctime AS pt\n" + //FOR SYSTEM_TIME AS OF cn.proctime
                "  ON cn.patient_id=pt.ID\n" +
                "  LEFT JOIN dim_dict FOR SYSTEM_TIME AS OF cn.proctime AS dict" +
                "  ON  jsonParser(pt.INFO.LINE, 'gender')=dict.ID" +
                "  LEFT JOIN dim_doctor FOR SYSTEM_TIME AS OF cn.proctime AS dc\n" +
                "  ON cn.doctor_id=dc.ID\n" +
                "  LEFT JOIN dim_hospital FOR SYSTEM_TIME AS OF cn.proctime AS hol\n" +
                "  ON jsonParser(dc.INFO.LINE, 'hospital_id')=hol.ID");

        tEnv.createTemporaryView("table_view",result_table);
//
//        tEnv.sqlQuery("select * from table_view").execute().print();

//
        String useTableSQL="select " +
                " DATE_FORMAT(TUMBLE_START(rowtime, INTERVAL '10' SECOND ),'yyyy-MM-dd HH:mm:ss') stt," +
                " DATE_FORMAT(TUMBLE_END(rowtime, INTERVAL '10' SECOND ),'yyyy-MM-dd HH:mm:ss') edt, " +
                " hol_name," +
                " age_group," +
                " gender," +
                " sum(consultation_fee) total_consultation_fee," +
                " count(distinct patient_id)  uv_ct, "+
                " UNIX_TIMESTAMP()*1000 ts" +
                " from  table_view " +
                " group by TUMBLE(rowtime, INTERVAL '10' SECOND )," +
                " age_group,gender,hol_name";

        Table resultTable=tEnv.sqlQuery(useTableSQL);
//        tEnv.sqlQuery(useTableSQL).execute().print();
        DataStream<ResultBean> resultTableDS=tEnv.toAppendStream(resultTable,ResultBean.class);
//
//
        resultTableDS.addSink(
                JdbcSink.sink(
                        "insert into dws_trade_hospital_gender_age_consul_win values (?,?,?,?,?,?,?)",
                        new JdbcStatementBuilder<ResultBean>() {
                            @Override
                            public void accept(PreparedStatement ps, ResultBean resultBean) throws SQLException {
                                //获取属性值
                                ps.setObject(1 , resultBean.getStt());
                                ps.setObject(2 , resultBean.getEdt());
                                ps.setObject(3 , resultBean.getAge_group());
                                ps.setObject(4 , resultBean.getGender());
                                ps.setObject(5 , resultBean.getTotal_consultation_fee());
                                ps.setObject(6 , resultBean.getUv_ct());
                                ps.setObject(7 , resultBean.getTs());
                            }
                        },
                        //构建者设计模式，创建JdbcExecutionOptions对象，给batchSize属性赋值，执行执行批次大小
                        new JdbcExecutionOptions.Builder().withBatchSize(5).build(),
                        //构建者设计模式，JdbcConnectionOptions，给连接相关的属性进行赋值
                        new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                                .withUrl("jdbc:clickhouse://hadoop-single:8123/default")
                                .withDriverName("ru.yandex.clickhouse.ClickHouseDriver")
                                .build()

                )
        );




        //获取超时数据
        Table outTimeDataTable=tEnv.sqlQuery(
                "select * from consultation" +
                        " where rowtime > rowtime - INTERVAL '10' SECOND ");

//        Table outTimeDataTable=tEnv.sqlQuery(
//                "select id,consultation_fee,patient_id,doctor_id,rowtime from consultation" +
//                        " where rowtime > WATERMARK(rowtime) ");
//
        DataStream<Row> outTimeresultDS=tEnv.toAppendStream(outTimeDataTable,Row.class);

        outTimeresultDS.print();

        outTimeresultDS.map(new MapFunction<Row, String>(){
            @Override
            public String map(Row row) throws Exception {
                return row.toString();
            }
        }).addSink(new MyKafkaUtil().getKafkaProducer("dws-outtime-consultation"));
//


        env.execute("test04");
    }
}
