package com.atguigu.flink.sql.query;

import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * Created by 黄凯 on 2023/6/27 0027 20:22
 *
 * @author 黄凯
 * 永远相信美好的事情总会发生.
 *
 * 常规联结:
 *  *    1. 内联结
 *  *
 *  *    2. 外联结（左外， 右外， 全外）
 *  *
 *  * 状态的TTL:
 *  *    常规联结时，会将左右表的数据全部进行状态。 可能会导致过大的状态， 建议配置状态的TTL
 */
public class Flink09_RegularJoin {

    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        SingleOutputStreamOperator<WaterSensor> ds1 = env.socketTextStream("127.0.0.1", 8888)
                .map(
                        line -> {
                            String[] fields = line.split(",");
                            return new WaterSensor(fields[0].trim(), Long.valueOf(fields[1].trim()), Integer.valueOf(fields[2].trim()));
                        }

                );
        SingleOutputStreamOperator<WaterSensor> ds2 = env.socketTextStream("127.0.0.1", 9999)
                .map(
                        line -> {
                            String[] fields = line.split(",");
                            return new WaterSensor(fields[0].trim(), Long.valueOf(fields[1].trim()), Integer.valueOf(fields[2].trim()));
                        }
                );

        //流转表
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //配置状态的过期时间
//        tableEnv.getConfig().getConfiguration().setLong("table.exec.state.ttl",10 * 1000);

        Schema schema = Schema.newBuilder()
                .column("id", "string")
                .column("vc" , "int")
                .column("ts" , "bigint")
                .columnByExpression("pt" , "proctime()")
                .columnByExpression("et" , "to_timestamp_ltz(ts, 3)")
                .watermark("et" , "et - interval '0' second")
                .build();

        Table left = tableEnv.fromDataStream(ds1, schema);
        Table right = tableEnv.fromDataStream(ds2, schema);
        tableEnv.createTemporaryView("t1" , left);
        tableEnv.createTemporaryView("t2" , right) ;

        //内联结
        String innerSql =
                "select l.id,\n" +
                        "       l.vc,\n" +
                        "       r.id,\n" +
                        "       r.vc\n" +
                        "from t1 l\n" +
                        "         inner join t2 r\n" +
                        "                    on l.id = r.id";

//        tableEnv.sqlQuery(innerSql).execute().print();

        //外联结
        //左外 | 右外
        String outerSql =
                "select l.id,\n" +
                        "       l.vc,\n" +
                        "       r.id,\n" +
                        "       r.vc\n" +
                        "from t1 l\n" +
                        "         left outer join t2 r\n" +
                        "                         on l.id = r.id";

//        tableEnv.sqlQuery(outerSql).execute().print();

        //全外
        String fullouterSql =
                " select l.id, l.vc , r.id , r.vc " +
                        " from t1 l full outer join t2  r" +
                        " on l.id = r.id";

        tableEnv.sqlQuery(fullouterSql).execute().print();
        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }


    }

}
