package com.atguigu.Flink.sql.query;

import com.atguigu.Flink.POJO.WindSpeedSensor;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Flink01_SimpleQuery {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment streamTableEnv = StreamTableEnvironment.create(env);
        SingleOutputStreamOperator<WindSpeedSensor> ds = env.socketTextStream("hadoop102", 8888)
                .map(
                        line -> {
                            String[] fields = line.split(",");
                            return new WindSpeedSensor(fields[0].trim(), Integer.valueOf(fields[1].trim()), Long.valueOf(fields[2].trim()));
                        }
                );
        //流转表
        Schema schema = Schema.newBuilder()
                .column("id" , "STRING")
                .column("vc" ,"INT")
                .column("ts" , "BIGINT")
                .columnByExpression("pt" , "PROCTIME()")
                .columnByExpression("et" , "TO_TIMESTAMP_LTZ(ts, 3)")
                .watermark("et" , "et - INTERVAL '2' SECOND")
                .build();
        Table table = streamTableEnv.fromDataStream(ds, schema);
        table.printSchema();
        streamTableEnv.createTemporaryView("t1" , table);

        //1.select where
        //streamTableEnv.sqlQuery("select id, vc, ts from t1 where vc >= 100").execute().print();

        //2. with
        //streamTableEnv.sqlQuery("with t2 AS(select id, vc, ts from t1 where vc >= 100) select * from t2").execute().print();

        //3.distinct
        //streamTableEnv.sqlQuery("select distinct id from t1 ").execute().print();

        //4.分组聚合
        //streamTableEnv.sqlQuery("select count(*) from t1").execute().print();
        //streamTableEnv.sqlQuery("select id , sum(vc) svc from t1 group by id").execute().print();

        //5. order by
        //streamTableEnv.sqlQuery("select id , vc , et  from t1 order by et ").execute().print();

        //6. limit 只支持批模式

        //7. SQL hits
        String createSourceTable =
                "CREATE TABLE t2 (\n" +
                        "  `id` STRING,\n" +
                        "  `vc` INT,\n" +
                        "  `ts` BIGINT\n" +
                        ") WITH (\n" +
                        "  'connector' = 'kafka',\n" +
                        "  'topic' = 'topicA',\n" +
                        "  'properties.bootstrap.servers' = 'hadoop102:9092,hadoop103:9092,hadoop104:9092',\n" +
                        "  'properties.group.id' = 'flink12',\n" +
                        "  'scan.startup.mode' = 'group-offsets',\n" +
                        "  'properties.auto.offset.reset' = 'latest' ," +
                        "  'format' = 'csv'\n" +
                        ")" ;
        //streamTableEnv.executeSql(createSourceTable);

        //streamTableEnv.sqlQuery("select id, vc ,ts from t2 /*+ OPTIONS('topic'='topicB') */ ").execute().print();

        // 8. 集合操作
        String createSinkTable =
                " create table t5(" +
                        " s STRING , " +
                        " PRIMARY KEY (s) NOT ENFORCED " +
                        " ) with (" +
                        "   'connector' = 'jdbc', " +
                        "   'url' = 'jdbc:mysql://hadoop102:3306/test' ," +
                        "   'table-name' = 'collection' ," +
                        "   'username' = 'root' ," +
                        "   'password' = '000000'  " +
                        " )" ;
        streamTableEnv.executeSql(createSinkTable);

        streamTableEnv.executeSql("create view t3(s) as values ('c'), ('a'), ('b'), ('b'), ('c')") ;
        streamTableEnv.executeSql("create view t4(s) as values ('d'), ('e'), ('a'), ('b'), ('b')") ;
        // 并集
        // UNION（去重）    |    UNION ALL（不去重）
        //streamTableEnv.sqlQuery( "select s from t3 UNION select s from t4" ).execute().print();
        //streamTableEnv.sqlQuery( "select s from t3 UNION ALL select s from t4" ).execute().print();

        //交集
        // INTERSECT（去重）   | INTERSECT  ALL （不去重）
        //streamTableEnv.sqlQuery( "(SELECT s FROM t3) INTERSECT (SELECT s FROM t4)").execute().print();
        //streamTableEnv.sqlQuery( " (SELECT s FROM t3) INTERSECT ALL (SELECT s FROM t4) ").execute().print();


        //差集
        // EXCEPT(去重)  | EXCEPT  ALL （不去重）
        //streamTableEnv.sqlQuery("(SELECT s FROM t3) EXCEPT (SELECT s FROM t4)").execute().print();
        //streamTableEnv.sqlQuery("(SELECT s FROM t3) EXCEPT ALL (SELECT s FROM t4)").execute().print();


        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }

    }
}
