package a.query;

import org.apache.flink.connector.datagen.table.DataGenConnectorOptions;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.*;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import static org.apache.flink.table.api.Expressions.$;
//001,181,182,success,1000,40
// 002,182,183,fai1,3000,20
//001,183,184,success,2000,30
// 002,184,185,success,6000,50
// 003,181,183,fai1,5000,50
// 001,183,184,success,3000,10
// 002,184,185,success,4000,40
// 001,181,183,success,7000,50
//结果是：
//+----+--------------------------------+----------------------+
//        | op |                            sid |       total_duration |
//        +----+--------------------------------+----------------------+
//        | +I |                            001 |                   40 |
//        | -U |                            001 |                   40 |
//        | +U |                            001 |                   70 |
//        | +I |                            002 |                   50 |
//        | -U |                            001 |                   70 |
//        | +U |                            001 |                   80 |
//        | -U |                            002 |                   50 |
//        | +U |                            002 |                   90 |
//        | -U |                            001 |                   80 |
//        | +U |                            001 |                  130 |

/**
 * 表的标识和表的类型
 * Identifie =  Catalog名 +  数据库名 + TableName
 * <p>
 * Catalog名：  可以指向外部设备，HBase等 ，如果表持久化则必须使用
 * <p>
 * <p>
 * 表分为永久表和临时表
 * 创建临时表
 * 1、虚拟表：基于常规表得到的
 * 2、常规表：
 */
public class MyQuery {

    public static void main(String[] args) {
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .build();

        TableEnvironment tableEnvironment = TableEnvironment.create(settings);
        tableEnvironment.getConfig().getConfiguration().setString("parallelism.default", "1");
        tableEnvironment.createTemporaryTable("KafkaSourceTable",
                TableDescriptor.forConnector("kafka")
                        .schema(Schema.newBuilder()
                                .column("sid", DataTypes.STRING())
                                .column("call_out", DataTypes.STRING())
                                .column("call_in", DataTypes.STRING())
                                .column("call_type", DataTypes.STRING())
                                .column("call_time", DataTypes.BIGINT())
                                .column("duration", DataTypes.BIGINT())
                                .build())
                        .option("topic", "calllog")
                        .option("properties.bootstrap.servers", "127.0.0.1:9091,127.0.0.1:9092,127.0.0.1:9093")
                        .option("scan.startup.mode", "earliest-offset")
                        .option("format", "csv")
                        .option("properties.group.id", "testgroup")
                        .build()
        );
        Table table = tableEnvironment.from("KafkaSourceTable");
        table.filter($("call_type").isEqual("success").and($("duration").isGreaterOrEqual(10)))
                .groupBy($("sid"))
                .select($("sid"), $("duration").sum().as("total_duration"))
                .execute()
                .print();

    }
}
