package cn.flink.exercise;

import org.apache.flink.table.api.*;

import static org.apache.flink.table.api.Expressions.$;
import static org.apache.flink.table.api.Expressions.lit;

/**
 * 恶意登录统计
 * 
 * @author mayong
 * @date 2023/05/07
 **/
public class spiteLoginFlinkStat {

    public static void main(String[] args) {

        EnvironmentSettings environmentSettings = EnvironmentSettings.newInstance().build();

        TableEnvironment tableEnvironment = TableEnvironment.create(environmentSettings);

        Schema schema = Schema.newBuilder()
                .column("ip", DataTypes.STRING())
                .column("userId",DataTypes.STRING())
                .column("eventTime",DataTypes.STRING())
                .column("method",DataTypes.STRING())
                .column("url",DataTypes.STRING())
                .build();

        tableEnvironment.createTemporaryTable("sourceTable", TableDescriptor.forConnector("kafka")
                .schema(schema)
                .option("topic", "access_log")
                .option("properties.bootstrap.servers","bigdata01:9092,bigdata02:9092,bigdata03:9092")
                .option("properties.group.id", "log")
                .option("scan.startup.mode", "earliest-offset")
                .option("format", "json")
                .build()
        );

        Table table = tableEnvironment.from("sourceTable").select($("userId"), $("url"), $("eventTime"));

        StringBuilder sb = new StringBuilder();
        sb.append("select userId, url, ")
          .append("count(*) over w as accCount")
          .append("from "+ table)
          .append("WINDOW w AS (\\n")
          .append("PARTITION BY userId, url\\n")
          .append("order by eventTime\\n")
          .append("range between interval '2' second preceding and current row\\n");

        Table tableResult = tableEnvironment.sqlQuery(sb.toString());

        tableResult.execute().print();


    }




}
