package com.zdb.demo.flink.streaming;

import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.sinks.CsvTableSink;
import org.apache.flink.table.sinks.TableSink;
import org.apache.flink.table.sources.CsvTableSource;
import org.apache.flink.table.sources.ProjectableTableSource;
import org.apache.flink.table.sources.StreamTableSource;
import org.apache.flink.table.sources.TableSource;
import org.apache.flink.types.Row;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;

/**
 * 请实现一个 Flink Job
 * 自定义 TableSource，从标准输入中按行读取数据，每行按逗号分隔字段。表的 schema 为 （id int, name string, age int）
 * 自定义 TableSink，将 Table 数据写到标准输出
 * 通过 SQL 实现 select name, max(age) from student group by name
 */
public class H8_batch {

    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration(){{
            setInteger("rest.port", 9191);
            setBoolean("local.start-webserver", true);
        }};
        final ExecutionEnvironment batchEnv = ExecutionEnvironment.createLocalEnvironmentWithWebUI(conf);
        TableEnvironment batchTableEnv = TableEnvironment.getTableEnvironment(batchEnv);
        TableSource studentSource = CsvTableSource.builder()
                .path("")
                .fieldDelimiter(",")
                .lineDelimiter("\\n")
                .field("id", Types.INT)
                .field("name", Types.STRING)
                .field("age", Types.INT)
                .build();

        batchTableEnv.registerTableSource("student", studentSource);

        String[] fieldNames = new String[] {"id", "name", "age"};
        TypeInformation[] fieldTypes = {Types.INT, Types.STRING, Types.INT};
        TableSink studentSink = new CsvTableSink("", ",");
        batchTableEnv.sqlUpdate("insert into result_student select id,name,age+100 from student");
        batchEnv.execute();
    }
}