package com.zdb.demo.flink.streaming;

import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.apache.flink.api.java.typeutils.TupleTypeInfo;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.sinks.*;
import org.apache.flink.table.sources.ProjectableTableSource;
import org.apache.flink.table.sources.StreamTableSource;
import org.apache.flink.table.sources.TableSource;
import org.apache.flink.types.Row;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;

/**
 * 请实现一个 Flink Job
 * 自定义 TableSource，从标准输入中按行读取数据，每行按逗号分隔字段。表的 schema 为 （id int, name string, age int）
 * 自定义 TableSink，将 Table 数据写到标准输出
 * 通过 SQL 实现 select name, max(age) from student group by name
 */
public class H8 {

    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration(){{
            setInteger("rest.port", 9191);
            setBoolean("local.start-webserver", true);
        }};
        final StreamExecutionEnvironment streamEnv = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(conf);
//        TableEnvironment batchTableEnv = StreamTableEnvironment.create(streamEnv);
        TableEnvironment batchTableEnv = TableEnvironment.getTableEnvironment(streamEnv);
        ConsoleTableSource studentSource = new ConsoleTableSource();

        batchTableEnv.registerTableSource("student", studentSource);

        String[] fieldNames = new String[] {"name", "age"};
        TypeInformation[] fieldTypes = {Types.STRING, Types.INT};
        TableSink studentSink = new ConsoleTableSink();
        batchTableEnv.registerTableSink("result_student", fieldNames, fieldTypes, studentSink);
        batchTableEnv.sqlUpdate("insert into result_student select  name, max(age) age from student group by name");
        streamEnv.execute();
    }
}

class ConsoleTableSource
        implements StreamTableSource<Row>
        , ProjectableTableSource<Row> {

    private String[] fieldNames = new String[]{"id","name", "age"};
    private TypeInformation[] fieldTypes = new TypeInformation[]{Types.INT, Types.STRING, Types.INT};

    @Override
    public TableSource<Row> projectFields(int[] fields) {
        return new ConsoleTableSource();
    }

    @Override
    public DataStream<Row> getDataStream(StreamExecutionEnvironment execEnv) {
        return execEnv.addSource(new RichSourceFunction<Row>() {
            @Override
            public void run(SourceContext<Row> ctx) throws Exception {
                System.out.println("ReadTest, Please Enter Data:");
                InputStreamReader is = new InputStreamReader(System.in);  //new构造InputStreamReader对象
                BufferedReader br = new BufferedReader(is);  //拿构造的方法传到BufferedReader中
                try{  //该方法中有个IOExcepiton需要捕获
                    String line = null;
                    while((line = br.readLine()) != null) {
                        System.out.println("ReadTest Output:" + line);
                        String[] strs = line.split(",");
                        Row record = new Row(3);
                        record.setField(0, Integer.parseInt(strs[0]));
                        record.setField(1, strs[1]);
                        record.setField(2, Integer.parseInt(strs[2]));
                        ctx.collect(record);
                    }

                }
                catch(IOException e){
                    e.printStackTrace();
                }
            }

            @Override
            public void cancel() {

            }
        }).returns(new RowTypeInfo(fieldTypes, fieldNames));
    }

    @Override
    public TypeInformation<Row> getReturnType() {
        return new RowTypeInfo(fieldTypes, fieldNames);
    }

    @Override
    public TableSchema getTableSchema() {
        return new TableSchema(fieldNames, fieldTypes);
    }
}

class ConsoleTableSink implements RetractStreamTableSink<Row> {

    String[] fieldNames = new String[] {"name", "age"};
    TypeInformation[] fieldTypes = {Types.STRING, Types.INT};

    @Override
    public TypeInformation<Row> getRecordType() {
        return new RowTypeInfo(fieldTypes, fieldNames);
    }

    @Override
    public void emitDataStream(DataStream<Tuple2<Boolean, Row>> dataStream) {
        dataStream.map(line -> {
            Row row = line.f1;
            Row result = Row.of(line.f0, row.getField(0), row.getField(1));
            return new CsvFormatter(",").map(result);
        }).print();

    }

//    @Override
//    public void emitDataStream(DataStream<Row> dataStream) {
//        dataStream.map(row -> {
//            return new CsvFormatter(",").map(row);
//        }).print();
//
//    }

    @Override
    public TupleTypeInfo<Tuple2<Boolean, Row>> getOutputType() {
        return new TupleTypeInfo<>(Types.BOOLEAN, new RowTypeInfo(fieldTypes, fieldNames));
    }

    @Override
    public String[] getFieldNames() {
        return fieldNames;
    }

    @Override
    public TypeInformation<?>[] getFieldTypes() {
        return fieldTypes;
    }

    @Override
    public TableSink<Tuple2<Boolean, Row>> configure(String[] fieldNames, TypeInformation<?>[] fieldTypes) {
        return new ConsoleTableSink();
    }
}
