package com.study.flinktable;

import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.java.BatchTableEnvironment;
import org.apache.flink.table.descriptors.FileSystem;
import org.apache.flink.table.descriptors.OldCsv;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.table.functions.ScalarFunction;
import org.apache.flink.table.sinks.CsvTableSink;
import org.apache.flink.types.Row;



/**
 * @author sjw
 * @className TableBatch
 * @date 2020/12/1 10:19
 * @description: TODO
 */
public class TableBatchWordCount {
    public static void main(String[] args) throws Exception {
        ExecutionEnvironment env=ExecutionEnvironment.getExecutionEnvironment();
        BatchTableEnvironment tableEnvironment=BatchTableEnvironment.create(env);

        String sourcePath=TableBatchWordCount.class.getClassLoader().getResource("words.txt").getPath();
        String sinkPath=TableBatchWordCount.class.getClassLoader().getResource("out.txt").getPath();
        //System.out.println(path);

        tableEnvironment.connect(new FileSystem().path(sourcePath))
                .withFormat(new OldCsv()
                                .field("word", Types.STRING)
                                .field("age", Types.INT)
                                .field("gender", Types.STRING)
                                .fieldDelimiter(",")
                                .lineDelimiter("\n"))
                .withSchema(new Schema()
                        .field("word", Types.STRING)
                        .field("age", Types.INT)
                        .field("gender", Types.STRING))
                .registerTableSource("sourceTable");

//        tableEnvironment.connect(new FileSystem().path(sinkPath))
//                .withFormat(new OldCsv().field("word", Types.STRING).field("count",Types.LONG))
//                .withSchema(new Schema().field("word", Types.STRING).field("count",Types.LONG))
//                //.inUpsertMode()
//                .inAppendMode()
//                .registerTableSink("sinkTable");
//        String[] filedNames={"word","count"};
//        tableEnvironment.registerTableSink("sinkTable1",
//                new CsvTableSink(sinkPath,
//                        new String[]{"word"},
//                        new TypeInformation[]{Types.STRING}));

        Table result=tableEnvironment.scan("sourceTable")
               // .map(new ScalarFunction())
                .select("withColumns(1 to 3)");



        //result.insertInto("sinkTable");

        tableEnvironment.toDataSet(result, Row.class).print();

    }

    private static class MyMap extends ScalarFunction{

    }
}
