package com.shujia.flink.source;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.file.src.FileSource;
import org.apache.flink.core.fs.Path;
import org.apache.flink.formats.csv.CsvReaderFormat;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvMapper;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.time.Duration;

public class Demo2FIleSource {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        /*
         * 基于文件构建source  ---> 有界流
         */
        //老版本 api
        DataStreamSource<String> studentDS = env.readTextFile("data/students/csv");
        //studentDS.print();

        /*
         * 基于文件的source -- 无界流
         * 定期扫描目录下的文件，读取新的文件
         */

        //构建读取数据的结构
        CsvSchema schema = CsvSchema
                .builder()
                .addColumn(new CsvSchema.Column(0, "line", CsvSchema.ColumnType.STRING))
                .build();

        //指定数据解析的方式
        CsvReaderFormat<Lines> readerFormat = CsvReaderFormat
                .forSchema(new CsvMapper(),
                        schema,
                        TypeInformation.of(Lines.class));

        //构建file source
        FileSource<Lines> source = FileSource
                //指定读取文件的路径和数据的格式
                .forRecordStreamFormat(readerFormat, new Path("data/stream"))
                //定期扫描目录的时间
                .monitorContinuously(Duration.ofSeconds(1))//打开参数就是无界流，关闭参数就是有界流
                .build();

        //使用file source
        DataStream<Lines> fileDS = env.fromSource(source, WatermarkStrategy.noWatermarks(), "fileSource");

        //转换成kv结构
        DataStream<Tuple2<String, Integer>> kvDS = fileDS
                .map(line -> Tuple2.of(line.getLine(), 1), Types.TUPLE(Types.STRING, Types.INT));

        kvDS.keyBy(kv -> kv.f0)
                .sum(1)
                .print();


        env.execute();
    }

    public static class Lines {
        private String line;

        @Override
        public String toString() {
            return "Lines{" +
                    "line='" + line + '\'' +
                    '}';
        }

        public Lines() {
        }

        public Lines(String line) {
            this.line = line;
        }

        public String getLine() {
            return line;
        }

        public void setLine(String line) {
            this.line = line;
        }
    }
}
