package ex.datastream.connectors;

import java.io.File;
import java.time.Duration;

import ex.datastream.functions.richFunction.RichMapFunc;
import ex.vo.ComplexPojo;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.connector.file.src.FileSource;
import org.apache.flink.core.fs.Path;
import org.apache.flink.formats.csv.CsvReaderFormat;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 示例：基于文件读取数据源1
 * <p>
 * https://nightlies.apache.org/flink/flink-docs-release-1.18/docs/dev/datastream/sources/
 * https://nightlies.apache.org/flink/flink-docs-release-1.18/docs/connectors/datastream/formats/csv/
 * csv 格式:
 * 0,1#2#3
 * 1,5#6#7
 */
public class CsvFileTest01 {
    public static void main(String[] args) throws Exception {
        //创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        CsvReaderFormat<ComplexPojo> csvFormat =
                CsvReaderFormat.forSchema(
                        CsvSchema.builder()
                                .addColumn(
                                        new CsvSchema.Column(0, "id", CsvSchema.ColumnType.NUMBER))
                                .addColumn(
                                        new CsvSchema.Column(4, "array", CsvSchema.ColumnType.ARRAY)
                                                .withArrayElementSeparator("#"))
                                .build(),
                        TypeInformation.of(ComplexPojo.class));

        FileSource<ComplexPojo> source =
                FileSource.forRecordStreamFormat(csvFormat, Path.fromLocalFile(new File("data/csv"))).monitorContinuously(Duration.ofMinutes(1)).build();

        DataStream<ComplexPojo> dataStream = env.fromSource(source, WatermarkStrategy.noWatermarks(), "csv");

        dataStream.shuffle();
        SingleOutputStreamOperator<Integer> outputStreamOperator = dataStream.map(new RichMapFunc());

        outputStreamOperator.print();

        env.execute();


    }


}
