package com.hzh.test01;

import com.hzh.test01.pojo.DicCodeImport;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.connector.file.src.FileSource;
import org.apache.flink.core.fs.Path;
import org.apache.flink.formats.csv.CsvReaderFormat;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvMapper;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.io.File;
import java.util.function.Function;

public class FlinkTest01 {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        CsvReaderFormat<DicCodeImport> csvFormat =
                CsvReaderFormat.<DicCodeImport>forSchema(
                        () -> new CsvMapper(),
                        mapper -> mapper.schemaFor(DicCodeImport.class).withColumnSeparator(','),
                        TypeInformation.of(DicCodeImport.class)
                );

        FileSource<DicCodeImport> source =
                FileSource.forRecordStreamFormat(csvFormat, Path.fromLocalFile(new File("input/data.csv"))).build();
        DataStreamSource<DicCodeImport> csvSource = env.fromSource(source, WatermarkStrategy.noWatermarks(), "CsvSource");
        csvSource.print();
//        CsvReaderFormat<DicCodeImport> csvFormat = CsvReaderFormat.forPojo(DicCodeImport.class);
//        FileSource<DicCodeImport> source =
//                FileSource.forRecordStreamFormat(csvFormat, Path.fromLocalFile(new File("input/data.csv"))).build();
//        DataStreamSource<DicCodeImport> stream = env.fromSource(source, WatermarkStrategy.noWatermarks(), "CsvSource");
//        stream.print();
        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}
