package com.desheng.bigdata.flink.batch.source

import com.offcn.bigdata.flink.domain.Student
import org.apache.flink.api.java.io.TextInputFormat
import org.apache.flink.api.scala._
import org.apache.flink.core.fs.Path

object _01SourceFromFile {
    def main(args: Array[String]): Unit = {
        val env = ExecutionEnvironment.getExecutionEnvironment

        //step 1. load data from local file
        val path = "file:/E:/data/flink/hello.txt"
        env.readFile(
            new TextInputFormat(new Path(path)),
            path
        )
        //step 2. load data from local csv file by pojo
        val csvFile = env.readCsvFile[Student](
                            filePath = "file:/E:/data/flink/student.csv",
                            fieldDelimiter = "|",
                            ignoreFirstLine = true,
                            pojoFields = Array("id", "name", "age", "gender", "course", "score")
                        )
        // by case class
        val csvFile2 = env.readCsvFile[Student2](
            filePath = "file:/E:/data/flink/student.csv",
            fieldDelimiter = "|",
            ignoreFirstLine = true
        )
        // by tuple
        val csvFile3 = env.readCsvFile[(Int, String, Int, String, String, Double)](
            filePath = "file:/E:/data/flink/student.csv",
            fieldDelimiter = "|",
            ignoreFirstLine = true
        )

        csvFile3.print()
    }
}

case class Student2(id: Int, name: String, age: Int, gender: String, course: String, score: Double)