package com.hymanting;

import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import scala.Int;

/**
 * @Author hxchen
 * @Date 2021/7/5
 */
public class FirstFlinkApp {
    public static void main(String[] args) {
        //定义执行环境
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //从数据源读取数据形成集合
        DataStream<String> input = env.readTextFile("C:\\Users\\Hyman\\Desktop\\buildframeworks\\flink\\doit\\readme.txt");
        //数据流转换后形成新的数据流
        DataStream<Integer> parsedStream = input.map(new MapFunction<String, Integer>() {
            @Override
            public Integer map(String value) throws Exception {
                return Integer.parseInt(value);
            }
        });
        //将数据流中的元素写入文件
        parsedStream.writeAsText("C:\\Users\\Hyman\\Desktop\\buildframeworks\\flink\\doit\\");
        parsedStream.print();
        try {
            JobExecutionResult result =  env.execute("first Flink App");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
