package com.itcast.flink.conncetors.hdfs;

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * <p>Description: </p>
 *
 * @author
 * @version 1.0
 * <p>Copyright:Copyright(c)2020</p>
 * @date
 */
public class HdfsSourceApplication {
    
    public static void main(String[] args) throws Exception {
        // 1. 创建运行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        
        // 2. 读取HDFS数据源
        DataStreamSource<String> socketStr = env.readTextFile("hdfs://192.168.23.128:9090/hadoop-env.sh");
        
        // 3. 打印文件内容
        socketStr.print().setParallelism(1);
        
        // 4. 执行任务
        env.execute("job");
        
    }
}
