import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.core.fs.FileSystem;

public class HDFS_WriteTxt
{
        public static void main(String[] args) throws Exception
        {
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        //2.定义数据 stu(age,name,height)

            DataSet<Tuple3<Integer,String,String>> stu=env.fromElements(
                    new Tuple3<>(19,"zhangsan","aaaa"),
                    new Tuple3<>(1449, "zhangsan","aaaa"),
                    new Tuple3<>(33, "zhangsan","aaaa"),
                    new Tuple3<>(22,"zhangsan","aaaa")
            );

//            ((DataSource<Tuple3<Integer, String, String>>) stu).setParallelism(1).writeAs

//        //todo 输出到本地
//        stu.setParallelism(1).writeAsText("file:///C:/Users/Administrator/Desktop/Flink代码/测试数据/test001.txt",
//        WriteMode.OVERWRITE)
//        env.execute()


        //todo 写入到hdfs，文本文档,路径不存在则自动创建路径。
            ((DataSource<Tuple3<Integer, String, String>>) stu).setParallelism(1).writeAsText("hdfs://Desktop:9000/test001.txt", FileSystem.WriteMode.OVERWRITE);
//        stu.setParallelism(1).writeAsText("hdfs:///output/flink/datasink/test001.txt", FileSystem.WriteMode.OVERWRITE);
        env.execute();

        }
}