import org.apache.calcite.sql.advise.SqlSimpleParser;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.core.fs.FileSystem;

public class HDFS_WriteCsv
{

    public static void main(String[] args) throws Exception
    {

    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

    DataSet<String> text = env.readTextFile("/home/appleyuchi/桌面/input.txt");



    String outputPath="hdfs://Desktop:9000/result";

    DataSet<Tuple2<String, Integer>> counts =
            // split up the lines in pairs (2-tuples) containing: (word,1)
            text.flatMap(new Tokenizers())
                    // group by the tuple field "0" and sum up tuple field "1"
                    .groupBy(0)
                    .sum(1);

counts.writeAsCsv(outputPath,"\n",",",FileSystem.WriteMode.OVERWRITE);
env.execute();


    }
}
