package com.shujia.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileReader;
import java.io.OutputStreamWriter;
import java.net.URI;

public class FIleToHdfs {
    public static void main(String[] args) throws Exception {
//        String path = "java/data/students.txt";
//        String newPath = "/data/student";
//        upload(path, newPath);
//
//        upload("java/data/score.txt","/data/score");
//        upload("java/data/cource.txt","/data/cource");
        upload("java/data/words.txt","/words.txt");

    }

    public static void upload(String path, String newPath) {
        try {
            /**
             * 将本地磁盘文件写入hdfs
             *
             */

            //1、创建本地文件输入流
            FileReader fileReader = new FileReader(path);
            BufferedReader bufferedReader = new BufferedReader(fileReader);
            String line;

            //2、创建hdfs文件输出流
            URI uri = new URI("hdfs://node1:9000");
            Configuration configuration = new Configuration();
            FileSystem fileSystem = FileSystem.get(uri, configuration);

            FSDataOutputStream stream = fileSystem.create(new Path(newPath), true);
            BufferedWriter bufferedWriter = new BufferedWriter(new OutputStreamWriter(stream));


            while ((line = bufferedReader.readLine()) != null) {

                //边读边写
                bufferedWriter.write(line);
                bufferedWriter.newLine();

            }

            bufferedWriter.close();
            bufferedReader.close();
            fileSystem.close();


        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
