package com.example.demohadoop;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;

public class DemoHadoopApplication {

	public static void main(String[] args) {


        String filePath = "/tmp/idtest.ambari-qa.1567673649.68.pig";
        readHDFSFile(filePath);

		String localPath= "C:\\Users\\98655\\Desktop\\20190905.txt";
		String hdfsPath = "hdfs://dev-hdp-master:8020/tmp/copy.pig";
		writeHDFS(localPath, hdfsPath);

		System.out.println();
	}

	public static FileSystem getFiledSystem() throws IOException {
		Configuration configuration = new Configuration();
		FileSystem fileSystem = FileSystem.get(configuration);
		return fileSystem;
	}

	public static void readHDFSFile(String filePath){
		FSDataInputStream fsDataInputStream = null;

		try {
			Path path = new Path(filePath);
			fsDataInputStream = getFiledSystem().open(path);
			IOUtils.copyBytes(fsDataInputStream, System.out, 4096, false);

		} catch (IOException e) {
			e.printStackTrace();
		} finally {
			if(fsDataInputStream != null){
				IOUtils.closeStream(fsDataInputStream);
			}
		}

	}

	public static void writeHDFS(String localPath, String hdfsPath){
		FSDataOutputStream outputStream = null;
		FileInputStream fileInputStream = null;

		try {
			Path path = new Path(hdfsPath);
			outputStream = getFiledSystem().create(path);
			fileInputStream = new FileInputStream(new File(localPath));
			//输入流、输出流、缓冲区大小、是否关闭数据流，如果为false就在 finally里关闭
			IOUtils.copyBytes(fileInputStream, outputStream,4096, false);

		} catch (IOException e) {
			e.printStackTrace();
		}finally {
			if(fileInputStream != null){
				IOUtils.closeStream(fileInputStream);
			}
			if(outputStream != null){
				IOUtils.closeStream(outputStream);
			}
		}

	}
}
