package com.neu;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.*;
import java.net.URI;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.Properties;

/**
 * @author ：lychallengers@gmail.com
 * @date ：2020/12/8 21:54
 */
public class hdfsTest {
    public static final String HDFS_PATH = "hdfs://219.216.65.81:9000"; //hdfs路径
    FileSystem fileSystem = null;  //操作Hdfs核心类
    Configuration configuration = null;  //配置类
    String mfsUrl = "mfs://172.19.0.201:7888";

    public String pathTransform(String originPath){
        int t1=Math.abs(originPath.hashCode()%7);
        int t2=Math.abs(originPath.hashCode()%23);
        return "/0"+"/1"+"/"+originPath;
    }
    public  byte[] toByteArrayNIO(String filename) throws IOException {
        File file = new File(filename);
        if (!file.exists()) {
            throw new FileNotFoundException(filename);
        }
        FileChannel channel = null;
        FileInputStream fs = null;
        try {
            fs = new FileInputStream(file);
            channel = fs.getChannel();
            ByteBuffer byteBuffer = ByteBuffer.allocate((int) channel.size());
            while (channel.read(byteBuffer) > 0) {
                // do nothing
                // System.out.println("reading");
            }
            return byteBuffer.array();
        } catch (Exception e) {
            e.printStackTrace();
            throw e;
        } finally {
            try {
                channel.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
            try {
                fs.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }
    @Before
    public void setUp() throws Exception{
        System.out.println("set up");
        configuration = new Configuration();
        configuration.set("bufferSize","20480");//buffersize?
        configuration.set("zkServers","172.19.0.201:7181");
        configuration.set("kafkaServers", "172.19.0.201:7092");
        fileSystem=FileSystem.get(new URI(mfsUrl), configuration);
        System.out.println("init success");
//        fileSystem = FileSystem.get(new URI(HDFS_PATH), configuration,"omnisky");  //如用户无权限，可加用户参数
    }

    @After
    public void tearDown() throws Exception {
        configuration = null;
        fileSystem.close();
        fileSystem = null;
        System.out.print("tearDowm");
    }
    @Test
    public void mkdir() throws Exception{
        fileSystem.mkdirs(new Path("/hdfsapi/test"));
    }
    @Test
    public void delFile()throws Exception{
        //output stream remote file
        String delPath="3.txt";
        Path path = new Path(pathTransform(delPath));
        fileSystem.delete(path,true);
    }
    @Test//写文件，写到远程文件系统
    public void copyFile()throws Exception{
        String filePath ="kafkas.txt";
        //output stream remote file
        Path path = new Path(pathTransform(filePath));
        OutputStream fsDataOutputStream = fileSystem.create(path);
        //input stream local file
        InputStream inputStream = new FileInputStream(filePath);
        //bytecount
        int bytesWritten = 0;
        int byteCount;
        //byte count
        byte[] bytes = new byte[10240];
        while ((byteCount = inputStream.read(bytes)) != -1)
        {
            fsDataOutputStream.write(bytes, bytesWritten, byteCount);
            bytesWritten += byteCount;
        }
        fsDataOutputStream.close();

    }
    @Test //读取文件,写到本地文件系统
    public void readFile() throws Exception{
        String filePath ="kafkas.txt";
        Path path = new Path(pathTransform(filePath));
        InputStream inputStream = fileSystem.open(path);

        //output stream
        File file = new File("1.txt");
        OutputStream outputStream= new FileOutputStream(file);
        //bytecount
        int bytesWritten = 0;
        int byteCount = 0;
        //byte count
        byte[] bytes = new byte[10240];
        while ((byteCount = inputStream.read(bytes)) != -1)
        {
            outputStream.write(bytes, bytesWritten, byteCount);
            bytesWritten += byteCount;
        }

    }
    @Test
    public void readConfigFile() throws Exception{
        String f = "setting.properties";
        Properties props = new Properties();
        props.load(new java.io.FileInputStream(f));

        String filepath = props.getProperty("last_open_file");
        String interval = props.getProperty("auto_save_interval", "120");
        System.out.println(filepath);
        System.out.println(interval);
    }

    public static void main(String[] args) throws Exception {
        hdfsTest hdf=new hdfsTest();
        hdf.setUp();
        hdf.copyFile();
        hdf.readFile();
    }
}
