import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.IOException;
import java.net.URI;
import java.util.Random;

public class HDFSApp {
    public static final String HDFS_PATH = "hdfs://hadoop-master:9000";
    //public static final String HDFS_PATH = "hdfs://192.168.1.234:9000";
    FileSystem fileSystem = null;
    Configuration configuration = null;

    @Before
    public void setUp() throws Exception {
        configuration = new Configuration();
        configuration.set("dfs.client.use.datanode.hostname", "true");
//        configuration.set("fs.defaultFS", "hdfs://hadoop-master:9000");//主机名访问
        configuration.set("fs.defaultFS", "hdfs://hadoop-master:9000");
        configuration.set("dfs.nameservices", "mycluster");
        configuration.set("dfs.ha.namenodes.mycluster", "nn1");
        configuration.set("dfs.namenode.rpc-address.mycluster.nn1", "hadoop-master:9000");
        configuration.set("dfs.client.failover.proxy.provider.mycluster",
                "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
        fileSystem = FileSystem.get(configuration);
//        fileSystem.setReplication(new Path("/hdfsapi/test/a.txt"), (short) 1);
        System.out.println("HDFS APP SETUP");
    }

    @Test
    public void create() throws IOException {
//        FSDataOutputStream outputStream = fileSystem.create(new Path("/hdfsapi/word/file2.log"));
        FSDataOutputStream outputStream = fileSystem.create(new Path("/hdfsapi/test/childs.log"));
//        outputStream.write("hello hadoop".getBytes());

//        FSDataOutputStream outputStream = fileSystem.create(new Path("/hdfsapi/test/scores.txt"));
        StringBuilder stringBuilder = new StringBuilder();
//        stringBuilder.append("222 sss\n");
//        stringBuilder.append("十周年哈哈 sss\n");
//        stringBuilder.append("张三 sss\n");
        stringBuilder.append("child parent\n");
        stringBuilder.append("Tom Lucy\n");
        stringBuilder.append("Tom Jack\n");
        stringBuilder.append("Jone Lucy\n");
        stringBuilder.append("Jone Jack\n");
        stringBuilder.append("Lucy Mary\n");
        stringBuilder.append("Lucy Ben\n");
        stringBuilder.append("Jack Alice\n");
        stringBuilder.append("Jack Jesse\n");
        stringBuilder.append("Terry Alice\n");
        stringBuilder.append("Terry Jesse\n");
        stringBuilder.append("Philip Terry\n");
        stringBuilder.append("Philip Alma\n");
        stringBuilder.append("Mark Terry\n");
        stringBuilder.append("Mark Alma\n");


//        for (long i = 0; i < 10000000; i++) {
//            stringBuilder.append("王" + i + "#" + new Random().nextInt(100)+ "#" + new Random().nextInt(100)+ "#" + new Random().nextInt(100)+"\n");
//        }
        outputStream.write(stringBuilder.toString().getBytes());

        outputStream.flush();
        outputStream.close();


    }

    @Test
    public void count() {
        String s1 = "hello hadoop";
        System.out.print(s1.getBytes().length);
    }

    @Test
    public void cat() throws IOException {
        FSDataInputStream inputStream = fileSystem.open(new Path("/hdfsapi/test/a.log"));
        IOUtils.copyBytes(inputStream, System.out, 1024);
        inputStream.close();
    }

    @Test
    public void delete() throws IOException {
//        fileSystem.delete(new Path("/hdfsapi/test"), true);
        fileSystem.delete(new Path("/hbase/.tmp/hbase-hbck.lock"), true);
    }

    @Test
    public void mkdir() throws IOException {
//        fileSystem.mkdirs(new Path("/hdfsapi/test"));
        fileSystem.mkdirs(new Path("/hbase"));
    }

    @After
    public void tearDown() throws Exception {
        configuration = null;
        configuration = null;
        System.out.println("HDFS APP SHUTDOWN");
    }
}