package hadoop.ch03.v17024410134;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.net.URI;

public class ReadHDFSFileAttr {
    public static void main(String[] args) throws  Exception{
        Configuration conf = new Configuration();
        URI uri = new URI("hdfs://192.168.30.131:8020");
        FileSystem fs = FileSystem.get(uri, conf, "hadoop");
        Path dfs =new Path("/17024410134/test3.txt");
        FileStatus fstu = fs.getFileStatus(dfs);
        long _blocksize = fstu.getBlockSize();
        System.out.println("getAccessTime="+fstu.getAccessTime());
        System.out.println("blockszie="+_blocksize);
        System.out.println("getGroup="+fstu.getGroup());
        System.out.println("getLen="+fstu.getLen());
        System.out.println("getModificationTime="+fstu.getModificationTime());
        System.out.println("getOwner="+fstu.getOwner());
        System.out.println("getReplication="+fstu.getReplication());
        System.out.println("getPath="+fstu.getPath());

    }
}
