package com.arch.hdfs.tfile;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.file.tfile.TFile;

/**
 * @author pizhihui
 * @date 2024-06-05 16:34
 */
public class TFileDemo {


    public static void main(String[] args) {



        String file = "/tmp/tfile";

        Configuration conf = new Configuration();

        try {


            Path path = new Path(file);
            FileSystem fs = path.getFileSystem(conf);

//            long length = fs.getFileStatus(path).getLen();
//            FSDataInputStream fsdis = fs.open(path);

            FSDataOutputStream fsdos = fs.create(path);


            int minBlockSie = 512;
            String compressName = "no";
            String comparator = "";
            TFile.Writer writer = new TFile.Writer(fsdos, minBlockSie, compressName, comparator, conf);

            writer.append("key1".getBytes(), "key2".getBytes());

            writer.close();

            fsdos.close();




        } catch (Exception e) {
            e.printStackTrace();
        } finally {


        }



    }

}
