package com.briup.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.compress.BZip2Codec;

import java.io.IOException;

/**
 *
 *
 * @author 最美如初
 * @version 1.0
 * @date 2021/7/19
 * @description
 */
public class SequenceFileHDFS {
    //写入数据，数据不压缩
    public void Seq_Write_none() throws IOException {
        Configuration conf = new Configuration();
        conf.set("dfs.defaultFS","hdfs://192.168.43.100:9000");
        Path path = new Path("hdfs://192.168.43.100:9000/user/hdfs/seq2.txt");
        SequenceFile.Writer.Option op = SequenceFile.Writer.file(path);
        SequenceFile.Writer.Option op1 = SequenceFile.Writer.keyClass(IntWritable.class);
        SequenceFile.Writer.Option op2 = SequenceFile.Writer.valueClass(Text.class);

        //默认不压缩,也可以省略
        SequenceFile.Writer.Option op3 = SequenceFile.Writer.compression(SequenceFile.CompressionType.NONE);
        SequenceFile.Writer writer = SequenceFile.createWriter(conf, op, op1, op2, op3);
        for (int i = 0; i <100 ; i++) {
            writer.append(new IntWritable(i),new Text("zhang"+i));
        }
        writer.hflush();
        if (writer!=null)writer.close();
    }
    //写入数据，数据基于记录压缩
    //记录压缩是对每一组键值中的值压缩
    public void Seq_Write_record() throws IOException {
        Configuration conf = new Configuration();
        conf.set("dfs.defaultFS","hdfs://172.16.0.4:9000");
        Path path = new Path("/user/zhang/seq.txt");
        SequenceFile.Writer.Option op = SequenceFile.Writer.file(path);
        SequenceFile.Writer.Option op1 = SequenceFile.Writer.keyClass(IntWritable.class);
        SequenceFile.Writer.Option op2 = SequenceFile.Writer.valueClass(Text.class);
        //GzipCodec压缩之后文件上传到hdfs集群，不能拆分块
        //BZip2Codec压缩之后文件上传到hdfs集群，允许拆分块
        SequenceFile.Writer.Option op3 = SequenceFile.Writer.compression(SequenceFile.CompressionType.RECORD,new BZip2Codec());
        SequenceFile.Writer writer = SequenceFile.createWriter(conf, op, op1, op2, op3);
        for (int i = 0; i <100 ; i++) {
            writer.append(new IntWritable(i),new Text("zhang"+i));
        }
        writer.hflush();
        if (writer!=null)writer.close();
    }
    //写入数据，数据基于块压缩
    //块压缩是多组键值压缩在一起，压缩时键全部压缩在一起，值全部压缩在一起
    public void Seq_Write_block() throws IOException {
        Configuration conf = new Configuration();
        conf.set("dfs.defaultFS","hdfs://172.16.0.4:9000");
        Path path = new Path("/user/zhang/seq.txt");
        SequenceFile.Writer.Option op = SequenceFile.Writer.file(path);
        SequenceFile.Writer.Option op1 = SequenceFile.Writer.keyClass(IntWritable.class);
        SequenceFile.Writer.Option op2 = SequenceFile.Writer.valueClass(Text.class);
        //块压缩
        SequenceFile.Writer.Option op3 = SequenceFile.Writer.compression(SequenceFile.CompressionType.BLOCK);
        SequenceFile.Writer writer = SequenceFile.createWriter(conf, op, op1, op2, op3);
        for (int i = 0; i <100 ; i++) {
            writer.append(new IntWritable(i),new Text("zhang"+i));
        }
        writer.hflush();
        if (writer!=null)writer.close();
    }
    //读取SequenceFile格式的数据
    public void seq_read_hdfs() throws IOException, IllegalAccessException, InstantiationException {
        Configuration conf = new Configuration();
        conf.set("dfs.defaultFS","hdfs://172.16.0.4:9000");
        Path path = new Path("/user/zhang/seq.txt");
        SequenceFile.Reader.Option op = SequenceFile.Reader.file(path);
        SequenceFile.Reader sr = new SequenceFile.Reader(conf, op);
        Writable k = (Writable) sr.getKeyClass().newInstance();
        Writable v = (Writable) sr.getValueClass().newInstance();
        while(sr.next(k,v)){
            System.out.println(k+"\t"+v);
        }
    }
    public static void main(String[] args) throws Exception {
        new SequenceFileHDFS().Seq_Write_none();
    }
}
