package cn.humaohua.study.hadoop.cmdrunner.c5;

import cn.humaohua.study.hadoop.cmdrunner.Test;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.springframework.stereotype.Component;

import java.net.URI;

@Component
public class SequenceFileWriteTest implements Test {
    private static final String[] DATA = {"One, two, buckle my shoe",
            "Three, four, shut the door", "Five, six, pick up sticks", "Seven, eight, lay them straight", "Nine, ten, a big fat hen"
    };

    @Override
    public void test() throws Exception {
//        String uri = "/opt/tmp/hadoop/numbers.seq";
        //测试时，数据没写进去，但也没提示错误，将hadoop重新解压一份用老的配置文件，成功
        String uri = "hdfs://localhost/user/hmh/numbers.seq";
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(uri), conf);
        Path path = new Path(uri);
        IntWritable key = new IntWritable();
        Text value = new Text();
        SequenceFile.Writer writer = null;
        try {
            writer = SequenceFile.createWriter(fs, conf, path,
                    key.getClass(), value.getClass());
            for (int i = 0; i < 100; i++) {
                key.set(100 - i);
                value.set(DATA[i % DATA.length]);
                System.out.printf("[%s]\t%s\t%s\n", writer.getLength(), key, value);
                writer.append(key, value);
            }
        } finally {
            IOUtils.closeStream(writer);
        }
    }
}
