package com.six.compress.old;


import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat;
import org.apache.hadoop.hive.ql.io.orc.OrcSerde;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputFormat;
import org.apache.hadoop.mapred.RecordWriter;
import org.apache.hadoop.mapred.Reporter;

@SuppressWarnings("all")
public class ORCSnappyStore {

    public static void main(String[] args) throws Exception {

        try {
            JobConf conf = new JobConf();
            StructObjectInspector inspector =
                    (StructObjectInspector) ObjectInspectorFactory
                            .getReflectionObjectInspector(String.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
            OrcSerde serde = new OrcSerde();
            OutputFormat outFormat = new OrcOutputFormat();
            FileSystem fs = FileSystem.get(conf);
            conf.setCompressMapOutput(true);
            ((OrcOutputFormat) outFormat).setCompressOutput(conf, true);
            RecordWriter writer = outFormat.getRecordWriter(fs, conf,
                    "/home/hdfs/data/fly-" + Thread.currentThread().getId() + ".orc", Reporter.NULL);
            int i = 120000;
            long s1 = System.currentTimeMillis();
            for (int j = 0; j < 120000; j++) {
                writer.write(NullWritable.get(), serde.serialize("892.33338", inspector));
            }
            long e1 = System.currentTimeMillis();
            writer.close(Reporter.NULL);
            fs.close();
            System.out.println("write success .");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}