package com.sixoo.ignite;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.orc.CompressionKind;
import org.apache.orc.OrcFile;
import org.apache.orc.TypeDescription;
import org.apache.orc.Writer;
import org.apache.orc.storage.ql.exec.vector.DoubleColumnVector;
import org.apache.orc.storage.ql.exec.vector.LongColumnVector;
import org.apache.orc.storage.ql.exec.vector.VectorizedRowBatch;

import java.util.Random;

public class ORCWriterFor611Test {

    public static void main(String[] args) throws Exception {

        //定义ORC数据结构，即表结构
        TypeDescription schema = TypeDescription.createStruct();
        schema.addField("time", TypeDescription.createLong());
        for (int i = 1; i < 121; i++) {
            schema.addField("p" + i, TypeDescription.createFloat());
        }
        for (int i = 121; i < 800; i++) {
            schema.addField("p" + i, TypeDescription.createBoolean());
        }
        Writer writer = null;
        try {
            String lxw_orc1_file = "/tmp/flyp/test_zlib_alluxio.orc";
            //输出ORC文件本地绝对路径
            Configuration conf = new Configuration();
            conf.set("fs.igfs.impl", "org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem");
            conf.set("fs.defaultFS", "igfs://igfs@192.168.10.61");
            Path hdfsPath = new Path(lxw_orc1_file);

            writer = OrcFile.createWriter(
                    hdfsPath,
                    OrcFile.writerOptions(conf)
                            .setSchema(schema)
                            .encodingStrategy(OrcFile.EncodingStrategy.COMPRESSION)
                            .compress(CompressionKind.ZLIB)
                            .version(OrcFile.Version.FUTURE)
            );

            long start = System.currentTimeMillis();
            Random random = new Random(100);
            Random boolRandom = new Random();

            int max_row = 280000;

            VectorizedRowBatch batch = schema.createRowBatch(max_row);

            for (int i = 0; i < max_row; i++) {
                ((LongColumnVector) batch.cols[0]).vector[i] = start + i;
                for (int j = 1; j < 121; j++) {
                    ((DoubleColumnVector) batch.cols[j]).vector[i] = random.nextFloat();
                }
                for (int k = 121; k < 800; k++) {
                    ((LongColumnVector) batch.cols[k]).vector[i] = boolRandom.nextInt(2);
                }
                if (i > 0 && i % 10000 == 0) {
                    writer.addRowBatch(batch);
                    long end = System.currentTimeMillis();
                    System.out.println("当前:第=>" + i + "条" + " cost=>" + (end - start) + "  avg=>" + (end - start*1.0) / i + "ms/条");
                    batch.reset();
                }
                batch.size++;
            }
            writer.addRowBatch(batch);
            writer.close();
            long e = System.currentTimeMillis();
            System.out.println("cost=>" + (e - start) / 1000);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

}
