package com.six.compress.old;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.orc.CompressionKind;
import org.apache.orc.OrcFile;
import org.apache.orc.TypeDescription;
import org.apache.orc.Writer;
import org.apache.orc.storage.ql.exec.vector.DoubleColumnVector;
import org.apache.orc.storage.ql.exec.vector.ListColumnVector;
import org.apache.orc.storage.ql.exec.vector.LongColumnVector;
import org.apache.orc.storage.ql.exec.vector.VectorizedRowBatch;

import java.util.ArrayList;
import java.util.List;
import java.util.Random;

@SuppressWarnings("all")
public class ORCWriterListTest2 {

    public static void main(String[] args) throws Exception {
        //定义ORC数据结构，即表结构
        TypeDescription schema = TypeDescription.createStruct();
        schema.addField("time", TypeDescription.createLong());
        schema.addField("params" , TypeDescription.createList(TypeDescription.createFloat()));

        Writer writer = null;
        try {
            //输出ORC文件本地绝对路径
            String lxw_orc1_file = "/home/hdfs/data/fly_param_float_arr_" + System.currentTimeMillis() + ".orc";
            Configuration conf = new Configuration();
            FileSystem.getLocal(conf);
            writer = OrcFile.createWriter(
                    new Path(lxw_orc1_file),
                    OrcFile.writerOptions(conf)
                            .setSchema(schema)
                            .compress(CompressionKind.ZLIB)
                            .version(OrcFile.Version.V_0_12)
            );

            VectorizedRowBatch batch = schema.createRowBatch(10);
            long time = System.currentTimeMillis();
            Random random = new Random(10000);
            List<Float> list = new ArrayList<>();
            for (int i = 0; i < 10; i++) {
                System.out.println("aaaaaaaaaaaaaaaaa=>" + i);
                int rowcount = batch.size++;
                LongColumnVector  timeVec = ((LongColumnVector) batch.cols[0]);
                ListColumnVector paramVec = (ListColumnVector) batch.cols[1];

                timeVec.vector[i] = time+1;

                paramVec.lengths[i] = 10;
                paramVec.childCount += paramVec.lengths[i];
                for (int j = 0; j < 10; j++) {
                    DoubleColumnVector ndsFloat = (DoubleColumnVector) paramVec.child;
                    ((DoubleColumnVector) ndsFloat).vector[(int)paramVec.offsets[i]] = random.nextFloat();
                }

                if (batch.size == 100) {
                    writer.addRowBatch(batch);
                    batch.reset();
                    System.out.println("wwwwww");
                }

            }
            writer.addRowBatch(batch);
            writer.close();
            long e = System.currentTimeMillis();
            System.out.println("cost=>" + (e - time));
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

}
