package com.asiainfo.dacp.common.datasink;

import org.apache.flink.orc.vector.Vectorizer;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.lang.reflect.Field;

public class PoJoGeneralOrcVector<T> extends Vectorizer<T> {
    private Logger logger = LoggerFactory.getLogger(PoJoGeneralOrcVector.class);
    public PoJoGeneralOrcVector(String schema) {
        super(schema);
    }

    @Override
    public void vectorize(T datas, VectorizedRowBatch batch) throws IOException {

        try {
          /* int row=batch.size++;
           //编译
           for (int i = 0; i < datas.size(); i++) {
               BytesColumnVector columnVector = (BytesColumnVector) batch.cols[i];
               columnVector.setVal(row,datas.get(i).getBytes("UTF-8"));
           }*/
            int row=batch.size++;
          //通过反射获取到对象的字段名称和值
            Class<?> aClass = datas.getClass();
            Field[] fields = aClass.getDeclaredFields();
            for (int i = 0; i < fields.length; i++) {
                fields[i].setAccessible(true); //设置访问域
                //数据
                Object filedData = fields[i].get(datas);
                BytesColumnVector columnVector = (BytesColumnVector) batch.cols[i];
                columnVector.setVal(row,filedData.toString().getBytes("UTF-8"));
            }

        }catch (Exception e){
            logger.error("hive向量化程序执行错误");
           e.printStackTrace();
       }
    }
}
