package cn.doitedu.rtdw.udf;

import org.apache.flink.table.functions.ScalarFunction;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

public class ArraySer extends ScalarFunction {

    public byte[] eval(Double[] arr) throws IOException {
        byte[] resultBytes = new byte[8 * arr.length];
        int k = 0;
        for (double v : arr) {
            byte[] bytes = Bytes.toBytes(v);
            for (int i = 0; i < bytes.length; i++) {
                resultBytes[i + 8 * k] = bytes[i];
            }
            k++;
        }
        return resultBytes;
    }
}
