package com.dec.kks.etl.loader;

import com.google.gson.Gson;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Arrays;


public class HBaseLoader implements ILoader {

    @Override
    public void loader() throws Exception {

        Table table = null;
        try {

            Double[] fft = new Double[]{1.2,1.4,1.6};

            Gson gson = new Gson();
            String fftstr = gson.toJson(fft);
            System.out.println(fftstr);
            fft = gson.fromJson(fftstr,Double[].class);
            System.out.println(Arrays.toString(fft));

            ByteArrayOutputStream bos=new ByteArrayOutputStream();
            ObjectOutputStream oos=new ObjectOutputStream(bos);
            oos.writeObject(fft);

            byte[] byteArray=bos.toByteArray();
            ByteArrayInputStream bis=new ByteArrayInputStream(byteArray);
            ObjectInputStream obs=new ObjectInputStream(bis);

            String str = Arrays.toString((Double[])obs.readObject());
            System.out.println(str);

            Configuration conf = HBaseConfiguration.create();
            Connection conn = ConnectionFactory.createConnection(conf);
            table = conn.getTable(TableName.valueOf("test_wud_table_fqcl"));
            Put put = new Put("kkk123".getBytes());
            put.addColumn(Bytes.toBytes("cf"),Bytes.toBytes("t1"),Bytes.toBytes(System.currentTimeMillis()));
            put.addColumn(Bytes.toBytes("cf"),Bytes.toBytes("fft"),bos.toByteArray());
            put.addColumn(Bytes.toBytes("cf"),Bytes.toBytes("ffts"),Bytes.toBytes(fftstr));
            table.put(put);

            Get get = new Get(Bytes.toBytes("kkk123"));
            get.addColumn(Bytes.toBytes("cf"),Bytes.toBytes("ffts"));
            Result res = table.get(get);
            byte[] v = res.getValue(Bytes.toBytes("cf"), Bytes.toBytes("ffts"));

            System.out.println("fft_str"+new String(v));

            Get get1 = new Get(Bytes.toBytes("kkk123"));
            get1.addColumn(Bytes.toBytes("cf"),Bytes.toBytes("fft"));
            Result res1 = table.get(get1);
            byte[] v1 = res1.getValue(Bytes.toBytes("cf"), Bytes.toBytes("fft"));

            ByteArrayInputStream bis1=new ByteArrayInputStream(v1);
            ObjectInputStream obs1=new ObjectInputStream(bis1);

            String str1 = Arrays.toString((Double[])obs1.readObject());
            System.out.println("double 数组："+str1);

        } catch (Exception e) {
            throw new Exception("批量存储数据失败！", e);
        } finally {
            table.close();
        }

    }

    public static void main(String[] args) throws Exception {
        System.setProperty("hadoop.home.dir","/home/hdfs/bigdata/hadoop-2.7.4");
        ILoader loader = new HBaseLoader();
        loader.loader();
    }
}
