package com.ruyuan.ingestion.parser.hbase;

import com.ruyuan.ingestion.parser.EventParser;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;

public class HBaseEventParser extends EventParser<Put> {
    /**
     * rowKey:eventId
     * ColumnFamily1: schedule 包含 start_time
     * ColumnFamily2: location 包含city,state,zip,country,lat,lng
     * ColumnFamily3: creator 包含 user_id create 'events','schedule','location','creator'
     * */
    @Override
    public Put parse(String[] fields) {
        //event_id,user_id,start_time,city,state,zip,country,lat,lng
        Put p = new Put(Bytes.toBytes(fields[0]));
        //schedule
        p.addColumn(Bytes.toBytes("schedule"),Bytes.toBytes("start_time"),Bytes.toBytes(fields[2]));
        //location
        p.addColumn(Bytes.toBytes("location"),Bytes.toBytes("city"),Bytes.toBytes(fields[3]));
        p.addColumn(Bytes.toBytes("location"),Bytes.toBytes("state"),Bytes.toBytes(fields[4]));
        p.addColumn(Bytes.toBytes("location"),Bytes.toBytes("zip"),Bytes.toBytes(fields[5]));
        p.addColumn(Bytes.toBytes("location"),Bytes.toBytes("country"),Bytes.toBytes(fields[6]));
        p.addColumn(Bytes.toBytes("location"),Bytes.toBytes("lat"),Bytes.toBytes(fields[7]));
        p.addColumn(Bytes.toBytes("location"),Bytes.toBytes("lng"),Bytes.toBytes(fields[8]));
        //creator
        p.addColumn(Bytes.toBytes("creator"),Bytes.toBytes("user_id"),Bytes.toBytes(fields[1]));

        StringBuffer stringBuffer = new StringBuffer();
        if (fields.length > 8) {
            for (int i = 9; i<fields.length;i++) {
                if (stringBuffer.length() > 0) {
                    stringBuffer.append("|");
                }
                stringBuffer.append(fields[i]);
            }
        }
        p.addColumn(Bytes.toBytes("others"),Bytes.toBytes("commons"),Bytes.toBytes(stringBuffer.toString()));
        return p;
    }
}
