package hbase.mapreduce;

import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.JsonPrimitive;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;
import java.util.Map;

/**
 * Created with hadoop-test.
 * 作者: 周艳钢
 * Date: 2016/10/24 0024
 * Time: 下午 5:54
 * 功能：
 */
public class ParseJsonMulti extends Configured implements Tool{

    public enum Counters {ROW, VALID, ERR, COLS}


    @Override
    public int run(String[] args) throws Exception {

        if (args.length != 4) {
            System.err.println("Usage: ParseJsonMulti inputTable family:qualifier linkTable infoTable");
            GenericOptionsParser.printGenericCommandUsage(System.err);
            return 1;
        }

        String inputTable = args[0];

        Scan scan = new Scan();
        byte[][] column = KeyValue.parseColumn(Bytes.toBytes(args[1]));
        if (column.length > 1) {
            scan.addColumn(column[0], column[1]);
            getConf().setStrings("conf.family", Bytes.toString(column[0]));
            getConf().setStrings("conf.qualifier", Bytes.toString(column[1]));
        } else {
            scan.addFamily(column[0]);
            getConf().setStrings("conf.family", Bytes.toString(column[0]));
        }

        getConf().setStrings("conf.linktable", args[2]);
        getConf().setStrings("conf.infotable", args[3]);

        Job job = Job.getInstance(getConf(), "Parse Json to multi hbase table");
        job.setJarByClass(ParseJsonMulti.class);
        job.setJar("./target/hadoop-test-1.0-SNAPSHOT.jar");
        job.setNumReduceTasks(0);

        TableMapReduceUtil.initTableMapperJob(inputTable, scan, ParseMapper.class
                , ImmutableBytesWritable.class, Put.class, job);
        job.setOutputFormatClass(NullOutputFormat.class);

        return job.waitForCompletion(true) ? 0 : 1;
    }

    public static class ParseMapper extends TableMapper<ImmutableBytesWritable, Mutation> {

        private JsonParser parser = new JsonParser();
        private HTable infoTable;
        private HTable linkTable;
        private byte[] family;

        @Override
        protected void setup(Context context) throws IOException, InterruptedException {
            Connection conn = ConnectionFactory.createConnection(context.getConfiguration());
            String infoTableName = context.getConfiguration().get("conf.infotable");
            String linkTableName = context.getConfiguration().get("conf.linktable");
            infoTable = (HTable) conn.getTable(TableName.valueOf(infoTableName));
            infoTable.setAutoFlushTo(false);
            linkTable = (HTable) conn.getTable(TableName.valueOf(linkTableName));
            linkTable.setAutoFlushTo(false);

            family = Bytes.toBytes(context.getConfiguration().get("conf.family"));
        }

        @Override
        protected void cleanup(Context context) throws IOException, InterruptedException {
            infoTable.flushCommits();
            linkTable.flushCommits();
        }

        @Override
        protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {
            context.getCounter(AnalyzeData.Counters.ROW).increment(1);

            Put infoPut = new Put(key.get());
            Put linkPut = new Put(key.get());
            String jsonString = null;
            try {
                for (Cell cell : value.listCells()) {
                    context.getCounter(AnalyzeData.Counters.COLS).increment(1);
                    jsonString = Bytes.toString(CellUtil.cloneValue(cell));
                    JsonObject jsonObject = (JsonObject) parser.parse(jsonString);
                    for (Map.Entry<String, JsonElement> entry : jsonObject.entrySet()) {
                        if (entry.getKey().equals("link")) {
                            linkPut.addColumn(family, Bytes.toBytes(entry.getKey())
                                    , Bytes.toBytes(entry.getValue().toString()));
                        } else {
                            infoPut.addColumn(family, Bytes.toBytes(entry.getKey())
                                    , Bytes.toBytes(entry.getValue().toString()));
                        }
                    }
                    infoTable.put(infoPut);
                    linkTable.put(linkPut);
                    context.getCounter(AnalyzeData.Counters.VALID).increment(1);
                }
            } catch (Exception e) {
                e.printStackTrace();
                System.out.println("ROW = " + Bytes.toStringBinary(key.get()) + "/ VALUE = " + jsonString);
                context.getCounter(AnalyzeData.Counters.ERR).increment(1);
            }
        }
    }


    public static void main(String[] args) throws Exception {
        int exitCode = ToolRunner.run(new ParseJsonMulti(), args);
        System.exit(exitCode);
    }
}
