package hbase.mapreduce;

import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.JsonPrimitive;
import com.google.gson.JsonSyntaxException;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.codehaus.jackson.map.ObjectMapper;
import org.json.JSONObject;

import java.io.IOException;

/**
 * Created with hadoop-test.
 * 作者: 周艳钢
 * Date: 2016/10/24 0024
 * Time: 下午 1:57
 * 功能：
 */
public class AnalyzeData extends Configured implements Tool {

    public enum Counters {ROW, VALID, ERR, COLS}

    @Override
    public int run(String[] args) throws Exception {

        System.setProperty("HADOOP_USER_NAME", "root");

        if (args.length != 3) {
            System.err.println("Usage: AnalyzeData table family:qulifier outputfile");
            GenericOptionsParser.printGenericCommandUsage(System.err);
            return 1;
        }

        Scan scan = new Scan();
        byte[][] column = KeyValue.parseColumn(Bytes.toBytes(args[1]));
        if (column.length > 1) {
            scan.addColumn(column[0], column[1]);
        } else {
            scan.addFamily(column[0]);
        }

        Job job = Job.getInstance(getConf(), "Analyze Data");
        job.setJarByClass(AnalyzeData.class);
        job.setJar("./target/hadoop-test-1.0-SNAPSHOT.jar");

//        job.setMapperClass(AnalyzeMapper.class);
        job.setReducerClass(AnalyzeReducer.class);

        TableMapReduceUtil.initTableMapperJob(args[0]
                , scan, AnalyzeMapper.class, Text.class, IntWritable.class, job);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        Path output = new Path(args[2]);
        FileSystem fs = FileSystem.get(getConf());
        if (fs.exists(output)) {
            fs.delete(output, true);
        }
        FileOutputFormat.setOutputPath(job, output);

        return job.waitForCompletion(true) ? 0 : 1;
    }


    public static class AnalyzeMapper extends TableMapper<Text, IntWritable> {
        private JsonParser parser = new JsonParser();
        private Text authorText = new Text();
        private IntWritable one = new IntWritable(1);

        @Override
        protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {
            context.getCounter(Counters.ROW).increment(1);

            String jsonString = null;
            try {
                for (Cell cell : value.listCells()) {
                    context.getCounter(Counters.COLS).increment(1);
                    jsonString = Bytes.toString(CellUtil.cloneValue(cell));
                    JsonObject jsonObject = (JsonObject) parser.parse(jsonString);
                    JsonPrimitive author = (JsonPrimitive) jsonObject.get("author");
                    authorText.set(author.getAsString());
                    context.write(authorText, one);
                    context.getCounter(Counters.VALID).increment(1);
                }
            } catch (Exception e) {
                e.printStackTrace();
                System.out.println("ROW = " + Bytes.toStringBinary(key.get()) + "/ VALUE = " + jsonString);
                context.getCounter(Counters.ERR).increment(1);
            }
        }
    }

    public static class AnalyzeReducer extends Reducer<Text, IntWritable, Text, IntWritable> {

        private IntWritable count = new IntWritable();

        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
            int sum = 0;
            for (IntWritable value : values) {
                sum += value.get();
            }
            count.set(sum);
            context.write(key, count);
        }
    }



    public static void main(String[] args) throws Exception {
        int exitCode = ToolRunner.run(new AnalyzeData(), args);
        System.exit(exitCode);
    }
}
