/**
 * 
 */
package com.rrd.dw.mr.all;

import java.io.IOException;
// import java.util.HashMap;
// import java.util.Map;
import java.io.InputStream;
import java.util.Properties;
import java.util.Set;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.rrd.dw.utils.DateUtil;
import com.rrd.dw.utils.JSONUtil;
import com.rrd.dw.utils.MD5Util;

/**
 * @author xiexihao
 */
public class DHBTagInfoRecordsExtractor extends Configured implements Tool {
    private static final String STR_FILED_SPLIT_FLAG = "\001";

    public static final int SIZEOF_MD5 = 16;

    private static final byte[] FAMILY_INF = Bytes.toBytes("inf");
    private static final byte[] FAMILY_JOB = Bytes.toBytes("job");

    private static final String STR_TARGET_POSITION_TAGINFO_RECORDS_BASIC = "dhbtaginforecordsbasic";
    //记记录JOB信息表
    private static final String STR_TARGET_POSITION_TAGINFO_RECORDS_JOB = "dhbtaginforecordsjob";
    //电话邦标记记录分类信息表
    private static final String STR_TARGET_POSITION_TAGINFO_RECORDS_TAGNAMES = "dhbtaginforecordstagnames";
    //电话邦标记记录金融标签信息表
    private static final String STR_TARGET_POSITION_TAGINFO_RECORDS_ITAGIDS = "dhbtaginforecordsitagids";

    private static class DHBTagInfoRecordsMapper
            extends TableMapper<Text, Text> {
        private Text key = null;
        private Text value = null;

        protected void setup(Context context) {
            key = new Text();
            value = new Text();
        }

        protected void map(ImmutableBytesWritable key, Result value,
                Context context) throws IOException, InterruptedException {
            String keyStr = null;

            /*
             * Object[] objs = HBaseUtil.decodeUserRowkey(key.get());
             * if (objs == null)
             * return;
             * String userKey = objs[0].toString();
             * long timestamp = Long.parseLong(objs[1].toString());
             * keysb.append(userKey);
             * keysb.append(timestamp);
             */

            byte[] data = value.getValue(FAMILY_INF, Bytes.toBytes("data"));
            if (data == null)
                return;
            JSONObject dataObject=null;
            try{
                dataObject = JSON.parseObject(Bytes
                .toString(value.getValue(FAMILY_INF, Bytes.toBytes("data"))));
            } catch(Exception e){
                e.printStackTrace();
                System.out.println(Bytes
                .toString(value.getValue(FAMILY_INF, Bytes.toBytes("data"))));
            }
            if (dataObject == null)
                return;
            JSONObject tagsObject = dataObject.getJSONObject("tags");
            if (tagsObject == null)
                return;

            StringBuffer keysb = new StringBuffer();

            keysb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("job_id")) == null ? ""
                    : Bytes.toString(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("job_id"))));
            keysb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("system_id")) == null
                    ? ""
                    : Bytes.toString(value.getValue(FAMILY_JOB,
                        Bytes.toBytes("system_id"))));
            keysb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("start_time")) == null
                    ? ""
                    : Bytes.toLong(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("start_time"))));
            keyStr = MD5Util.getMd5(keysb.toString());
            this.key.set(STR_TARGET_POSITION_TAGINFO_RECORDS_JOB);

            StringBuffer sb = new StringBuffer();
            sb.append(keyStr).append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("job_id")) == null ? ""
                    : Bytes.toString(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("job_id"))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("finished")) == null
                    ? ""
                    : String.valueOf(Bytes.toBoolean(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("finished")))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("requested")) == null
                    ? ""
                    : String.valueOf(Bytes.toBoolean(value.getValue(FAMILY_JOB,
                        Bytes.toBytes("requested")))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("successful")) == null
                    ? ""
                    : String.valueOf(Bytes.toBoolean(value.getValue(FAMILY_JOB,
                        Bytes.toBytes("successful")))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("start_time")) == null
                    ? ""
                    : String.valueOf(Bytes.toLong(value.getValue(FAMILY_JOB,
                        Bytes.toBytes("start_time")))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("end_time")) == null
                    ? ""
                    : String.valueOf(Bytes.toLong(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("end_time")))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("data_file")) == null
                    ? ""
                    : Bytes.toString(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("data_file"))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("system_id")) == null
                    ? ""
                    : Bytes.toString(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("system_id"))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("query")) == null ? ""
                    : Bytes.toString(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("query"))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("try_times")) == null
                    ? ""
                    : String.valueOf(Bytes.toInt(value.getValue(FAMILY_JOB,
                        Bytes.toBytes("try_times")))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("type")) == null ? ""
                    : Bytes.toString(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("type"))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("message")) == null
                    ? ""
                    : Bytes.toString(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("message"))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("code")) == null ? ""
                    : Bytes.toString(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("code"))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("timestamp")) == null
                    ? ""
                    : String.valueOf(Bytes.toLong(value.getValue(FAMILY_JOB,
                        Bytes.toBytes("timestamp")))));

            this.value.set(sb.toString());

            context.write(this.key, this.value);
            generateDHBTagInfoRecordsInfo(context, value, keyStr, tagsObject);
        }

        private void generateDHBTagInfoRecordsInfo(Context context,
                Result value, String keyStr, JSONObject tagsObject)
                throws IOException, InterruptedException {
            String code = value.getValue(FAMILY_INF,
                Bytes.toBytes("code")) == null ? ""
                    : String.valueOf(Bytes.toString(
                        value.getValue(FAMILY_INF, Bytes.toBytes("code"))));
            String message = value.getValue(FAMILY_INF,
                Bytes.toBytes("message")) == null ? ""
                    : String.valueOf(Bytes.toString(
                        value.getValue(FAMILY_INF, Bytes.toBytes("message"))));
            String sid = value.getValue(FAMILY_INF,
                Bytes.toBytes("sid")) == null ? ""
                    : String.valueOf(Bytes.toString(
                        value.getValue(FAMILY_INF, Bytes.toBytes("sid"))));

            StringBuffer sb = new StringBuffer();

            Set<String> tagStringSet = tagsObject.keySet();
            JSONObject obj = null;
            String status = null;
            String name = null;
            String teldesc = null;
            String telnum = null;
            String tellloc = null;
            JSONObject flagObject = null;
            JSONArray catnames = null;
            JSONArray itagIds = null;
            for (String tagString : tagStringSet) {
                obj = tagsObject.getJSONObject(tagString);
                if (obj == null)
                    continue;

                sb.delete(0, sb.length());

                status = JSONUtil.getString(obj, "status");
                name = JSONUtil.getString(obj, "name");
                teldesc = JSONUtil.getString(obj, "teldesc");
                telnum = JSONUtil.getString(obj, "telnum");
                tellloc = JSONUtil.getString(obj, "tellloc");
                flagObject = obj.getJSONObject("flag");
                catnames = obj.getJSONArray("catnames");
                itagIds = obj.getJSONArray("itag_ids");

                sb.append(keyStr).append(STR_FILED_SPLIT_FLAG);
                sb.append(code).append(STR_FILED_SPLIT_FLAG);
                sb.append(message).append(STR_FILED_SPLIT_FLAG);
                sb.append(sid).append(STR_FILED_SPLIT_FLAG);

                sb.append(tagString).append(STR_FILED_SPLIT_FLAG);
                sb.append(status).append(STR_FILED_SPLIT_FLAG);
                sb.append(name).append(STR_FILED_SPLIT_FLAG);
                sb.append(teldesc).append(STR_FILED_SPLIT_FLAG);
                sb.append(telnum).append(STR_FILED_SPLIT_FLAG);
                sb.append(tellloc).append(STR_FILED_SPLIT_FLAG);
                sb.append(JSONUtil.getString(flagObject, "type"))
                    .append(STR_FILED_SPLIT_FLAG);
                sb.append(JSONUtil.getString(flagObject, "num"))
                    .append(STR_FILED_SPLIT_FLAG);
                sb.append(JSONUtil.getString(flagObject, "fid"));

                this.key.set(STR_TARGET_POSITION_TAGINFO_RECORDS_BASIC);
                this.value.set(sb.toString());
                context.write(this.key, this.value);

                if (catnames != null) {
                    for (int i = 0; i < catnames.size(); i++) {
                        sb.delete(0, sb.length());
                        sb.append(keyStr).append(STR_FILED_SPLIT_FLAG);
                        sb.append(sid).append(STR_FILED_SPLIT_FLAG);
                        sb.append(tagString).append(STR_FILED_SPLIT_FLAG);
                        sb.append((i + 1)).append(STR_FILED_SPLIT_FLAG);
                        sb.append(catnames.getString(i));

                        this.key
                            .set(STR_TARGET_POSITION_TAGINFO_RECORDS_TAGNAMES);
                        this.value.set(sb.toString());
                        context.write(this.key, this.value);
                    }
                }

                sb.delete(0, sb.length());
                if (itagIds != null) {
                    for (int i = 0; i < itagIds.size(); i++) {
                        sb.delete(0, sb.length());
                        sb.append(keyStr).append(STR_FILED_SPLIT_FLAG);
                        sb.append(sid).append(STR_FILED_SPLIT_FLAG);
                        sb.append(tagString).append(STR_FILED_SPLIT_FLAG);
                        sb.append((i + 1)).append(STR_FILED_SPLIT_FLAG);
                        sb.append(itagIds.getString(i));

                        this.key
                            .set(STR_TARGET_POSITION_TAGINFO_RECORDS_ITAGIDS);
                        this.value.set(sb.toString());
                        context.write(this.key, this.value);
                    }
                }
            }
        }
    }

    private static class DHBTagInfoRecordsReducer
            extends Reducer<Text, Text, Text, Text> {
        private NullWritable key2 = NullWritable.get();
        private MultipleOutputs<Text, Text> mos = null;

        protected void setup(Context context)
                throws IOException, InterruptedException {
            mos = new MultipleOutputs<Text, Text>(context);
        }

        protected void cleanup(Context context)
                throws IOException, InterruptedException {
            mos.close();
        }

        protected void reduce(Text key, Iterable<Text> values, Context context)
                throws IOException, InterruptedException {
            String keyStr = null;

            for (Text value : values) {
                keyStr = key.toString();

                if (STR_TARGET_POSITION_TAGINFO_RECORDS_BASIC.equals(keyStr)) {
                    this.mos.write(STR_TARGET_POSITION_TAGINFO_RECORDS_BASIC,
                        key2, value);
                } else if (STR_TARGET_POSITION_TAGINFO_RECORDS_JOB
                    .equals(keyStr)) {
                    this.mos.write(STR_TARGET_POSITION_TAGINFO_RECORDS_JOB,
                        key2, value);
                } else if (STR_TARGET_POSITION_TAGINFO_RECORDS_TAGNAMES
                    .equals(keyStr)) {
                    this.mos.write(STR_TARGET_POSITION_TAGINFO_RECORDS_TAGNAMES,
                        key2, value);
                } else if (STR_TARGET_POSITION_TAGINFO_RECORDS_ITAGIDS
                    .equals(keyStr)) {
                    this.mos.write(STR_TARGET_POSITION_TAGINFO_RECORDS_ITAGIDS,
                        key2, value);
                }
            }
        }
    }

    @Override
    public int run(String[] args) throws Exception {
        Properties prop = new Properties();
        try (InputStream is = this.getClass().getClassLoader()
            .getResourceAsStream("hbase.properties");) {
            prop.load(is);
        }
        Job job = Job.getInstance(HBaseConfiguration.create(this.getConf()));
        Configuration conf = job.getConfiguration();
        conf.set("hbase.zookeeper.quorum", "datanode1,datanode2,datanode3");
        conf.set("mapreduce.map.memory.mb", "3072");
        conf.set("mapreduce.reduce.memory.mb", "5120");
        conf.set("hbase.client.scanner.timeout.period", "120");
        conf.set("mapreduce.output.fileoutputformat.compress", "true");
        conf.set("mapreduce.output.fileoutputformat.compress.type", "BLOCK");
        conf.set("mapreduce.output.fileoutputformat.compress.codec",
            "org.apache.hadoop.io.compress.GzipCodec");
        conf.set("io.compression.codecs",
            "org.apache.hadoop.io.compress.GzipCodec");

        /*
         * conf.set("param.out.path",
         * "hdfs://10.10.15.10:8020/tmp/output_xxh_rongreport/");
         * conf.set("param.start_date", "2017-09-19");
         * conf.set("param.stop_date", "2017-09-20");
         */
        conf.set("param.out.path", args[0]);
        conf.set("param.start_date", args[1]);
        conf.set("param.stop_date", args[2]);

        String tableName = "dhbtaginfo_records";
        String outputDir = conf.get("param.out.path");
        System.out.println("开始日期:" + conf.get("param.start_date"));
        System.out.println("结束日期:" + conf.get("param.stop_date"));
        System.out.println("输入HBase表名:" + tableName);
        System.out.println("输出路径:" + outputDir);
        String model = conf.get("param.run.model","ALL");
        Long startTime = DateUtil
            .formateToTimestamp(conf.get("param.start_date"));
        Long endTime = DateUtil.formateToTimestamp(conf.get("param.stop_date"));

        Path outpath = new Path(outputDir);
        FileSystem fs = outpath.getFileSystem(conf);
        if (fs.exists(outpath)) {
            fs.delete(outpath, true);
        }

        job.setJarByClass(DHBTagInfoRecordsExtractor.class);
        Scan scan = new Scan();

        scan.addFamily(FAMILY_INF);
        scan.addFamily(FAMILY_JOB);

//		scan.setCacheBlocks(false);
//		scan.setCaching(200);
//		scan.setBatch(100);
        scan.setMaxVersions(1);
//		scan.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, Bytes.toBytes(tableName));
        if (!model.equals("ALL")) {
			System.out.println("增量运行.............");
			scan.setTimeRange(startTime, endTime + 14400000);
		} else {
			System.out.println("全量运行.............");

		}

        TableMapReduceUtil.initTableMapperJob(tableName, scan,
            DHBTagInfoRecordsMapper.class, Text.class, Text.class, job);
        FileOutputFormat.setOutputPath(job, outpath);
        MultipleOutputs.addNamedOutput(job,
            STR_TARGET_POSITION_TAGINFO_RECORDS_BASIC, TextOutputFormat.class,
            Text.class, Text.class);
        MultipleOutputs.addNamedOutput(job,
            STR_TARGET_POSITION_TAGINFO_RECORDS_JOB, TextOutputFormat.class,
            Text.class, Text.class);
        MultipleOutputs.addNamedOutput(job,
            STR_TARGET_POSITION_TAGINFO_RECORDS_TAGNAMES,
            TextOutputFormat.class, Text.class, Text.class);
        MultipleOutputs.addNamedOutput(job,
            STR_TARGET_POSITION_TAGINFO_RECORDS_ITAGIDS, TextOutputFormat.class,
            Text.class, Text.class);

        job.setReducerClass(DHBTagInfoRecordsReducer.class);

        return job.waitForCompletion(true) ? 0 : 1;
    }

    /**
     * @param args
     * @throws Exception
     */
    public static void main(String[] args) throws Exception {
        System.exit(ToolRunner.run(HBaseConfiguration.create(),
            new DHBTagInfoRecordsExtractor(), args));
    }
}
