/**
 * 
 */
package com.rrd.dw.mr.gzip;

import java.io.IOException;
// import java.util.HashMap;
// import java.util.Map;
import java.io.InputStream;
import java.util.Properties;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.rrd.dw.utils.DateUtil;
import com.rrd.dw.utils.JSONUtil;
import com.rrd.dw.utils.MD5Util;

/**
 * @author xiexihao
 */
public class DHBCuiShouRecordsExtractor extends Configured implements Tool {
    private static final String STR_FILED_SPLIT_FLAG = "\001";

    public static final int SIZEOF_MD5 = 16;

    private static final byte[] FAMILY_INF = Bytes.toBytes("inf");
    private static final byte[] FAMILY_JOB = Bytes.toBytes("job");

    private static final String STR_TARGET_POSITION_DHBCUISHOU_RECORDS_BASIC = "dhbcuishourecordsbasic";
    private static final String STR_TARGET_POSITION_DHBCUISHOU_RECORDS_JOB = "dhbcuishourecordsjob";

    private static class DHBCuishouRecordsMapper
            extends TableMapper<Text, Text> {
        private Text key = null;
        private Text value = null;

        protected void setup(Context context) {
            key = new Text();
            value = new Text();
        }

        protected void map(ImmutableBytesWritable key, Result value,
                Context context) throws IOException, InterruptedException {
            String keyStr = null;

            /*
            Object[] objs = HBaseUtil.decodeUserRowkey(key.get());
            if (objs == null)
                return;

            String userKey = objs[0].toString();
            long timestamp = Long.parseLong(objs[1].toString());
            
            keysb.append(userKey);
            keysb.append(timestamp);
            */
            
            byte[] data = value.getValue(FAMILY_INF, Bytes.toBytes("data"));
            if(data == null)
            	return;
            JSONObject dataObject = JSON.parseObject(Bytes.toString(
                    value.getValue(FAMILY_INF, Bytes.toBytes("data"))));
            if(dataObject == null)
            	return;
            
            StringBuffer keysb = new StringBuffer();
            
            keysb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("job_id")) == null ? ""
                    : Bytes.toString(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("job_id"))));
            keysb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("system_id")) == null
                    ? ""
                    : Bytes.toString(value.getValue(FAMILY_JOB,
                        Bytes.toBytes("system_id"))));
            keysb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("end_time")) == null
                    ? ""
                    : Bytes.toLong(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("end_time"))));
            keyStr = MD5Util.getMd5(keysb.toString());
            this.key.set(STR_TARGET_POSITION_DHBCUISHOU_RECORDS_JOB);

            StringBuffer sb = new StringBuffer();
            sb.append(keyStr).append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("job_id")) == null ? ""
                    : Bytes.toString(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("job_id"))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("finished")) == null
                    ? ""
                    : String.valueOf(Bytes.toBoolean(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("finished")))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("requested")) == null
                    ? ""
                    : String.valueOf(Bytes.toBoolean(value.getValue(FAMILY_JOB,
                        Bytes.toBytes("requested")))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("successful")) == null
                    ? ""
                    : String.valueOf(Bytes.toBoolean(value.getValue(FAMILY_JOB,
                        Bytes.toBytes("successful")))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("start_time")) == null
                    ? ""
                    : String.valueOf(Bytes.toLong(value.getValue(FAMILY_JOB,
                        Bytes.toBytes("start_time")))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("end_time")) == null
                    ? ""
                    : String.valueOf(Bytes.toLong(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("end_time")))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("data_file")) == null
                    ? ""
                    : Bytes.toString(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("data_file"))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("system_id")) == null
                    ? ""
                    : Bytes.toString(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("system_id"))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("query")) == null ? ""
                    : Bytes.toString(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("query"))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("try_times")) == null
                    ? ""
                    : String.valueOf(Bytes.toInt(value.getValue(FAMILY_JOB,
                        Bytes.toBytes("try_times")))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("type")) == null ? ""
                    : Bytes.toString(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("type"))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("message")) == null
                    ? ""
                    : Bytes.toString(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("message"))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("code")) == null ? ""
                    : Bytes.toString(
                        value.getValue(FAMILY_JOB, Bytes.toBytes("code"))))
                .append(STR_FILED_SPLIT_FLAG);
            sb.append(
                value.getValue(FAMILY_JOB, Bytes.toBytes("timestamp")) == null
                    ? ""
                    : String.valueOf(Bytes.toLong(value.getValue(FAMILY_JOB,
                        Bytes.toBytes("timestamp")))));

            this.value.set(sb.toString());

            context.write(this.key, this.value);
            generateDHBCuishouRecordsInfo(context, value, keyStr, dataObject);
        }
        
        private void generateDHBCuishouRecordsInfo(Context context, Result value, String keyStr, JSONObject dataObject) 
        	throws IOException, InterruptedException {
        	
        	String code = value.getValue(FAMILY_INF, Bytes.toBytes("code")) == null
                    ? ""
                    : String.valueOf(Bytes.toString(value.getValue(FAMILY_INF,
                        Bytes.toBytes("code"))));
        	String message = value.getValue(FAMILY_INF, Bytes.toBytes("message")) == null
                    ? ""
                    : String.valueOf(Bytes.toString(value.getValue(FAMILY_INF,
                        Bytes.toBytes("message"))));
        	String sid = value.getValue(FAMILY_INF, Bytes.toBytes("sid")) == null
                    ? ""
                    : String.valueOf(Bytes.toString(value.getValue(FAMILY_INF,
                        Bytes.toBytes("sid"))));
        	
        	String uid = dataObject.getString("uid") == null ? "" : dataObject.getString("uid");
        	String tel = dataObject.getString("tel") == null ? "" : dataObject.getString("tel");
        	String time = dataObject.getString("time") == null ? "" : dataObject.getString("time");
        	String total_num = dataObject.getString("total_num") == null ? "" : dataObject.getString("total_num");
        	String effective_num = dataObject.getString("effective_num") == null ? "" : dataObject.getString("effective_num");
        	
        	StringBuffer head = new StringBuffer();
        	
        	head.append(keyStr).append(STR_FILED_SPLIT_FLAG);
        	head.append(code).append(STR_FILED_SPLIT_FLAG);
        	head.append(message).append(STR_FILED_SPLIT_FLAG);
        	head.append(sid).append(STR_FILED_SPLIT_FLAG);
        	head.append(uid).append(STR_FILED_SPLIT_FLAG);
        	head.append(tel).append(STR_FILED_SPLIT_FLAG);
        	head.append(time).append(STR_FILED_SPLIT_FLAG);
        	head.append(total_num).append(STR_FILED_SPLIT_FLAG);
        	head.append(effective_num);
        	
        	generateSpecificRow(context, dataObject, head.toString(), "overview", "dunning");
        	generateSpecificRow(context, dataObject, head.toString(), "overview", "not_sure_dunning");
        	generateSpecificRow(context, dataObject, head.toString(), "last_week", "dunning");
        	generateSpecificRow(context, dataObject, head.toString(), "last_week", "not_sure_dunning");
        	generateSpecificRow(context, dataObject, head.toString(), "last_two_weeks", "dunning");
        	generateSpecificRow(context, dataObject, head.toString(), "last_two_weeks", "not_sure_dunning");
        	generateSpecificRow(context, dataObject, head.toString(), "last_three_weeks", "dunning");
        	generateSpecificRow(context, dataObject, head.toString(), "last_three_weeks", "not_sure_dunning");
        	generateSpecificRow(context, dataObject, head.toString(), "last_30_days", "dunning");
        	generateSpecificRow(context, dataObject, head.toString(), "last_30_days", "not_sure_dunning");
        	generateSpecificRow(context, dataObject, head.toString(), "last_30_and_60_days", "dunning");
        	generateSpecificRow(context, dataObject, head.toString(), "last_30_and_60_days", "not_sure_dunning");
        	generateSpecificRow(context, dataObject, head.toString(), "last_60_and_90_days", "dunning");
        	generateSpecificRow(context, dataObject, head.toString(), "last_60_and_90_days", "not_sure_dunning");
        }
        
        private void generateSpecificRow(Context context, JSONObject dataObject, String head, String type, String dunningOrNotSureDunning)
        		throws IOException, InterruptedException {
        	StringBuffer sb = new StringBuffer();
        	
        	JSONObject obj = null;
        	JSONObject typeObject = null;
        	
        	if(dataObject != null && (typeObject = dataObject.getJSONObject(type)) != null) {
        		obj = typeObject.getJSONObject(dunningOrNotSureDunning);
        	}
        	
        	if(obj != null) {
        		sb.append(head).append(STR_FILED_SPLIT_FLAG);
            	sb.append(type).append(STR_FILED_SPLIT_FLAG);
            	sb.append(dunningOrNotSureDunning).append(STR_FILED_SPLIT_FLAG);
            	
            	sb.append(JSONUtil.getString(obj, "call_tel_total_nums")).append(STR_FILED_SPLIT_FLAG);
            	sb.append(JSONUtil.getString(obj, "call_total_times")).append(STR_FILED_SPLIT_FLAG);
            	sb.append(JSONUtil.getString(obj, "call_out_times")).append(STR_FILED_SPLIT_FLAG);
            	sb.append(JSONUtil.getString(obj, "call_in_times")).append(STR_FILED_SPLIT_FLAG);
            	sb.append(JSONUtil.getString(obj, "call_total_duration")).append(STR_FILED_SPLIT_FLAG);
            	sb.append(JSONUtil.getString(obj, "call_avg_duration")).append(STR_FILED_SPLIT_FLAG);
            	sb.append(JSONUtil.getString(obj, "call_out_duration")).append(STR_FILED_SPLIT_FLAG);
            	sb.append(JSONUtil.getString(obj, "call_in_duration")).append(STR_FILED_SPLIT_FLAG);
            	sb.append(JSONUtil.getString(obj, "call_duration_below15")).append(STR_FILED_SPLIT_FLAG);
            	sb.append(JSONUtil.getString(obj, "call_duration_between15_and_30")).append(STR_FILED_SPLIT_FLAG);
            	sb.append(JSONUtil.getString(obj, "call_duration_above60")).append(STR_FILED_SPLIT_FLAG);
            	sb.append(JSONUtil.getString(obj, "first_call_time")).append(STR_FILED_SPLIT_FLAG);
            	sb.append(JSONUtil.getString(obj, "last_call_time"));
            	
            	this.key.set(STR_TARGET_POSITION_DHBCUISHOU_RECORDS_BASIC);
            	
            	this.value.set(sb.toString());
            	context.write(this.key, this.value);
        	}
        }
    }

    private static class DHBCuishouRecordsReducer
            extends Reducer<Text, Text, Text, Text> {
        private NullWritable key2 = NullWritable.get();
        private MultipleOutputs<Text, Text> mos = null;

        protected void setup(Context context)
                throws IOException, InterruptedException {
            mos = new MultipleOutputs<Text, Text>(context);
        }

        protected void cleanup(Context context)
                throws IOException, InterruptedException {
            mos.close();
        }

        protected void reduce(Text key, Iterable<Text> values, Context context)
                throws IOException, InterruptedException {
            String keyStr = null;

            for (Text value : values) {
                keyStr = key.toString();

                if (STR_TARGET_POSITION_DHBCUISHOU_RECORDS_BASIC.equals(keyStr)) {
                    this.mos.write(STR_TARGET_POSITION_DHBCUISHOU_RECORDS_BASIC,
                        key2, value);
                } else if (STR_TARGET_POSITION_DHBCUISHOU_RECORDS_JOB
                    .equals(keyStr)) {
                    this.mos.write(
                    		STR_TARGET_POSITION_DHBCUISHOU_RECORDS_JOB, key2,
                        value);
                }
            }
        }
    }

    @Override
    public int run(String[] args) throws Exception {
        Properties prop = new Properties();
        try (InputStream is = this.getClass().getClassLoader()
            .getResourceAsStream("hbase.properties");) {
            prop.load(is);
        }
        Job job = Job.getInstance(HBaseConfiguration.create(this.getConf()));
        Configuration conf = job.getConfiguration();
        conf.set("hbase.zookeeper.quorum", "datanode1,datanode2,datanode3");
        conf.set("mapreduce.map.memory.mb", "3072");
        conf.set("mapreduce.reduce.memory.mb", "5120");
        conf.set("hbase.client.scanner.timeout.period", "120");
        conf.set("mapreduce.output.fileoutputformat.compress", "true");
        conf.set("mapreduce.output.fileoutputformat.compress.type", "BLOCK");
        conf.set("mapreduce.output.fileoutputformat.compress.codec",
            "org.apache.hadoop.io.compress.GzipCodec");
        conf.set("io.compression.codecs",
            "org.apache.hadoop.io.compress.GzipCodec");

        /*
         * conf.set("param.out.path",
         * "hdfs://10.10.15.10:8020/tmp/output_xxh_rongreport/");
         * conf.set("param.start_date", "2017-09-19");
         * conf.set("param.stop_date", "2017-09-20");
         */
        conf.set("param.out.path", args[0]);
        conf.set("param.start_date", args[1]);
        conf.set("param.stop_date", args[2]);

        String tableName = "dhbcuishou_records";
        String outputDir = conf.get("param.out.path");
        System.out.println("开始日期:" + conf.get("param.start_date"));
        System.out.println("结束日期:" + conf.get("param.stop_date"));
        System.out.println("输入HBase表名:" + tableName);
        System.out.println("输出路径:" + outputDir);

        Long startTime = DateUtil
            .formateToTimestamp(conf.get("param.start_date"));
        Long endTime = DateUtil.formateToTimestamp(conf.get("param.stop_date"));

        Path outpath = new Path(outputDir);
        FileSystem fs = outpath.getFileSystem(conf);
        if (fs.exists(outpath)) {
            fs.delete(outpath, true);
        }

        job.setJarByClass(RongReportsExtractor.class);
        Scan scan = new Scan();

        scan.addFamily(FAMILY_INF);
        scan.addFamily(FAMILY_JOB);

//		scan.setCacheBlocks(false);
//		scan.setCaching(200);
//		scan.setBatch(100);
        scan.setMaxVersions(1);
//		scan.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, Bytes.toBytes(tableName));
        scan.setTimeRange(startTime, endTime + 14400000);

        TableMapReduceUtil.initTableMapperJob(tableName, scan,
        		DHBCuishouRecordsMapper.class, Text.class, Text.class, job);
        FileOutputFormat.setOutputPath(job, outpath);
        MultipleOutputs.addNamedOutput(job,
        		STR_TARGET_POSITION_DHBCUISHOU_RECORDS_BASIC, TextOutputFormat.class,
            Text.class, Text.class);
        MultipleOutputs.addNamedOutput(job,
        		STR_TARGET_POSITION_DHBCUISHOU_RECORDS_JOB,
            TextOutputFormat.class, Text.class, Text.class);

        job.setReducerClass(DHBCuishouRecordsReducer.class);

        return job.waitForCompletion(true) ? 0 : 1;
    }

    /**
     * @param args
     * @throws Exception
     */
    public static void main(String[] args) throws Exception {
        System.exit(ToolRunner.run(HBaseConfiguration.create(),
            new DHBCuiShouRecordsExtractor(), args));
    }
}
