package com.rrd.dw.mr.gzip;

import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Properties;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.rrd.dw.utils.DateUtil;
import com.rrd.dw.utils.HadoopUtils;

public class NifaQeryMR extends Configured implements Tool {
    public static final String FILED_SPLIT_FLAG = "\001";
    public static final int SIZEOF_MD5 = 16;
    public static final byte[] INF_FAMILY = Bytes.toBytes("inf");
    public static final byte[] JOB_FAMILY = Bytes.toBytes("job");
    public static class NifaQueryMapper extends TableMapper<Text, Text> {
        private JSONObject dataObj = null;
        private JSONObject tmpObj = null;
        private Text text = new Text();
        private Text key_text = new Text();
        private StringBuilder sb = new StringBuilder(200);
        private StringBuilder sb2 = new StringBuilder(100);
        private String data_json="";
        private String tmpstr="";
        
        @Override
        protected void map(ImmutableBytesWritable key, Result value,
                Context context) throws IOException, InterruptedException {
            sb.delete(0, sb.length());
            sb2.delete(0, sb2.length());
            // jobid
            tmpstr=Bytes.toString(
                value.getValue(JOB_FAMILY, Bytes.toBytes("job_id")));
            sb.append(tmpstr).append(FILED_SPLIT_FLAG);
            sb2.append(tmpstr).append(FILED_SPLIT_FLAG);
            //system_id
            tmpstr=Bytes.toString(
                value.getValue(JOB_FAMILY, Bytes.toBytes("system_id")));
            sb.append(tmpstr).append(FILED_SPLIT_FLAG);
            sb2.append(tmpstr).append(FILED_SPLIT_FLAG);
          //start_time
            Date date=new Date(Bytes.toLong(value.getValue(JOB_FAMILY, Bytes.toBytes("start_time"))));
            tmpstr= DateUtil.formateDateStr(date);
            sb.append(tmpstr).append(FILED_SPLIT_FLAG);
            sb2.append(tmpstr).append(FILED_SPLIT_FLAG);
            
            //名称
            tmpstr=value.getValue(INF_FAMILY, Bytes.toBytes("name")) == null?"":Bytes.toString(
                value.getValue(INF_FAMILY, Bytes.toBytes("name")));
             sb.append(tmpstr).append(FILED_SPLIT_FLAG);
             sb2.append(tmpstr).append(FILED_SPLIT_FLAG);
            //证件类型
             tmpstr=value.getValue(INF_FAMILY, Bytes.toBytes("idtype")) == null?"":Bytes.toString(
                 value.getValue(INF_FAMILY, Bytes.toBytes("idtype")));
             sb.append(tmpstr).append(FILED_SPLIT_FLAG);
             sb2.append(tmpstr).append(FILED_SPLIT_FLAG);
            //证件号码
            if (value.getValue(INF_FAMILY, Bytes.toBytes("no")) == null) {
                return;
            } else {
                tmpstr=Bytes.toString(value.getValue(INF_FAMILY, Bytes.toBytes("no")));
                sb.append(tmpstr).append(FILED_SPLIT_FLAG);
                sb2.append(tmpstr).append(FILED_SPLIT_FLAG);
            }
            //查询原因
            sb.append(
                value.getValue(INF_FAMILY, Bytes.toBytes("reason")) == null ? ""
                    : Bytes.toString(
                        value.getValue(INF_FAMILY, Bytes.toBytes("reason"))))
                .append(FILED_SPLIT_FLAG);
          //data json报文
          
            data_json=value.getValue(INF_FAMILY, Bytes.toBytes("data")) == null ? ""
                    : Bytes.toString(
                        value.getValue(INF_FAMILY, Bytes.toBytes("data")));
           
           //解析data_json
            dataObj=JSONObject.parseObject(data_json);
            if(dataObj==null){
                sb.append("").append(FILED_SPLIT_FLAG).append("").append(FILED_SPLIT_FLAG);
                sb.append("").append(FILED_SPLIT_FLAG).append("").append(FILED_SPLIT_FLAG);
                sb.append("").append(FILED_SPLIT_FLAG).append("").append(FILED_SPLIT_FLAG);
                sb.append("").append(FILED_SPLIT_FLAG).append("").append(FILED_SPLIT_FLAG);
                sb.append("").append(FILED_SPLIT_FLAG).append("").append(FILED_SPLIT_FLAG);
                sb.append("").append(FILED_SPLIT_FLAG).append("");
            }else{
              sb.append(dataObj.getString("loancount")==null?"":dataObj.getString("loancount")).append(FILED_SPLIT_FLAG);
              sb.append(dataObj.getString("loanamt")==null?"":dataObj.getString("loanamt")).append(FILED_SPLIT_FLAG);
              sb.append(dataObj.getString("outstandcount")==null?"":dataObj.getString("outstandcount")).append(FILED_SPLIT_FLAG);
              sb.append(dataObj.getString("loanbal")==null?"":dataObj.getString("loanbal")).append(FILED_SPLIT_FLAG);
              sb.append(dataObj.getString("overduecount")==null?"":dataObj.getString("overduecount")).append(FILED_SPLIT_FLAG);
              sb.append(dataObj.getString("overdueamt")==null?"":dataObj.getString("overdueamt")).append(FILED_SPLIT_FLAG);
              sb.append(dataObj.getString("overduemorecount")==null?"":dataObj.getString("overduemorecount")).append(FILED_SPLIT_FLAG);
              sb.append(dataObj.getString("overduemoreamt")==null?"":dataObj.getString("overduemoreamt")).append(FILED_SPLIT_FLAG);
              sb.append(dataObj.getString("generationcount")==null?"":dataObj.getString("generationcount")).append(FILED_SPLIT_FLAG);
              sb.append(dataObj.getString("generationamount")==null?"":dataObj.getString("generationamount")).append(FILED_SPLIT_FLAG);
              sb.append(dataObj.getString("totalorg")==null?"":dataObj.getString("totalorg")).append(FILED_SPLIT_FLAG);
              sb.append(dataObj.getString("queryatotalorg")==null?"":dataObj.getString("queryatotalorg"));
              //解析查詢明細
              tmpstr=sb2.toString();
              JSONArray jarr=dataObj.getJSONArray("infoquerybean");
              if(jarr!=null){
                  for (int i = 0; i < jarr.size(); i++) {
                      sb2.delete(0, sb2.length());
                      tmpObj = jarr.getJSONObject(i);
                      sb2.append(tmpstr);
                      sb2.append(tmpObj.getString("ordernum")==null?"":tmpObj.getString("ordernum")).append(FILED_SPLIT_FLAG);
                      sb2.append(tmpObj.getString("ddate")==null?"":tmpObj.getString("ddate")).append(FILED_SPLIT_FLAG);
                      sb2.append(tmpObj.getString("s_value")==null?"":tmpObj.getString("s_value"));
                      text.set(sb2.toString());
                      key_text.set("detail");
                      context.write(key_text, text);
                  }
                 
              }
            }
            key_text.set("query");
            text.set(sb.toString());
            context.write(key_text, text);
            
            
        }

    }

    public static class NifaQueryReducer
            extends Reducer<Text, Text, Text, Text> {
        private MultipleOutputs<Text, Text> mos;
        private NullWritable key2 = NullWritable.get();
        private String keystr="";
        @Override
        protected void setup(Context context)
                throws IOException, InterruptedException {
            this.mos = new MultipleOutputs<Text, Text>(context);
        }

        @Override
        protected void cleanup(Context context)
                throws IOException, InterruptedException {
            this.mos.close();
        }

        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context)
                throws IOException, InterruptedException {
            keystr=key.toString();
            for (Text val : values) {
                if (keystr.equals("query")) {
                    this.mos.write("query", key2, val);
                } else {
                    this.mos.write("details", key2, val);
                }
            }

        }
    }

    @Override
    public int run(String[] args) throws Exception {
        Properties prop = new Properties();
        try (InputStream is = this.getClass().getClassLoader()
            .getResourceAsStream("hbase.properties");) {
            prop.load(is);
        }
        Job job = Job.getInstance(HBaseConfiguration.create(this.getConf()));
        Configuration conf = job.getConfiguration();
        conf.set("hbase.zookeeper.quorum",
            prop.get("hbase.zookeeper.quorum").toString());
        conf.set("hbase.rootdir", prop.get("hbase.rootdir").toString());

        conf.set("mapreduce.map.memory.mb", "3072");
        conf.set("mapreduce.reduce.memory.mb", "5120");
        conf.set("hbase.client.scanner.timeout.period", "120");
        conf.set("mapreduce.output.fileoutputformat.compress", "true");
        conf.set("mapreduce.output.fileoutputformat.compress.type", "BLOCK");
        conf.set("mapreduce.output.fileoutputformat.compress.codec",
            "org.apache.hadoop.io.compress.GzipCodec");
        conf.set("io.compression.codecs",
            "org.apache.hadoop.io.compress.GzipCodec");
        conf.set("mapreduce.task.io.sort.factor", "10");
        String table = "nifa_query";
        String outDir = conf.get("param.out.path");
        System.out.println("开始日期：" + conf.get("param.start_date"));
        System.out.println("结束日期：" + conf.get("param.stop_date"));
        System.out.println("输入hbase表：" + table);
        System.out.println("输出路径：" + outDir);
        Long startTime = DateUtil
            .formateToTimestamp(conf.get("param.start_date"));
        Long endTime = DateUtil.formateToTimestamp(conf.get("param.stop_date"));

        // String isTest=conf.get("param.isTest","0");
        Path outpath = new Path(outDir);

        HadoopUtils.delete(conf, outpath);
        job.setJarByClass(NifaQeryMR.class);
        List<Scan> scans = new ArrayList<Scan>();
        Scan scan = new Scan();
        scan.setCacheBlocks(false);
        scan.addColumn(JOB_FAMILY, Bytes.toBytes("system_id"));
        scan.addColumn(JOB_FAMILY, Bytes.toBytes("start_time"));
        scan.addColumn(JOB_FAMILY, Bytes.toBytes("job_id"));
        scan.addColumn(INF_FAMILY, Bytes.toBytes("name"));
        scan.addColumn(INF_FAMILY, Bytes.toBytes("idtype"));
        scan.addColumn(INF_FAMILY, Bytes.toBytes("no"));
        scan.addColumn(INF_FAMILY, Bytes.toBytes("reason"));
        scan.addColumn(INF_FAMILY, Bytes.toBytes("data"));
        
        scan.setCaching(200);
        scan.setMaxVersions(1);
        scan.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME,
            Bytes.toBytes(table));
        scan.setTimeRange(startTime, endTime + 14400000);
        // scan.setStartRow(Bytes.toBytes(""));
        // scan.getStopRow(Bytes.toBytes(""));
        scans.add(scan);

        TableMapReduceUtil.initTableMapperJob(scans,
            NifaQueryMapper.class, Text.class, Text.class, job);
        FileOutputFormat.setOutputPath(job, outpath);
        MultipleOutputs.addNamedOutput(job, "query", TextOutputFormat.class,
            Text.class, Text.class);
        MultipleOutputs.addNamedOutput(job, "details", TextOutputFormat.class,
            Text.class, Text.class);

        job.setReducerClass(NifaQueryReducer.class);
        // job.setNumReduceTasks(1);
        return job.waitForCompletion(true) ? 0 : 1;
    }

    public static void main(String[] args) throws Exception {
        Configuration conf = HBaseConfiguration.create();
        int res = ToolRunner.run(conf, new NifaQeryMR(), args);
        System.exit(res);
    }

}
