package com.rrd.dw.mr.all;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.rrd.dw.utils.DateUtil;
import com.rrd.dw.utils.HadoopUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Properties;

/**
 * 全量同步互金数据
 * @author liubaoxin
 *
 */
public class ExportAllNifaQeryNewMR extends Configured implements Tool {
    public static final String FILED_SPLIT_FLAG = "\001";
    public static final int SIZEOF_MD5 = 16;
    public static final byte[] INF_FAMILY = Bytes.toBytes("inf");
    public static final byte[] JOB_FAMILY = Bytes.toBytes("job");
    public static class NifaQueryMapper extends TableMapper<Text, Text> {
        private JSONObject dataObj = null;
        private JSONObject tmpObj = null;
        private Text text = new Text();
        private Text key_text = new Text();
        private StringBuilder sb = new StringBuilder(200);
        private StringBuilder sb2 = new StringBuilder(100);
        private String data_json="";
        private String tmpstr="";
        
        @Override
        protected void map(ImmutableBytesWritable key, Result value,
                Context context) throws IOException, InterruptedException {
            sb.delete(0, sb.length());
            sb2.delete(0, sb2.length());
            // jobid
            tmpstr=Bytes.toString(
                value.getValue(JOB_FAMILY, Bytes.toBytes("job_id")));

            key_text.set("query");
            text.set(sb.toString());
            context.write(key_text, text);
            
            
        }

    }

    public static class NifaQueryReducer
            extends Reducer<Text, Text, Text, Text> {
        private MultipleOutputs<Text, Text> mos;
        private NullWritable key2 = NullWritable.get();
        private String keystr="";
        @Override
        protected void setup(Context context)
                throws IOException, InterruptedException {
            this.mos = new MultipleOutputs<Text, Text>(context);
        }

        @Override
        protected void cleanup(Context context)
                throws IOException, InterruptedException {
            this.mos.close();
        }

        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context)
                throws IOException, InterruptedException {
            keystr=key.toString();
            for (Text val : values) {
                if (keystr.equals("query")) {
                    this.mos.write("query", key2, val);
                } else {
                    this.mos.write("details", key2, val);
                }
            }

        }
    }

    @Override
    public int run(String[] args) throws Exception {
        Properties prop = new Properties();
        try (InputStream is = this.getClass().getClassLoader()
            .getResourceAsStream("hbase.properties");) {
            prop.load(is);
        }
        Job job = Job.getInstance(HBaseConfiguration.create(this.getConf()));
        Configuration conf = job.getConfiguration();
        conf.set("hbase.zookeeper.quorum",
            prop.get("hbase.zookeeper.quorum").toString());
        conf.set("hbase.rootdir", prop.get("hbase.rootdir").toString());

        conf.set("mapreduce.map.memory.mb", "3072");
        conf.set("mapreduce.reduce.memory.mb", "5120");
        conf.set("hbase.client.scanner.timeout.period", "120");
        conf.set("mapreduce.output.fileoutputformat.compress", "true");
        conf.set("mapreduce.output.fileoutputformat.compress.type", "BLOCK");
        conf.set("mapreduce.output.fileoutputformat.compress.codec",
            "org.apache.hadoop.io.compress.GzipCodec");
        conf.set("io.compression.codecs",
            "org.apache.hadoop.io.compress.GzipCodec");
        conf.set("mapreduce.task.io.sort.factor", "10");
        String table = "nifa_query";
        String outDir = conf.get("param.out.path");
        System.out.println("开始日期：" + conf.get("param.start_date"));
        System.out.println("结束日期：" + conf.get("param.stop_date"));
        System.out.println("输入hbase表：" + table);
        System.out.println("输出路径：" + outDir);
        Long startTime = DateUtil
            .formateToTimestamp(conf.get("param.start_date"));
        Long endTime = DateUtil.formateToTimestamp(conf.get("param.stop_date"));

        // String isTest=conf.get("param.isTest","0");
        Path outpath = new Path(outDir);

        HadoopUtils.delete(conf, outpath);
        job.setJarByClass(ExportAllNifaQeryNewMR.class);
        List<Scan> scans = new ArrayList<Scan>();
        Scan scan = new Scan();
        scan.setCacheBlocks(false);
        scan.addColumn(JOB_FAMILY, Bytes.toBytes("system_id"));
        scan.addColumn(JOB_FAMILY, Bytes.toBytes("start_time"));
        scan.addColumn(JOB_FAMILY, Bytes.toBytes("job_id"));
        scan.addColumn(INF_FAMILY, Bytes.toBytes("name"));
        scan.addColumn(INF_FAMILY, Bytes.toBytes("idtype"));
        scan.addColumn(INF_FAMILY, Bytes.toBytes("no"));
        scan.addColumn(INF_FAMILY, Bytes.toBytes("reason"));
        scan.addColumn(INF_FAMILY, Bytes.toBytes("data"));
        
        scan.setCaching(200);
        scan.setMaxVersions(1);
        scan.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME,
            Bytes.toBytes(table));
//        scan.setTimeRange(startTime, endTime + 14400000);
        // scan.setStartRow(Bytes.toBytes(""));
        // scan.getStopRow(Bytes.toBytes(""));
        scans.add(scan);

        TableMapReduceUtil.initTableMapperJob(scans,
            NifaQueryMapper.class, Text.class, Text.class, job);
        FileOutputFormat.setOutputPath(job, outpath);
        MultipleOutputs.addNamedOutput(job, "query", TextOutputFormat.class,
            Text.class, Text.class);
        MultipleOutputs.addNamedOutput(job, "details", TextOutputFormat.class,
            Text.class, Text.class);

        job.setReducerClass(NifaQueryReducer.class);
        // job.setNumReduceTasks(1);
        return job.waitForCompletion(true) ? 0 : 1;
    }

    public static void main(String[] args) throws Exception {
        Configuration conf = HBaseConfiguration.create();
        int res = ToolRunner.run(conf, new ExportAllNifaQeryNewMR(), args);
        System.exit(res);
    }

}
