package com.rrd.dw.mr.all;

import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Properties;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import com.amazonaws.util.Md5Utils;
import com.rrd.dw.utils.DateUtil;
import com.rrd.dw.utils.HBaseUtil;
import com.rrd.dw.utils.MD5Util;

/**
 * 银联智慧
 * 
 * @author liubaoxin
 *
 */
public class ExportAllYLSmartDataToDW extends Configured implements Tool {
	public static final String CRT001 = "\001";
	public static final String[] PAY_COLUMNS = { "code", "status", "message", "smart_id", "data" };// inf

	public static final String[] JOB_COLUMNS = { "job_id", "finished:boolean", "requested:boolean",
			"successful:boolean", "start_time:date", "end_time:date", "system_id", "query", "try_times:int", "type",
			"message", "code" };// job

	@Override
	public int run(String[] args) throws Exception {
		Properties prop = new Properties();
		try (InputStream is = this.getClass().getClassLoader().getResourceAsStream("hbase.properties");) {
			prop.load(is);
		}

		Job job = Job.getInstance(HBaseConfiguration.create(this.getConf()));
		Configuration conf = job.getConfiguration();
		conf.set("hbase.zookeeper.quorum", prop.get("hbase.zookeeper.quorum").toString());
		conf.set("hbase.rootdir", prop.get("hbase.rootdir").toString());

		conf.set("mapreduce.map.memory.mb", "3072");
		conf.set("mapreduce.reduce.memory.mb", "5120");
		conf.set("hbase.client.scanner.timeout.period", "120");
		conf.set("mapreduce.output.fileoutputformat.compress", "true");
		conf.set("mapreduce.output.fileoutputformat.compress.type", "BLOCK");
		conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.GzipCodec");
		conf.set("io.compression.codecs", "org.apache.hadoop.io.compress.GzipCodec");
		conf.set("mapreduce.task.io.sort.factor", "10");
		String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
		if (otherArgs.length != 4) {
			System.err.println("Usage: DataExportTool <out> <start_time> <end_time>");
			return -1;
		}
		Long startTime = HBaseUtil.parseDate(otherArgs[1]).getTime();
		Long endTime = HBaseUtil.parseDate(otherArgs[2]).getTime();
		  String model = otherArgs[3];
		job.setJarByClass(ExportAllYLSmartDataToDW.class);
		List<Scan> scans = new ArrayList<>();

		Scan scan = new Scan();
		scan.setCacheBlocks(false);
		scan.setCaching(200);
		scan.setMaxVersions(1);
		scan.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, Bytes.toBytes("unionpaysmart_personal"));
		
		if(!model.equals("ALL")){
        	System.out.println("增量运行.............");
        	scan.setTimeRange(startTime, endTime + 14400000);
        }else{
        	System.out.println("全量运行.............");
        	
        }
		scans.add(scan);

		TableMapReduceUtil.initTableMapperJob(scans, ExportYLSmartDataToDWMapper.class, Text.class, Text.class, job);
		FileOutputFormat.setOutputPath(job, new Path(otherArgs[0]));
		MultipleOutputs.addNamedOutput(job, "baseinfo", TextOutputFormat.class, Text.class, Text.class);

		job.setReducerClass(ExportYLSmartDataToDWReducer.class);
		job.setNumReduceTasks(10);
		return job.waitForCompletion(true) ? 0 : 1;

	}

	public static void main(String[] args) throws Exception {
		Configuration conf = HBaseConfiguration.create();
		int res = ToolRunner.run(conf, new ExportAllYLSmartDataToDW(), args);
		System.exit(res);
	}

	public static class ExportYLSmartDataToDWMapper extends TableMapper<Text, Text> {
		private String id = null;
		private String job_id = null;

		@Override
		protected void map(ImmutableBytesWritable key, Result value, Context context)
				throws IOException, InterruptedException {

			StringBuffer sb = new StringBuffer("");
			Object[] rowkey = HBaseUtil.decodeRowkey(key.get());
			if (rowkey == null) {
				return;
			}
			long timestamp = Long.parseLong(rowkey[0].toString());
			String timestampstr=DateUtil.formateDateStr(new Date(timestamp));
			String idNo = rowkey[1].toString();
			Cell dataCell = null;
			String type = "";
			String field = "";
			dataCell = value.getColumnLatestCell(Bytes.toBytes("job"), Bytes.toBytes("job_id"));
			this.job_id = dataCell == null ? "" : HBaseUtil.getValue(type, CellUtil.cloneValue(dataCell)).toString();
			id=MD5Util.getMd5(idNo+timestampstr+job_id);
			sb.append(id).append(CRT001).append(timestampstr).append(CRT001).append(idNo).append(CRT001);
			// pay基本信息
			for (String str1 : PAY_COLUMNS) {
				String[] fieldArray = str1.split(":");
				if (fieldArray.length > 1) {
					field = fieldArray[0];
					type = fieldArray[1];
				} else {
					field = fieldArray[0];
					type = "string";
				}
				dataCell = value.getColumnLatestCell(Bytes.toBytes("inf"), Bytes.toBytes(field));
				if (dataCell != null) {
					Object dataObject = HBaseUtil.getValue(type, CellUtil.cloneValue(dataCell));
					if ("data".equals(field)) {
						sb.append(dataObject.toString().replaceAll("\\\"", "\""));
					} else {
						sb.append(dataObject.toString());
					}
				}
				sb.append(CRT001);
			}
			// job信息
			for (String str1 : JOB_COLUMNS) {
				String[] fieldArray = str1.split(":");
				if (fieldArray.length > 1) {
					field = fieldArray[0];
					type = fieldArray[1];
				} else {
					field = fieldArray[0];
					type = "string";
				}
				dataCell = value.getColumnLatestCell(Bytes.toBytes("job"), Bytes.toBytes(field));
				if (dataCell != null) {
					sb.append(HBaseUtil.getValue(type, CellUtil.cloneValue(dataCell)));
				}
				sb.append(CRT001);
			}
			String outString = sb.toString().replace("\n","");
			context.write(new Text(timestamp + "\t" + idNo),
					new Text(outString.substring(0, outString.lastIndexOf(CRT001))));
			sb = null;
		}

	}

	public static class ExportYLSmartDataToDWReducer extends Reducer<Text, Text, Text, Text> {
		private MultipleOutputs<Text, Text> mos;

		@Override
		protected void setup(Context context) throws IOException, InterruptedException {
			this.mos = new MultipleOutputs<>(context);
		}

		@Override
		protected void cleanup(Context context) throws IOException, InterruptedException {
			this.mos.close();
		}
		private NullWritable k= NullWritable.get();
		@Override
		protected void reduce(Text key, Iterable<Text> values, Context context)
				throws IOException, InterruptedException {
			for (Text val : values) {
				this.mos.write("baseinfo", k, val);
			}

		}
	}
}
