package com.rrd.dw.mr.all;

import java.io.IOException;
import java.io.InputStream;
import java.math.BigDecimal;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Properties;
import java.util.regex.Pattern;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.filter.LongComparator;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import com.rrd.dw.utils.DateUtil;
import com.rrd.dw.utils.HBaseUtil;
import com.rrd.dw.utils.MD5Util;

/**
 * 全量同步通话详单相关数据，作为kafka数据进行etl的初始化数据
 * 
 * @author liubaoxin
 *
 */
public class ExportAllPhoneCallRecordToDW extends Configured implements Tool {
	public static final String CRT001 = "\001";
	public static final Pattern CRT001_PATTERN = Pattern.compile("\001");
	public static final byte[] FAMILY=Bytes.toBytes("inf");
	public static final String[] DEVICE_COLUMNS = {  "app_bundle", "app_version", "jailbroken:boolean",
			"screen", "simulator:boolean", "network_type", "os", "wifi:boolean", "imei", "device_name", "language", "os_version",
			"model", "carrier" };

	public static final String[] JOB_COLUMNS = { "job_id", "finished:boolean", "requested:boolean",
			"successful:boolean", "start_time:date", "end_time:date", "data_file", "system_id", "query", "tryTimes:int",
			"type", "message", "code" };
    
	public static final String[] DETAILS_COLUMNS = { "type", "name", "number",
			"number_type:int", "number_label", "date", "duration:int"};
    
	
	@Override
	public int run(String[] args) throws Exception {
		Properties prop = new Properties();
		try (InputStream is = this.getClass().getClassLoader().getResourceAsStream("hbase.properties");) {
			prop.load(is);
		}

		Job job = Job.getInstance(HBaseConfiguration.create(this.getConf()));
		Configuration conf = job.getConfiguration();
		conf.set("hbase.zookeeper.quorum", prop.get("hbase.zookeeper.quorum").toString());
		conf.set("hbase.rootdir", prop.get("hbase.rootdir").toString());

		conf.set("mapreduce.map.memory.mb", "3072");
		conf.set("mapreduce.reduce.memory.mb", "5120");
		conf.set("mapreduce.output.fileoutputformat.compress", "true");
		conf.set("mapreduce.output.fileoutputformat.compress.type", "BLOCK");
		conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.GzipCodec");
		conf.set("io.compression.codecs", "org.apache.hadoop.io.compress.GzipCodec");
		conf.set("mapreduce.task.io.sort.factor", "10");
		conf.set("hbase.client.scanner.timeout.period", "80000");
		String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
		if (otherArgs.length != 4) {
			System.err.println("Usage: DataExportTool <out> <start_time> <end_time>");
			return -1;
		}
		String isAll = otherArgs[3];
		System.out.println("isAll==>" + isAll);
		Long startTime = DateUtil.addDay(HBaseUtil.parseDate(otherArgs[1]), -2).getTime();
		Long endTime = DateUtil.addDay(HBaseUtil.parseDate(otherArgs[2]), 2).getTime();
		Long startTime2 = HBaseUtil.parseDate(otherArgs[1]).getTime();
		Long endTime2 = HBaseUtil.parseDate(otherArgs[2]).getTime();
		System.out.println("time_ranage=>" + startTime + "," + endTime);
		System.out.println("end_time_filter=>" + startTime2 + "," + endTime2);

		job.setJarByClass(ExportAllPhoneCallRecordToDW.class);
		List<Scan> scans = new ArrayList<Scan>();

		Scan scan = new Scan();
		SingleColumnValueFilter scvf = new SingleColumnValueFilter(Bytes.toBytes("job"), Bytes.toBytes("end_time"),
				CompareFilter.CompareOp.GREATER_OR_EQUAL, new LongComparator(startTime2));

		scvf.setFilterIfMissing(true);
		SingleColumnValueFilter scvf2 = new SingleColumnValueFilter(Bytes.toBytes("job"), Bytes.toBytes("end_time"),
				CompareFilter.CompareOp.LESS, new LongComparator(endTime2));
		scvf2.setFilterIfMissing(true);
		List<Filter> filters = new ArrayList<Filter>();
		filters.add(scvf);
		// filters.add(scvf2);
		FilterList filterList = new FilterList(Operator.MUST_PASS_ALL, filters);
		if (isAll.equals("ALL")) {
			System.out.println("进行全量同步！！！！！！！");
		} else {
			System.out.println("进行增量同步！！！！！！！");
			scan.setFilter(filterList);
			// scan.setTimeRange(startTime, endTime + 14400000);
		}
		scan.setCacheBlocks(false);

		scan.setCaching(200);
		scan.setMaxVersions(1);
		scan.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, Bytes.toBytes("phone_call_records"));

		scans.add(scan);

		TableMapReduceUtil.initTableMapperJob(scans, ExportCallRecordsToDWMapper.class, Text.class, Text.class, job);
		FileOutputFormat.setOutputPath(job, new Path(otherArgs[0]));
		MultipleOutputs.addNamedOutput(job, "devices", TextOutputFormat.class, Text.class, Text.class);
		MultipleOutputs.addNamedOutput(job, "details", TextOutputFormat.class, Text.class, Text.class);
		job.setNumReduceTasks(0);
		int stat = job.waitForCompletion(true) ? 0 : 1;
		System.out.println("成功job次數：" + job.getCounters().getGroup("counter").findCounter("job").getValue());
		System.out.println("成功detail次數：" + job.getCounters().getGroup("counter").findCounter("countCell").getValue());
		System.out.println("成功detail次數：" + job.getCounters().getGroup("counter").findCounter("details").getValue());
		return stat;
	}

	public static void main(String[] args) throws Exception {
		Configuration conf = HBaseConfiguration.create();
		int res = ToolRunner.run(conf, new ExportAllPhoneCallRecordToDW(), args);
		System.exit(res);
	}

	public static class ExportCallRecordsToDWMapper extends TableMapper<Text, Text> {
		private MultipleOutputs<Text, Text> mos;
		private SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
		private NullWritable nullWritable = NullWritable.get();
		private String job_id = null;
		private String timestampstr = null;
		private String user_id="";
		private StringBuffer sb = new StringBuffer("2000");
		private StringBuffer sb2 = new StringBuffer("500");
		@Override
		protected void setup(Context context) throws IOException, InterruptedException {
			this.mos = new MultipleOutputs<Text, Text>(context);
		}

		@Override
		protected void cleanup(Context context) throws IOException, InterruptedException {
			this.mos.close();
		}

		@Override
		protected void map(ImmutableBytesWritable key, Result value, Context context)
				throws IOException, InterruptedException {
			Thread.sleep(20);
			sb.delete(0, sb.length());
			
			Object[] rowkey = HBaseUtil.decodeUserRowkey(key.get());
			if (rowkey == null) {
				return;
			}
			if (value.getValue(Bytes.toBytes("job"), Bytes.toBytes("job_id")) == null) {
				return;
			}
			long timestamp = Long.parseLong(rowkey[1].toString());
			timestampstr = DateUtil.formateDateStr(new Date(timestamp));
			context.getCounter("records", "map.records").increment(1);
			String user_key = rowkey[0].toString();
			Cell dataCell = null;
			Cell countCell = null;
			String type = "";
			String field = "";
			job_id = Bytes.toString(value.getValue(Bytes.toBytes("job"), Bytes.toBytes("job_id")));
			user_id=MD5Util.getMd5(timestampstr+user_key+job_id);
			sb.append(user_id).append(CRT001).append(user_key).append(CRT001).append(timestampstr).append(CRT001);
			
			// 设备基础信息
			for (String str1 : DEVICE_COLUMNS) {
				String[] fieldArray = str1.split(":");
				if (fieldArray.length > 1) {
					field = fieldArray[0];
					type = fieldArray[1];
				} else {
					field = fieldArray[0];
					type = "string";
				}
				dataCell = value.getColumnLatestCell(Bytes.toBytes("inf"), Bytes.toBytes(field));
				if (dataCell != null) {
					sb.append(this.getValue(type, CellUtil.cloneValue(dataCell)).toString().replaceAll("\n", ""));
				}
				sb.append(CRT001);
			}
			
			// job信息
			for (String str1 : JOB_COLUMNS) {
				String[] fieldArray = str1.split(":");
				if (fieldArray.length > 1) {
					field = fieldArray[0];
					type = fieldArray[1];
				} else {
					field = fieldArray[0];
					type = "string";
				}
				dataCell = value.getColumnLatestCell(Bytes.toBytes("job"), Bytes.toBytes(field));
				if (dataCell != null) {
					sb.append(this.getValue(type, CellUtil.cloneValue(dataCell)));
				}
				sb.append(CRT001);
			}
			String outString = sb.toString().replace("\n", "");
			this.mos.write("devices", nullWritable, new Text(outString.substring(0, outString.lastIndexOf(CRT001))));
	
			
			context.getCounter("counter","job").increment(1);
			// 通话记录
			countCell = value.getColumnLatestCell(Bytes.toBytes("inf"), Bytes.toBytes("count"));
			if (countCell != null) {
				context.getCounter("counter","countCell").increment(1);
				int count = Bytes.toInt(CellUtil.cloneValue(countCell));
				sb2.delete(0, sb2.length());
				
				for (int i = 0; i < count; i++) {
					context.getCounter("counter","details").increment(1);
					int j = 0;
					sb.delete(0, sb.length());
					sb.append(user_id).append(CRT001);
					sb.append(timestampstr).append(CRT001);
					sb.append(i+1).append(CRT001);
					
					for (String str1 : DETAILS_COLUMNS) {
						String[] fieldArray = str1.split(":");
						if (fieldArray.length > 1) {
							field = fieldArray[0];
							type = fieldArray[1];
						} else {
							field = fieldArray[0];
							type = "string";
						}
						dataCell = value.getColumnLatestCell(Bytes.toBytes("inf"), Bytes.toBytes(field + "#" + i));
						if (dataCell != null) {
							sb.append(this.getValue(type, CellUtil.cloneValue(dataCell)));
						}
						if (j < DETAILS_COLUMNS.length - 1) {
							sb.append(CRT001);
						}
						j++;
					}
					sb2.append(MD5Util.getMd5(sb.toString())).append(CRT001).append(sb.toString().replace("\n", ""));
					if (i < count - 1) {
						sb2.append("\n");
					}
				}
				this.mos.write("details", nullWritable, new Text(sb2.toString()));
			}
			
		}

		private Object getValue(String type, byte[] value) {
			switch (type) {
			case "string":
				return value == null ? "" : Bytes.toString(value);
			case "date":
				return value == null ? new Date(0) : formatter.format(new Date(Bytes.toLong(value)));
			case "bigdecimal":
				return value == null ? new BigDecimal(0) : Bytes.toBigDecimal(value);
			case "boolean":
				return value != null && Bytes.toBoolean(value);
			case "double":
				return value == null ? 0 : Bytes.toDouble(value);
			case "float":
				return value == null ? 0 : Bytes.toFloat(value);
			case "int":
				return value == null ? 0 : Bytes.toInt(value);
			case "short":
				return value == null ? 0 : Bytes.toShort(value);
			case "long":
				return value == null ? 0 : Bytes.toLong(value);
			default:
				return Bytes.toString(value);

			}
		}
	}
}
