package com.rrd.dw.mr.all;

import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Properties;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.bson.Document;

import com.mongodb.MongoClient;
import com.mongodb.MongoClientURI;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.Filters;
import com.rrd.dw.utils.HBaseUtil;
import com.rrd.dw.utils.HadoopUtils;
import com.ucredit.babel.common.model.RecordType;
import com.ucredit.babel.common.model.RowkeyType;

/**
 * Created by liubaoxin on 2018/06/04.
 */
public class ExportAllJob extends Configured implements Tool {
	public static final String CRT001 = "\001";
	
	public static final String[] JOB_COLUMNS = { "job_id", "finished:boolean", "requested:boolean",
			"successful:boolean", "start_time:date", "end_time:date", "data_file", "system_id", "query", "tryTimes:int",
			"type", "message", "code", "timestamp:long" };
	
	@Override
	public int run(String[] args) throws Exception {
		Properties prop = new Properties();
		try (InputStream is = this.getClass().getClassLoader().getResourceAsStream("hbase.properties");) {
			prop.load(is);
		}

		Job job = Job.getInstance(HBaseConfiguration.create(this.getConf()));
		Configuration conf = job.getConfiguration();
		conf.set("hbase.zookeeper.quorum", prop.get("hbase.zookeeper.quorum").toString());
		conf.set("hbase.rootdir", prop.get("hbase.rootdir").toString());
		System.out.println("hbase.zookeeper.quorum:"+conf.get("hbase.zookeeper.quorum"));
		conf.set("hbase.client.scanner.timeout.period", "12000");
		conf.set("mapreduce.output.fileoutputformat.compress", "true");
		conf.set("mapreduce.output.fileoutputformat.compress.type", "BLOCK");
		conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.GzipCodec");
		conf.set("io.compression.codecs", "org.apache.hadoop.io.compress.GzipCodec");
		conf.set("mapreduce.task.io.sort.factor", "10");
		String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
//		if (otherArgs.length < 2) {
//			System.err.println("Usage: DataExportTool <out> <table Name>");
//			return -1;
//		}
		String tableName = RecordType.valueOf(otherArgs[1]).modelClass().newInstance().getTableName();
		conf.set("rowkey.record.type", otherArgs[1]);
		Long startTime = HBaseUtil.parseDate(otherArgs[2]).getTime();
		Long endTime = HBaseUtil.parseDate(otherArgs[3]).getTime();
		String model = otherArgs[4];
		job.setJarByClass(ExportAllJob.class);
		List<Scan> scans = new ArrayList<>();

		Scan scan = new Scan();
		scan.setCacheBlocks(false);
		scan.setCaching(200);
		scan.setMaxVersions(1);
		scan.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, Bytes.toBytes(tableName));
	
		if (!model.equals("ALL")) {
			System.out.println("增量导出表：" + tableName);
			scan.setTimeRange(startTime, endTime + 14400000);
		} else {
			System.out.println("全量导出表：" + tableName);

		}

		scans.add(scan);
		HadoopUtils.delete(conf, new Path(otherArgs[0]));
		TableMapReduceUtil.initTableMapperJob(scans, ExportAliPayDataToDWMapper.class, Text.class, Text.class, job);
		 FileOutputFormat.setOutputPath(job, new Path(otherArgs[0]));

		// job.setReducerClass(ExportAliPayDataToDWReducer.class);
		job.setNumReduceTasks(0);
		
		int n= job.waitForCompletion(true) ? 0 : 1;
		System.out.println("写入mongo数："+job.getCounters().getGroup("counts").findCounter("out.mongo.records").getValue());
		System.out.println("rowkey is null,写入失败mongo数："+job.getCounters().getGroup("counts").findCounter("rowkey.is.null").getValue());
		System.out.println("jobID is null,写入失败mongo数："+job.getCounters().getGroup("counts").findCounter("jobId.is.null").getValue());
		return n;
	}

	public static void main(String[] args) throws Exception {
		Configuration conf = HBaseConfiguration.create();
		int res = ToolRunner.run(conf, new ExportAllJob(), args);
		System.exit(res);
	}

	public static class ExportAliPayDataToDWMapper extends TableMapper<Text, Text> {
		private RowkeyType rowkeyType = null;
		private MongoDatabase database = null;
		private MongoClient client = null;
		private MongoCollection<Document> collection = null;
		private Document d = null;
		private String query = null;
		private Document fab = null;
		private String jobId = null;
		private String recordType=null;
		private Date date = null;
		private Cell dataCell = null;
		private Object[] rowkey = null;
		@Override
		protected void setup(Mapper<ImmutableBytesWritable, Result, Text, Text>.Context context)
				throws IOException, InterruptedException{
			MongoClientURI muri = new MongoClientURI(
					"mongodb://babel:password@hadoop-1,hadoop-2,hadoop-3/babel?maxPoolSize=128&minPoolSize=8&readPreference=secondaryPreferred");
		   client = new MongoClient(muri);
			this.database = client.getDatabase("babel");
			this.collection = this.database.getCollection("rowkeys");
			this.recordType = context.getConfiguration().get("rowkey.record.type");
			this.rowkeyType=getType(context.getConfiguration().get("rowkey.record.type"));
			
			if(this.rowkeyType==null){
				throw new InterruptedException("解析rowkeyType为空！！！！！！！！！！！,退出");
			}
		}
		@Override
		protected void cleanup(Mapper<ImmutableBytesWritable, Result, Text, Text>.Context context)
				throws IOException, InterruptedException {
			this.client.close();
		}
		 

		@Override
		protected void map(ImmutableBytesWritable key, Result value, Context context)
				throws IOException, InterruptedException {
			Thread.sleep(20);
			d = new Document();
			if(rowkeyType==null){
				return;
			}
			d.append("type", this.recordType);
			// qyert
			dataCell = value.getColumnLatestCell(Bytes.toBytes("job"), Bytes.toBytes("query"));
			query = dataCell == null ? "" : HBaseUtil.getValue(CellUtil.cloneValue(dataCell)).toString();
			// rowkey
			d.append("rowkey", key.get());
			if (rowkeyType == RowkeyType.ID_NUMBER) {
				rowkey = HBaseUtil.decodeRowkey(key.get());
				if(rowkey==null){
					context.getCounter("counts","rowkey.is.null").increment(1);
					return;
				}
				d.append("idnumber", rowkey[1].toString());
				d.append("userKey", "");
			} else if (rowkeyType == RowkeyType.USER_KEY) {
				rowkey = HBaseUtil.decodeUserRowkey(key.get());
				if(rowkey==null){
					context.getCounter("counts","rowkey.is.null").increment(1);
					return;
				}
				d.append("idnumber", "");
				d.append("userKey", rowkey[0].toString());
			} else if (rowkeyType == RowkeyType.MOBILE) {
				rowkey = HBaseUtil.decodeMobileRowkey(key.get());
				if(rowkey==null){
					context.getCounter("counts","rowkey.is.null").increment(1);
					return;
				}
				d.append("idnumber", "");
				d.append("userKey", "");
				query=rowkey[0].toString();
				
			} else {
				throw new InterruptedException("没有匹配到对应的RowkeyType");
			}
			d.append("query", query);
			// jobid
			dataCell = value.getColumnLatestCell(Bytes.toBytes("job"), Bytes.toBytes("job_id"));
			if (dataCell == null) {
				context.getCounter("counts","jobId.is.null").increment(1);
				return;
			} else {
				jobId=HBaseUtil.getValue(CellUtil.cloneValue(dataCell)).toString();
				d.append("_id", this.recordType.concat("#").concat(HBaseUtil.getValue(CellUtil.cloneValue(dataCell)).toString()));
				d.append("jobID", jobId);
			}
			// job timestamp
			dataCell = value.getColumnLatestCell(Bytes.toBytes("job"), Bytes.toBytes("timestamp"));
			if(dataCell ==null){
				date= new Date();
			}else{
				date = new Date((Long) HBaseUtil.getValue("long", CellUtil.cloneValue(dataCell)));
			}
			d.append("timestamp", date);

			 
			context.getCounter("counts","out.mongo.records").increment(1);
			fab = this.collection.find(Filters.eq("_id", this.recordType.concat("#").concat(jobId))).first();
			if (fab == null) {
				this.collection.insertOne(d);
			}else{
				this.collection.updateMany(
                        Filters.eq("_id", this.recordType.concat("#").concat(jobId)),
                        new Document("$set", d));
			}
			 

		}

		/**
		 * ID_NUMBER:身份证，USER_KEY：用户标识,MOBILE:手机号
		 * 
		 * @return
		 */
		public static RowkeyType getType(String type) {
			try{
				return RecordType.valueOf(type).rowkeyType();
			}catch(Exception e){
				e.printStackTrace();
				return null;
			}
		}
	}

	public static class ExportAliPayDataToDWReducer extends Reducer<Text, Text, NullWritable, Text> {

		private NullWritable k = NullWritable.get();

		@Override
		protected void reduce(Text key, Iterable<Text> values, Context context)
				throws IOException, InterruptedException {
			for (Text val : values) {
				context.write(k, val);
			}

		}
	}
}
