package com.aotain.coeus.mr;

import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;

import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;




public class NGramModel  extends Configured implements Tool {

	private static final Log log = LogFactory.getLog(NGramModel.class);
	private static Configuration conf = null;
	
	
	public static class NGramModelMapper extends Mapper<LongWritable, Text, Text, Text> {	
	
	
	public void map(LongWritable key, Text value, Context context)
			throws IOException, InterruptedException {
			String line = value.toString();
			String[] tuple = line.split("\\|",-1);
			
			try{
				if(tuple[0].equals("") || tuple[1].equals("")) return;
								 
				String url = new String(Base64.decodeBase64(tuple[1])).toLowerCase();
				Configuration cf = context.getConfiguration();
				if(!cf.get("SuffixName").isEmpty()){
					String[] sfarr = cf.get("SuffixName").split("\\|",-1);
					for(String sf : sfarr){
						if(url.contains("."+sf)) return;
					}
				}
				
				if(url.contains("://")){
					url = url.split("://",-1)[1];
				}
				
				//length model				
				context.write(new Text (tuple[0] + "|0|"), new Text (String.valueOf(url.length()-3) + "|" + String.valueOf(url.length()+3)));
				
				//path model
				//String urlnew = url.substring(url.indexOf("/")+1,url.length());www.baidu.com/20151225/120.htm
				String[] urls = url.split("/",-1);
				int loop = urls.length > 3 ? 3 : urls.length-1;
				for (int i = 0; i < loop; i++) {
					if(!urls[i].isEmpty()){ 
						context.write(new Text (tuple[0] + "|1|" + urls[i]), new Text ("1"));
						if(i != 0) context.write(new Text (tuple[0] + "|1|" + urls[i-1] + "/" + urls[i]), new Text ("1"));
					}
				}

//				//para model
//				if(url.contains("?")){
//					context.write(new Text (tuple[0] + "|2|x=e"  ), new Text ("1"));
//					Map<String, String> mapRequest = CRequest.URLRequest(url);
//					for(String strRequestKey: mapRequest.keySet()) {
//						//参数的概率para_x_prob = count(/a/b/c/d?x)/count(x) ; para_y_prob = count(/a/b/c/d?y)/count(y); 
//						//计算/a/b/c/d?x=e&y=f中x和y的次数。/a/b/c/d?x和/a/b/c/d?y的次数
//						context.write(new Text (tuple[0] + "|2|" + strRequestKey), new Text ("1"));
//						context.write(new Text (tuple[0] + "|2|" + CRequest.UrlPage(url)+"?"+strRequestKey), new Text ("1"));
//						
////						//参数值的概率para_vale_e_prob = para_x_prob *(count(x=e)/count(e)) ;  para_vale_f_prob =para_y_prob *(count(y=f)/count(f)) ；
////						//计算/a/b/c/d?x=e&y=f中x=e和e的次数。y=f和f的次数						
//////						context.write(new Text (tuple[0] + "|3|" + mapRequest.get(strRequestKey)), new Text ("1"));
//////						context.write(new Text (tuple[0] + "|3|" + strRequestKey +"=" + mapRequest.get(strRequestKey)), new Text ("1"));
//					}
//				}
			}catch(Exception e){
				System.out.println("NGramModel throws an exception>>>>>>>>>"+e);
			}
		}
	}
	
	public static class NGramModelReducer extends Reducer<Text, Text, Text, Text> {		
		protected void reduce(Text key,  Iterable<Text> values, Context context)
			throws IOException, InterruptedException {
			
			if(key == null || key.toString().equals("")){
				System.out.println("key return null.");
				return;
			} 
			long v1 = 2000000000;
			long v2 = 0;
			long v3 = 0;
			String[] ksp = key.toString().split("\\|",-1);
			
			if(ksp.length != 3) {
				System.out.println("key's length error.");
				return;
			}

			if(ksp[1].trim().equals("0")){//length model_  key:a.163.com|0| value:6|12
				for (Text value : values) {	
					v1 = Math.min(v1, Long.parseLong(value.toString().split("\\|",-1)[0]));
					v2 = Math.max(v2, Long.parseLong(value.toString().split("\\|",-1)[1]));
				}
				context.write(new Text(ksp[0] + "|" + ksp[1] + "|" + String.valueOf(v1) + "|" + String.valueOf(v2)),new Text(""));
			}else{//ngram model_  key:a.163.com|1|a/b value:6
				for (Text value : values) {				
					v3 += Long.parseLong(value.toString().trim());
				}
				context.write(new Text(ksp[0] + "|" + ksp[1] + "|" + ksp[2] + "|" + String.valueOf(v3)),new Text(""));
			}
		}
	}

	
	/** 初始化JOB
	 * @param
	 * args[0]=domain  args[1]=outputpath args[2]=inputpath
	 * jarname 163.com /user/data/tianwang/out/163_com   /user/data/tianwang/in/163_com
	 */
	public Job NGramModelJob(final Configuration conf,final String[] args) throws IOException {		
		
		SimpleDateFormat df = new SimpleDateFormat("yyyyMMdd");
		String statstamp = df.format(new Date());
		String jobname = ">>>NGramModelJob>>> "+args[0] + ">>>" + statstamp;
		String input = args[2];
		conf.set("SuffixName", "cgi|tpt|mp4|htc|bif|m4a|so|ipcc|ipd|patch|trt|cfg|torrent|crl|lcr|fcg|do|m3u8|config|cdr|mkv|tsd|crc|flv|wav|manifest|mpq|mfil|swz|dns|msi|pack|ts|ver|ctl|srf|zip|rar|bmp|ico|pic|exe|doc|docx|pdf|xls|xlsx|gz|f4v|gif|jpg|png|css|json|js|txt|eot|id|gzip|cab|7z|vbs|cer|etf|kwp|swf|dat|ini|dll|zl|msu|lua|asa|psf|jpeg|qpyd|blf|ads|bin|xdelta|webp|xml|lrc|data|e|lst|npk|crt|rp|dtd|msp|rmvb|bup|sdo|org|cit|qgi"); 
		
		Job job = Job.getInstance(conf);
		job.setJobName(jobname);
		job.setJarByClass(NGramModel.class);
		for (String pt : input.split(",")) {
			FileInputFormat.addInputPath(job,new Path(pt));
		}		
		job.setMapperClass(NGramModelMapper.class);
		Path outputpath = new Path(args[1]);
		outputpath.getFileSystem(conf).delete(outputpath, true);
		FileOutputFormat.setOutputPath(job, outputpath);
		job.setInputFormatClass(TextInputFormat.class);
		job.setReducerClass(NGramModelReducer.class);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(Text.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);		
		return job;
	}
	
	
	/**
	 * @param errorMsg Error message. Can be null.
	 */
	private static void usage(final String errorMsg) {
		if (errorMsg != null && errorMsg.length() > 0) {
			log.error("ERROR: " + errorMsg);
		}
		log.info("Usage: hadoop jar XX.jar jobname output inputlist");

	}
	
	public static void main(final String[] args) throws Exception {
		log.info("-------NGramModel main start----");
		//PropertyConfigurator.configure("../conf/log4j.properties");
		int exitCode = ToolRunner.run(new NGramModel(), args);
		
		System.exit(exitCode);
	}
	
	/** 
	 * @param
	 * args[0]=jobname  args[1]=outputpath args[2]=inputpath
	 */
	public int run(String[] args) throws Exception {
		conf = new Configuration();	
		String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
		for(int i=0;i<otherArgs.length;i++){
			log.info(otherArgs[i]);
		}
		if (otherArgs.length != 3) {
			usage("Wrong number of arguments: " + otherArgs.length);
			System.exit(-1);
		}		
		
		Job statics = NGramModelJob(conf,otherArgs);
		int ret = statics.waitForCompletion(true) ? 0 : 1;		
		return ret;		
	}
}
