package com.aotain.coeus.mr;

import java.io.IOException;
import java.net.URI;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Iterator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import com.hadoop.mapreduce.LzoTextInputFormat;

public class SdsHttpStatIntoHive extends Configured implements Tool {

	public static void main(String[] args) throws Exception {
		// TODO Auto-generated method stub
		int exitcode = ToolRunner.run(new SdsHttpStatIntoHive(), args);
		System.exit(exitcode);
	}

	@Override
	public int run(String[] args) throws Exception {
		// TODO Auto-generated method stub
		if(args.length!=3)
		{
			System.err.printf("Usage: %s <InputPath><targetPath><Hour>",getClass().getSimpleName());
			ToolRunner.printGenericCommandUsage(System.err);
			return -1;
		}

		Configuration conf = new Configuration();
		String inputPostPath = args[0];
		String targetPath = args[1];
		String hour = args[2];

		Date dateNow = new Date();
		Calendar cal = Calendar.getInstance();
		cal.setTime(dateNow);
		cal.add(Calendar.DAY_OF_MONTH, -1);
		Date dateBef = cal.getTime();

		SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
		String date = String.format("%s%s0000", sdf.format(dateBef),hour);

		conf.set("date", date);

		//如果输入路径不存在则退出
		FileSystem fsSource = FileSystem.get(URI.create(inputPostPath), conf);
		Path pathSource = new Path(inputPostPath);
		if(!fsSource.exists(pathSource)) {
			return 0;
		}

		//如果输出目录已存在，需要删除
		FileSystem fsTarget = FileSystem.get(URI.create(targetPath),conf);
		Path pathTarget = new Path(targetPath);
		if(fsTarget.exists(pathTarget))
		{
			fsTarget.delete(pathTarget, true);
		}

		Job job = new Job(conf);
		job.setJarByClass(getClass());
		job.setJobName("SdsHttpStatIntoHive");

		job.setMapperClass(SdsHttpStatHMapper.class);
		job.setReducerClass(SdsHttpStatHReduce.class);

		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(IntWritable.class);

		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		job.setInputFormatClass(LzoTextInputFormat.class);

		FileInputFormat.addInputPath(job,new Path(inputPostPath));
		FileOutputFormat.setOutputPath(job,new Path(targetPath)); 

		return job.waitForCompletion(true)?0:1;  
	}


	static class SdsHttpStatHMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
		Text K2 = new Text();
		IntWritable V2 = new IntWritable(1);
		String dateStr = "";

		@Override
		protected void setup(
				Mapper<LongWritable, Text, Text, IntWritable>.Context context)
						throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			Configuration config = context.getConfiguration();
			dateStr = config.get("date");
		}


		//		Random random = new Random();
		@Override
		protected void map(LongWritable key, Text value,
				org.apache.hadoop.mapreduce.Mapper.Context context)
						throws IOException, InterruptedException {
			String line = value.toString();
			if(line != null && !"".equals(line)) {
				String[] splits = line.split("\\|", -1);
				if(splits.length >=8 && splits[6] != null && splits[7] != null && splits[2] != null && !"".equals(splits[6]) && !"".equals(splits[7]) && !"".equals(splits[2])) {

					String domainname = splits[6];
					String [] strArr = domainname.split("\\.",-1);
					String rootDomain = "";
					if(IsSpecialRoot(domainname) && strArr.length >= 3)
					{//特殊处理例如 .com.cn  .com.hk
						rootDomain = String.format("%s.%s.%s", strArr[strArr.length-3],
								strArr[strArr.length-2],strArr[strArr.length-1]);
					}
					else if(IsSpecialRoot2Level(domainname) && strArr.length>1)
					{
						//						 System.err.println("##########################" + domainname);
						rootDomain = String.format("%s.%s", 
								strArr[strArr.length-2],strArr[strArr.length-1]);
					}
					if(!"".equals(rootDomain)) {
						if(rootDomain.contains(":")) {
							rootDomain = rootDomain.substring(0,rootDomain.lastIndexOf(":"));
						}
						String rowkey = String.format("%s_%s_%s", rootDomain,dateStr,splits[2]);

						K2.set(rowkey);
						
						context.write(K2, V2);
					}
				}
			}
		}
	}
	static class SdsHttpStatHReduce  extends Reducer<Text,IntWritable,Text,Text> {
		Text k3 = new Text();
		Text v3 = new Text("");
		String str = "";
		@Override
		public void reduce(Text key, Iterable<IntWritable> values, Context context)throws IOException, InterruptedException        {
			int sum = 0;
			Iterator<IntWritable> iter =  values.iterator();
			while(iter.hasNext()) {
				sum += iter.next().get();
			}
			str = key.toString();
			String[] splits = key.toString().split("_",-1);
			String row = String.format("%s,%s,%s,%s,%s", str,splits[0],splits[1],splits[2],sum);
			k3.set(row);
			context.write(k3,v3); 
		} 
	}


	private static boolean IsSpecialRoot(String domain)
	{
		boolean blReturn = false;
		if(domain.contains(".com.")
				|| domain.contains(".co.")
				|| domain.contains("gd.cn")
				|| domain.contains(".cn.com")
				|| domain.contains(".gov.")
				|| domain.contains(".net.")
				|| domain.contains(".edu.")
				|| domain.contains(".org.")
				|| domain.contains(".ac."))
			blReturn = true;
		return blReturn;
	}

	private static boolean IsSpecialRoot2Level(String domain)
	{
		boolean blReturn = false;
		if(domain.contains(".com")
				|| domain.contains("cn")
				|| domain.contains(".net")
				|| domain.contains(".org"))
			blReturn = true;
		return blReturn;
	}
}
