package com.bj58.ecdata.hadoop.fourthinfo;

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Serializable;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;


public class FourthinfoMultiOutput {
	private static final String input="/dsap/resultdata/infostat/ZP_InfoJobHDFS/yyyyMMdd/";
	private static final String output="/dsap/middata/EC_Post_Detail_test_Job/";

	// Mapper
	public static class FourthInfoMapper extends Mapper<LongWritable, Text, Text, Text> {
		private static Pattern p = Pattern.compile("^cate(first|second)(\\d+)_local(first|second|third)(\\d+)_(.*?)(\\d{8})_([iu]count:\\d+)");
		private HashMap<String,LocalInfo> localInfos=null;
		
		private Text mapOutputKey = new Text();
		private Text mapOutputValue = new Text();
		private static HashSet<String> citySet=new HashSet<String>();
		static{
			String citys[]={"西安","呼和浩特","潍坊","衡水","三亚","天水","石嘴山"};
			citySet.addAll(Arrays.asList(citys));
		}
		public void setup(Context context) {
			try {
				BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream("DisLocal.part"),"UTF-8"));
				localInfos=getLocalInfo(br);
				System.out.println(localInfos.size()+" localInfo!");
			} catch (IOException e) {
				e.printStackTrace();
				return;
			}
		}
		
		public void map(LongWritable key, Text value, Context context) {
			Matcher m = p.matcher(value.toString());
			if (m.find()) {
				String localid = m.group(4);
				if(!localInfos.containsKey(localid))
					return;
				LocalInfo local=localInfos.get(localid);
				String cityName=local.getLocalname();
				if(!citySet.contains(cityName))
					return;
				String info = m.group(5) + m.group(7);
				String month=m.group(6).substring(0,6);
				mapOutputKey.set(month+"_"+localid);
				mapOutputValue.set(info);
				try {
					context.write(mapOutputKey, mapOutputValue);
				} catch (Exception e) {
					e.printStackTrace();
					return;
				}
			}
		}
	}

	// Reducer
	public static class FourthInfoReducer extends Reducer<Text, Text, NullWritable, Text> {
		private String[] fields = { "stat_date", "local_id","dispcity1", "dispcity2",
			"dispcity3", "cate_id","belong_cate1", "belong_cate2", "total_info_count",
			"total_info_users", "valid_info_count", "valid_info_users",
			"new_info_count", "new_info_users", "modify_info_count",
			"modify_info_users", "refresh_info_count", "refresh_info_users",
			"new_valid_info_count", "new_valid_info_users",
			"ordinary_valid_info_count", "ordinary_valid_info_users",
			"ordinary_new_info_count", "ordinary_new_info_users",
			"ordinary_modify_info_count", "ordinary_modify_info_users",
			"ordinary_refresh_info_count", "ordinary_refresh_info_users",
			"ordinary_new_valid_info_count", "ordinary_new_valid_info_users",
			"vip_valid_info_count", "vip_valid_info_users",
			"vip_new_info_count", "vip_new_info_users",
			"vip_modify_info_count", "vip_modify_info_users",
			"vip_refresh_info_count", "vip_refresh_info_users",
			"vip_new_valid_info_count", "vip_new_valid_info_users" };
		private MultipleOutputs<NullWritable, Text> mos;
		private HashMap<String, Integer> fieldsIndex = null;
		private HashMap<String, String> dict = new HashMap<String, String>();
		private HashMap<String,LocalInfo> localInfos=null;
//		private HashMap<String,CateInfo> cateInfos=null;
		private NullWritable nullKey=NullWritable.get();
		private Text outputValue=new Text();
		public void setup(Context context) {
			mos = new MultipleOutputs<NullWritable, Text>(context);
			String[] states = { "state1", "uptype0", "uptype1", "uptype2",
			"state1_uptype0" };
			String[] types = { "valid", "new", "modify", "refresh", "new_valid" };

			String[] src = { "", "source0", "source6" };
			String[] roles = { "", "ordinary", "vip" };

			String[] countType = {"_info_count", "_info_users" };
			String[] counters = { "icount", "ucount" };
			for (int i = 0; i < types.length; i++)
				for (int j = 0; j < roles.length; j++)
					for (int k = 0; k < countType.length; k++) {
						if(j==0)
							dict.put(states[i] + "_" + counters[k], types[i] +  countType[k]);
						else
							dict.put(states[i] + "_" + src[j] + "_" + counters[k],roles[j] + "_" + types[i] +  countType[k]);
					}
			dict.put("icount", "total_info_count");
			dict.put("ucount", "total_info_users");
			fieldsIndex = new HashMap<String, Integer>();
			for (int i = 0; i < fields.length; i++) {
				fieldsIndex.put(fields[i], i);
			}
			try {
				BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream("BelongCate.part"),"UTF-8"));
//				cateInfos=getCateInfo(br);
				br = new BufferedReader(new InputStreamReader(new FileInputStream("DisLocal.part"),"UTF-8"));
				localInfos=getLocalInfo(br);
				System.out.println(localInfos.size()+" localInfo!");
//				System.out.println(cateInfos.size()+" cateInfo!");
			} catch (IOException e) {
				e.printStackTrace();
				return;
			}
		}

		@Override
		public void cleanup(Context context) {
			try {
				mos.close();
			} catch (Exception e) {
				e.printStackTrace();
				return;
			}
		}
		
		public void reduce(Text key, Iterable<Text> values, Context context) {
			String[] month_localid=key.toString().split("_");
			String month=month_localid[0]; 
			String localid=month_localid[1];
			if(!localInfos.containsKey(localid))
					return;
			String[] row=new String[fields.length];
			boolean flag=false;
			Arrays.fill(row, "0");
			row[0]=month;
			row[fieldsIndex.get("local_id")]=localid;
			LocalInfo local=localInfos.get(localid);
			String[] localIds=local.getLocalids();
			int index=fieldsIndex.get("dispcity1");
			for(int i=0;i<3;i++){
				if(localIds[i].equals("0")||!localInfos.containsKey(localIds[i]))
					row[index+i]="";
				else
					row[index+i]=localInfos.get(localIds[i]).getLocalname();
			}	
			for(Text t:values){
				String[] info=t.toString().split(":");
				String infoKey=info[0].trim();
				String infoValue=info[1].trim();
				String f=dict.get(infoKey);
				if(f!=null){
					flag=true;
					int fieldindex=fieldsIndex.get(f);
					try{
						long number= Long.parseLong(row[fieldindex]);
						row[fieldindex]=String.valueOf(number+Long.parseLong(infoValue));
					}catch(Exception e){
						e.printStackTrace();
					}
				}
			}
			if (flag == false)
				return;
			StringBuilder strbuilder = new StringBuilder();
			for (int i = 0; i < fields.length - 1; i++) {
				strbuilder.append(row[i] + ",");
			}
			strbuilder.append(row[row.length - 1]);
			outputValue.set(strbuilder.toString());
			try {
				mos.write(nullKey, outputValue, month+"/part");
//				context.write(nullKey, outputValue);
			} catch (Exception e) {
				e.printStackTrace();
				return;
			}
		}
	
	}
	

	 /**删HDFS文件夹
    * @param dirPath  路径
    * @param isNull	true:非空文件夹也删除   false:只删除空文件夹
    * @throws IOException
    */
   public static boolean deleteDir(String dirPath,boolean isNull) throws IOException
   {
       Configuration conf = new Configuration();
       FileSystem fs = FileSystem.get(conf);
       Path path=new Path(dirPath);
       boolean isDeleted  = fs.delete(path, isNull);
       
       fs.close();
       return isDeleted;
   }
   
   
   public static boolean checkPath(String dirPath) throws IOException
   {
       Configuration conf = new Configuration();
       FileSystem fs = FileSystem.get(conf);
       Path path=new Path(dirPath);
       boolean exists  = fs.exists(path);
       
       fs.close();
       return exists;
   }
   
	public static class LocalInfo implements Serializable{
		/**
		 * 
		 */
		private static final long serialVersionUID = 1L;
		private String localname;
		private String[] localids=new String[3];
		public LocalInfo(String localname){
			this.localname=localname;
			for(int i=0;i<3;i++)
				localids[i]="0";
		}
		public void setLocalIds(String id1,String id2,String id3){
			localids[0]=id1;
			localids[1]=id2;
			localids[2]=id3;
		}
		public String getLocalname() {
			return localname;
		}
		public void setLocalname(String localname) {
			this.localname = localname;
		}
		public String[] getLocalids() {
			return localids;
		}
		public void setLocalids(String[] localids) {
			this.localids = localids;
		}
		public void setLocalids(int index,String id) {
			localids[index] = id;
		}
	}
	public static class CateInfo implements Serializable{
		/**
		 * 
		 */
		private static final long serialVersionUID = 2L;
		private String catename;
		private String[] cateids=new String[2];
		
		public CateInfo(String catename) {
			this.catename=catename;
			for(int i=0;i<2;i++)
				cateids[i]="0";
		}
		public void setCateIds(String id1,String id2){
			cateids[0]=id1;
			cateids[1]=id2;
		}
		public String getCatename() {
			return catename;
		}
		public void setCatename(String catename) {
			this.catename = catename;
		}
		public String[] getCateids() {
			return cateids;
		}
		public void setCateids(String[] cateids) {
			this.cateids = cateids;
		}
		public void setCateids(int index,String id) {
			cateids[index] = id;
		}
	}

	public static HashMap<String,LocalInfo> getLocalInfo(BufferedReader reader) throws IOException{
		String line=null;
		HashMap<String,LocalInfo> locals=new HashMap<String,LocalInfo>();
		while((line=reader.readLine())!=null){
			String fields[]=line.split("\t");
			String localid=fields[0];
			String localname=fields[2];
			LocalInfo local=new LocalInfo(localname);
			String levels[]=fields[4].split(",");
			int len=levels.length<3?levels.length:3;
			for(int i=0;i<len;i++){
				local.setLocalids(i, levels[i]);
			}
			locals.put(localid, local);
		}
		reader.close();
		return locals;
	}
	
	public static HashMap<String,CateInfo> getCateInfo(BufferedReader reader) throws IOException{
		String line=null;
		HashMap<String,CateInfo> cateInfos=new HashMap<String,CateInfo>();
		while((line=reader.readLine())!=null){
			String fields[]=line.split("\t");
			String cateid=fields[0];
			String catename=fields[2];
			CateInfo cate=new CateInfo(catename);
			String levels[]=fields[5].split(",");
			int len=levels.length<2?levels.length:2;
			for(int i=0;i<len;i++){
				cate.setCateids(i, levels[i]);
			}
			cateInfos.put(cateid, cate);
		}
		reader.close();
		return cateInfos;
	}

	
	
	public static int hadoopMainEntrance(String startDate,String endDate) throws Exception{
		String statDate=DateUtil.strToStr(endDate, "yyyyMMdd");
		Configuration conf = new Configuration();		
		Path catePath = new Path("/dsap/rawdata/cmc_category/" + statDate
				+ "/part-m-00000");
		Path localPath = new Path("/dsap/rawdata/cmc_diplocal/" + statDate
				+ "/part-m-00000");
		String outPath1UriWithLink = catePath.toUri().toString() + "#"
				+ "BelongCate.part";// #号之后的名称是对上面文件的链接，不同文件的链接名必须不能相同
		String outPath2UriWithLink = localPath.toUri().toString() + "#"
				+ "DisLocal.part";
		try {
			DistributedCache.addCacheFile(new URI(outPath1UriWithLink), conf);
		} catch (URISyntaxException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		DistributedCache.addCacheFile(new URI(outPath2UriWithLink), conf);
		int returnCode = 126;
		Job job = new Job(conf, "EC_Post_Detail_Test_Job");
		job.setJarByClass(FourthInfo.class);
		job.setMapperClass(FourthInfoMapper.class);
		job.setReducerClass(FourthInfoReducer.class);
		job.setNumReduceTasks(1);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(Text.class);
		job.setOutputKeyClass(NullWritable.class);
		job.setOutputValueClass(Text.class);
		MultipleOutputs.addNamedOutput(job, "multiple", TextOutputFormat.class,
				 NullWritable.class, Text.class);
		for(String date:DateUtil.getDateList(startDate, endDate)){
				date=DateUtil.strToStr(date, "yyyyMMdd");
			if (FileSystem.get(conf).exists(new Path(input.replaceAll("yyyyMMdd", date)))) {
				FileInputFormat.addInputPath(job, new Path(input.replaceAll("yyyyMMdd", date)));
//				System.out.println(input.replaceAll("yyyyMMdd", date));
			}
		}
		
		String outPath=output;
		if(checkPath(outPath)){
			deleteDir(outPath, true);
		}
		FileOutputFormat.setOutputPath(job, new Path(outPath));
		returnCode = job.waitForCompletion(true) ? 0 : 1;
		return returnCode;
	}


	/**
	 * @param args
	 * @throws Exception 
	 */
	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration();
		GenericOptionsParser parser=new GenericOptionsParser(conf, args);
		String[] otherArgs = parser.getRemainingArgs();
		String startDate = "";
		String endDate = "";
		if(otherArgs.length > 0 ){
			if(otherArgs.length == 1 ){
				startDate=otherArgs[0];
				endDate=otherArgs[0];
				if(checkParam(startDate,endDate)){
					if(!startDate.contains("-"));
						startDate=DateUtil.getStringDate(DateUtil.strToDate(startDate, "yyyyMMdd"),"yyyy-MM-dd");
					endDate = startDate;
				}else{
					return;
				}
			}else if(otherArgs.length == 2 ){
				startDate = otherArgs[0];
				endDate = otherArgs[1];
				if(checkParam(startDate,endDate)){
					if(!startDate.contains("-"))
						startDate=DateUtil.getStringDate(DateUtil.strToDate(startDate, "yyyyMMdd"),"yyyy-MM-dd");
					if(!endDate.contains("-"))
						endDate=DateUtil.getStringDate(DateUtil.strToDate(endDate, "yyyyMMdd"),"yyyy-MM-dd");
				}else{
					return;
				}
			}
		}
		
		if(startDate == null || "".equals(startDate)){
			startDate = DateUtil.getStringDate(DateUtil.getDateBefore(new Date(), 1), "yyyy-MM-dd");
			endDate = startDate;
		}
            hadoopMainEntrance(startDate,endDate);
	}

	public static boolean checkParam(String startDate,String endDate){
		Pattern p = Pattern.compile("^((\\d{8})|(\\d{4}-\\d{2}-\\d{2}))$");
		Matcher m=p.matcher(startDate);
		if(!m.matches()){
			System.out.println("************************************************");
			System.out.println("Usage: please input params: startDate [endDate]");
			System.out.println("for example: 2013-01-01 2013-01-06  or  20130106");
			System.out.println("************************************************");
			return false;
		}
		m=p.matcher(endDate);
		if(!m.matches()){
			System.out.println("************************************************");
			System.out.println("Usage: please input params: startDate [endDate]");
			System.out.println("for example: 2013-01-01 2013-01-06  or  20130106");
			System.out.println("************************************************");
			return false;
		}
		return true;
	}
}

