package ec_fourthinfo;


import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Serializable;
import java.util.Arrays;
import java.util.HashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

public class FourthInfoV_2 {
	
	// Mapper
	public static class FourthInfoMapper extends Mapper<LongWritable, Text, Text, Text> {
		private static Pattern p = Pattern.compile("^cate(first|second)(\\d+)_local(first|second|thirt)(\\d+)_(.*?)(\\d{8})_(.*)");
		private Text mapOutputKey = new Text();
		private Text mapOutputValue = new Text();

		public void map(LongWritable key, Text value, Context context) {
			Matcher m = p.matcher(value.toString());
			if (m.find()) {
				String cateid_localid = m.group(2) + "_" + m.group(4);
				String info = m.group(5) + m.group(7);
				mapOutputKey.set(cateid_localid);
				mapOutputValue.set(info);
				try {
					context.write(mapOutputKey, mapOutputValue);
				} catch (Exception e) {
					e.printStackTrace();
					return;
				}
			}
		}
	}

	// Reducer
	public static class FourthInfoReducer extends Reducer<Text, Text, NullWritable, Text> {
		private String[] fields = { "cateid","stat_date", "dispcity1", "dispcity2",
			"dispcity3", "belong_cate1", "belong_cate2", "total_info_count",
			"total_info_users", "valid_info_count", "valid_info_users",
			"new_info_count", "new_info_users", "modify_info_count",
			"modify_info_users", "refresh_info_count", "refresh_info_users",
			"new_valid_info_count", "new_valid_info_users",
			"ordinary_valid_info_count", "ordinary_valid_info_users",
			"ordinary_new_info_count", "ordinary_new_info_users",
			"ordinary_modify_info_count", "ordinary_modify_info_users",
			"ordinary_refresh_info_count", "ordinary_refresh_info_users",
			"ordinary_new_valid_info_count", "ordinary_new_valid_info_users",
			"vip_valid_info_count", "vip_valid_info_users",
			"vip_new_info_count", "vip_new_info_users",
			"vip_modify_info_count", "vip_modify_info_users",
			"vip_refresh_info_count", "vip_refresh_info_users",
			"vip_new_valid_info_count", "vip_new_valid_info_users" };

		private HashMap<String, Integer> fieldIndex = null;
		private HashMap<String, String> dict = new HashMap<String, String>();
		private HashMap<String,LocalInfo> localInfos=null;
		private HashMap<String,CateInfo> cateInfos=null;
		private String date;
		private NullWritable nullKey=NullWritable.get();
		private Text outputValue=new Text();

		public  void init(){
			String[] states = { "state1", "uptype0", "uptype1", "uptype2",
					"state1_uptype0" };
			String[] types = { "valid", "new", "modify", "refresh", "new_valid" };

			String[] src = { "", "source0", "source6" };
			String[] roles = { "", "ordinary", "vip" };

			String[] countType = { "_info_count", "_info_users" };
			String[] counters = { "icount", "ucount" };
			for (int i = 0; i < types.length; i++)
				for (int j = 0; j < roles.length; j++)
					for (int k = 0; k < countType.length; k++) {
						if(j==0)
							dict.put(states[i] + "_" + counters[k], types[i] + "_" + countType[k]);
						else
							dict.put(states[i] + "_" + src[j] + "_" + counters[k],roles[j] + "_" + types[i] + "_" + countType[k]);
					}
			dict.put("icount", "total_info_count");
			dict.put("ucount", "total_info_users");
			fieldIndex = new HashMap<String, Integer>();
			for (int i = 0; i < fields.length; i++) {
				fieldIndex.put(fields[i], i);
			}

		}
		public void setup(Context context) {
			init();
			date = context.getConfiguration().get("DATE");
			Path cateInfoPath=new Path("/dsap/rawdata/cmc_category/"+date+"/part-m-00000");
			Path localInfoPath=new Path("/dsap/rawdata/cmc_diplocal/"+date+"/part-m-00000");
			FileSystem hdfs;
			try {
				hdfs = FileSystem.get(context.getConfiguration());
				localInfos=getLocalInfo(hdfs,localInfoPath);
				cateInfos=getCateInfo(hdfs,cateInfoPath);
				System.out.println(localInfos.size()+" localInfo!");
				System.out.println(cateInfos.size()+" cateInfo!");
			} catch (IOException e) {
				e.printStackTrace();
			}
		}

		public void cleanup(Context context) throws IOException {
		}

		public HashMap<String,LocalInfo> getLocalInfo(FileSystem hdfs,Path path) throws IOException{
			FSDataInputStream hdfsInStream = hdfs.open(path);
			BufferedReader reader=new BufferedReader(new InputStreamReader(hdfsInStream));
			String line=null;
			HashMap<String,LocalInfo> locals=new HashMap<String,LocalInfo>();
			while((line=reader.readLine())!=null){
				System.out.println(line);
				String fields[]=line.split("\t");
				String localid=fields[0];
				String localname=fields[2];
				LocalInfo local=new LocalInfo(localname);
				String levels[]=fields[4].split(",");
				int len=levels.length<3?levels.length:3;
				for(int i=0;i<len;i++){
					local.setLocalids(i, levels[i]);
				}
				locals.put(localid, local);
			}
			reader.close();
			return locals;
		}
		
		public HashMap<String,CateInfo> getCateInfo(FileSystem hdfs,Path path) throws IOException{
			FSDataInputStream hdfsInStream = hdfs.open(path);
			BufferedReader reader=new BufferedReader(new InputStreamReader(hdfsInStream));
			String line=null;
			HashMap<String,CateInfo> cateInfos=new HashMap<String,CateInfo>();
			while((line=reader.readLine())!=null){
				System.out.println(line);
				String fields[]=line.split("\t");
				String cateid=fields[0];
				String catename=fields[2];
				CateInfo cate=new CateInfo(catename);
				String levels[]=fields[4].split(",");
				int len=levels.length<2?levels.length:2;
				for(int i=0;i<len;i++){
					cate.setCateids(i, levels[i]);
				}
				cateInfos.put(cateid, cate);
			}
			reader.close();
			return cateInfos;
		}
		public void reduce(Text key, Iterable<Text> values, Context context) {
			String[] cate_local_id=key.toString().split("_");
			String cateid=cate_local_id[0]; 
			String localid=cate_local_id[1];
			String[] row=new String[fields.length];
			Arrays.fill(row, "0");
			row[0]=cateid;
			row[1]=date;
			CateInfo  cate=cateInfos.get(cateid);
			String[] catesIds=cate.getCateids();
			int index=fieldIndex.get("belong_cate1");
			for(int i=0;i<2;i++){
				if(catesIds[i].equals("0"))
					row[index+i]="";
				else
					row[index+i]=cateInfos.get(catesIds[i]).getCatename();
			}
			LocalInfo local=localInfos.get(localid);
			String[] localIds=local.getLocalids();
			index=fieldIndex.get("dispcity1");
			for(int i=0;i<3;i++){
				if(localIds[i].equals("0"))
					row[index+i]="";
				else
					row[index+i]=localInfos.get(localIds[i]).getLocalname();
			}	
			
//			for(Text t:values){
//				String[] info=t.toString().split(":");
//				String infoKey=info[0];
//				String infoValue=info[1];
//				String f=dict.get(infoKey);
//				int fieldindex=fieldIndex.get(f);
//				row[fieldindex]=infoValue;
//			}
			StringBuilder strbuilder=new StringBuilder();
			for(int i=0;i<fields.length-1;i++){
				strbuilder.append(fields[i]+",");
			}
			strbuilder.append(fields[fields.length-1]);
			outputValue.set(strbuilder.toString());
			try {
				context.write(nullKey, outputValue);
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			} catch (InterruptedException e) {
				e.printStackTrace();
				return;
			}
		}
		
	}

	/**
	 * @param args
	 * @throws Exception 
	 */
	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration();
		conf.set("mapred.job.queue.name", "regular");
		String[] otherArgs = new GenericOptionsParser(conf, args)
				.getRemainingArgs();
		if (otherArgs.length != 1) {
			System.out
					.println("************************************************");
			System.out
					.println("************************************************");
			System.out
					.println("Usage: please input 1 params, for example: file.jar args[0]");
			System.out
					.println("args[0] is dateList: 20130101,20130106 or 20130106");
			System.out
					.println("************************************************");
			System.out
					.println("************************************************");
			System.exit(127);
		}
		conf.setInt("mapred.linerecordreader.maxlength", 81920); // 
		conf.setInt("io.sort.mb", 500);//
		conf.setInt("io.sort.factor", 100);//
		conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);// 关闭reduce speculative 运行
		int startDate = Integer.parseInt(otherArgs[0].split(",")[0]);
		int endDate =otherArgs[0].split(",").length == 2 ? Integer.parseInt(otherArgs[0].split(",")[1]): startDate;
		String baseInPath = "/dsap/resultdata/infostat/ZP_InfoJobHDFS";
		String baseOutPath = "/dsap/middata/zhaoxiang";
		int returnCode = 126;
		
		for (int i=startDate;i<=endDate;i++) {
			conf.set("DATE", String.valueOf(i));
			Job job = new Job(conf, "EC_Fourthinfo_Job");
			job.setJarByClass(FourthInfoV_2.class);
			job.setMapperClass(FourthInfoMapper.class);
			job.setReducerClass(FourthInfoReducer.class);
			job.setNumReduceTasks(11);
			job.setMapOutputKeyClass(Text.class);
			job.setMapOutputValueClass(Text.class);
			job.setOutputKeyClass(NullWritable.class);
			job.setOutputValueClass(Text.class);
			if (FileSystem.get(conf).exists(new Path(baseInPath + "/" + i))) {
				FileInputFormat.addInputPath(job, new Path(baseInPath + "/" + i));
				System.out.println("input path: "+baseInPath + "/" + i);
			} else {
				continue;
			}
			String outPath = baseOutPath + "/" + i;
			FileSystem.get(conf).delete(new Path(outPath), true);
			FileOutputFormat.setOutputPath(job, new Path(outPath));
			System.out.println("output path: "+outPath);
			returnCode = job.waitForCompletion(true) ? 0 : 1;
		}
		System.out.println("Task completed!");
		System.exit(returnCode);
	}

	
	
	public static class LocalInfo implements Serializable{
		/**
		 * 
		 */
		private static final long serialVersionUID = 1L;
		private String localname;
		private String[] localids=new String[3];
		public LocalInfo(String localname){
			this.localname=localname;
			for(int i=0;i<3;i++)
				localids[i]="0";
		}
		public void setLocalIds(String id1,String id2,String id3){
			localids[0]=id1;
			localids[1]=id2;
			localids[2]=id3;
		}
		public String getLocalname() {
			return localname;
		}
		public void setLocalname(String localname) {
			this.localname = localname;
		}
		public String[] getLocalids() {
			return localids;
		}
		public void setLocalids(String[] localids) {
			this.localids = localids;
		}
		public void setLocalids(int index,String id) {
			localids[index] = id;
		}
	}
	public static class CateInfo implements Serializable{
		/**
		 * 
		 */
		private static final long serialVersionUID = 2L;
		private String catename;
		private String[] cateids=new String[2];
		
		public CateInfo(String catename) {
			this.catename=catename;
			for(int i=0;i<2;i++)
				cateids[i]="0";
		}
		public void setCateIds(String id1,String id2){
			cateids[0]=id1;
			cateids[1]=id2;
		}
		public String getCatename() {
			return catename;
		}
		public void setCatename(String catename) {
			this.catename = catename;
		}
		public String[] getCateids() {
			return cateids;
		}
		public void setCateids(String[] cateids) {
			this.cateids = cateids;
		}
		public void setCateids(int index,String id) {
			cateids[index] = id;
		}
	}

}
