package preprocess;

import io.TextInputFormatForSequenceFile;

import java.util.ArrayList;
import java.util.HashMap;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import trouger.dmt.JoinTableRunner;
import trouger.dmt.JoinType;
import trouger.dmt.OutputRecordFilter;
import utils.Utils;

public class DumpPvLog implements Tool{
	
	public static String getPvLogPath(String date) {
		//return "D:\\dev\\clickprediction\\data\\PvLog";
		return Utils.rootDir + date + "/PVLog/";
	}
	
	public static class OutputFilter implements OutputRecordFilter{
		private static HashMap<String, String> sortMap = Utils.sortFilterSetUp();
		@Override
		public boolean filterRecord(Object[] record) {
			if (sortMap.containsKey(record[8].toString()))
				record[8] = sortMap.get(record[8].toString());
			else
				record[8] = Utils.otherSort;	
			return true;  
		}
	}
	
	@Override 
	public Configuration getConf() {
		return null;
	}
	
	@Override
	public void setConf(Configuration conf) {
		
	}
	/*
	 * (non-Javadoc)
	 * UserFiltered Table
	 * Path: /group/tbsc-dev/xzwang/clickprediction/$date/PVDumpedLog/
	 *    1. Acookie的cookie id
	 *    2. 用户id (数字id)
	 *    3. 请求时间
	 *    4. 用户id（32位） 
	 *    5. query
	 *    6. 类目【直达后的类目】
	 *    7. 属性串
	 *    8. 搜索的排序方式，格式以转换
	 *    9. 展现的商品id【格式为豆腐块商品数字id list（用|分隔）+主搜索的商品数字id list（id之间用|分隔），如果只有豆腐块商品，则为”豆腐块商品数字id list+”，如果只有主搜索的商品，则为“主搜索商品id list” 
	 * */	 
	@Override
	public int run(String[] args) throws Exception {
		String pvLogPrefix = "/group/tbads/logdata/new_search_pv/" + args[0];
		String aCookieUserIdFilteredMapFile = ACookieUserIdMapFilter.getACookieUserIdMapFilteredPath(args[0]);
		String output = getPvLogPath(args[0]);
		
		System.out.println("start job DumpPVLog...");
		System.out.println("pvLogPrefix: " + pvLogPrefix);
		System.out.println("aCookieUserIdFilteredMapFile: " + aCookieUserIdFilteredMapFile);
		System.out.println("output: " + output);
		if (Utils.exitSystem(args))
			return -1;
		JoinTableRunner r = new JoinTableRunner();

		
		String[] minStrs = {"00"};
		String[] serverStrs = {"ttslave27.sds.cnz.alimama.com", 
				"ttslave28.sds.cnz.alimama.com", 
				"ttslave29.sds.cnz.alimama.com", 
				"ttslave30.sds.cnz.alimama.com"};
		ArrayList<String> pvLogPaths = new ArrayList<String>();
		
		for (int i = 0; i < 24; ++i) {
			String strHour;
			if (i < 10)
				strHour = "0" + i;
			else 
				strHour = String.valueOf(i);
			for (String strMin : minStrs) {
				for (String server : serverStrs) {
					String input = pvLogPrefix + "/" + strHour + "/" + strMin + "/" + server + "/";
					pvLogPaths.add(input);
				}
			}
		}
		r.addSourceTable((String[])pvLogPaths.toArray(new String[0]), TextInputFormatForSequenceFile.class, 
				pvLogPrefix, "UTF-8", "\t");
		r.addSourceTable(aCookieUserIdFilteredMapFile, "UTF-8", "\t");
		r.addKeyGroup("1.1, 2.1");		
		r.setJoinType(JoinType.InnerJoin);
		r.setMaxReduceItemCount(1000);
		r.setOutputRecordFilterClass(OutputFilter.class);
		r.setResultFieldsEx("1.1, 2.2, 1.3, 2.3, 1.5, 1.6, 1.7, 1.8, 1.9");
		r.setResultTable(output);
		
		r.run("DumpPVLogJob");
		return 0;		 	
	}
	
	/**
	 * @param args
	 */
	public static void main(String[] args) throws Exception {
		Utils.checkArgs(args);
		
		// TODO Auto-generated method stub
		int res = ToolRunner.run(new DumpPvLog(), args);
		System.exit(res);
	}
}
