package test;

import io.TextInputFormatForSequenceFile;

import java.util.ArrayList;
import java.util.Scanner;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import trouger.dmt.OutputRecordFilter;
import trouger.dmt.SelfAggregationRunner;
import utils.Utils;

public class AcookieFilter implements Tool{

	public static String getAcookieFilteredPath(String date) {
		return Utils.rootDir + date + "/AcookieFiltered/";
	}
	
		
	public static class OutputFilter implements OutputRecordFilter{
		@Override
		public boolean filterRecord(Object[] record) {
			int pvCount  = Integer.parseInt(record[2].toString());
			return pvCount > 300; 
		}
	}
	
	@Override 
	public Configuration getConf() {
		return null;
	}
	
	@Override
	public void setConf(Configuration conf) {
		
	}
	/*
	 * (non-Javadoc)
	 * AcookieFiltered Table
	 * Path: /group/tbsc-dev/xzwang/clickprediction/$date/AcookieFiltered/
	 * 1.ACookie
	 * 2.pv数
	 */
	@Override
	public int run(String[] args) throws Exception {
		String pvLogPrefix = "/group/tbads/logdata/new_search_pv/" + args[0];
		String output = getAcookieFilteredPath(args[0]);
		System.out.println("start job AcookieFilter...");
		System.out.println("pvLogPrefix: " + pvLogPrefix);
		System.out.println("output: " + output);
		System.out.println("Continue(0/1)?");
		Scanner scanner = new Scanner(System.in);
		int c = scanner.nextInt(); 
		if (c == 0)
			return -1;
		SelfAggregationRunner r = new SelfAggregationRunner();
		String[] minStrs = {"00"};
		String[] serverStrs = {"ttslave27.sds.cnz.alimama.com", 
				"ttslave28.sds.cnz.alimama.com", 
				"ttslave29.sds.cnz.alimama.com", 
				"ttslave30.sds.cnz.alimama.com"};
		ArrayList<String> pvLogPaths = new ArrayList<String>();
		
		for (int i = 0; i < 24; ++i) {
			String strHour;
			if (i < 10)
				strHour = "0" + i;
			else 
				strHour = String.valueOf(i);
			for (String strMin : minStrs) {
				for (String server : serverStrs) {
					String input = pvLogPrefix + "/" + strHour + "/" + strMin + "/" + server + "/";
					pvLogPaths.add(input);
				}
			}
		}
		r.addSourceTable((String[])pvLogPaths.toArray(new String[0]), TextInputFormatForSequenceFile.class, 
				pvLogPrefix, "UTF-8", "\t");
		r.addKeyGroup("1.1");
		r.setResultFieldsEx("1.1, Count(1.1)");		
		r.setOutputRecordFilterClass(OutputFilter.class);
		r.setResultTable(output);
		r.run("AcookieFilterJob");
		return 0;		 	
	}
	
	/**
	 * @param args
	 */
	public static void main(String[] args) throws Exception {
		if (args.length != 1) {
			System.out.println("Usage: hadoop jar xx.jar <date>.");
			System.exit(-1);
		}
		// TODO Auto-generated method stub
		int res = ToolRunner.run(new AcookieFilter(), args);
		System.exit(res);
	}
}
