package com.ccxe.parser;

import java.io.IOException;
import java.util.Iterator;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.MD5Hash;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Partitioner;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;

import com.ccxe.io.Data;
import com.ccxe.io.FilterKey;
import com.ccxe.util.DATA;
import com.ccxe.util.DateConvertUtil;
import com.ccxe.util.ParserData;
/**
 * 将每天的数据进行
 * @author Administrator
 *
 */
public class Filter {
	public static final Log LOG = LogFactory.getLog(Filter.class);

	/************************************************************************************
	 * Driver
	 *******************************************************************************/
	public static int insert(Path injectMinPath, Path filterPath)
			throws IOException {
		return run(injectMinPath, filterPath);
	}

	public static void main(String[] args) throws IOException {
		if (args.length != 2) {
			System.err.println("Usage: com.ccxe.driver.Filter <in> <out>");
			System.exit(2);
		}
		System.exit(insert(new Path(args[0]), new Path(args[1])));
	}

	private static int run(Path input, Path output) throws IOException {
		// 如果输出路径存在就删除；
		JobConf conf = new JobConf(Filter.class);

		FileInputFormat.addInputPath(conf, input);
		FileOutputFormat.setOutputPath(conf, output);

		conf.setJobName("Filter_" + DateConvertUtil.getDate());

		conf.setMapperClass(FilterMapper.class);
		conf.setMapOutputKeyClass(FilterKey.class);
		conf.setMapOutputValueClass(Data.class);

		conf.setPartitionerClass(FilterPartitioner.class);
		conf.setOutputKeyComparatorClass(FilterComparator.class);
		conf.setOutputValueGroupingComparator(FilterGroupComparator.class);

		conf.setReducerClass(FilterReducer.class);

		conf.setOutputKeyClass(IntWritable.class);
		conf.setOutputValueClass(Text.class);

		JobClient.runJob(conf);

		return 0;
	}

	/************************************************************************************
	 * FilterMapper
	 *******************************************************************************/
	public static class FilterMapper extends MapReduceBase implements
			Mapper<LongWritable, Text, FilterKey, Data> {

		public void map(LongWritable key, Text value,
				OutputCollector<FilterKey, Data> output, Reporter reporter)
				throws IOException {
			String line = value.toString();
			String[] arrayTmp = ParserData.parser(line);
			// 剔除标题和异常数据
			if (line.indexOf("c_") != -1 || arrayTmp.length < 3)
				return;
			Data data = new Data(arrayTmp);
			// 过滤非交易时间的数据
			int time = (int) ((data.getC_date_time() / 100) % 10000);
			if (key == null
					|| (time >= DATA.OPENINGI_TIME1 && time <= DATA.END_TIME1)
					|| (time >= DATA.OPENINGI_TIME2 && time <= DATA.END_TIME2)) {
				int flag = isValidate(data.getC_date_time()%1000000/100);
				if(flag == 1) {
					if (((data.getC_date_time()/100) % 10000) == 1030) {
						output.collect(new FilterKey(data.getC_stock_no(), data
								.getC_date_time()), data);
						return;
					}
				}
				if(flag == 2) {
					if (((data.getC_date_time()/100) % 100) == 0) {
						output.collect(new FilterKey(data.getC_stock_no(), data
								.getC_date_time()), data);
						return;
					}
				}
				else return;
			}
		}
		/**
		 * 取数据的具体方法。
		 * @param l 为当前的时分，如9点30分
		 * @return 1、2、0分别代表交易时间在上午、下午、无效。
		 */
		private int isValidate(long l) {
			if(l >= DATA.OPENINGI_TIME1 && l <= DATA.END_TIME1) {
				return 1;
			}
			if(l >= DATA.OPENINGI_TIME2 && l<= DATA.END_TIME2 ) {
				return 2;
			}
			return 0;
		}
	}

	/************************************************************************************
	 * FilterComparator
	 *******************************************************************************/
	public static class FilterComparator extends WritableComparator {
		protected FilterComparator() {
			super(FilterKey.class, true);
		}

		@Override
		public int compare(Object a, Object b) {
			// LOG.info("------------------FilterComparator compare()-----------");
			FilterKey gk1 = (FilterKey) a;
			FilterKey gk2 = (FilterKey) b;
			return gk1.compareTo(gk2);
		}
	}

	/************************************************************************************
	 * FilterComparator 按天进行分组。
	 *******************************************************************************/
	public static class FilterGroupComparator extends WritableComparator {
		protected FilterGroupComparator() {
			super(FilterKey.class, true);
		}

		@Override
		public int compare(Object a, Object b) {
			// LOG.info("------------------FilterComparator compare()-----------");
			FilterKey gk1 = (FilterKey) a;
			FilterKey gk2 = (FilterKey) b;
			long l1 = Long.parseLong(gk1.getC_stock_no() + ""
					+ gk1.getC_date_time() / 1000000);
			long l2 = Long.parseLong(gk2.getC_stock_no() + ""
					+ gk2.getC_date_time() / 1000000);
			return (l1 == l2) ? 0 : (l1 < l2 ? 1 : -1);
		}
	}

	/************************************************************************************
	 * FilterComparator 按天进行分区
	 *******************************************************************************/
	public static class FilterPartitioner implements
			Partitioner<FilterKey, Data> {
		@Override
		public int getPartition(FilterKey key, Data value, int numPartitions) {
			// LOG.info("------------------FilterComparator getPartition()-----------");
			return Math.abs(MD5Hash.digest(
					key.getC_stock_no() + "" + key.getC_date_time() / 1000000)
					.quarterDigest())
					% numPartitions;
		}

		@Override
		public void configure(JobConf jobConfw) {
		}
	}

	/************************************************************************************
	 * FilterCombiner
	 *******************************************************************************/
	public static class FilterCombiner extends MapReduceBase implements
			Reducer<FilterKey, Data, FilterKey, Data> {

		@Override
		public void reduce(FilterKey key, Iterator<Data> values,
				OutputCollector<FilterKey, Data> output, Reporter reporter)
				throws IOException {
			// LOG.info("------------------FilterCombiner reduce()-----------");
			while (values.hasNext()) {
				output.collect(key, values.next());
			}
		}
	}

	/************************************************************************************
	 * FilterReducer
	 *******************************************************************************/
	public static class FilterReducer extends MapReduceBase implements
			Reducer<FilterKey, Data, FilterKey, Text> {

		@Override
		public void reduce(FilterKey key, Iterator<Data> values,
				OutputCollector<FilterKey, Text> output, Reporter reporter)
				throws IOException {
			LOG.info("---------------FilterReducer run--------------------");
			StringBuffer sb = new StringBuffer();
			Data data = null;
			int count =0;
			while (values.hasNext()) {
				data = values.next();
				sb.append(data.getC_price()+"\t");
				count ++;
			}
			if (sb != null && !("".equals(sb)) && data != null) {
				output.collect(new FilterKey(data.getC_stock_no(), data
						.getC_date_time() / 1000000), new Text(sb.toString()));
			}
			System.out.println("StringBuffer  size = "+count);
		}
	}

}
