package com.ccxe.parser;

import java.io.IOException;
import java.util.Iterator;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.MD5Hash;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Partitioner;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;

import com.ccxe.io.Data;
import com.ccxe.util.DATA;
import com.ccxe.util.DateConvertUtil;
import com.ccxe.util.ParserData;

public class Selector {

	public static final Log LOG = LogFactory.getLog(Selector.class);

	private static int[] cs300 = DATA.CS300;

	/************************************************************************************
	 * Driver
	 *******************************************************************************/

	public Selector() {
	}

	private static int run(Path[] input, Path output) throws IOException,
			InterruptedException, ClassNotFoundException {
		LOG.info("Selector started!!!" + DateConvertUtil.getDate());
		//初始化
		JobConf conf = new JobConf(Selector.class);
		FileInputFormat.setInputPaths(conf, input);
		FileOutputFormat.setOutputPath(conf, output);
		conf.setJobName("Selector_" + DateConvertUtil.getDate());
		conf.setMapperClass(SelectorMapper.class);
		conf.setPartitionerClass(SelectorPartitioner.class);
		conf.setOutputKeyComparatorClass(SelectorComparator.class);
		conf.setOutputValueGroupingComparator(SelectorGroupComparator.class);
		conf.setCombinerClass(SelectorReducer.class);
		conf.setReducerClass(SelectorReducer.class);
		conf.setOutputKeyClass(Data.class);
		conf.setOutputValueClass(NullWritable.class);

		JobClient.runJob(conf);
		return 0;
	}

	public static void main(String[] args) throws Exception {
		if (args.length < 1) {
			System.err.println("Usage: com.ccxe.driver.Selector <in>[,in2,in3...] <out>");
			System.exit(2);
		}
		String[] pathString = args[0].split(",");
		Path [] inputs = new Path [pathString.length];
		for (int i = 0; i < pathString.length; i++) {
			inputs[i] = new Path(pathString[i]);
		}
		System.exit(run (inputs,new Path(args[1])));
	}

	public static int selcet(Path[] input, Path selectPath) throws Exception {
		return run(input, selectPath);

	}

	/*************************************************************************************
	 * SelectorMapper
	 *************************************************************************************/
	public static class SelectorMapper extends MapReduceBase implements
			Mapper<LongWritable, Text, Data, NullWritable> {
		@Override
		public void map(LongWritable key, Text value,
				OutputCollector<Data, NullWritable> output, Reporter reporter)
				throws IOException {
			String line = value.toString();
			// 剔除标题
			if (line.indexOf("c_") != -1 || line.length() < 10) {
				return;
			}
			String[] arrayTmp = ParserData.parser(line);

			Data data = null;
			// 根据实际文件文件格式构造Data
			data = new Data(arrayTmp);
			//如果是sh的数据，股票代码不等于300或者不大于600000的直接剔除；
			if(line.indexOf("sh") != -1) {
				if( data.getC_stock_no() !=300 && data.getC_stock_no() < 600000 ) {
					return;
				}
			}
			// 输出交易时间的数据
			int time = (int) ((data.getC_date_time() / 100) % 10000);
			// 仅仅输出300成分数据。
			if (inCS300(data.getC_stock_no())) {
				if ((time >= DATA.OPENINGI_TIME1 && time <= DATA.END_TIME1)
						|| (time >= DATA.OPENINGI_TIME2 && time <= DATA.END_TIME2)) {
					output.collect(data, NullWritable.get());
				}
				return;
			}
		}

		private static boolean inCS300(int no) {
			for (int i : cs300) {
				if (no == i) {
					return true;
				}
			}
			return false;
		}
	}

	/****************************************************************************
	 * SelectorComparator
	 ****************************************************************************/
	public static class SelectorComparator extends WritableComparator {
		protected SelectorComparator() {
			super(Data.class, true);
		}

		@Override
		public int compare(Object a, Object b) {
			Data sk1 = (Data) a;
			Data sk2 = (Data) b;
			return sk1.compareTo(sk2);
		}
	}

	/****************************************************************************
	 * SelectorPartitioner
	 ****************************************************************************/
	public static class SelectorPartitioner implements
			Partitioner<Data, NullWritable> {
		
		@Override
		public int getPartition(Data key, NullWritable value, int numPartitions) {
			return Math.abs(MD5Hash.digest(
					//根据分钟进行分区
					key.getC_stock_no() + "" + key.getC_date_time() / 100)
					.quarterDigest())
					% numPartitions;
		}

		@Override
		public void configure(JobConf jobConfw) {
		}

	}

	/****************************************************************************
	 * SelectorGroupComparator
	 ****************************************************************************/
	public static class SelectorGroupComparator extends WritableComparator {
		protected SelectorGroupComparator() {
			super(Data.class, true);
		}
//		根据分钟进行分组
		@Override
		public int compare(Object a, Object b) {
			Data gk1 = (Data) a;
			Data gk2 = (Data) b;
			long l1 = Long.parseLong(gk1.getC_stock_no() + ""
					+ gk1.getC_date_time() / 100);
			long l2 = Long.parseLong(gk2.getC_stock_no() + ""
					+ gk2.getC_date_time() / 100);
			return (l1 == l2) ? 0 : (l1 < l2 ? -1 : 1);
		}
	}

	/****************************************************************************
	 * SelectorReducer
	 ****************************************************************************/
	public static class SelectorReducer extends MapReduceBase implements
			Reducer<Data, NullWritable, Data, NullWritable> {
		@Override
		public void reduce(Data key, Iterator<NullWritable> value,
				OutputCollector<Data, NullWritable> output, Reporter reporter)
				throws IOException {
//			输出每分钟最大秒的数据
			output.collect(key, NullWritable.get());
		}
	}

}
