package com.ccxe.parser;

import java.io.IOException;
import java.util.Iterator;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Partitioner;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;

import com.ccxe.io.DataPair;
import com.ccxe.io.FilterKey;
import com.ccxe.util.DATA;
import com.ccxe.util.DateConvertUtil;
import com.ccxe.util.SampleDataUtil;

/**
 * 获取基准日数据
 * 数据格式为：股票号\t基准日长度开始时的价格\t....基准日的价格\t基准日长度结束时的价格
 * 现在只能获取每60分钟一次的数据。
 * @author Administrator
 *
 */
public class MergeData {
	public static final Log LOG = LogFactory.getLog(MergeData.class);

	public static int [] datumDay;

	/************************************************************************************
	 * Driver
	 *******************************************************************************/
	public static int insert(Path injectMinPath, Path filterPath)
			throws IOException {
		return run(injectMinPath, filterPath);
	}

	public static void main(String[] args) throws IOException {
		if (args.length != 2) {
			System.err
					.println("Usage: com.ccxe.driver.MergeData <in> <out>");
			System.exit(2);
		}
		datumDay = SampleDataUtil.getDatumDay(DATA.DatumDay, DATA.length);
		System.exit(insert(new Path(args[0]), new Path(args[1])));
	}

	private static int run(Path input, Path output) throws IOException {
		// 如果输出路径存在就删除；
		JobConf conf = new JobConf(MergeData.class);

		FileInputFormat.addInputPath(conf, input);
		FileOutputFormat.setOutputPath(conf, output);

		conf.setJobName("MergeData_" + DateConvertUtil.getDate());

		conf.setMapperClass(MergeDataMapper.class);
		conf.setMapOutputKeyClass(FilterKey.class);
		conf.setMapOutputValueClass(DataPair.class);

		conf.setPartitionerClass(MergeDataPartitioner.class);
		conf.setOutputKeyComparatorClass(MergeDataComparator.class);
		conf.setOutputValueGroupingComparator(MergeDataGroupComparator.class);

		conf.setReducerClass(MergeDataReducer.class);

		conf.setOutputKeyClass(IntWritable.class);
		conf.setOutputValueClass(Text.class);

		JobClient.runJob(conf);

		return 0;
	}

	/************************************************************************************
	 * MergeDataMapper
	 *******************************************************************************/
	public static class MergeDataMapper extends MapReduceBase implements
			Mapper<LongWritable, Text, FilterKey, DataPair> {

		public void map(LongWritable key, Text value,
				OutputCollector<FilterKey, DataPair> output, Reporter reporter)
				throws IOException {
			String line = value.toString();
			int flag = line.indexOf("\t");
			String strKey = line.substring(0, flag);
			int flag2 = strKey.indexOf("_");

			FilterKey filterKey = new FilterKey(Integer.parseInt(strKey
					.substring(0, flag2)), Integer.parseInt(strKey.substring(
					flag2+1, strKey.length())));
			Text text = new Text(line.substring(flag, line.length()));
			long time = filterKey.getC_date_time();
			if(isValiudate(time))
			output.collect(filterKey, new DataPair(time,text));
		}
		private boolean isValiudate(long i) {
			if(datumDay[0] <= i && i <= datumDay[datumDay.length-1] ) {
				return true;
			}
			return false;
		}

	}

	/************************************************************************************
	 * MergeDataComparator
	 *******************************************************************************/
	public static class MergeDataComparator extends WritableComparator {
		protected MergeDataComparator() {
			super(FilterKey.class, true);
		}

		@Override
		public int compare(Object a, Object b) {
			// LOG.info("------------------MergeDataComparator compare()-----------");
			FilterKey gk1 = (FilterKey) a;
			FilterKey gk2 = (FilterKey) b;
			return gk1.compareTo(gk2);
		}
	}

	/************************************************************************************
	 * MergeDataComparator 按股票代码进行分组。
	 *******************************************************************************/
	public static class MergeDataGroupComparator extends
			WritableComparator {
		protected MergeDataGroupComparator() {
			super(FilterKey.class, true);
		}

		@Override
		public int compare(Object a, Object b) {
			// LOG.info("------------------MergeDataComparator compare()-----------");
			FilterKey gk1 = (FilterKey) a;
			FilterKey gk2 = (FilterKey) b;
			int l1 = gk1.getC_stock_no();
			int l2 = gk2.getC_stock_no();
			return (l1 == l2) ? 0 : (l1 < l2 ? 1 : -1);
		}
	}

	/************************************************************************************
	 * MergeDataComparator 按股票代码进行分区
	 *******************************************************************************/
	public static class MergeDataPartitioner implements
			Partitioner<FilterKey, DataPair> {
		@Override
		public int getPartition(FilterKey key, DataPair value, int numPartitions) {
			// LOG.info("------------------MergeDataComparator getPartition()-----------");
			return Math.abs(key.getC_stock_no())% numPartitions;
		}

		@Override
		public void configure(JobConf jobConfw) {
		}
	}

	/************************************************************************************
	 * MergeDataReducer
	 *******************************************************************************/
	public static class MergeDataReducer extends MapReduceBase implements
			Reducer<FilterKey, DataPair, IntWritable, Text> {
		private static int count = 0;
		private static int flag = 0;
		@Override
		public void reduce(FilterKey key, Iterator<DataPair> values,
				OutputCollector<IntWritable, Text> output, Reporter reporter)
				throws IOException {
//			LOG.info("---------------MergeDataReducer run--------------------");
			Text text = new Text();
			StringBuffer content = new StringBuffer();
			while(values.hasNext()) {
				DataPair data = values.next();
				//如果与所需基准日的日期记录相同，则直接往text后附加数据；
				if(datumDay[count] == data.getDay().get()) {
					content.append(data.getText().toString().trim()+"\t");
					count++;
				}
				else {
					int i = count;
					for (; i < datumDay.length; i++) {
						if(data.getDay().get() == datumDay[i]) {
							flag = i;
							break;
						}
					}
					int currenIndex = flag - count;
					for (int j = 0; j < currenIndex; j++) {
						content.append("0.0\t0.0\t0.0\t0.0\t");
						count++;
					}
					content.append(data.getText().toString().trim()+"\t");
					count++;
				}
			}
			int curruntValue = datumDay.length - count;
//			缺少的数据填0（因为取数据的方式是每小时一次，也就是一天只有4条数据，所以填充4个0.0\t）
			if(curruntValue > 0) {
				for (int j = curruntValue; j > 0; j--) {
					content.append("0.0\t0.0\t0.0\t0.0\t");
				}
			}
			text.set(content.toString());
			output.collect(new IntWritable(key.getC_stock_no()), text);
			count = 0;
			flag = 0;
		}
		
		public boolean inject() {
			
			return false;
		}
		
	}
}
