package com.taobao.analyse.map;

import java.io.IOException;
import java.util.List;
import java.util.Map;

import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;

import com.taobao.analyse.utils.LCDataParser;
import com.taobao.cmp.citadel.message.PropConstants;
import com.taobao.cmp.citadel.message.SkuUtil;
import com.taobao.cmp.lib.sst.Document;
import com.taobao.cmp.lib.sst.mapred.writable.SSTWritable;
import com.taobao.item.domain.ItemSkuDO;

/**
 * 全量生命周期编辑记录mapper
 * @author xuanwei
 * @version 2011-11-27
 * @Mail-To xuanwei@taobao.com
 */
public class ArchiveMapper extends MapReduceBase implements Mapper<NullWritable, SSTWritable, LongWritable, Text> {
	private DataInputBuffer	input	= new DataInputBuffer();

	@Override
	public void configure(JobConf jobConf) {
	}

	@Override
	public void map(NullWritable key, SSTWritable value, OutputCollector<LongWritable, Text> output, Reporter reporter)
			throws IOException {
		try {
			Document doc = value.getDoc();
			Integer level = (Integer) doc.getField("level").getValue();

			//只处理商品级别的数据，卖家索引神马的直接扔掉
			if (level <= 0) {
				reporter.incrCounter("LCParser", "Level0", 1);
				return;
			}

			byte[] payload = (byte[]) doc.getField("payload").getValue();
			input.reset(payload, payload.length);
			Map<String, Object> val = LCDataParser.read(input, reporter);
			// Do your thing.
			//在这里进行自己的操作
		} catch (Exception e) {
			reporter.incrCounter("LCParser", "MapperExe", 1);
		}
	}
}
