package com.pxene.hbase2hdfs;

import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.Arrays;

import org.ansj.recognition.impl.FilterRecognition;
import org.ansj.splitWord.analysis.NlpAnalysis;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;

public class ReadHBaseMapper extends TableMapper<Text, Text> {

	private Text resultKey = new Text();
	private Text resultVal = new Text();
	private FilterRecognition filter = new FilterRecognition();
	@Override
	protected void setup(Mapper<ImmutableBytesWritable, Result, Text, Text>.Context context) throws IOException, InterruptedException {
		// 停用标点符合
		filter.insertStopNatures("w");
	}

	@Override
	protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {
		StringBuilder sb = new StringBuilder();
		// 通过cell获取表中的列值
		for (Cell cell : value.listCells()) {
			String val = Bytes.toString(Arrays.copyOfRange(cell.getValueArray(), cell.getValueOffset(), cell.getValueOffset() + cell.getValueLength()));
			sb.append(val).append("|");
		}

		// 写入HDFS
		String rowkey = Bytes.toString(value.getRow());
		if (rowkey.startsWith("001")) {
			resultKey.set("1");
		} else if (rowkey.startsWith("002")) {
			resultKey.set("2");
		} else if (rowkey.startsWith("003")) {
			resultKey.set("3");
		} else {
			return;
		}
		// resultVal.set(IKAnalysis(sb.deleteCharAt(sb.length()-1).toString()));

		resultVal.set(NlpAnalysis.parse(sb.deleteCharAt(sb.length() - 1).toString()).recognition(filter).toStringWithOutNature(" "));
		context.write(resultKey, resultVal);
	}

	public static String IKAnalysis(String str) {
		StringBuffer sb = new StringBuffer();
		try {
			// InputStream in = new FileInputStream(str);//
			byte[] bt = str.getBytes();// str
			InputStream ip = new ByteArrayInputStream(bt);
			Reader read = new InputStreamReader(ip);
			IKSegmenter iks = new IKSegmenter(read, true);
			Lexeme t;
			while ((t = iks.next()) != null) {
				sb.append(t.getLexemeText() + " ");
			}
			sb.delete(sb.length() - 1, sb.length());
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		// System.out.println(sb.toString());
		return sb.toString();
	}

}