package com.etlmaster.datax.plugins.writer.hbasewriter;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.hadoop.hbase.client.HTable;

import com.dp.nebula.wormhole.common.AbstractPlugin;
import com.dp.nebula.wormhole.common.interfaces.ILine;
import com.dp.nebula.wormhole.common.interfaces.ILineReceiver;
import com.dp.nebula.wormhole.common.interfaces.IWriter;
import com.etlmaster.executor.bigdata.hbase.HbaseUtil;
import com.etlmaster.executor.utils.LogWriter;


/**
 * @author Wugf
 * @date 2016-11-10
 */
public class HbaseWriter  extends AbstractPlugin implements IWriter {

	private String []columns;
	private String rowKeys;
	private List<Map<String,Object>> rowList=new ArrayList<Map<String,Object>>();
	private HTable table;
	@Override
	public void init() {
		columns=getParam().getValue(ParamKey.columns).split(",");
		rowKeys=getParam().getValue(ParamKey.row_key_columns);
		try {
			table = new HTable(HbaseUtil.getConf(), getParam().getValue(ParamKey.tableName));
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	public void write(ILineReceiver lineReceiver) {
		ILine line = null;
		while ((line = lineReceiver.receive()) != null) {
			Map<String,Object> map=new HashMap<String,Object>();
			for(int i=0;i<line.getFieldNum();i++){
				map.put(columns[i], line.getField(i));
			}
			rowList.add(map);
			if(rowList.size()>=getParam().getIntValue(ParamKey.commitSize)){
				LogWriter.addLog("INFO","提交数据！");
				HbaseUtil.batchPut(table,  rowKeys, rowList);
				getMonitor().increaseSuccessLine(rowList.size());
				LogWriter.addLog("INFO","提交数据完成！");
				rowList.clear();
			}
		}
		
	}

	public void commit() {
		if(rowList.size()>0){
			LogWriter.addLog("INFO","提交数据！");
			HbaseUtil.batchPut(table,  rowKeys, rowList);
			getMonitor().increaseSuccessLine(rowList.size());
			LogWriter.addLog("INFO","提交数据完成！");
			rowList.clear();
		}
		HbaseUtil.closeTable(table);
	}

}
