package com.ultrapower.request;

import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;

import com.ultrapower.bean.AgentLifeCycleBean;
import com.ultrapower.hbase.HbaseUtil;
import com.ultrapower.mapper.AgentLifeCycleMapper;
import com.ultrapower.util.CommonUtils;
import com.ultrapower.util.HBaseTables;
import com.ultrapower.util.RowKeyUtils;
/**
 * 
 * @author wangy
 *
 */
public class AgentLifeCycleTable extends BaseRequest {
	
	public static void main(String[] args) {
		BaseRequest agentInfo = new AgentLifeCycleTable();
		agentInfo.execute();
	}

	@Override
	void execute() {
		TableName agentLifeCycleTableName = HbaseUtil.getTableName(HBaseTables.AGENT_LIFECYCLE_STR);
		this.getHbaseTemplate().find(agentLifeCycleTableName, createScan(), (ResultScanner scanner) -> {
			AgentLifeCycleMapper map = new AgentLifeCycleMapper();
			scanner.forEach(r -> {
				try {
					AgentLifeCycleBean lifeCycle = map.mapRow(r, 0);
					System.out.println(lifeCycle);
				} catch (Exception e) {
					e.printStackTrace();
				}
			});
			return null;
		});
	}

	@Override
	protected Scan createScan() {
		//from = from/(1000*60*60*10)*(1000*60*60*10);
		byte[] agentIdBytes = Bytes.toBytes(agentId);
		long reverseFromTimestamp = CommonUtils.reverseTimeMillis(from);
		long reverseToTimestamp = CommonUtils.reverseTimeMillis(to);
		byte[] startKeyBytes = RowKeyUtils.concatFixedByteAndLong(agentIdBytes, HBaseTables.AGENT_NAME_MAX_LEN, reverseToTimestamp);
		byte[] endKeyBytes = RowKeyUtils.concatFixedByteAndLong(agentIdBytes, HBaseTables.AGENT_NAME_MAX_LEN, reverseFromTimestamp);

		Scan scan = new Scan(startKeyBytes, endKeyBytes);
		scan.addColumn(HBaseTables.AGENT_LIFECYCLE_CF_STATUS, HBaseTables.AGENT_LIFECYCLE_CF_STATUS_QUALI_STATES);
		scan.setMaxVersions(1);
		scan.setCaching(SCANNER_CACHING);
		return scan;
	}

}
