// NextUseRegAllocator.cpp 
// Implement class NextUseRegAllocator
#include "crossbit/NextUseRegAllocator.h"
#include "crossbit/VBlock.h"
#include "crossbit/VInst.h"

using namespace crossbit;

void NextUseRegAllocator::init(VBlock *vb)
{
	vinst_seq = 0;	

	for( XTRegNum i = 0 ; i < ra_num_of_reg ; ++i ){
		ra_reg_table[i].status = RegAllocator::FREE;
	}

	for( XTInt32 i = 0 ; i < MAX_VREG_NUM ; i++ ){
		while( !ra_vreg_used_list[i].empty() ){
			ra_vreg_used_list[i].pop();
		}
	}
	UTIL::XTMemset( ra_vreg_spilled , 0 , sizeof( ra_vreg_spilled ) ); 
	collectNextUseInfo(vb);
}

void NextUseRegAllocator::phaseTask()
{
	// take care of the toFreeRegisters
	/*								std::vector<XTRegNum>::iterator it = ra_torelease_regs.begin();
									for (; it != ra_torelease_regs.end(); ++it)
									{
									XTRegNum treg = *it;
									struct RegUsage &tru = ra_treg_usg_tbl[treg],
									&vru = ra_vreg_usg_tbl[tru.mapped_to];

	//XXX
	ra_free_regs.insert(treg);
	tru.status = RegAllocator::FREE;
	tru.mapped_to = 0;
	vru.status = RegAllocator::UNMAPPED;
	vru.mapped_to = 0;
	}
	ra_torelease_regs.clear();
	*/
}

void NextUseRegAllocator::collectNextUseInfo(VBlock *vb)
{
	XTUint32 seq = 0;

	VBlock::vb_const_iterator it = vb -> begin();
	VBlock::vb_const_iterator end = vb -> end();
	for( ; it != end ; it++ , seq++ ){
		//printf("collecting No.%d\n", end - it);
		VInst *inst = *it;
		XTUint32 n = inst->getOperandNum();
		Opcode op = inst->getOpcode();
		DataType t = inst->getDataType();

		// modified by zhangyichao, 2009-12-1
		if(!((op == OP_MUL || op == OP_ADD) && t == f64))
		{
			for (XTInt32 i = 1; i <= n; i++ )
			{
				Operand opr = inst->getOperand(i);
				XTRegNum r;

				if (opr.type == Operand::VREG_USE || opr.type == Operand::VREG_DEF || opr.type == Operand::FREG_USE || opr.type == Operand::FREG_DEF)
				{ 
					if (opr.type == Operand::VREG_USE || opr.type == Operand::FREG_USE || opr.type == Operand::FREG_DEF)
					{
						r = opr.reg;
						ra_vreg_used_list[r].push(std::make_pair(seq,USE));
					}

					if (opr.type == Operand::VREG_DEF)
					{
						r = opr.reg;
						ra_vreg_used_list[r].push(std::make_pair(seq,DEF));
					}
				}
			}
		}
	}
}

// this is a auxilary function used for debugging
void NextUseRegAllocator::printNextUseInfo()
{

}

// Allocate target register for virtual register
XTRegNum NextUseRegAllocator::regAlloc(XTRegNum vreg, RegAccessMode mode)
{
	//printf("This is regAlloc calling!\n");
	XTRegNum alloc = ra_num_of_reg;
	for( XTRegNum i = 0 ; i < ra_num_of_reg ; i++ ){
		if( ra_reg_table[i].status == ALLOCATED && ra_reg_table[i].mapped_to == vreg ){
			if( ra_vreg_spilled[vreg] == false && mode == USE){
				regSpillIn( vreg , i );
				ra_vreg_spilled[vreg] = true;
			}
			ra_reg_table[i].inst = vinst_seq;
			return i;
		}
	}				 

	XTInt32 max_next = -1;
	XTRegNum max_ps = ra_num_of_reg;
	for( XTRegNum i = 0 ; alloc == ra_num_of_reg  && i < ra_num_of_reg ; i++ ){
		if( ra_reg_table[i].status == FREE ){
			alloc = i;
			break;
		}else if( ra_reg_table[i].status == ALLOCATED ){
			if( ra_reg_table[i].inst == vinst_seq) continue;
			XTRegNum  temp_reg = ra_reg_table[i].mapped_to;
			while( ra_vreg_used_list[temp_reg].empty() == false 
					&& ra_vreg_used_list[temp_reg].front().first < vinst_seq ){
				ra_vreg_used_list[temp_reg].pop();
			}
			if( ra_vreg_used_list[temp_reg].empty()  
					|| ra_vreg_used_list[temp_reg].front().second == DEF ){
				alloc = i;
				break;
			}else if( ra_vreg_used_list[temp_reg].front().first > max_next ){
				max_next = ra_vreg_used_list[temp_reg].front().first;
				max_ps = i;
			}

		}
	}

	if( alloc == ra_num_of_reg ) alloc = max_ps;
	if( alloc == ra_num_of_reg) UTIL::XTLOG("ERROR, There is no physical register could be allocated\n");

	if( ra_reg_table[alloc].status == ALLOCATED ){
		XTRegNum temp_reg = ra_reg_table[alloc].mapped_to;
		if( ra_vreg_used_list[temp_reg].empty() == false 
				&& ra_vreg_used_list[temp_reg].front().second == USE ){
			regSpillOut(alloc);
		}						
		ra_vreg_spilled[temp_reg] = false;
	}	
	if( mode == USE ) {
		regSpillIn(vreg,alloc);
		ra_vreg_spilled[vreg] = true;
	}

	ra_reg_table[alloc].mapped_to = vreg;
	ra_reg_table[alloc].inst = vinst_seq;
	ra_reg_table[alloc].status = ALLOCATED;
	return alloc;
}


void NextUseRegAllocator::unmapIfNoNextUse(XTRegNum vreg)
{

}

// Force allocate target register "expect" to "vreg"
//
// Algorithms:
//  if (vreg is already allocated to expect)
//	return immediately
//  else 
//	empty "expect" register for "Vreg";
//	if (vreg is currently allocated to another target register)
//	    move it to "expect"
//	if (vreg is spilled)
//	    load vreg from spill pool to expect
//	...
// 
//NOTICE : As regAllocForce just allocate the expect physical reg to vreg , and 
XTRegNum NextUseRegAllocator::regAllocForce(XTRegNum vreg, XTRegNum expect, RegAccessMode mode)
{
	//printf("This is regAllocForce calling!\n");
	if( ra_reg_table[expect].status == RESERVED ){
		UTIL::XTLOG("Can't force allocate the expect reg to vreg\n");
		return -1;	
	}else if( ra_reg_table[expect].status == ALLOCATED ){
		if( ra_reg_table[expect].mapped_to == vreg ){
			ra_reg_table[expect].inst = vinst_seq;
			if( ra_vreg_spilled[vreg] == false && mode == USE ){
				regSpillIn( vreg , expect );
				ra_vreg_spilled[vreg] = true;
			}
			return expect;
		}else{
			XTRegNum temp_reg = ra_reg_table[expect].mapped_to;
			while( ra_vreg_used_list[temp_reg].empty() == false 
					&& ra_vreg_used_list[temp_reg].front().first < vinst_seq ){
				ra_vreg_used_list[temp_reg].pop();
			}
			if( ra_vreg_used_list[temp_reg].empty() == false
					&& ra_vreg_used_list[temp_reg].front().second == USE ){
				regSpillOut( expect );
			}
			ra_vreg_spilled[temp_reg] = false;
		}
	}
	for( XTRegNum i = 0 ; i < ra_num_of_reg ; i++ ){
		if( ra_reg_table[i].status == ALLOCATED && ra_reg_table[i].mapped_to == vreg ){
			(*ra_cb_reg_to_reg)( i , expect );
			ra_reg_table[expect].status = ALLOCATED;
			ra_reg_table[expect].inst = vinst_seq;
			ra_reg_table[expect].mapped_to = vreg; 
			ra_reg_table[i].status = FREE;
			return expect;	
		}
	}

	ra_vreg_spilled[vreg] = false;
	if( mode == USE ) {
		regSpillIn( vreg , expect );
		ra_vreg_spilled[vreg] = true;
	}
	ra_reg_table[expect].status = ALLOCATED;
	ra_reg_table[expect].mapped_to = vreg;
	ra_reg_table[expect].inst = vinst_seq;
	return expect;
}

// Force "vreg" NOT to be allocated to "except"
//
// Algorithm:
//	if vreg is already allocated and it's not "except"
//	    return immediately
//	else 
//	    "reserve" "except" register
//	    alloc = regAlloc(vreg, mode)
//	    "release" "except" register
//
XTRegNum NextUseRegAllocator::regAllocForceExcept(XTRegNum vreg, XTRegNum except, RegAccessMode mode)
{
	//  printf("This is regAllocForceExcept calling!\n");
	XTInt32 temp = ra_reg_table[except].status;	
	if( temp == ALLOCATED && ra_reg_table[except].mapped_to == vreg ){
		while( ra_vreg_used_list[vreg].empty() == false && 
				ra_vreg_used_list[vreg].front().first < vinst_seq )  ra_vreg_used_list[vreg].pop();
		if( ra_vreg_used_list[vreg].empty() == false 
				&& ra_vreg_used_list[vreg].front().second == USE ){
			regSpillOut( except );
		}
		ra_vreg_spilled[vreg] = false;
		temp = FREE;
	}
	ra_reg_table[except].status = RESERVED;
	XTRegNum alloc = regAlloc( vreg , mode );
	ra_reg_table[except].status = temp;
	return alloc;
}

void NextUseRegAllocator::regAllocReserve(XTRegNum treg)
{
	//printf("This is regAllocReserve calling!\n");
	if(  ra_reg_table[treg].status != ALLOCATED ){
		ra_reg_table[treg].status = RESERVED;
		return ;
	}
	XTRegNum vreg = ra_reg_table[treg].mapped_to;
	if( ra_reg_table[treg].inst == vinst_seq ) printf("Can't reserve the reg allocated in the same vinst_seq\n");
	while( ra_vreg_used_list[vreg].empty() == false 
			&& ra_vreg_used_list[vreg].front().first < vinst_seq ){
		ra_vreg_used_list[vreg].pop();
	}
	if( ra_vreg_used_list[vreg].empty() == false   
			&& ra_vreg_used_list[vreg].front().second == USE ){
		regSpillOut( treg );
		ra_vreg_spilled[vreg] = false;
	}
	ra_reg_table[treg].status = RESERVED;
}

void NextUseRegAllocator::regAllocRelease(XTRegNum treg)
{
	// printf("This is regAllocRelease calling!\n");
	ra_reg_table[treg].status = FREE;
}

XTRegNum NextUseRegAllocator::regSpillOut(XTRegNum physical_reg)
{
	XTRegNum vreg = ra_reg_table[physical_reg].mapped_to;
	(*ra_cb_spill_out)(physical_reg,(XTMemAddr)(ra_spill_pool+vreg));
	return physical_reg;
}
