#include "StdAfx.h"
#include "TermSet.h"
 

void TokenSet::copyMap(const TokenSet& TS){
	  destroyMap();
	  for(TokenSet::PartitionIterator PPtr = TS.Partition.begin();PPtr != TS.Partition.end(); PPtr++)
		  for(size_t i=0; i<PPtr->second.size();i++)
			  TokenSet::insert(*(PPtr->second[i]));
			
	}

void TokenSet::destroyMap(){
	map<size_t , vecTokenPtr>::iterator PPtr;
	for(PPtr = Partition.begin();PPtr !=Partition.end(); PPtr++)	{
	   for(size_t i=0; i<PPtr->second.size();i++)
		   delete PPtr->second[i];
	   PPtr->second.clear();	
		}
	vecSize=0;
	}
TokenSet::TokenSet():vecSize(0){
	}
TokenSet::TokenSet(const TokenSet& TS):vecSize(TS.vecSize){
	 copyMap(TS);
}
TokenSet::~TokenSet(){
	destroyMap();
	}

const TokenSet& TokenSet::operator=(const TokenSet& To){
	if(this != &To)
		copyMap(To);
	return *this;
	}

const Token* TokenSet::insert(Token::const_reference To){
	if(find(To)) return NULL;
	vecSize++;
	Token::pointer Res = To.clone();
	Partition[To.complexity()].push_back( Res ); 
	return Res;
	}

void TokenSet::insertSet(const TokenSet& TS){
    for(size_t i=0; i<TS.size( ); i++)
		this->insert(TS[i]);
	}
	
bool TokenSet::find(Token::const_reference To) const{	
	size_t index =	 To.complexity();
	PartitionIterator PPtr = Partition.find(index);
	if(PPtr != Partition.end()){   
		const vecTokenPtr& Tokens = PPtr->second;
		for(size_t i=0; i < Tokens.size(); i++)
			if((Tokens[i])->operator==(To)) return true;
		}
	return false;
	}
Token::const_reference TokenSet::operator[ ](size_t index)const{
	if(0 > index || index >= size( )) throw TokenSet::exception("size exception at Token::const_reference TokenSet::operator[ ](size_t )const");
	size_t inCounter = 0;
	PartitionIterator PPtr = Partition.begin();
	while(inCounter + PPtr->second.size() <= index){
				inCounter+=  PPtr->second.size();
				PPtr++;
			}
	int i = 0;
	while(inCounter < index){
		i++;
		inCounter++;
		}
	return *(PPtr->second.operator [ ](i));	
}


const Token* TokenSet::getRandToken( ) const{
	if(size( )) return &(operator[ ](rand()%size()));
	return NULL;
	}
TokenSet::PartitionIterator TokenSet::beginMinimumComplexity(size_t MinCom) const{
	TokenSet::PartitionIterator PPtr = Partition.begin();
	while((PPtr != Partition.end())	&&(PPtr->first < MinCom))
		PPtr++;
	return PPtr;
}
string TokenSet::toString( )const{
	  string Res("");
	  PartitionIterator PPtr = Partition.begin();
	  while(PPtr != Partition.end()){
		  for(size_t i=0; i<PPtr->second.size();i++)
			Res += string("\n\r") + ((PPtr->second).operator[](i))->toString();
		  PPtr++;
		  }
	  return Res;
	}
size_t TokenSet::size( ) const{
	return this->vecSize;
	}



TermSet::TermSet( ){}
TermSet::TermSet(const TermSet& TS):TokenSet(TS),Types(TS.Types){

}
TermSet::~TermSet(){}

 
bool TermSet::hasType(Type::const_reference Ty) const{
	return Types.find(Ty);
}
bool TermSet::hasTerm(const Term& Te) const{
	return find(Te);
}

bool TermSet::find(Token::const_reference To) const{
	if(To.isType ( )) return Types.find(To);
	if(Types.find(_TERM(To).getType()))
		return  TokenSet::find(To);
	return false;
}
const Token* TermSet::insert(Token::const_reference To){
	if(To.isType())
		return Types.insert(To);
	else{
			Types.insert(_TERM(To).getType());
			return TokenSet::insert(To);
		}
}

string TermSet::toString()const {


	string Res("\n\n******TermSet Listing*******");
	for(size_t i=0;i<Types.size();i++){
			Res+="\n";
			Res += Types[i].toString();
			}

	Res += TokenSet::toString();
	return Res;
}
const Token* TermSet::getRandToken( ) const{
		if (rand( )%5)
			return TokenSet::getRandToken( );
		return Types.getRandToken( );
	}
const Term* TermSet::getRandTerm() const{
	return getRandTermOfType(*getRandType());
	}

const Type*  TermSet::getRandType() const{
	return static_cast< const Type* >(Types.getRandToken( ));
	}

const Term*  TermSet::getRandTermOfType(Type::const_reference VarType) const{                   
	vector <Term::const_pointer> Vars;
	
	if(Types.find(VarType)){
		TokenSet::PartitionIterator PPtr = Partition.begin();
		while(PPtr->first <= VarType.complexity())
			PPtr++;

		while(PPtr != Partition.end()){
			for(size_t i=0; i<PPtr->second.size();i++){
				Term::const_reference SomeTerm = _TERM(*(PPtr->second.operator[](i)));
				if(SomeTerm.getType() == VarType)
					Vars.push_back(&SomeTerm);
				}
				PPtr++;
			}
		
		if(Vars.size())
			return  Vars[rand()%Vars.size()];
		}
	
	return NULL;
}

const Term*  TermSet::getRandFunctionInDomain(Type::const_reference VarType) const{
	vector <Term::const_pointer>	Vars;
	TokenSet::PartitionIterator PPtr = Partition.begin();
	while(PPtr->first <= VarType.complexity())
		PPtr++;
	while(PPtr != Partition.end()){
		for(size_t i=0; i<PPtr->second.size();i++){
			Term::const_reference SomeTerm = _TERM(*(PPtr->second.operator[](i)));
			if (SomeTerm.getType().isArrowType()){
				ArrowType::const_reference AT =	_ARROWTYPE(SomeTerm.getType());
				if(AT.getLeft() == VarType)						
						Vars.push_back(&SomeTerm);
				}
			}
			PPtr++;
		}
	if(Vars.size()) return  Vars[rand()%Vars.size()];
	return NULL;
}


const Term*  TermSet::getRandFunctionToDomain(Type::const_reference VarType) const{
	
	vector <Term::const_pointer>	Vars;
	TokenSet::PartitionIterator PPtr = TokenSet::beginMinimumComplexity(VarType.complexity());
	while(PPtr != Partition.end()){
		for( size_t i=0; i < PPtr->second.size(); i++){
			Term::const_reference SomeTerm = _TERM(*(PPtr->second.operator [](i)));
			if(SomeTerm.getType() == VarType){
					Vars.push_back(&SomeTerm);
			}
			else 
				if (SomeTerm.getType().isArrowType()){
					ArrowType::const_reference AT =	_ARROWTYPE(SomeTerm.getType());
					if(AT.FunctionTo() == VarType)
						Vars.push_back(&SomeTerm);
					}
			}
		PPtr++;
		}
	if(Vars.size()) return  Vars[rand()%Vars.size()];
	return NULL;
}






