#include "StdAfx.h"
#include "TermSet.h"
 
TokenSet* TokenSet::freeVariablesof(const Term & Ter){
	TokenSet* Res = new TokenSet( );
	if(Ter.isTermVariable()){
		if(_TERMVARIABLE(Ter).isFreeTermVariable()) 
			Res->insert(Ter);
		return Res;
		}				
	if(Ter.isAbstractTerm()){
	 	TokenSet* BodyVars = TokenSet::freeVariablesof(_ABSTRACTTERM(Ter).getBody( ));
		Res->insertSet(*BodyVars);
		delete BodyVars;
		}
   	if(Ter.isAbstractTypeTerm()){
	    TokenSet* BodyVars = TokenSet::freeVariablesof(_ABSTRACTTYPETERM(Ter).getBody( ));
		Res->insertSet(*BodyVars);
		delete BodyVars;	
		}
	if(Ter.isApplicationTerm()){
    	 TokenSet* LeftVars = TokenSet::freeVariablesof(_APPLICATIONTERM(Ter).getLeft( ));
		 Res->insertSet(*LeftVars);
		 delete LeftVars;
		 if (_APPLICATIONTERM(Ter).getRight( ).isTerm( )){
		 	 TokenSet* RightVars = TokenSet::freeVariablesof(_TERM(_APPLICATIONTERM(Ter).getRight( )));	
             Res->insertSet(*RightVars);
			 delete RightVars;
			 }
		 }


	if(Ter.isVirtualInvertedListTerm( )){
			for(size_t i=0; i<_VIRTUALINVERTEDLISTTERM( Ter ).size( ); i++){
				if(_VIRTUALINVERTEDLISTTERM( Ter )[i].isTerm( )){
					TokenSet* Temp1 =	 TokenSet::freeVariablesof(
					_TERM(_VIRTUALINVERTEDLISTTERM( Ter )[i]));
					Res->insertSet(*Temp1);
					delete Temp1;
					}
				}
		}
	return Res;	    	
	}
TokenSet* TokenSet::allFreeAtomicTypesof(Token::const_reference To){
	TokenSet* Types = new TokenSet( );
	if(To.isType( )){
		if(_TYPE( To).isAtomicType( )){
			if(_ATOMICTYPE( To ).isFreeTypeVariable()) Types->insert(To);
			return Types;
			}
		if(_TYPE( To ).isArrowType( )){
			TokenSet* LeftTypes = TokenSet::allFreeAtomicTypesof(_ARROWTYPE( To ).getLeft( ));
			Types->insertSet(*LeftTypes);
			delete LeftTypes;
			
			TokenSet* RightTypes = TokenSet::allFreeAtomicTypesof(_ARROWTYPE( To ).getRight( ));
			Types->insertSet(*RightTypes);
			delete RightTypes;
			return Types;
			}
		if(_TYPE( To ).isAbstractType( )){
			delete Types;	
			return TokenSet::allFreeAtomicTypesof(_ABSTRACTTYPE( To).getBody( )); 
			}	
		}
	else{
		if(_TERM( To).isTermVariable( )){
			delete Types;
			return TokenSet::allFreeAtomicTypesof(_TERM( To ).getType( ));
			}
		if(_TERM( To).isAbstractTerm( )){
			TokenSet* Temp1 =	 TokenSet::allFreeAtomicTypesof(_ABSTRACTTERM(To).getVar( ));
			TokenSet* Temp2 =	 TokenSet::allFreeAtomicTypesof(_ABSTRACTTERM(To).getBody( ));
			Types->insertSet(*Temp1);
			Types->insertSet(*Temp2);			
			delete Temp1;
			delete Temp2;
			return Types;
			}
		if(_TERM( To ).isAbstractTypeTerm( ) ){
			delete Types;
			return TokenSet::allFreeAtomicTypesof(_ABSTRACTTYPETERM( To ).getBody( ));
			}
		if(_TERM( To ).isApplicationTerm( )){
			TokenSet* Temp1 =	 TokenSet::allFreeAtomicTypesof(_APPLICATIONTERM(To).getLeft( ));
			TokenSet* Temp2 =	 TokenSet::allFreeAtomicTypesof(_APPLICATIONTERM(To).getRight( ));
			Types->insertSet(*Temp1);
			Types->insertSet(*Temp2);			
			delete Temp1;
			delete Temp2;
			return Types;
			}
		if(_TERM( To ).isVirtualInvertedListTerm( )){
			for(size_t i=0; i<_VIRTUALINVERTEDLISTTERM( To ).size( ); i++){
				 TokenSet* Temp1 =	 TokenSet::allFreeAtomicTypesof(_VIRTUALINVERTEDLISTTERM( To )[i]);
				 Types->insertSet(*Temp1);
				 delete Temp1;
				}
			return Types;
			}
		}
	return Types;
	}

void TokenSet::copyMap(const TokenSet& TS){
	  destroyMap();
	  for(TokenSet::PartitionIterator PPtr = TS.Partition.begin();PPtr != TS.Partition.end(); PPtr++)
		  for(size_t i=0; i<PPtr->second.size();i++)
			  TokenSet::insert(*(PPtr->second[i]));
			
	}

void TokenSet::destroyMap(){
	map<size_t , vecTokenPtr>::iterator PPtr;
	for(PPtr = Partition.begin();PPtr !=Partition.end(); PPtr++)	{
	   for(size_t i=0; i<PPtr->second.size();i++)
		   delete PPtr->second[i];
	   PPtr->second.clear();	
		}
	vecSize=0;
	}
TokenSet::TokenSet():vecSize(0){
	}
TokenSet::TokenSet(const TokenSet& TS):vecSize(TS.vecSize){
	 copyMap(TS);
}
TokenSet::~TokenSet(){
	destroyMap();
	}

const TokenSet& TokenSet::operator=(const TokenSet& To){
	if(this != &To)
		copyMap(To);
	return *this;
	}

const Token* TokenSet::insert(Token::const_reference To){
	if(find(To)) return NULL;
	vecSize++;
	Token::pointer Res = To.clone();
	Partition[To.complexity()].push_back( Res ); 
	return Res;
	}

bool TokenSet::remove(Token::const_reference To){	
	bool Res = false;
	Token* Temp = NULL;
	size_t index =	 To.complexity();
	map<size_t , vecTokenPtr>::iterator PPtr = Partition.find(index);
	
	if(PPtr != Partition.end( )){   
		vecTokenPtr Tokens = PPtr->second;
		vecTokenPtr NewTokens;
		for(size_t i=0; i < PPtr->second.size(); i++){
			if(((PPtr->second)[i])->operator==(To)){
				Res = true;
				Temp = 	(PPtr->second)[i];
				vecSize--;
				}
			else{
				NewTokens.push_back((PPtr->second)[i]);
				}	
			}
		
		if(NewTokens.size( )){
			Partition[index] = NewTokens; 
			}
		else{
			Partition.erase(index);
			}  
		}
	if(Temp) delete Temp;	
	return Res;
	}


void TokenSet::insertSet(const TokenSet& TS){
    for(size_t i=0; i<TS.size( ); i++)
		this->insert(TS[i]);
	}
const Token* TokenSet::find(Token::const_reference To) const{	
	size_t index =	 To.complexity();
	PartitionIterator PPtr = Partition.find(index);
	if(PPtr != Partition.end()){   
		const vecTokenPtr& Tokens = PPtr->second;
		for(size_t i=0; i < Tokens.size(); i++)
			if((Tokens[i])->operator==(To)) return Tokens[i];
		}
	return NULL;
	}
Token::const_reference TokenSet::operator[ ](size_t index)const{
	if(0 > index || index >= size( )) throw TokenSet::exception("size exception at Token::const_reference TokenSet::operator[ ](size_t )const");
	size_t inCounter = 0;
	PartitionIterator PPtr = Partition.begin();
	while(inCounter + PPtr->second.size() <= index){
				inCounter+=  PPtr->second.size();
				PPtr++;
			}
	int i = 0;
	while(inCounter < index){
		i++;
		inCounter++;
		}
	return *(PPtr->second.operator [ ](i));	
}


const Token* TokenSet::getRandToken( ) const{
	if(size( )) return &(operator[ ](rand()%size()));
	return NULL;
	}
TokenSet::PartitionIterator TokenSet::beginMinimumComplexity(size_t MinCom) const{
	TokenSet::PartitionIterator PPtr = Partition.begin();
	while((PPtr != Partition.end())	&&(PPtr->first < MinCom))
		PPtr++;
	return PPtr;
}
string TokenSet::toString( )const{
	  string Res("");
	  PartitionIterator PPtr = Partition.begin();
	  while(PPtr != Partition.end()){
		  for(size_t i=0; i<PPtr->second.size();i++)
			Res += string("\n\r") + ((PPtr->second).operator[](i))->toString();
		  PPtr++;
		  }
	  return Res;
	}
size_t TokenSet::size( ) const{
	return this->vecSize;
	}
size_t TokenSet::totalComplexity( ) const{
	size_t TotalComplexity = 0;
	map<size_t , vecTokenPtr>::const_iterator it = Partition.begin( );
	while(it != Partition.end( )){
		TotalComplexity += it->first * it->second.size( );
		it++;
		}
	return TotalComplexity;
	}


TermSet::TermSet( ){}
TermSet::TermSet(const TermSet& TS):TokenSet(TS),Types(TS.Types){

}

 
bool TermSet::hasType(Type::const_reference Ty) const{
	const Token* ToPtr =  Types.find(Ty);
	if(ToPtr) return true;
	return false;
}
bool TermSet::hasTerm(const Term& Te) const{
	const Token* ToPtr = find(Te);
	if(ToPtr) return true;
	return false;
}

const Token* TermSet::find(Token::const_reference To) const{
	if(To.isType ( )) return Types.find(To);
	if(Types.find(_TERM(To).getType()))
		return  TokenSet::find(To);
	return false;
}
const Token* TermSet::insert(Token::const_reference To){
	if(To.isType())
		return Types.insert(To);
	else{
			Types.insert(_TERM(To).getType());
			return TokenSet::insert(To);
		}
}

string TermSet::toString()const {


	string Res("\n\n******TermSet Listing*******");
	for(size_t i=0;i<Types.size();i++){
			Res+="\n";
			Res += Types[i].toString();
			}

	Res += TokenSet::toString();
	return Res;
}
const Token* TermSet::getRandToken( ) const{
	size_t TotalTokenNum = this->TeSize( ) + this->TySize( );
	if (rand( )%TotalTokenNum > this->TySize( ))
			return TokenSet::getRandToken( );
		return Types.getRandToken( );
	}
const Term*  TermSet::getRandTerm() const{
	return getRandTermOfType(*getRandType());
	}

const Type*  TermSet::getRandType() const{
	return static_cast< const Type* >(Types.getRandToken( ));
	}

const Term*  TermSet::getRandTermOfType(Type::const_reference VarType) const{                   
	vector <Term::const_pointer> Vars;
	
	if(Types.find(VarType)){
		TokenSet::PartitionIterator PPtr = Partition.begin();
		while(PPtr->first <= VarType.complexity())
			PPtr++;

		while(PPtr != Partition.end()){
			for(size_t i=0; i<PPtr->second.size();i++){
				Term::const_reference SomeTerm = _TERM(*(PPtr->second.operator[](i)));
				if(SomeTerm.getType() == VarType)
					Vars.push_back(&SomeTerm);
				}
				PPtr++;
			}
		
		if(Vars.size())
			return  Vars[rand()%Vars.size()];
		}
	
	return NULL;
}

const Term*  TermSet::getRandFunctionInDomain(Type::const_reference VarType) const{
	vector <Term::const_pointer>	Vars;
	TokenSet::PartitionIterator PPtr = Partition.begin();
	while(PPtr->first <= VarType.complexity())
		PPtr++;
	while(PPtr != Partition.end()){
		for(size_t i=0; i<PPtr->second.size();i++){
			Term::const_reference SomeTerm = _TERM(*(PPtr->second.operator[](i)));
			if (SomeTerm.getType().isArrowType()){
				ArrowType::const_reference AT =	_ARROWTYPE(SomeTerm.getType());
				if(AT.getLeft() == VarType)						
						Vars.push_back(&SomeTerm);
				}
			}
			PPtr++;
		}
	if(Vars.size()) return  Vars[rand()%Vars.size()];
	return NULL;
}


const Term*  TermSet::getRandFunctionToDomain(Type::const_reference VarType) const{
	vector <Term::const_pointer>	Vars;
	TokenSet::PartitionIterator PPtr = TokenSet::beginMinimumComplexity(VarType.complexity());
	while(PPtr != Partition.end()){
		for( size_t i=0; i < PPtr->second.size(); i++){
			Term::const_reference SomeTerm = _TERM(*(PPtr->second.operator [](i)));
			if(SomeTerm.getType() == VarType){
					Vars.push_back(&SomeTerm);
			}
			else 
				if (SomeTerm.getType().isArrowType()){
					ArrowType::const_reference AT =	_ARROWTYPE(SomeTerm.getType());
					if(AT.FunctionTo() == VarType)
						Vars.push_back(&SomeTerm);
					}
			}
		PPtr++;
		}
	if(Vars.size()) return  Vars[rand()%Vars.size()];
	return NULL;
}
size_t TermSet::totalComplexity( ) const{
	return Types.totalComplexity( ) + TokenSet::totalComplexity( );
	}

SemanticEvaluation::SemanticEvaluation( ){
		this->first = NULL;
		this->second = NULL;
}
SemanticEvaluation::SemanticEvaluation( Token* TPtr,  FTypeName<double>* FDoubPtr){
		this->first = TPtr;
		this->second = FDoubPtr;
}

SemanticEvaluation::SemanticEvaluation( const SemanticEvaluation& SE){
		this->first = NULL;
		this->second = NULL;
		if(SE.first)      first = SE.first->clone( );
		if(SE.second)	 second = new F<double>(*(SE.second));
	}

const SemanticEvaluation& SemanticEvaluation::operator=(const SemanticEvaluation& SE){
	if(this == &SE) return *this;
	
	if (first) {
		 delete first;
		 first = NULL;
		 }
    if (second) {
		delete second;
		second = NULL;
		}
	
	 if(SE.first)      first = SE.first->clone( );
	 if(SE.second)	  second = new F<double>(*(SE.second));
	 return *this;

	}


SemanticEvaluation::~SemanticEvaluation( ){
	if (first) delete first;
	if (second) delete second;
}

SemanticEvaluation* SemanticEvaluation::SemanticPlus( SemanticEvaluation* SERight){
	return new SemanticEvaluation(NULL, new F<double>( *(this->second) + *(SERight->second) ));
	}

#define MIN0 0.000001
SemanticEvaluation* SemanticEvaluation::SemanticDiv( SemanticEvaluation* SERight){
	SemanticEvaluation* SERes = new SemanticEvaluation( );
	double Div = SERight->second->x( );
	if	((Div >= 0.0 && Div < MIN0) 
		||(Div < 0.0 && Div >= -MIN0)) return SERes;
	SERes->second = new F<double>( (*(this->second)) / (*(SERight->second)));
	}

SemanticEvaluation* SemanticEvaluation::SemanticMinus( SemanticEvaluation* SERight){
				return new SemanticEvaluation(
					NULL,
					new F<double>( *(this->second) - *(SERight.second) ));
	}
SemanticEvaluation* SemanticEvaluation::SemanticMult( SemanticEvaluation* SERight){
				return new SemanticEvaluation(
					NULL,
					new F<double>( (*(this->second)) * (*(SERight->second)) ));
	}

SemanticEvaluation* SemanticEvaluation::SemanticGt( SemanticEvaluation* SERight, const Term& Te_True, const Term& Te_False){
	if(this->second->x( ) > SERight.second->x( )){
		return new SemanticEvaluation(Te_True.clone( ), NULL);
		}
	else{
            return new SemanticEvaluation(Te_False.clone( ), NULL);
		}
	}

const Token* EvaluatingTermSet::insert(Token::const_reference To){
	   const Token* Res =	TermSet::insert( To );
	   if((Res) && (Res->isTerm( ))){
			SemanticEvaluation* SEval = new SemanticEvaluation( );
			SEnv[static_cast<const Term*>(Res)] = SEval;
		   }
	   return Res;
	}

void	EvaluatingTermSet::setValue (const Term* TerPtr, SemanticEvaluation* SE){
	SemanticEnvironment::const_iterator it = SEnv.find(TerPtr);
	if(it != SEnv.end( )){
		if(it->second) delete it->second;
		  SEnv[TerPtr] = SE;
		}
	}
const SemanticEvaluation* EvaluatingTermSet::getValue(const Term* Ter) const{
	 SemanticEnvironment::const_iterator it = SEnv.find(Ter);
	 if(it != SEnv.end( )){
		 return it->second;
		 }
	 return NULL;
	}
bool EvaluatingTermSet::isPrimitive(const Token& To) const{
	  	for(size_t i=0; i<this->Primitives.size( ); i++)
			if(Primitives[i]->operator ==(To)) return true;
		return false;
	}
void EvaluatingTermSet::setAsPrimitive(const Token& To){
	const Token* ToPtr = TermSet::find(To);
	if((ToPtr) &&	(!isPrimitive(*ToPtr)))	{
		Primitives.push_back(ToPtr);
		}
	}


