/*	Copyright 2007 - Xavier Baro (xbaro@cvc.uab.cat)

	This file is part of eapmlib.

    Eapmlib is free software; you can redistribute it and/or modify
    it under the terms of the GNU General Public License as published by
    the Free Software Foundation; either version 3 of the License, or any 
	later version.

    Eapmlib is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU General Public License for more details.

    You should have received a copy of the GNU General Public License
    along with this program.  If not, see <http://www.gnu.org/licenses/>.
*/
/*! \file NaiveBayesModel.cpp
\brief Naive Bayes Model class source
\author Xavier Bar Sol

This file contains the declaration of the Naive Bayes Model Class. This clase implements
this type of probabilistic model. 
*/
#include "NaiveBayesModel.h"
#include <time.h>
#include <fstream>

Evolutive::CNaiveBayesModel::CNaiveBayesModel() : CProbModel(),m_Model(NULL),m_HoldoutPercentage(0.1)
{	
	m_GenerationMethod=POPGEN_PROB;	
	m_HoldoutPercentage=0.4;

	//! Set the default parameters for NBE
	m_VarDiv = 2.5;
	m_MinLlChangeEM = 0.0001;
    m_MinLlChangeAdd = 0.001;
	m_MinWeightChange= 0.0001;
	m_AbsToLoad = 4;	
	m_FixedEMIters = -1;
	m_PruneFrac = 0.999;
	m_PruneFreq = 5;
	m_RemoveClusterCenters=false;
	m_SaveBestModel=false;
	m_Rerun=false;
	m_SampledAbs = true;
}

Evolutive::CNaiveBayesModel::~CNaiveBayesModel()
{
	if(m_Model)
		delete m_Model;
}

void Evolutive::CNaiveBayesModel::SetHoldOutFrac(double Fraction)
{
	m_HoldoutPercentage=Fraction;
}

void Evolutive::CNaiveBayesModel::SetMaxIters(int NumIters)
{
	m_FixedEMIters = NumIters;
}

void Evolutive::CNaiveBayesModel::SetMinWeightChange(double MinWeight)
{
	m_MinWeightChange=MinWeight;
}

void Evolutive::CNaiveBayesModel::SetPruneFrac(double Fraction)
{
	m_PruneFrac=Fraction;
}

void Evolutive::CNaiveBayesModel::SetPruneFreq(int NumIters)
{
	m_PruneFreq=NumIters;
}

void Evolutive::CNaiveBayesModel::SetInitialNumClusters(int NumClusters)
{
	m_AbsToLoad = NumClusters;	
}

void Evolutive::CNaiveBayesModel::SetEMDelta(double Delta)
{
	m_MinLlChangeEM = Delta;
}

void Evolutive::CNaiveBayesModel::SetAddDelta(double Delta)
{
	m_MinLlChangeAdd = Delta;
}

void Evolutive::CNaiveBayesModel::SetVarDiv(double VarDiv)
{
	m_VarDiv = VarDiv;
}

void Evolutive::CNaiveBayesModel::SetRemoveClusterCentres(bool Flag)
{
	m_RemoveClusterCenters=Flag;
}

bool Evolutive::CNaiveBayesModel::IsStatic(double Tolerance)
{	
	return false;
}

void Evolutive::CNaiveBayesModel::NewPopulation(Evolutive::CPopulation &Population,CODING_METHOD ModelCode,Evolutive::CEvaluator *Evaluator)
{
	int PopSize;
	int *NumSamples=NULL;
	register int i,j;
	double *SumList=NULL;
	int RestSamples;
	int Num;
	int Count;
	double NormFact,Prob;
	int BestCluster;
	int NumBest;
	int GenCount;
	int GenTry=10;
	POBGEN_METHOD GenerationMethod=m_GenerationMethod;
	CODING_METHOD IndCode;
	bool MaxFunction;
	VarSet NewSample;

	//! Retrieve the number of individuals to generate
	PopSize=Population.GetPopulationUpdSize();
	RestSamples=PopSize;

	// Initialize the random generator
	srand( (unsigned)time( NULL ) );

	// If the model is not initialized, generate a random population
	if(!m_Model)
	{
		for(i=0;i<PopSize;i++)
		{
			if(Evaluator)
			{
				GenCount=0;
				do{
					Population.GetChromosomePtr(i)->Generate();
					GenCount++;
				} while(!Evaluator->IsValid(&(Population[i])) && GenCount<GenTry);
			}
			else
			{
				Population.GetChromosomePtr(i)->Generate();
			}
		}
		return;
	}

	// Create a vector with the number of samples for each abstraction
	NumSamples=new int[m_Model->getNumAbstractions()];

	// If the population is not evaluated, force the generation using probabilities
	if(!Population.IsScored())
	{
		// Change the generation type (the other types need the scores)
		GenerationMethod=POPGEN_PROB;
	}
	else
	{
		// Use the best and worse value to guess the sorting order
		if(Population.GetSortedScoreVal(0)<Population.GetSortedScoreVal(Population.GetPopulationSize()-1))
		{
			MaxFunction=false;
		}
		else
		{
			MaxFunction=true;
		}
	}

	// Use the probability distribution to generate the individuals
	switch(GenerationMethod)
	{
	case POPGEN_PROB:
		for(i=0;i<m_Model->getNumAbstractions();i++)
		{
			Num=static_cast<int>(ceil(m_Model->getWeight(i)*PopSize));
			NumSamples[i]=max(0,min(RestSamples,Num));
			RestSamples=max(0,RestSamples-NumSamples[i]);
		}
		break;
	case POPGEN_VAL:
		// Create a list to store the samples score
		SumList=new double[m_Model->getNumAbstractions()];
		memset(SumList,0,sizeof(double)*m_Model->getNumAbstractions());

		// Add the score of each individual to the sum list
		NormFact=0;
		for(i=0;i<Population.GetPopulationSize();i++)
		{
			// Obtain the cluster
			Num=Classify(Population.GetSortedChromosomePtr(i));
			
			// Add the value
			SumList[Num]+=Population.GetSortedScoreVal(i);		
			NormFact+=Population.GetSortedScoreVal(i);
		}

		// Obtain the number of individuals for each cluster
		for(i=0;i<m_Model->getNumAbstractions();i++)
		{
			// Calculate the normalized value
			Prob=SumList[i]/NormFact;

			// Calculate the proportion of individuals for this cluster
			if(!MaxFunction)
			{
				// (Minimizing) Invert the probability of this cluster
				Prob=1.0-Prob;
			}

			// Calculate the number of individuals using the proportion
			Num=static_cast<int>(ceil(Prob*PopSize));
			NumSamples[i]=max(0,min(RestSamples,Num));
			RestSamples=max(0,RestSamples-NumSamples[i]);
		}

		// Remove the list
		if(SumList)
			delete[] SumList;
		break;
	}

	// Generate the new population	
	Count=0;
	NumBest=-1;
	RestSamples=PopSize;
	for(i=0,j=0;i<PopSize;i++)
	{
		// Change the coding method of the individual to the coding of model
		IndCode=Population.GetChromosomePtr(i)->GetCodingMethod();
		Population.GetChromosomePtr(i)->SetCode(ModelCode);

		// Generate individuals for this abstraction		
		if(Count>=NumSamples[j])
		{		
			// Store the cluster with more samples
			if(NumSamples[j]>NumBest)
			{
				BestCluster=j;
				NumBest=NumSamples[j];
			}
			// Go to the next abstraction
			j++;
			Count=0;
		}

		// Generate the individual
		if(Evaluator)
		{
			// Generate the sample
			GenCount=0;
			do{
				NewSample=m_Model->getSample(j);
				Population.GetChromosomePtr(i)<<NewSample;
				GenCount++;
			} while(!Evaluator->IsValid(&(Population[i])) && GenCount<GenTry);

			// Verify for the stopping criteria
			if(GenCount>=GenTry)
			{
				//throw CEvolutiveLibException("Impossible to find a valid individual.",__FILE__,__LINE__,"NewPopulation");
			}
		}
		else
		{			
			// Generate the sample
			NewSample=m_Model->getSample(j);
			Population.GetChromosomePtr(i)<<NewSample;
		}

		//! Restore the coding method
		Population.GetChromosomePtr(i)->SetCode(IndCode);
		
		// Increment the counter
		Count++;
		RestSamples--;
	}

	// If not all the samples are generated add them to the best cluster
	if(RestSamples>0)
	{
		for(i=PopSize-RestSamples;i<PopSize;i++)
		{
			// Change the coding method of the individual to the coding of model
			IndCode=Population.GetChromosomePtr(i)->GetCodingMethod();
			Population.GetChromosomePtr(i)->SetCode(ModelCode);

			// Generate the individual
			if(Evaluator)
			{
				// Generate the sample
				GenCount=0;
				do{
					NewSample=m_Model->getSample(j);
					Population.GetChromosomePtr(i)<<NewSample;
					GenCount++;
				} while(!Evaluator->IsValid(&(Population[i])) && GenCount<GenTry);

				// Verify for the stopping criteria
				if(GenCount>=GenTry)
					throw CEvolutiveLibException("Impossible to find a valid individual.",__FILE__,__LINE__,"NewPopulation");
			}
			else
			{
				// Generate the sample		
				NewSample=m_Model->getSample(j);
				Population.GetChromosomePtr(i)<<NewSample;				
			}			

			// Restore the coding method
			Population.GetChromosomePtr(i)->SetCode(IndCode);
		}
	}

	//! Remove the vector
	if(NumSamples)
		delete[] NumSamples;
}

void Evolutive::CNaiveBayesModel::Update(Evolutive::CChromosome &C)
{
	
}

int Evolutive::CNaiveBayesModel::Classify(double *Data,double *ProbVal)
{
	register int a;
	int Dimension;
	int BestAbs=-1;
	double BestLl=-1;
	Prob p=0.0;

	// Copy the data vector to a VarSet object
	Dimension=m_ModelSchema.getNumVars();
	VarSet q=VarSet(Dimension);
		
	// Copy the data into the varset
	memcpy(q.getArray(),Data,Dimension*sizeof(double));


	// Return the abstraction (cluster) with maximum Likelihood
	for(a=0;a<m_Model->getNumAbstractions(); a++) 
	{
		//! Calculate the probability for the abstraction a
		p = m_Model->getWeight(a) * m_Model->getProb(q, a);
		
		//! Compare with the best
		if(p.ln()>BestLl || a==0)
		{
			BestAbs=a;
			BestLl=p.ln();
			if(ProbVal)
				ProbVal[a]=p.val();
		}
	}

	return BestAbs;
}

int Evolutive::CNaiveBayesModel::Classify(Evolutive::CChromosome *C)
{
	register int a;
	int BestAbs=-1;
	double BestLl=-1;
	VarSet q;
	Prob p=0.0;

	//! Convert the Chromosome to a VarSet object
	*C>>q;

	//! Return the abstraction (cluster) with maximum Likelihood
	for(a=0;a<m_Model->getNumAbstractions(); a++) 
	{
		//! Calculate the probability for the abstraction a
		p = m_Model->getWeight(a) * m_Model->getProb(q, a);
		
		//! Compare with the best
		if(p.ln()>BestLl || a==0)
		{
			BestAbs=a;
			BestLl=p.ln();
		}
	}

	return BestAbs;
}

void Evolutive::CNaiveBayesModel::InitializeModel(int NumVars,Evolutive::CPopulation &Population)
{

	//! Call the base constructor
	CProbModel::InitializeModel(NumVars,Population);
}

void Evolutive::CNaiveBayesModel::PrepareData(int NumIndividuals,Evolutive::CPopulation *Population)
{
	CChromosome *pC=NULL;
	VarSet ChromosomeData;	
	register int i;
	int NumBest,NumWorse;

	// Initialize the random generator
	srand((unsigned)time( NULL ));

	// Clear old data
	m_Observations.clear();
	m_TrainSet.clear();
	m_HoldoutSet.clear();

	// If we have to use bad individuals, separe the number of good and bad
	NumBest=NumIndividuals;
	NumWorse=0;
	if(m_EstimationWorsePer>0.0 && Population->IsScored())
	{
		NumWorse=cvRound(m_EstimationWorsePer*NumIndividuals);
		NumBest=NumIndividuals-NumWorse;		
	}

	// Create the observations set from the best individuals of the population
	for(i=0;i<NumBest;i++)
	{
		// Point to the Chromosome
		if(Population->IsScored())
			pC=Population->GetSortedChromosomePtr(i);
		else
			pC=Population->GetChromosomePtr(i);

		// Convert the data
		*pC>>ChromosomeData;

		// Add to the observations set
		m_Observations.append(ChromosomeData);
	}
    
	// Create the observations set from the worse individuals of the population
	for(i=1;i<=NumWorse;i++)
	{
		// Point to the Chromosome
		pC=Population->GetSortedChromosomePtr(Population->GetPopulationSize()-i);		

		// Convert the data
		*pC>>ChromosomeData;

		// Add to the observations set
		m_Observations.append(ChromosomeData);
	}

	//! Split the data into the training and validation sets
    if (m_HoldoutPercentage > 0.0) 
	{
        for(i=0;i<NumIndividuals;i++) 
		{
            if(rand() < m_HoldoutPercentage * RAND_MAX) 
			{
                m_HoldoutSet.append(m_Observations[i]);
            } 
			else 
			{
                m_TrainSet.append(m_Observations[i]);
            }
        }
    } 
	else 
	{
        m_TrainSet = m_Observations;
    }

	//cout << "\nNumObs : " << m_Observations.size() << "\n" << endl;
}

void Evolutive::CNaiveBayesModel::PrepareData(int NumIndividuals,int Dimension,double *Data)
{
	register int i;	

	// Initialize the random generator
	srand((unsigned)time( NULL ));

	// Clear old data
	m_Observations.clear();
	m_TrainSet.clear();
	m_HoldoutSet.clear();

	// Create the observations set from the population
	for(i=0;i<NumIndividuals;i++)
	{
		// Create a new array of variables
		VarSet SampleData=VarSet(Dimension);
		

		// Copy the data into the varset
		memcpy(SampleData.getArray(),&(Data[i*Dimension]),Dimension*sizeof(double));
		

		// Add to the observations set
		m_Observations.append(SampleData);
	}
    
	
	//! Split the data into the training and validation sets
    if (m_HoldoutPercentage > 0.0) 
	{
        for(i=0;i<NumIndividuals;i++) 
		{
            if(RAND_VALUE() < m_HoldoutPercentage) 
			{
                m_HoldoutSet.append(m_Observations[i]);
            } 
			else 
			{
                m_TrainSet.append(m_Observations[i]);
            }
        }
    } 
	else 
	{
        m_TrainSet = m_Observations;
    }
}

void Evolutive::CNaiveBayesModel::ExtractSchema(void)
{
	register int i,j;
	NumSet<int> TempSchema(m_Observations[0].getNumNums());

	//! Initialize the schema 
    for(j=0;j<TempSchema.getNumNums();j++) 
	{
		TempSchema[j] = 0;
    }

	//! Use the observations to define the schema
    for(i=0;i<m_Observations.length(); i++) 
	{
        for (int j = 0; j < TempSchema.getNumNums(); j++) 
		{
            if (TempSchema[j] >= 0) 
			{
                if (floor(m_Observations[i][j]) != m_Observations[i][j]) 
				{
					TempSchema[j] = -1;
                } 
				else if (m_Observations[i][j] + 1 > TempSchema[j]) 
				{
                    TempSchema[j] = (int)m_Observations[i][j] + 1;
                }
            }
        }
    }

	//! Store the final schema
    m_ModelSchema = TempSchema;
}

void Evolutive::CNaiveBayesModel::EstimateModel(void)
{
	int EMitersLeft;
    double weightChange = 1.0;
    double bestOverall  = -1e100;
    double bestll       = -1e100;
    double currll       = -1e100;    
    double bestModelll = bestOverall;
    int bestAbsLoaded = 0;
    int bestEMSteps = 0;
    int totalEMSteps = 0;
    int origAbsToLoad = m_AbsToLoad;
    int randomRestarts = 0;
	int TotalAbsLoaded = 0;

	//! Obtain the initial parameters
	double MinLlChangeEM = m_MinLlChangeEM;
    double MinLlChangeAdd = m_MinLlChangeAdd;
	int AbsToLoad = m_AbsToLoad;
	int SampledAbs=m_SampledAbs;

	//! Extract the schema from the data
	ExtractSchema();	

	//! Create the model, based on schema
	if(m_Model)
		delete m_Model;
	m_Model=new AbsVarAbstractionSet(m_ModelSchema, true, m_VarDiv);

	//! Train the model     
	//vout << "\n\nLearning model...\n";
    m_Model->resetWeights();
    m_Model->initParameters(m_TrainSet.getArray(), m_TrainSet.size());

    //! Start with one intial cluster, representing variable marginals
    m_Model->addNullAbstraction();

	//! Set the initial values
    if (m_HoldoutSet.size() == 0) 
	{
        MinLlChangeEM = -1;
        MinLlChangeAdd = -1;
    }

	//! Initialize the estimation process
	AbsVarAbstractionSet bestModel = *m_Model;
    GrowArray<VarSet> bestTrainSet = m_TrainSet;
    AbsVarAbstractionSet origModel = *m_Model;
    GrowArray<VarSet> origTrainSet = m_TrainSet;

	//! Begins the NBE algorithm
    do {
        /*! When we have a fixed number of EM iterations to run, we
          run exactly that number, no more no less.  We will only reenter
          this loop if we're doing random restarts.  We will never extend
          the model by adding more clusters.
        */ 
        EMitersLeft = m_FixedEMIters;

        if (randomRestarts > 0) 
		{
            randomRestarts--;
        }        
        bestOverall = bestModelll;

        //! Do multiple random restarts.
        if(randomRestarts > 0) 
		{
            m_TrainSet = origTrainSet;
            *m_Model = origModel;
            AbsToLoad = origAbsToLoad;
			TotalAbsLoaded = 0;
            bestll = -1e100;      
			//vout << randomRestarts << " random restarts left.\n";
        }
        
        // * Load additional abstractions incrementally
        // 
        int addedAbs = 0;
        while (SampledAbs && addedAbs != AbsToLoad) 
		{
			Abstraction NewAbs;

			if(m_RemoveClusterCenters)
			{
				int absIndex = rand() % m_TrainSet.size();
				NewAbs = m_TrainSet[absIndex];
				m_TrainSet[absIndex] = m_TrainSet[m_TrainSet.size() - 1];
				m_TrainSet.deleteLast();
			}
			else
			{
				int absIndex = rand() % (m_TrainSet.size() - TotalAbsLoaded);
				NewAbs = m_TrainSet[absIndex];
				m_TrainSet[absIndex] = m_TrainSet[m_TrainSet.size() 
					- TotalAbsLoaded - 1];
				m_TrainSet[m_TrainSet.size() - TotalAbsLoaded - 1] = NewAbs;
			}
			
            m_Model->addAbstraction(new Abstraction(NewAbs));
            TotalAbsLoaded++;
            addedAbs++;

			if(m_RemoveClusterCenters)
			{
				if (m_TrainSet.size() == 0) 
				{
					SampledAbs = false;
				}
			}
			else
			{
				if (TotalAbsLoaded >= m_TrainSet.size()) 
				{
					SampledAbs = false;
				}
			}
        }

        if (addedAbs > 0) 
		{
			AbsToLoad += addedAbs;    
			//vout << "  Added " << addedAbs << " clusters.\n";
		}
        else 
		{
            AbsToLoad = 0;
		}
                
        if (m_HoldoutSet.size() > 0.0) 
		{
			//! Calculate the initial Likelihood of the validation set
            currll = m_Model->getLogLikelihood(m_HoldoutSet.getArray(),m_HoldoutSet.size());
			//vout << "    Initial ll: " << currll << endl;
			                   
            if (currll > bestll) 
			{
                currll = bestll;
            }
            if (currll > bestModelll) 
			{
                bestModelll = currll;
                bestModel = *m_Model;
                bestTrainSet = m_TrainSet;
                bestAbsLoaded = TotalAbsLoaded;
                bestEMSteps = totalEMSteps;
            }
        }

        //! Run EM with these abstractions until we begin to converge (or overfit)
        int iter = 0;
        do {            
            //! Run one step of EM
            weightChange = m_Model->EMStep(m_TrainSet.getArray(),m_TrainSet.size());            
            totalEMSteps++;
         
            //! Compute the log likelihood for the holdout set
            if (m_HoldoutSet.size() > 0) 
			{
                bestll = currll;
                currll = m_Model->getLogLikelihood(m_HoldoutSet.getArray(),m_HoldoutSet.size());
				//vout << "    Hold-out ll: " << currll << endl;
                
                if (currll > bestModelll) 
				{
                    bestModelll = currll;
                    bestModel = *m_Model;
                    bestTrainSet = m_TrainSet;
                    bestAbsLoaded = TotalAbsLoaded;
                    bestEMSteps = totalEMSteps;
                }
            } 
			else
			{
				//vout << '.';
			}

			//! Prune abstractions with low weight, every so often
            if (m_PruneFrac < 1.0 && (++iter % m_PruneFreq == 0)) 
			{               
				//vout << "    Pruning model...";
                m_Model->prune(m_PruneFrac);   
				//vout << m_Model->getNumAbstractions() << " clusters left.\n";
            }
        } while ((--EMitersLeft != 0) 
                && (currll - bestll)/fabs(currll) >= MinLlChangeEM
                && weightChange >= m_MinWeightChange);

		//! If the flag to save the best is enabled restore that
		if(m_SaveBestModel)
		{
			//! Restore best model
			if (m_HoldoutSet.size() > 0.0) 
			{
				*m_Model = bestModel;
				m_TrainSet = bestTrainSet;
				totalEMSteps = bestEMSteps;
			}
		}

        if (m_PruneFrac < 1.0) 
		{        
			//vout << "  Pruning model...";
            m_Model->prune(m_PruneFrac);
			//vout << m_Model->getNumAbstractions() << " clusters left.\n";
        }
    } while (randomRestarts || ((m_FixedEMIters < 0) && SampledAbs
            && (bestll - bestOverall)/fabs(bestll) >= MinLlChangeAdd
            && weightChange >= m_MinWeightChange));

    if (m_HoldoutSet.size() > 0.0) 
	{
        if (m_Rerun) 
		{
            m_TrainSet = m_Observations;
            *m_Model = origModel;     

			//vout << "...RERUNNING...\n";
            //vout << "Clusters to load: " << bestAbsLoaded << endl;
            //vout << "EM steps to run: " << bestEMSteps << endl;
            
            while (bestAbsLoaded--) 
			{
                int absIndex = rand() % m_TrainSet.size();
                Abstraction abs = m_TrainSet[absIndex];
                m_TrainSet[absIndex] = m_TrainSet[m_TrainSet.size() - 1];
                m_TrainSet.deleteLast();

                m_Model->addAbstraction(new Abstraction(abs));
            }

            while (bestEMSteps--) 
			{
                m_Model->EMStep(m_TrainSet.getArray(), m_TrainSet.size());
            }     
			//vout << "Done.\n";
        } 
		else 
		{            
            for (int i = 0; i < m_HoldoutSet.size(); i++) 
			{
                m_TrainSet.append(m_HoldoutSet[i]);
            }
            m_Model->EMStep(m_TrainSet.getArray(), m_TrainSet.size());
            m_Model->EMStep(m_TrainSet.getArray(), m_TrainSet.size());         
        }
    }    
}


void Evolutive::CNaiveBayesModel::Estimate(int NumIndividuals,int Dimension,double *Data)
{
	// Prepare the data to learn the model
	PrepareData(NumIndividuals,Dimension,Data);

	// Estimate the model
	EstimateModel();
}


void Evolutive::CNaiveBayesModel::Estimate(int NumIndividuals,Evolutive::CPopulation &Population)
{
	// Prepare the data to learn the model
	PrepareData(NumIndividuals,&Population);

	// Estimate the model
	EstimateModel();
}


void Evolutive::CNaiveBayesModel::SetGenerationMethod(POBGEN_METHOD GenMethod)
{
	m_GenerationMethod=GenMethod;
}

void Evolutive::CNaiveBayesModel::Save(string FileName)
{
	ostream *os=NULL;
	fstream fout;
	register int i;

	// Open the file to write
	fout.open(FileName.data(),ios_base::out | ios_base::trunc);
	if(fout.fail())
		throw CEvolutiveLibException("Cannot open output file",__FILE__,__LINE__,"Save");

	// Point to the stream
	os=&fout;

	// Write the parameters of the schema
	fout<<m_ModelSchema<<endl;	

	// Write the clusters probabitity	
	fout <<"[";
	for(i=0;i<m_Model->getNumAbstractions(); i++) 
	{		
		if(i)
			fout << " ";
		fout << m_Model->getWeight(i);
	}
	fout <<"]"<<endl;

	// Write the parameters of the model
	m_Model->writeParameters(*os);	

	// Close the file
	fout.close();	
}

void Evolutive::CNaiveBayesModel::Load(string FileName)
{
	istream *is=NULL;
	fstream fin;

	// Open the file to read
	fin.open(FileName.data(),ios_base::in | ios_base::trunc);
	if(fin.fail())
		throw CEvolutiveLibException("Cannot open output file",__FILE__,__LINE__,"Load");

	// Point to the stream
	is=&fin;

	// Load the schema
	fin >> m_ModelSchema;

	// Load the cluster probabilities
	throw CEvolutiveLibException("Not implemented option (TODO)",__FILE__,__LINE__,"Load");

	// Read the parameters of the model
	m_Model->readParameters(*is);

	// Close the file
	fin.close();
}

int Evolutive::CNaiveBayesModel::GetNumClusters(void)
{
	return m_Model->getNumAbstractions();
}

void Evolutive::CNaiveBayesModel::GetParameters(double *Priors,double *Centres,double *Variances)
{
	register int a,v;
	int Dimension;
	AbsData* Data = NULL;


	// Check the pointers
	
	
	// Get the dimension
	Dimension=m_ModelSchema.getNumVars();

	// Copy the parameters
	for(a=0;a<m_Model->getNumAbstractions(); a++) 
	{
		// Get the abstraction
		Abstraction Abs=m_Model->getAbstraction(a);

		// Get the prior value
		Priors[a]=m_Model->getWeight(a);

		// Point to the cluster structure
		Data=(AbsData*)Abs.getData();

		// Get the centers
		for(v=0;v<Dimension;v++)
		{
			Centres[Dimension*a+v]=Data->varValProbs[v][0];
			Variances[Dimension*a+v]=Data->varValProbs[v][1];
		}
	}
}
