/*
 * Copyright (C) 2010-2011, Mathieu Labbe and IntRoLab - Universite de Sherbrooke
 *
 * This file is part of SeMoLearning.
 *
 * SeMoLearning is free software: you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * SeMoLearning is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with SeMoLearning.  If not, see <http://www.gnu.org/licenses/>.
 */

#include "semol/core/BayesFilter.h"
#include "semol/core/Memory.h"
#include "semol/core/Node.h"
#include "semol/core/Parameters.h"
#include <iostream>

#include "utilite/UTimer.h"
#include "utilite/UConversion.h"

namespace semol {

BayesFilter::BayesFilter(const ParametersMap & parameters) :
	_virtualPlacePrior(Parameters::defaultBayesVirtualPlacePriorThr()),
	_predictionOnNonNullActionsOnly(Parameters::defaultBayesPredictionOnNonNullActionsOnly()),
	_fullPredictionUpdate(Parameters::defaultBayesFullPredictionUpdate()),
	_epsilon(Parameters::defaultBayesEpsilon()),
	_totalPredictionLCValues(0.0f)
{
	this->setPredictionLC(Parameters::defaultBayesPredictionLC());
	this->parseParameters(parameters);
}

BayesFilter::~BayesFilter() {
}

void BayesFilter::parseParameters(const ParametersMap & parameters)
{
	ParametersMap::const_iterator iter;
	if((iter=parameters.find(Parameters::kBayesVirtualPlacePriorThr())) != parameters.end())
	{
		this->setVirtualPlacePrior(std::atof((*iter).second.c_str()));
	}
	if((iter=parameters.find(Parameters::kBayesPredictionLC())) != parameters.end())
	{
		this->setPredictionLC((*iter).second);
	}
	Parameters::parse(parameters, Parameters::kBayesPredictionOnNonNullActionsOnly(), _predictionOnNonNullActionsOnly);
	Parameters::parse(parameters, Parameters::kBayesFullPredictionUpdate(), _fullPredictionUpdate);
	Parameters::parse(parameters, Parameters::kBayesEpsilon(), _epsilon);
}

void BayesFilter::setVirtualPlacePrior(float virtualPlacePrior)
{
	if(virtualPlacePrior < 0)
	{
		ULOGGER_WARN("virtualPlacePrior=%f, must be >=0 and <=1", virtualPlacePrior);
		_virtualPlacePrior = 0;
	}
	else if(virtualPlacePrior > 1)
	{
		ULOGGER_WARN("virtualPlacePrior=%f, must be >=0 and <=1", virtualPlacePrior);
		_virtualPlacePrior = 1;
	}
	else
	{
		_virtualPlacePrior = virtualPlacePrior;
	}
}

// format = {Virtual place, Loop closure, level1, level2, l3, l4...}
void BayesFilter::setPredictionLC(const std::string & prediction)
{
	std::list<std::string> strValues = uSplit(prediction, ' ');
	if(strValues.size() < 2)
	{
		UERROR("The number of values < 2 (prediction=\"%s\")", prediction.c_str());
	}
	else
	{
		std::vector<double> tmpValues(strValues.size());
		int i=0;
		bool valid = true;
		for(std::list<std::string>::iterator iter = strValues.begin(); iter!=strValues.end(); ++iter)
		{
			tmpValues[i] = std::atof((*iter).c_str());
			//UINFO("%d=%e", i, tmpValues[i]);
			if(tmpValues[i] < 0.0 || tmpValues[i]>1.0)
			{
				valid = false;
				break;
			}
			++i;
		}

		if(!valid)
		{
			UERROR("The prediction is not valid (values must be between >0 && <=1) prediction=\"%s\"", prediction.c_str());
		}
		else
		{
			_predictionLC = tmpValues;
		}
	}
	_totalPredictionLCValues = 0.0f;
	for(unsigned int j=0; j<_predictionLC.size(); ++j)
	{
		_totalPredictionLCValues += _predictionLC[j];
	}
}

const std::vector<double> & BayesFilter::getPredictionLC() const
{
	// {Vp, Lc, l1, l2, l3, l4...}
	return _predictionLC;
}

std::string BayesFilter::getPredictionLCStr() const
{
	std::string values;
	for(unsigned int i=0; i<_predictionLC.size(); ++i)
	{
		values.append(uNumber2Str(_predictionLC[i]));
		if(i+1 < _predictionLC.size())
		{
			values.append(" ");
		}
	}
	return values;
}

void BayesFilter::reset()
{
	_posterior.clear();
	_predictionMap.clear();
}

const std::map<int, float> & BayesFilter::update(
		const Memory * memory,
		const std::map<int, float> & likelihood)
{
	ULOGGER_DEBUG("");

	if(!memory)
	{
		ULOGGER_ERROR("Memory is Null!");
		return _posterior;
	}

	if(!likelihood.size())
	{
		ULOGGER_ERROR("likelihood is empty!");
		return _posterior;
	}

	if(_predictionLC.size() < 2)
	{
		ULOGGER_ERROR("Prediction is not valid!");
		return _posterior;
	}

	UTimer timer;
	timer.start();

	cv::Mat posterior;
	std::set<int> ids = uKeysSet(likelihood);

	float sum = 0;
	int j=0;
	// Recursive Bayes estimation...
	// STEP 1 - Prediction : Prior*lastPosterior
	if(!_fullPredictionUpdate && _predictionMap.size() != 0)
	{
		updatePrediction(_predictionMap, memory, uKeysSet(_posterior), ids);
	}
	else
	{
		_predictionMap = this->generatePrediction(memory, ids);
	}
	ULOGGER_DEBUG("STEP1-generate prior=%fs, rows=%d", timer.ticks(), _predictionMap.size());
	//printPredictionMap();

	// Adjust the last posterior if some images were
	// reactivated or removed from the working memory
	posterior = cv::Mat(likelihood.size(), 1, CV_32FC1);
	this->updatePosterior(memory, ids);
	j=0;
	for(std::map<int, float>::const_iterator i=_posterior.begin(); i!= _posterior.end(); ++i)
	{
		((float*)posterior.data)[j++] = (*i).second;
	}
	ULOGGER_DEBUG("STEP1-update posterior=%fs, posterior=%d, _posterior size=%d", timer.ticks(), posterior.rows, _posterior.size());
	//std::cout << "LastPosterior=" << posterior << std::endl;

	// Multiply prediction matrix with the last posterior
	// (m,m) X (m,1) = (m,1)
	cv::Mat prior = cv::Mat::zeros(posterior.rows, 1, CV_32F);
	std::map<int, int> idToIndexMap;
	int i=0;
	for(std::set<int>::iterator iter=ids.begin(); iter!=ids.end(); ++iter)
	{
		UASSERT_MSG(*iter != 0, "Signature id is null ?!?");
		idToIndexMap.insert(idToIndexMap.end(), std::make_pair(*iter, i++));
	}
	ULOGGER_DEBUG("STEP1-matrix indexMap map=%fs", timer.ticks());
	bool error = false;
	for(std::map<int, std::map<int, float> >::iterator iter=_predictionMap.begin(); iter != _predictionMap.end(); ++iter)
	{
		std::map<int, int>::iterator findIter = idToIndexMap.find(iter->first);
		if(findIter == idToIndexMap.end())
		{
			error = true;
			UERROR("id=%d", iter->first);
		}
		else
		{
			int col = findIter->second;
			for(std::map<int, float>::iterator jter=iter->second.begin(); jter != iter->second.end(); ++jter)
			{
				findIter = idToIndexMap.find(jter->first);
				if(findIter == idToIndexMap.end())
				{
					UERROR("iter->%d jter->%d", iter->first, jter->first);
					error = true;
				}
				else
				{
					prior.at<float>(findIter->second) += jter->second * posterior.at<float>(col);
				}
			}
		}
	}
	ULOGGER_DEBUG("STEP1-matrix mult time map=%fs", timer.ticks());
	if(error)
	{
		printPredictionMap();
	}

	// virtual place
	cv::Mat vpRow(posterior.rows, 1, CV_32F, _predictionLC[0]);
	vpRow.at<float>(0) = _virtualPlacePrior;
	prior.at<float>(0) = vpRow.dot(posterior);
	float val = (1.0-_virtualPlacePrior)/(posterior.rows-1);
	if(val >= _epsilon)
	{
		for(int i=1; i<posterior.rows; ++i)
		{
			prior.at<float>(i) += val*posterior.at<float>(0);
		}
	}
	else
	{
		UWARN("Ignored vp prior on loop closures, value=%f < epsilon(%f), rows=%d", val, _epsilon, posterior.rows);
	}


	/*std::map<int, std::map<int, float> > prediction  =_predictionMap;
	prediction.insert(std::make_pair(-1, std::map<int, float>()));
	cv::Mat dense;
	convertMapToSparse(prediction).convertTo(dense, CV_32F);
	dense.at<float>(0,0) = _virtualPlacePrior;
	float v = (1.0-_virtualPlacePrior)/(dense.rows-1);
	for(int i=1; i<dense.rows; ++i)
	{
		dense.at<float>(i, 0) = v;
		dense.at<float>(0, i) = _predictionLC[0];
	}
	std::cout << "PredictionMap=" << dense << std::endl;
	ULOGGER_DEBUG("STEP1-convert=%fs", timer.ticks());
	cv::Mat priorDense = dense * posterior;*/

	//std::cout << "ResultingPriorDense=" << priorDense << std::endl;
	//std::cout << "ResultingPriorMap=" << prior << std::endl;

	ULOGGER_DEBUG("STEP1-matrix mult time=%fs", timer.ticks());
	std::vector<float> likelihoodValues = uValues(likelihood);
	//std::cout << "Likelihood=" << cv::Mat(likelihoodValues) << std::endl;

	// STEP 2 - Update : Multiply with observations (likelihood)
	j=0;
	for(std::map<int, float>::const_iterator i=likelihood.begin(); i!= likelihood.end(); ++i)
	{
		std::map<int, float>::iterator p =_posterior.find((*i).first);
		if(p!= _posterior.end())
		{
			(*p).second = (*i).second * ((float*)prior.data)[j++];
			sum+=(*p).second;
		}
		else
		{
			ULOGGER_ERROR("Problem1! can't find id=%d", (*i).first);
		}
	}
	ULOGGER_DEBUG("STEP2-likelihood time=%fs", timer.ticks());
	//std::cout << "Posterior (before normalization)=" << _posterior << std::endl;

	// Normalize
	ULOGGER_DEBUG("sum=%f", sum);
	if(sum != 0)
	{
		for(std::map<int, float>::iterator i=_posterior.begin(); i!= _posterior.end(); ++i)
		{
			(*i).second /= sum;
		}
	}
	ULOGGER_DEBUG("normalize time=%fs", timer.ticks());
	//std::cout << "Posterior=" << _posterior << std::endl;

	return _posterior;
}

std::map<int, std::map<int, float> > BayesFilter::generatePrediction(const Memory * memory, const std::set<int> & ids) const
{
	UDEBUG("");
	UASSERT(memory &&
		   _predictionLC.size() >= 2 &&
		   ids.size());

	UTimer timer;
	timer.start();
	UTimer timerGlobal;
	timerGlobal.start();

	std::map<int, int> idToIndexMap;
	int i=0;
	for(std::set<int>::const_iterator iter=ids.begin(); iter!=ids.end(); ++iter)
	{
		UASSERT_MSG(*iter != 0, "Signature id is null ?!?");
		idToIndexMap.insert(idToIndexMap.end(), std::make_pair(*iter, i++));
	}

	std::map<int, std::map<int, float> > predictionMap;

	// Each prior is a column vector
	UDEBUG("_predictionLC.size()=%d",_predictionLC.size());
	std::set<int> idsDone;

	for(std::set<int>::const_iterator jter=ids.begin(); jter!=ids.end(); ++jter)
	{
		if(idsDone.find(*jter) == idsDone.end())
		{
			if(*jter > 0)
			{
				// Set high values (gaussians curves) to loop closure neighbors

				// ADD prob for each neighbors
				std::map<int, int> neighbors = memory->getNeighborsId(*jter, _predictionLC.size()-1, 0, _predictionOnNonNullActionsOnly, true);
				std::list<int> idsLoopMargin;
				//filter neighbors in STM
				for(std::map<int, int>::iterator iter=neighbors.begin(); iter!=neighbors.end();)
				{
					if(memory->isInSTM(iter->first))
					{
						neighbors.erase(iter++);
					}
					else
					{
						if(iter->second == 0)
						{
							idsLoopMargin.push_back(iter->first); // all neighbors on margin 0 have all same neighbors
						}
						++iter;
					}
				}

				// should at least have 1 id in idsMarginLoop
				if(idsLoopMargin.size() == 0)
				{
					UFATAL("No 0 margin neighbor for signature %d !?!?", *jter);
				}

				// same neighbor tree for loop signatures (margin = 0)
				for(std::list<int>::iterator iter = idsLoopMargin.begin(); iter!=idsLoopMargin.end(); ++iter)
				{
					this->addNeighborProbMap(predictionMap, *iter, neighbors, uKeysSet(idToIndexMap));
					idsDone.insert(*iter);
				}
			}
		}
	}

	ULOGGER_DEBUG("time = %fs", timerGlobal.ticks());

	return predictionMap;
}

cv::Mat BayesFilter::generatePredictionMatrix(const Memory * memory, const std::set<int> & ids) const
{
	cv::Mat dense;
	std::map<int, std::map<int, float> > predictionMap = generatePrediction(memory, ids);
	predictionMap.insert(std::make_pair(-1, std::map<int, float>())); // add virtual place

	convertMapToSparse(predictionMap).convertTo(dense, CV_32F);

	// add virtual prob
	dense.at<float>(0,0) = _virtualPlacePrior;
	float v = (1.0-_virtualPlacePrior)/(dense.rows-1);
	for(int i=1; i<dense.rows; ++i)
	{
		dense.at<float>(i, 0) = v;
		dense.at<float>(0, i) = _predictionLC[0];
	}

	return dense;
}

void BayesFilter::normalize(cv::Mat & prediction, unsigned int index, float addedProbabilitiesSum, bool virtualPlaceUsed) const
{
	UASSERT(index < (unsigned int)prediction.rows && index < (unsigned int)prediction.cols);

	int cols = prediction.cols;
	// ADD values of not found neighbors to loop closure
	if(addedProbabilitiesSum < _totalPredictionLCValues-_predictionLC[0])
	{
		float delta = _totalPredictionLCValues-_predictionLC[0]-addedProbabilitiesSum;
		((float*)prediction.data)[index + index*cols] += delta;
		addedProbabilitiesSum+=delta;
	}

	float allOtherPlacesValue = 0;
	if(_totalPredictionLCValues < 1)
	{
		allOtherPlacesValue = 1.0f - _totalPredictionLCValues;
	}

	// Set all loop events to small values according to the model
	if(allOtherPlacesValue > 0 && cols>1)
	{
		float value = allOtherPlacesValue / float(cols - 1);
		for(int j=virtualPlaceUsed?1:0; j<cols; ++j)
		{
			if(((float*)prediction.data)[index + j*cols] == 0)
			{
				((float*)prediction.data)[index + j*cols] = value;
				addedProbabilitiesSum += ((float*)prediction.data)[index + j*cols];
			}
		}
	}

	//normalize this row
	float maxNorm = 1 - (virtualPlaceUsed?_predictionLC[0]:0); // 1 - virtual place probability
	if(addedProbabilitiesSum<maxNorm-0.0001 || addedProbabilitiesSum>maxNorm+0.0001)
	{
		for(int j=virtualPlaceUsed?1:0; j<cols; ++j)
		{
			((float*)prediction.data)[index + j*cols] *= maxNorm / addedProbabilitiesSum;
		}
		addedProbabilitiesSum = maxNorm;
	}

	// ADD virtual place prob
	if(virtualPlaceUsed)
	{
		((float*)prediction.data)[index] = _predictionLC[0];
		addedProbabilitiesSum += ((float*)prediction.data)[index];
	}

	//debug
	//for(int j=0; j<cols; ++j)
	//{
	//	ULOGGER_DEBUG("test col=%d = %f", i, prediction.data.fl[i + j*cols]);
	//}

	if(addedProbabilitiesSum<0.99 || addedProbabilitiesSum > 1.01)
	{
		UWARN("Prediction is not normalized sum=%f", addedProbabilitiesSum);
	}
}

void BayesFilter::updatePrediction(
		std::map<int, std::map<int, float> > & prediction,
		const Memory * memory,
		const std::set<int> & oldIds,
		const std::set<int> & newIds) const
{
	UTimer timer;
	UDEBUG("");

	UASSERT(memory &&
		oldIds.size() &&
		newIds.size() &&
		oldIds.size() == prediction.size()+1); // + virtual place not saved in prediction

	//Get removed ids
	std::set<int> removedIds;
	for(std::set<int>::const_iterator iter=oldIds.begin(); iter!=oldIds.end(); ++iter)
	{
		if(!uContains(newIds, *iter))
		{
			removedIds.insert(removedIds.end(), *iter);
			UDEBUG("removed id=%d", *iter);
		}
	}
	UDEBUG("time getting removed ids = %fs", timer.restart());

	int added = 0;
	// get ids to update
	std::set<int> idsToUpdate;
	for(std::set<int>::const_iterator kter=oldIds.begin(); kter!=oldIds.end(); ++kter)
	{
		if(removedIds.find(*kter) != removedIds.end())
		{
			std::map<int, std::map<int, float> >::iterator iter = prediction.find(*kter);
			UASSERT(iter != prediction.end());

			for(std::map<int, float>::iterator jter=iter->second.begin(); jter!=iter->second.end(); ++jter)
			{
				if( jter->first != iter->first &&
					removedIds.find(jter->first) == removedIds.end() &&
					idsToUpdate.find(jter->first) == idsToUpdate.end())
				{
					idsToUpdate.insert(jter->first);
					UDEBUG("update id=%d of removed %d", jter->first, iter->first);
				}
			}
			prediction.erase(iter);
		}
	}

	for(std::set<int>::const_iterator kter=newIds.begin(); kter!=newIds.end(); ++kter)
	{
		if(!uContains(oldIds, *kter))
		{
			std::map<int, int> neighbors = memory->getNeighborsId(*kter, _predictionLC.size()-1, 0, _predictionOnNonNullActionsOnly, true);
			this->addNeighborProbMap(prediction, *kter, neighbors, newIds);
			++added;
			for(std::map<int,int>::iterator iter=neighbors.begin(); iter!=neighbors.end(); ++iter)
			{
				if(uContains(oldIds, iter->first) &&
				   iter->first != *kter &&
				   removedIds.find(iter->first) == removedIds.end())
				{
					idsToUpdate.insert(iter->first);
				}
			}
		}
	}
	UDEBUG("time getting ids (%d) to update = %fs", idsToUpdate.size(), timer.restart());

	// update modified/added ids
	int modified = 0;
	UTimer timer1;
	float total1=0.0f, total2=0.0f;
	for(std::set<int>::iterator iter = idsToUpdate.begin(); iter!=idsToUpdate.end(); ++iter)
	{
		std::map<int, int> neighbors = memory->getNeighborsId(*iter, _predictionLC.size()-1, 0, _predictionOnNonNullActionsOnly, true);
		total1+=timer1.ticks();
		this->addNeighborProbMap(prediction, *iter, neighbors, newIds);
		total2+=timer1.ticks();
		++modified;
	}
	UDEBUG("total1=%fs, total2=%fs", total1, total2);
	UDEBUG("time updating modified/added ids = %fs", timer.restart());

	UDEBUG("Modified=%d, Added=%d", modified, added);
}

void BayesFilter::updatePosterior(const Memory * memory, const std::set<int> & likelihoodIds)
{
	ULOGGER_DEBUG("");
	std::map<int, float> newPosterior;
	for(std::set<int>::const_iterator i=likelihoodIds.begin(); i != likelihoodIds.end(); ++i)
	{
		std::map<int, float>::iterator post = _posterior.find(*i);
		if(post == _posterior.end())
		{
			if(_posterior.size() == 0)
			{
				newPosterior.insert(std::pair<int, float>(*i, 1));
			}
			else
			{
				newPosterior.insert(std::pair<int, float>(*i, 0));
			}
		}
		else
		{
			newPosterior.insert(std::pair<int, float>((*post).first, (*post).second));
		}
	}
	_posterior = newPosterior;
}

float BayesFilter::addNeighborProb(cv::Mat & prediction, unsigned int col, const std::map<int, int> & neighbors, const std::map<int, int> & idToIndexMap) const
{
	UASSERT((unsigned int)prediction.cols == idToIndexMap.size() &&
			(unsigned int)prediction.rows == idToIndexMap.size() &&
			col < (unsigned int)prediction.cols &&
			col < (unsigned int)prediction.rows);

	float sum=0;
	for(std::map<int, int>::const_iterator iter=neighbors.begin(); iter!=neighbors.end(); ++iter)
	{
		int index = uValue(idToIndexMap, iter->first, -1);
		if(index >= 0)
		{
			sum += ((float*)prediction.data)[col + index*prediction.cols] = _predictionLC[iter->second+1];
		}
	}
	return sum;
}

void BayesFilter::addNeighborProbSparse(cv::SparseMat & prediction, unsigned int col, const std::map<int, int> & neighbors, const std::map<int, int> & idToIndexMap) const
{
	UASSERT((unsigned int)prediction.size(0) == idToIndexMap.size() &&
			(unsigned int)prediction.size(1) == idToIndexMap.size() &&
			col < (unsigned int)prediction.size(0) &&
			col < (unsigned int)prediction.size(1));

	float sum = 0.0f;
	for(std::map<int, int>::const_iterator iter=neighbors.begin(); iter!=neighbors.end(); ++iter)
	{
		int index = uValue(idToIndexMap, iter->first, -1);
		if(index >= 0)
		{
			prediction.ref<float>(index, col) = _predictionLC[iter->second+1];
			sum += _predictionLC[iter->second+1];
		}
	}

	//normalize
	if(sum)
	{
		float maxNorm = 1 - _predictionLC[0]; // 1 - virtual place probability
		for(std::map<int, int>::const_iterator iter=neighbors.begin(); iter!=neighbors.end(); ++iter)
		{
			int index = uValue(idToIndexMap, iter->first, -1);
			if(index >= 0)
			{
				prediction.ref<float>(index, col) *= maxNorm/sum;
			}
		}
	}
}

void BayesFilter::addNeighborProbMap(std::map<int, std::map<int, float> > & prediction, int id, const std::map<int, int> & neighbors, const std::set<int> & currentIds) const
{
	UASSERT(id > 0);

	prediction.erase(id);
	std::map<int, std::map<int, float> >::iterator pter = prediction.insert(std::make_pair(id, std::map<int, float>())).first;

	float sum = 0.0f;
	for(std::map<int, int>::const_iterator iter=neighbors.begin(); iter!=neighbors.end(); ++iter)
	{
		if(currentIds.find(iter->first) != currentIds.end())
		{
			pter->second.insert(std::make_pair(iter->first, _predictionLC[iter->second+1]));
			sum += _predictionLC[iter->second+1];
		}
	}

	//normalize
	if(sum)
	{
		int ignored = 0;
		float maxNorm = 1 - _predictionLC[0]; // 1 - virtual place probability
		for(std::map<int, float>::iterator iter=pter->second.begin(); iter!=pter->second.end(); )
		{
			iter->second *= maxNorm/sum;
			if(iter->second < _epsilon)
			{
				pter->second.erase(iter++);
				++ignored;
			}
			else
			{
				++iter;
			}
		}
		if(ignored)
		{
			UWARN("ignored %d values under epsilon(%f) for id=%d", ignored, _epsilon, id);
		}
	}
}

cv::SparseMat BayesFilter::convertMapToSparse(const std::map<int, std::map<int, float> > & prior) const
{
	UASSERT(prior.size());

	int size[2]; // rows and cols in two dimensions
	size[0] = prior.size();
	size[1] = prior.size();
	cv::SparseMat sparse(2, size, CV_32F);
	std::map<int, int> idToIndexMap;
	int i=0;
	for(std::map<int, std::map<int, float> >::const_iterator iter =prior.begin(); iter!=prior.end(); ++iter)
	{
		UASSERT_MSG(iter->first != 0, "Signature id is null ?!?");
		idToIndexMap.insert(idToIndexMap.end(), std::make_pair(iter->first, i++));
	}
	for(std::map<int, std::map<int, float> >::const_iterator iter =prior.begin(); iter!=prior.end(); ++iter)
	{
		UASSERT(idToIndexMap.find(iter->first) != idToIndexMap.end());
		for(std::map<int, float>::const_iterator jter=iter->second.begin(); jter!=iter->second.end(); ++jter)
		{
			UASSERT(idToIndexMap.find(jter->first) != idToIndexMap.end());
			sparse.ref<float>(idToIndexMap.at(jter->first), idToIndexMap.at(iter->first)) = jter->second;
		}
	}
	return sparse;
}

void BayesFilter::printPredictionMap() const
{
	printf("Prediction map:\n");
	for(std::map<int, std::map<int, float> >::const_iterator iter = _predictionMap.begin(); iter!=_predictionMap.end(); ++iter)
	{
		printf("ID=%d:", iter->first);
		for(std::map<int, float>::const_iterator jter=iter->second.begin(); jter!=iter->second.end(); ++jter)
		{
			printf(" %d", jter->first);
		}
		printf("\n");
	}
}

} // namespace rtabmap
