/**
 * Copyright 2012 Brigham Young University
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package edu.byu.nlp.cluster.em;

import edu.byu.nlp.cluster.Dataset;
import edu.byu.nlp.cluster.em.ExpectationMaximization.ParameterPair;
import edu.byu.nlp.math.optimize.IterativeOptimizer;
import edu.byu.nlp.math.optimize.IterativeOptimizer.Optimizable;
import edu.byu.nlp.math.optimize.IterativeOptimizer.ReturnType;
import edu.byu.nlp.math.optimize.ValueAndObject;

/**
 * @author rah67
 *
 */
public class AlternatingEM {
	
	private final ExpectationMaximization em;
	private final IterativeOptimizer io;
	
	public AlternatingEM(ExpectationMaximization em, IterativeOptimizer io) {
		this.em = em;
		this.io = io;
	}

	private static class AlternatingOptimizable<H, P> implements Optimizable<P> {

		private final ExpectationMaximization em;
		private final Dataset data;
		private final HyperParameterOptimizer<H, P> hyperParameterOptimizer;
		private final AlternatingEMAble<H, P> altEMAble;
		private final ParameterPair<P> paramPair;
		private final ParameterPair<H> hyperPair;
		
		public AlternatingOptimizable(ExpectationMaximization em, Dataset data,
				AlternatingEMAble<H, P> altEMAble,
				HyperParameterOptimizer<H, P> hyperParameterOptimizer,
				ParameterPair<P> paramPair,
				ParameterPair<H> hyperPair) {
			this.em = em;
			this.data = data;
			this.hyperParameterOptimizer = hyperParameterOptimizer;
			this.altEMAble = altEMAble;
			this.paramPair = paramPair;
			this.hyperPair = hyperPair;
		}

		/** {@inheritDoc} */
		@Override
		public ValueAndObject<P> computeNext(P curParams) {
			paramPair.setCurParams(curParams);
			
			// Optimize hyper-parameters
			H nextHyperParam = hyperParameterOptimizer.optimizeHyperParameters(paramPair.getCurParams(), hyperPair);
			hyperPair.swap();
			hyperPair.setCurParams(nextHyperParam);
			
			Expectable<P> expectable = altEMAble.expectableForHyperParameters(nextHyperParam);
			
			// Optimize parameters
			ValueAndObject<P> vao = em.em(data, expectable, paramPair);
			paramPair.swap();
			paramPair.setCurParams(vao.getObject());

			return vao;
		}
		
	}
	
	public <H, P> ValueAndObject<P> optimize(Dataset data, AlternatingEMAble<H, P> hyperParameterOptimizable,
			HyperParameterOptimizer<H, P> hyperParameterOptimizer, ParameterPair<H> hyperPair,
			ParameterPair<P> paramPair) {
		Expectable<P> expectable = hyperParameterOptimizable.expectableForHyperParameters(hyperPair.getCurParams());
		ValueAndObject<P> vao = em.em(data, expectable, paramPair);
		paramPair.swap();
		Optimizable<P> optimizable =
				new AlternatingOptimizable<H, P>(em, data, hyperParameterOptimizable, hyperParameterOptimizer,
						paramPair, hyperPair);
		return io.optimize(optimizable, ReturnType.LAST, true, vao.getObject());
	}
	
}
