package value;
import globals.CentralStatics;
import problems.rdmthreeobjectives.RDMActionsThree;

import java.text.DecimalFormat;
import java.util.ArrayList;


public class ValueAtBeliefVector<A> extends SimpleVector<A> implements Vector, Taggable<A> {
	
	private A theTag;
	
	public ArrayList<AlphaMatrix<A>> alphas;
	//public converted from private have to change it
	public double[] weight; 

	public ValueAtBeliefVector(int d, double[] b, ArrayList<LazyScalarizedVector<A>> matrices){
		value = new double[d];
		this.weight = matrices.get(0).getWeights();
		double best = Double.NEGATIVE_INFINITY;
		for(int i=0; i<value.length;i++){
			value[i]=0;
		}
		
		this.alphas = new ArrayList<AlphaMatrix<A>>(matrices.size());
		
		for(int i=0; i<matrices.size(); i++){
			LazyScalarizedVector<A> current = matrices.get(i);
			//System.out.println("current tag"+current.retrieveTags());
			double vl = current.linearScalValue(b);
			if(best<vl){
				best = vl;
				value = current.getMatrix().scalarizeWithBelief(b);
				this.theTag=current.retrieveTags().get(0);
				//alphas.add(current.getMatrix());
				
				
			}
			alphas.add(current.getMatrix());
			
			
		}
		
		//this.theTag=null;
		//System.out.println("check tag checking    "+this.theTag);
	}
		
	public ValueAtBeliefVector(int d){
		value = new double[d];
		for(int i=0; i<value.length;i++){
			value[i]=0;
		}
		
		this.theTag=null;
		
	}
	
	public ValueAtBeliefVector(ValueAtBeliefVector<A> p1, ValueAtBeliefVector<A> p2){
		if(p1.length()==p2.length()){
			value = new double[p1.value.length];
			for(int i=0; i<value.length;i++){
				value[i]=p1.value[i]+p2.value[i];
			}
		} else {
			System.err.println("Unequal payoff dimensions in constructor Payoff.");
			System.exit(1);
		}
		
		this.theTag=null;
	}
	
	
	public ValueAtBeliefVector(ValueAtBeliefVector<A> p1){
		value = new double[p1.value.length];
		for(int i=0; i<value.length;i++){
			value[i]=p1.value[i];
		}
		this.theTag=p1.retrieveTags().get(0);
		
	}
	
	public ValueAtBeliefVector(double[] v){
		value = new double[v.length];
		for(int i=0; i<value.length;i++){
			value[i]=v[i];
		}
		//System.out.println("checking null value tag");
		this.theTag=null;
	}
	
	public boolean weakParetoDominates(ValueAtBeliefVector<A> p){
		if(p.value.length!=this.value.length){
			System.err.println("Comparing payoffs with unequal dimensions (Pareto)");
			System.exit(2);
		}
		for(int i=0; i<value.length;i++){
			if(this.value[i]<p.value[i]){
				return false;
			}
		}
		return true;
	}
	
	public String toString(){
		String result = "";
		for(int s=0; s<this.length(); s++){
			result+=this.getValue(s);
			if(s!=this.length()-1){
				result+=",";
			}
		}
		return result;
	}
	
	public double linearScalValue(double[] wvec){
		double result;
		if(wvec.length==this.value.length){
			 //System.out.println(this.toString()+" ["+wvec[0]+","+wvec[1]+"]");
			 result = CentralStatics.innerProduct(this.value, wvec);
		} else {
			result = Double.MIN_VALUE;
			System.err.println("Trying to linearly scalarize payoff vector with a weight vector of different length.");
			System.exit(0);
		}
		return result;
	}
	
	public boolean equalValues(ValueAtBeliefVector<A> p){
		for(int i=0; i<this.value.length; i++){
			if(p.value[i]!=this.value[i]){
				return false;
			}
		}
		return true;
	}
	
	@Override
	public void tag(A t) {
		this.theTag = t;
	}

	@Override
	public void tag(ArrayList<A> tl) {
		System.err.println("Value should only be tagged with one action in A.");
		System.err.println("Tagging with the first action in the list.");
		System.err.println("Tagging this type of vector should never be done manually.");
		//System.out.println("Tagging this type of vector should never be done manually.");
		this.theTag = tl.get(0);
	}

	@Override
	public ArrayList<A> retrieveTags() {
		ArrayList<A> lst = new ArrayList<A>(1);
		//System.out.println("Tag Value"+theTag);
		lst.add(theTag);
		return lst;
	}
}
