package jeconbond.experiment.natural.prodpossibfrontier.generate;

import jeconbond.automata.INode;
import jeconbond.automata.behaviour.NaturalAuditWalkStrategy;
import jeconbond.economic.agent.AutomataAgent;
import jeconbond.economic.agent.IEconomicProducer;
import jeconbond.economic.agent.ActIFFinRangeOfStepsAgent;
import jeconbond.economic.natural.strategy.NaturalMerchant;
import jeconbond.economic.resources.ResourceStorrage;
import jeconbond.economic.systems.natural.NaturalResourceRedestributionSystem;
import jeconbond.experiment.natural.prodpossibfrontier.process.IConneryExperimentContext;
import jeconbond.experiment.natural.prodpossibfrontier.ConneryExperimentSettings;
import jeconbond.experiment.generate.IAgentsCreator;
import laboratoryQ.experiment.process.IExperimentContext;

public class SetAuditStrategyBasedAgentsCreator implements IAgentsCreator {
	static { System.out.println("\nOLD_EXPERIMENT settings.getResources()[1]"); }
	
	@Override
	public IEconomicProducer[] automata2agents(INode[] automatas, IExperimentContext iexperimentContext) {
		IConneryExperimentContext experimentContext =
			(IConneryExperimentContext) iexperimentContext;
		ConneryExperimentSettings settings = experimentContext.getExperimentSettings();
		int n = automatas.length;
		IEconomicProducer[] result = new IEconomicProducer[n];
		double[] goalsArray = settings.getGoalsArray();
		int rangeBegin = -settings.agentPerStep + 1;
		int rangeEnd = 0;
		for (int i = 0; i < n; i++) {
			AutomataAgent agent = new AutomataAgent(automatas[i]);
			fillAgent(agent, goalsArray, experimentContext);
			ActIFFinRangeOfStepsAgent aIFFstepAgent = 
				new ActIFFinRangeOfStepsAgent(
						agent, 
						experimentContext.getNextStepCommander().stepCounter,
						rangeBegin,
						rangeEnd
				);
			result[i] = aIFFstepAgent;
			rangeBegin++;
			rangeEnd++;
		}
		return result;
	}

	private static void fillAgent(
			AutomataAgent agent,
			double[] goalsArray,
			IConneryExperimentContext experimentContext
	) {
		ConneryExperimentSettings settings = experimentContext.getExperimentSettings();
		int[] stepCounter_ptr = experimentContext.
			getNextStepCommander().stepCounter;
		
		ResourceStorrage irs = agent.getInputRS();
		ResourceStorrage ors = agent.getOutputRS();
		INode root = agent.getRoot();
		NaturalAuditWalkStrategy naturalAuditWalker = new NaturalAuditWalkStrategy(
				irs, 
				ors, 
				root,
				goalsArray,
				stepCounter_ptr,
				settings.tankResource,
				settings.traktorResource
				); 
		
		NaturalResourceRedestributionSystem nrrs = 
			(NaturalResourceRedestributionSystem) experimentContext.getRRS();
		NaturalMerchant naturalMerchant = new NaturalMerchant(
				irs,
				ors,
				naturalAuditWalker,
				nrrs
		);
		agent.setMerchant(naturalMerchant);
		agent.setWalkStrategy(naturalAuditWalker);
	}
}
