#include "StdAfx.h"
#include "parmela_input.h"
#include <fstream>
#include <iomanip>
#include <sstream>


parmela_distribution_input::parmela_distribution_input(void)
{
}


parmela_distribution_input::~parmela_distribution_input(void)
{
}

int parmela_distribution_input::write_input_0(phase_space_dist dist)
{
	std::ofstream file;
	file.open("input0.txt");

	char buffer[256];
	buffer[255] = 0;


	file << std::setprecision(9);

	for(int i=0;i<dist.size;i++)
	{
		sprintf_s(buffer,255,"%13.4e %13.4e %13.4e %13.4e %13.4e %13.4e 1\n",dist.dataset[i].x ,dist.dataset[i].px,dist.dataset[i].y,dist.dataset[i].py, dist.dataset[i].z ,dist.dataset[i].pz);
		file << buffer;
//		file << dist.dataset[i].x << ' ' << dist.dataset[i].px << ' ' << dist.dataset[i].y << ' ' << dist.dataset[i].py << ' ' << dist.dataset[i].z << ' ' << dist.dataset[i].pz << ' ' << 1 << std::endl;
	}
	file.close();

	return 0;
}

int parmela_distribution_input::write_input_40(trace_space_dist dist)
{
	std::ofstream file;

	double phi0 = dist.dataset[0].phi;
	double w0 = dist.dataset[0].w;

	file.open("input40.txt");

	file << std::setprecision(9);
	file << std::setw(13);

	for(int i=0;i<dist.size;i++)
	{
		file << dist.dataset[i].x << ' ' << dist.dataset[i].xp << ' ' << dist.dataset[i].y << ' ' << dist.dataset[i].yp << ' ' << dist.dataset[i].phi - phi0 << ' ' << dist.dataset[i].w - w0 << std::endl;
	}
	file.close();

	file.open("info.txt");

	file << "phi0=" << phi0 << std::endl << "w0=" << w0 << std::endl << "n=" << dist.size;

	file.close();

	return 0;
}

namespace lattice
{
	bool keyword_base::set_value(const std::vector<double>& value_list)
	{//set parameters by values stored in a vector
		if(value_list.size() != parameters.size())
			return false;
		for(int i=0;i<value_list.size();i++)
		{
			parameters[i].value = value_list[i];
			parameters[i].is_ignore = false;
		}
		return true;
	};
		
	bool keyword_base::set_value(const std::string& value_string)
	{
		if(value_string == "")
			return false;

		std::vector<std::string> tokenized;
		bool on_search = false;
		int start = 0;

		for(int i = 0;i < value_string.size();i++)
		{
			if(value_string.at(i) == ' ' || value_string.at(i) == '\t')
			{//at ws
				if(on_search)
				{//end of token, store substring, reset start counter
					tokenized.push_back(value_string.substr(start,i-start));
					on_search = false;
				}else
				{//multiple ws
					continue;
				}
			}else
			{//non at non ws
				if(on_search)
				{//token continue
					continue;
				}else
				{//start of token
					start = i;
					on_search = true;
				}
			}
		}

		if(tokenized.size() < 1 || tokenized.size() > parameters.size()+1)
		{
			return false;
		}

		if(tokenized.size() == 1)
		{
			return true;
		}

		if(_stricmp(tokenized[0].c_str(),alias_name.c_str()) != 0)
		{//check if alias name/keyword is correct, this function is microsoft defined, should replace with manual check loop
			return false;
		}

		for(int i=1;i<tokenized.size();i++)
		{
			if(isalpha(tokenized[i][0]))
				parameters[i-1].prm_string = tokenized[i];
			else
			{
				try
				{
					parameters[i-1].value = std::stod(tokenized[i]);
				}catch(const std::invalid_argument& ia)
				{
					parameters[i-1].value = 0;
				}
			}
			parameters[i-1].is_ignore = false;
		}

		
	
		
		return true;
	}

	std::string keyword_base::get_complete_line()
	{
		if(special_process != nullptr)
		{
			special_process->process();
		}

		std::stringstream sstr;

		sstr << keyword;

		for(int i=0;i<parameters.size();i++)
		{
			sstr << ' ' << parameters[i].value;
		}

		return sstr.str();
	}
}