// queryEngine.h

#pragma once

#include "../searchEngine/dictionary.h"
#include "../searchEngine/hasher.h"
#include "../searchEngine/huffman.h"
#include "../searchEngine/io.h"
#include "../searchEngine/myVector.h"
#include "../searchEngine/parser.h"
#include "../searchEngine/logger.h"
#include <string>


using namespace System;
using namespace Compression;
using namespace Hashing;
using namespace MyVector;
using namespace Stream;
using namespace SearchEngine;
using namespace System::Collections::Generic;

namespace Custom {
	public value struct PostingItem
	{
	public:
		int TitleNumber;
		int WordPosition;
		PostingItem(int titleNr,int wordPosition) : TitleNumber(titleNr) , WordPosition(wordPosition)
		{
		}

	};

	public enum class TokenType
	{
		Bad = Parser::Bad,
		Text = Parser::Text,
		Title = Parser::Title
	};



	public value struct TokenItem
	{
	public:
		TokenType Type;
		String ^Token;
		String ^Word;
		TokenItem(String ^s,String ^word,TokenType type) : Type(type)
		{
			Token = s;
			Word = word;
		}
	};

	public ref class QueryEngine
	{
	private:
		dictionary *SE;
		System::String ^filename;
		String ^basePath;
		char* buffer;
		char* buffer2;
		char* charBasePath;
		char* ConvertToCharArray(System::String ^s)
		{
			System::IO::MemoryStream ^ms = gcnew System::IO::MemoryStream();
			System::IO::StreamWriter ^sw = gcnew System::IO::StreamWriter(ms);
			sw->Write(s);
			sw->Flush();
			cli::array<unsigned char> ^arrayChar = ms->ToArray();
			char* result = new char[arrayChar->Length+15];
			for (int i=0;i<arrayChar->Length;++i)
				result[i] = arrayChar[i];
			result[arrayChar->Length]=0;
			return result;
		}

		string ConvertToString(String ^s)
		{
			System::IO::MemoryStream ^ms = gcnew System::IO::MemoryStream();
			System::IO::StreamWriter ^sw = gcnew System::IO::StreamWriter(ms);
			sw->Write(s);
			sw->Flush();
			cli::array<unsigned char> ^arrayChar = ms->ToArray();
			char* result = new char[arrayChar->Length+15];
			for (int i=0;i<arrayChar->Length;++i)
				result[i] = arrayChar[i];
			result[arrayChar->Length]=0;
			string res = result;
			delete [] result;
			return res;
		}



		List<Custom::PostingItem>^ ToList(const postingList &list)
		{
			List<Custom::PostingItem> ^pList = gcnew List<Custom::PostingItem>();
			for (int i=0;i<list.size();++i)
				pList->Add(Custom::PostingItem(list[i].Title,list[i].WordPos));
			return pList;
		}


	public:

		property String ^Filename
		{
			String^ get()
			{
				return filename;
			}
		}
		property String ^BasePath
		{
			String^ get()
			{
				return basePath;
			}
		}
		QueryEngine(System::String ^basePath,System::String ^filename,double ppb,double pageRankScore,double titleCountScore,double textCountScore,double wordDistanceScore) : SE(0) , charBasePath(0) , buffer(0) , buffer2(0)
		{
			this->filename = filename;
			this->basePath = basePath;
			char *t = ConvertToCharArray(filename);
			charBasePath = ConvertToCharArray(basePath);
			Logger logger(string(charBasePath)+"logs\\"+Logger::DefaultLogName(),LogLevel::Info);
			logger.Log(LogLevel::Info,"konstruktor queryEngine [filename %s]",t);
			SE = new dictionary(string(charBasePath),t);
			SE->loadIndex(SearchEngine::PageRankFileName(ppb));
			SE->ScoreSystem.PageRank = pageRankScore;
			SE->ScoreSystem.Text = textCountScore;
			SE->ScoreSystem.Title = titleCountScore;
			SE->ScoreSystem.WordDistance = wordDistanceScore;
			delete[] t;
			buffer = new char[30000];
			buffer2 = new char[30000];
		}
		~QueryEngine()
		{
			if (SE)
				delete SE;
			if (charBasePath)
				delete[] charBasePath;
			if (buffer)
				delete[] buffer;
			if (buffer2)
				delete[] buffer2;
		}
		List<PostingItem>^ Query(System::String ^q)
		{
			return Query(q,1000000000);
		}

		List<PostingItem>^ Query(System::String ^q,int maxCount)
		{
			char *t = ConvertToCharArray(q);
			Logger logger(string(charBasePath)+"logs\\"+Logger::DefaultLogName(),LogLevel::Info);
			logger.Log(LogLevel::Info,"query: [%s] - maxCount = %d",t,maxCount);
			List<PostingItem> ^pList = ToList(SE->Query(t,maxCount));
			delete[] t;
			return pList;
		}

		double GetScore (int titleNr)
		{
			if (SE->pageRank.size())
				return SE->GetScore(titleNr);
			else
				return 0;
		}

		void InitStream(String ^s)
		{
			string stream = ConvertToString(s);
			SE->wikiParser->InitStream(stream);
		}

		String^ GetTitle()
		{
			SE->wikiParser->GetTitleFromStream(buffer);
			return gcnew String(buffer,0,strlen(buffer),System::Text::Encoding::UTF8);
		}

		TokenItem GetToken()
		{
			TokenType type;
			Parser::TokenType result = SE->wikiParser->GetTokenFromStream(buffer,buffer2);
			switch (result)
			{
			case Parser::Bad:
				type = TokenType::Bad;
				break;
			case Parser::Text:
				type = TokenType::Text;
				break;
			case Parser::Title:
				type = TokenType::Title;
				break;
			}
			if (result != Parser::Bad)
				return TokenItem(gcnew String(buffer,0,strlen(buffer),System::Text::Encoding::UTF8),
				gcnew String(buffer2,0,strlen(buffer2),System::Text::Encoding::UTF8),type);
			else
				return TokenItem(String::Empty,String::Empty,type);
		}

		String^ Tokenize(String ^s)
		{
			char *t = ConvertToCharArray(s);
			String ^result;
			if (!SE->getToken(t))
				result = String::Empty;
			else
				result = gcnew String(t);
			delete[] t;
			return result;
		}

		inline int GetFilePosition(int titleNumber)
		{
			return SE->getWikiFilePosition(titleNumber);
		}
		

	};
}
