using System;
using System.Collections;
using System.IO;
using System.Reflection;
using System.Text;
using libsvm;
using Slaks.DocumentSerialization.TermIdInfo;
using Slaks.Readers;
using Slaks.Web;
using Slaks.WebServer.Core.Configuration;
using Slaks.WebServer.Core.DocumentParsing;
using TestSvmRealTimePrediction;

namespace Slaks.DataMining.Svm
{
	class TestSvmRealTimePrediction
	{
		[STAThread]
		static void Main(string[] args)
		{
			TermIdInfoList termIdInfoList = 
				TermIdInfo.ReadXml(@"C:\Lang\projects\THESIS_RELATED\Files\ParsedSeries\ArabTerrorNonTerror\term_id_info.xml",0);
			termIdInfoList.Sort();


			StreamWriter writer = new StreamWriter("arab-terms.txt");
			int count = 0;
			foreach(TermIdInfoElement element in termIdInfoList)
			{
				writer.WriteLine(element.Term);
				++count;
				if (count > 300) break;
			}

			writer.Close();



			TermIdInfoMap termIdInfoMap = 
				TermIdInfo.ReadXml(@"C:\Lang\projects\THESIS_RELATED\Files\ParsedSeries\ArabTerrorNonTerror\term_id_info.xml");

			//load static model
			svm_model model = 
				svm.svm_load_model(@"C:\Lang\development\SVM\libsvm-2.81\windows\arabterrornonterror\arab.txt.model"); // Model file

			//load document parser configuration
			DocumentParserConfiguration documentParserConfiguration = 
				 Slaks.WebServer.Core.Configuration.DocumentParserConfiguration.GetConfig();

			//initialize the hyperlink downloader
			Slaks.Web.HyperlinkDownload download = new HyperlinkDownload();

		  string [] files = Directory.GetFiles(@"C:\Lang\projects\THESIS_RELATED\Files\RawSeries\ArabTerrorNonTerror\");

			
			//foreach(string filename in files)
			//{
				byte [] file = download.GetFile(@"C:\Lang\projects\THESIS_RELATED\Files\RawSeries\ArabTerrorNonTerror\898.htm");
				//byte [] file = download.GetFile(@"http://www.tex.ac.uk/cgi-bin/texfaq2html?label=citeURL");
				//byte [] file = download.GetFile(filename);

				ParserFactory factory = ParserFactory.CreateParserFactoryInstance(documentParserConfiguration,"parsers");

				DocumentParser parser = factory.GetParser(download.GetFileExtension());
				if (parser == null) parser = factory.GetParser(download.GetContentType());
				if (parser != null)
				{
					parser.SetBuffer(file,download.GetLinkPathString(),download.GetLinkPathString(),0);
					ParsedPage page = parser.GetPage();
					//Console.WriteLine(page.Title);
					//Console.WriteLine(page.Text);

					TermIdInfoList idInfoList = GetTermList(page.Text, page.Title,termIdInfoMap);
					if (idInfoList.Count == 0)
					{
						Console.WriteLine("No terms...");
						//continue;
						return;
					}

					svm_predict svmpredict = new svm_predict();
					string svmVector = GetSvmVector(idInfoList,50);
					double target = svmpredict.PredictSingleVector(svmVector,model,0);

					Console.WriteLine(download.GetLinkPathString() + " : " + target);

					/*
					string f = new FileInfo(filename).Name;
					StreamWriter w = new StreamWriter(f);
					w.Write(svmVector);
					w.Close();
					*/
				}
				else Console.WriteLine("Unable to parse the file " + download.GetLinkPathString());
			//}


		}

		private static TermIdInfoList GetTermList(string text, string title,TermIdInfoMap infoMap)
		{
			ReaderState state = ReaderState.None;
			TermIdInfoList idInfoList = new TermIdInfoList();
			TermId2TermIdInfoElement termId2TermIdInfoElement = new TermId2TermIdInfoElement();
			
			Slaks.Readers.ArabSpecNormTermBufferReader bufferReader = 
				new ArabSpecNormTermBufferReader(new Slaks.Filters.ArabTermFilter(),title);
			bufferReader.OpenStream();


			string term;
			TermIdInfoElement newElement;
			//title
			while(!bufferReader.Eof())
			{
				term = bufferReader.NextTerm(ref state);
				if (state != ReaderState.Term) continue;
				if (infoMap.Contains(term) == false) continue;

				TermIdInfoElement element = infoMap[term];
				if (termId2TermIdInfoElement.Contains(element.Id) == false)
				{
					newElement = new TermIdInfoElement(term,element.Id,1);
					termId2TermIdInfoElement.Add(element.Id,newElement);
					idInfoList.Add(newElement);
				}
				else
				{
					newElement = termId2TermIdInfoElement[element.Id];
					newElement.Frequency += 1;
				}
			}

			bufferReader.AssingNewBuffer(text);
			//text
			while(!bufferReader.Eof())
			{
				term = bufferReader.NextTerm(ref state);
				if (state != ReaderState.Term) continue;
				if (infoMap.Contains(term) == false) continue;

				TermIdInfoElement element = infoMap[term];
				if (termId2TermIdInfoElement.Contains(element.Id) == false)
				{
					newElement = new TermIdInfoElement(term,element.Id,1);
					termId2TermIdInfoElement.Add(element.Id,newElement);
					idInfoList.Add(newElement);
				}
				else
				{
					newElement = termId2TermIdInfoElement[element.Id];
					newElement.Frequency += 1;
				}
			}


			bufferReader.CloseReader();

			idInfoList.Sort();

			return idInfoList;
		}
		private static string GetSvmVector(TermIdInfoList idInfoList,int pruning)
		{
			StringBuilder vector = new StringBuilder();
			int length = (idInfoList.Count < pruning ? idInfoList.Count : pruning);
			
			TermIdInfoList newIdInfoList = new TermIdInfoList();

			double maxFrequency = idInfoList[0].Frequency;
			for(int i=0;i<length;i++)
			{
				newIdInfoList.Add(idInfoList[i]);
			}

			newIdInfoList.Sort(new IdComparer());

			//add default class
			//vector.Append("+1 ");

			for(int i=0;i<newIdInfoList.Count;i++)
			{
				vector.Append(newIdInfoList[i].Id);
				vector.Append(":");
				vector.Append((double)newIdInfoList[i].Frequency/maxFrequency);
				vector.Append(" ");
			}

			vector.Append(Environment.NewLine);

			return vector.ToString();
		}


	}


}
