﻿using System;
using System.Collections.Generic;
using System.Configuration;
using System.Data.SqlClient;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using Sphinx.Client.Commands.Search;
using Sphinx.Client.Connections;
using Match = Sphinx.Client.Commands.Search.Match;

namespace LarasMutiaraDiva.SkripsiLaras.Lib
{
    public class Program
    {
        public static int NUMBER_OF_TOP_PASSAGES { get; set; }
        public static int NUMBER_OF_TOP_RELEVAN_DOCS { get; set; }
        public static int NUMBER_OF_TOP_CONCEPTS { get; set; }
        //public static string QUERY_SPARATOR1 = "|";
        //public static string QUERY_SPARATOR2 = "&";

        public static string pathToCorpusDir = @"D:\temp\LarasSkripsi\LarasMutiaraDiva.SkripsiLaras\LarasMutiaraDiva.SkripsiLaras.UI.Web\documents";
        
        private static void Main(string[] args)
        {
            string allText = string.Empty;

            var test = GetPassagesByQuery("riset tani", 2, out allText);
            Console.WriteLine(test);
            Console.ReadKey();
        }


        public static string GetNewQuery(string oldQuery)       //get new query from oldQuery have stemmed yet
        {
            //Console.WriteLine(GetWords("laras").First());
            //Console.WriteLine(GetTfInPassage2("laras mutiara diva selaras laraskan", "laras"));
            //Console.ReadKey();

            //Console.Write("Enter query: ");
            string userInput = oldQuery;                                  //User Query

            string textAllPassages = string.Empty;                         // pair of title + content from top n relevant doc

            int numberOfTopNRelevanceDoc = NUMBER_OF_TOP_RELEVAN_DOCS;
            var passages = GetPassagesByQuery(userInput, numberOfTopNRelevanceDoc, out textAllPassages);   //list of passage from top n relevan doc, Get top n doc, and from that top n doc are made some passage that consists of 2 sentence, textAllPassages is a pair title+content from top n doc

            //Console.ReadKey();
            var uniqueTerm = GetUniqueTerms(textAllPassages);                       //get all different term in all passage
            int i = 0;
           
            int numberPassages = passages.Count;
            int numberTerm = uniqueTerm.Count;
            var TF = new int[numberTerm,numberPassages];                            // tf all unique terms in all passage 

            int baris = 0;
            int kolom = 0;

            for (baris = 0; baris < numberTerm; baris++ )
            {
                for(kolom=0; kolom < numberPassages; kolom++)
                {
                    TF[baris, kolom] = GetTfInPassage2(passages[kolom], uniqueTerm[baris]);  // term frequency (tf)-> term in all passage
                }
            }
           
            //
           // var item = TF;
            var IDF = new double[numberTerm];
            //var IDFConcep = new double[numberTerm];
            var IDFBM25 = new double[numberTerm];
            for (int j = 0; j < numberTerm; j++)
            {
                var df = GetDocFreqByTerm(TF, j, numberPassages);
                
                IDF[j] = Math.Log10(numberPassages/df);     // inverse doc frequency --> term
               
                IDFBM25[j] = Math.Log10((numberPassages - df + 0.5)/(df + 0.5));
                //IDFConcep[j] = Math.Log10(29505/df);
            }

            var DOCLENGTH = new int[numberPassages];
            double DocLengthSUM = 0;
            for (int j = 0; j < numberPassages; j++)
            {
                DocLengthSUM += DOCLENGTH[j] = GetLengthDoc(TF, j, numberTerm);     // length passage (Ld)
                //DocLengthSUM += DOCLENGTH[j];

            }
            DocLengthSUM = DocLengthSUM / numberPassages;                       // average length of all passages (Ldave)

            var KD = new double[numberPassages];                                
            var k1 = 1.2;
            var b = 0.75;
            var k3 = 7;
            for (int j = 0; j < numberPassages; j++)
            {
                KD[j] = k1*((1-b) + b*(DOCLENGTH[j]/DocLengthSUM));             // value K in passage
            }

            var termsQuery = GetWords(userInput);
            var numberOfQuery = termsQuery.Count;
            var usedIndexQuery = new int[numberOfQuery];

            for (int j = 0; j < numberOfQuery; j++)
            {
                var index = uniqueTerm.GetIndex(termsQuery[j]);
                if (index >= 0)
                    usedIndexQuery[j] = index;     //index query in term collection
            }


            var SIM = new double[numberPassages];
            for (int j = 0; j < numberPassages; j++)
            {
                foreach (var index in usedIndexQuery)
                {
                   
                        //SIM[j] += IDF[index]*((k1 + 1)*TF[index, j])/(KD[j] + TF[index, j]); //similarity between query and passage j
                    SIM[j] += IDFBM25[index] * ((k1 + 1) * TF[index, j]) / (KD[j] + TF[index, j]) * ((k3 + 1) * 1) / (k3 +1 );
                }
            }
            var dictSIM = SIM.toDictionary();
            var sortedSIM = dictSIM.OrderByDescending(o => o.Value);            //get sorted passage

            var numberOfTopNPassages = NUMBER_OF_TOP_PASSAGES;                                       //number of top n passage 

            string textConcept = string.Empty;

            if (passages.Count > 0) 
            {
                for (int j = 0; j < numberOfTopNPassages; j++)
                {
                    textConcept += passages[sortedSIM.ElementAt(j).Key];            //get all text from all top n passages
                }
            }
            
            var termsConcept = GetUniqueTerms(textConcept);                     // get all term/ concept without number from all of top n passages

            //var indexConceptNonQuery = new int[termsConcept.Count-numberOfQuery];   //index konsep without index kueri
            var indexConceptNonQuery = new List<int>();
            i = 0;
            
            for (int j = 0; j < termsConcept.Count; j++)
            {
                if(!termsQuery.Contains(termsConcept[j]))
                {
                    //if term kueri tidak ada di term konsep
                    //indexConceptNonQuery[i++] = j;
                    var temp = uniqueTerm.GetIndex(termsConcept[j]);
                    ///indexConceptNonQuery[i++] =temp; //index konsep yang bukan kueri 
                    indexConceptNonQuery.Add(temp); //index konsep yang bukan kueri 
                }
            }

#region bel
            var BEL = new double[indexConceptNonQuery.Count()];
            var delta = 0.1;
            for (int j = 0; j < indexConceptNonQuery.Count(); j++)
            {
                var co_degree = new double[numberOfQuery];
                var fck = new double[numberOfQuery];
                
               BEL[j] = 1;
               
                for (int k = 0; k < numberOfQuery; k++)
                {
                    double temp = 0;
                    for (int l = 0; l < numberOfTopNPassages; l++)
                    {

                        var x = TF[indexConceptNonQuery[j], sortedSIM.ElementAt(l).Key];
                        var y = TF[usedIndexQuery[k], sortedSIM.ElementAt(l).Key];
                        temp += x*y;        //fck(konsep, kueri)= sigma(tf(konsep, passagei)*tf(kueri, passagei))
                    }
                    //var idfConcept = IDF[indexConceptNonQuery[j]];
                    fck[k] = temp;                                                          //fck(konsep, kueri)
                    //if (idfConcept >= 1)
                    //    idfConcept = 1;

                    co_degree[k] = Math.Log10(fck[k] + 1) * (IDF[indexConceptNonQuery[j]] / 5) /     //co_degree(konsep, kueri)= log(fck(konsep, kueri)+1)*idf(konsep)/log(top n passage)
                                   Math.Log10(numberOfTopNPassages);
                    BEL[j] = BEL[j] * Math.Pow(delta + co_degree[k], IDF[usedIndexQuery[k]]);  //BEL(konsep, kueri)= pangkat(phi(delta+co_degree(konsep, kuerii)))idf(kuerii)
                } 
                
            }

            var dictBEL = BEL.toDictionary();
            var sortedConcept = dictBEL.OrderByDescending(o => o.Value);        //sorted concept, candidate expansion query
#endregion

            var topNConcept = NUMBER_OF_TOP_CONCEPTS;

            string concept = string.Empty;
            if (uniqueTerm.Count > 0)
            {
                for (int j = 0; j < topNConcept; j++)
                {
                    var temp = uniqueTerm[sortedConcept.ElementAt(j).Key];
                    if(userInput.IndexOf(temp)<0)
                        concept += temp + " ";
                }
            }
            //var newQuery=userInput +" " +concept+ " "+ userInput;
            //var newQuery = "\"" + userInput + "\" & \"" + concept + "\"";
            var newQuery = userInput + " " + concept;
            //var docs = GetTopNDocByQuery(newQuery, int.MaxValue);
            //Console.ReadKey();
            return newQuery;
        }

        public static List<string> GetTopNDocByQuery(string userQuery, int numbersOfRelevanDoc)     //get top n numbersOfRelevanDoc relevant doc by userQuery 
        {
            var listOfPassages = new List<string>();
            using (ConnectionBase connection = new PersistentTcpConnection("localhost", 3312))
            {
                SearchCommand search = new SearchCommand(connection);
                SearchQuery query = new SearchQuery(userQuery);

                // allow boolean expressions and advanced query syntax in search string
                query.MatchMode = MatchMode.Extended2;

                //ranking by MB25
                query.RankingMode = MatchRankMode.ProximityBM25;

                //sort by document weight 
                query.SortBy = "@relevance DESC, average DESC";
                query.SortMode = ResultsSortMode.Relevance;
                query.AttributeFilters.Clear();
                //query.Offset = 10;    
                query.Limit = 200;

                search.QueryList.Add(query);

                search.Execute();
                var docResult = new List<string>();
                int docNumber = 0;
                foreach (SearchQueryResult result in search.Result.QueryResults)
                {
                    foreach (Match match in result.Matches)
                    {

                        if (docNumber < numbersOfRelevanDoc)
                        {
                            string docno = match.AttributesValues["docno"].GetValue().ToString();
                            docResult.Add(docno);
                        }
                        else
                            break;

                        docNumber++;

                    }
                }
                return docResult;
            }

        }


        public static string GetDocPreview(string docName, string kueri, int wordsNumber)
        {
            
            var textPreview = string.Empty;
            var arrayQuery = GetWords(kueri.ToLower());
            var pathDocKorpus = pathToCorpusDir;
            try
            {
                StreamReader readDoc = new StreamReader(pathDocKorpus + "\\" + docName);
                var contentDoc = readDoc.ReadToEnd();
                var arrayText = GetWords(contentDoc);
                int i = 0;
                var read = false;
                string tempWord = string.Empty;
                foreach (var word in arrayText)
                {
                    if (i == wordsNumber)
                        return textPreview;
                    //
                    if (arrayQuery.Contains(word.ToLower()))
                    {

                        read = true;
                        tempWord = string.Format("<b>{0}</b>", word);

                    }
                    else
                    {
                        tempWord = word;
                    }

                    if (read)
                    {
                        textPreview += tempWord + " ";
                        i++;
                    }




                }
            }
            catch (Exception err)
            {
                textPreview = err.Message;
            }
            return textPreview;
        }

        public static string GetContentDoc(string docName)
        {
            //var pathDocKorpus = @"C:\Users\Laras Mutiara Diva\Desktop\Laras code.google\trunk\LarasMutiaraDiva.SkripsiLaras\LarasMutiaraDiva.SkripsiLaras.UI.Web\documents";
            var pathDocKorpus = pathToCorpusDir;
            StreamReader readDoc = new StreamReader(pathDocKorpus + "\\" + docName);
            return readDoc.ReadToEnd();
        }

        public static string GetStemQuery(string querySentence) //get string query have been stemmed                    //remove stopword and stem query
        {
            string connectionString = ConfigurationManager.AppSettings["connectionString"];
             SqlConnection connection = new SqlConnection(connectionString);
             connection.Open();
            var words = GetWords(querySentence);
            var queryStem = string.Empty;
            foreach (var word in words)
            {
                var sql = "SELECT dbo.isStopword('" + word + "') as term";
                    //command string --> if isStopWord then return "0" else return stemmed word
                SqlCommand command = new SqlCommand(sql, connection);

                SqlDataReader reader = command.ExecuteReader(); //reader command
                reader.Read();
                var term = reader["term"].ToString(); //get sql result
                if(!term.Equals("0"))
                    queryStem += term + " ";
                reader.Close();
            }
            connection.Close();
             
             return queryStem;                //return stemmed word
        
        }


        public static int GetLengthDoc(int[,] TF, int kolom, int numTerm)                   //get passage length
        {
            int sum = 0;
            for (int i = 0; i < numTerm; i++)
            {
                sum += TF[i, kolom];
            }
            return sum;
        }

        public static int GetDocFreqByTerm(int[,] arrayTermPassage, int baris, int numPassages)  //get df of a term, in how much document a term exist
        {
            int n = 0;
            for (int i = 0; i < numPassages; i++)
            {
                //Console.WriteLine(arrayTermPassage[baris,i]);
                if (arrayTermPassage[baris, i] > 0)
                    n++;
            }
            return n;
        }

       

        public static List<string> GetPassagesByQuery(string userQuery, int numbersOfRelevanDoc, out string textAllDocs) //create passage from
        {
            var listOfPassages = new List<string>();
            using (ConnectionBase connection = new PersistentTcpConnection("localhost", 3312))
            {
                SearchCommand search = new SearchCommand(connection);
                SearchQuery query = new SearchQuery(userQuery);
                
                // allow boolean expressions and advanced query syntax in search string
                query.MatchMode = MatchMode.Extended2;
                
                //ranking by MB25
                query.RankingMode = MatchRankMode.ProximityBM25;

                //sort by document weight 
                query.SortBy = "@relevance DESC, average DESC";
                query.SortMode = ResultsSortMode.Relevance;
                query.AttributeFilters.Clear();
                //query.Offset = 10;
                query.Limit = 200;
                search.QueryList.Add(query);

                search.Execute();

                textAllDocs = string.Empty;
                int docNumber = 0;
                foreach (SearchQueryResult result in search.Result.QueryResults)
                {
                    foreach (Match match in result.Matches)
                    {
                        

                        if ( docNumber < numbersOfRelevanDoc )
                        {
                            Console.WriteLine("Document ID: {0}", match.DocumentId);
                            string content=match.AttributesValues["content"].GetValue().ToString();
                            string title = match.AttributesValues["title"].GetValue().ToString();

                            if(notEndedWithDot(content))
                                content+=".";

                            textAllDocs += title + " " + content;

                            var contentSentences = content.Split('.');
                            var passage = string.Empty;
                            for (int i = 0; i < contentSentences.Count(); i++)
                            {
                                    if(i==0)
                                    {
                                        passage = title +" "+ contentSentences[i];
                                    }
                                    else
                                    {
                                        if (i == contentSentences.Count() - 1)
                                        {
                                            if(contentSentences[i].Length < 5 )
                                                passage = contentSentences[i - 1] + " " + title;
                                            else
                                                passage = contentSentences[i - 1] + " " + title;
                                        }
                                        else
                                            passage = contentSentences[i - 1] + " " + contentSentences[i];
                                    }
                                    listOfPassages.Add(passage);
                            }
                            
                            
                        }
                        else
                            break;

                        docNumber++;
                        
                    }
                         
                    /*
                    Console.WriteLine("Elapsed time: {0} ms", result.ElapsedTime.TotalMilliseconds);
                    Console.WriteLine("Total found: {0}", result.TotalFound);
                    Console.WriteLine("Returned matches count: {0}", result.Count);
                     */
                }
            }
            //Console.ReadKey();

            return listOfPassages;
        }

        private static bool notEndedWithDot(string content)
        {
            bool stop = false;
            bool withOutDot = true;
            var whiteSpace=new char[]
                                {
                                    ' ',
                                    '\n'
                                };
            for (int i = content.Length-1; i>0 && !stop; i--)
            {
                var tempChar = content[i];
                if(!whiteSpace.Contains(tempChar))
                {
                    if (tempChar == '.')
                        withOutDot = false;

                    stop = true;


                }
            }
            return withOutDot;
        }

        public static List<string> GetUniqueTerms(string allText)           //get unique term and without number
        {
            var uniqueWord = new List<string>();
            var words = GetWords(allText);
            foreach (var word in words)
            {
                if(!uniqueWord.Contains(word) && !word.isNumber())
                    uniqueWord.Add(word);
            }

            return uniqueWord;
        }


        public static List<string> GetWords(string isiFile) // mengambil kata per kata(token) yang ada pada string yang panjang dan diletakkan di ebuah list
        {
            var temp = Regex.Split(isiFile, @"\W+"); // menghilangkan spasi dan tanda baca lainnya dan mengembalikan kata2
            var result = from i in temp             //LINQ for checking if word is not empty string
                         where i.Length > 0
                         select i;                  //get words that aren't empty

            return result.ToList();
        }

        public static int GetTfInPassage(string passage, string term)
        {
            MatchCollection myList = Regex.Matches(passage, "("+term+")+");
            return myList.Count;
        } 

        public static int GetTfInPassage2(string passage, string term)          //get tf of term in a certain passage
        {
           
            var words = GetWords(passage);
            return words.Where(i => i.Equals(term)).Count();
        }


        public static int GetTermFrekuensi(string word, List<string> words)     //get term frekuensi of word in a document
        {
            int tf = 0;
            foreach (var word1 in words)
            {
                if (word == word1)
                {
                    tf++;
                }
            }
            return tf;
        }
    }


}
