﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;
using SharpICTCLAS;
using System.Diagnostics;

namespace mb
{
    class Corpus
    {
        // convert each document content to a string list
        private List<List<string>> negfiletoken; //negative
        private List<List<string>> posfiletoken; //positive

        // convert each document tokens as a vector  
        private List<List<double>> negEigenvectors; //negative 
        private List<List<double>> posEigenvectors; //positive

        // the home dirtctory, be used to combine with folder name 
        private string homedirect;

        // the number of the negative files in the corpus, initialize as 0
        private int negfile;
        // the number of the postiv files in the corpus, initialize as 0
        private int posfile;

        // the tokens appear in the positive files 
        private Token postokens;
        // the tokens appear in the negative files 
        private Token negtokens;

        // the information gain values 
        private InfoGain igg;

        private List<string> texts;
        private List<string> disttexts;

        // valid tokens for machine learning 
        private List<string> validTokens;

        // store the top similarity values and document sentiment type 
        private List<Similar> maxsim;
        private string subPath;

        [Conditional("DEBUG")]
        private void CheckState()
        {
           
        }
        [Conditional("DEBUG")]
        private void ShowValidTokens()
        {
            Console.WriteLine("Valid Tokens Number: " + validTokens.Count);
        }
        [Conditional("DEBUG")]
        private void ShowFileNumber()
        {
            Console.WriteLine("Positive :   " + posfile + "     Negative :  " + negfile);
        }
        public Corpus(string path)
        {
            subPath = path;
            negfiletoken = new List<List<string>>();
            posfiletoken = new List<List<string>>();
            negEigenvectors = new List<List<double>>();
            posEigenvectors = new List<List<double>>();
            homedirect = GetHomePath();
            negfile = 0;
            posfile = 0;
            postokens = new Token();
            negtokens = new Token();
            igg = new InfoGain();
            texts = new List<string>();
            disttexts = new List<string>();
            validTokens = new List<string>();
            maxsim = new List<Similar>();
        }

        // Train the specified corpus for machine learning.
        // Word all the documents content into word tokens using ICTCLAS.
        // Calculate the file number divided by the sentiment type .
        // Count each token appearance number in the corpus.
        // Finally, we get two vector lists for machine learning and test.  
        public void TrainCorpus()
        {
            Console.WriteLine("START TRAIN CORPUS: " + System.DateTime.Now.ToString());
            disttexts.Clear();
            texts.Clear();
            Segmentsplit(homedirect, subPath, "neg", ref negfile, SentimentType.Negative);
            CountDist(disttexts, negtokens);
            Count(texts, negtokens);

            disttexts.Clear();
            texts.Clear();
            Segmentsplit(homedirect, subPath, "pos", ref posfile, SentimentType.Positive);
            CountDist(disttexts, postokens);
            Count(texts, postokens);
            ShowFileNumber();  //Diagnostics

            IGCount();
            SelectValidTokens();
            ShowValidTokens();   //Diagnostics
            CountEigenvectors();
            EigenvectorsSquare();
            Console.WriteLine("END  TRAIN CORPUS: " + System.DateTime.Now.ToString());
        }

        // Operate the two vectors with L-square operation
        // Decrease the big value influence .
        private void EigenvectorsSquare()
        {
            for (int i = 0; i < negEigenvectors.Count(); i++)
            {
                double sum = 0;
                for (int j = 0; j < negEigenvectors[i].Count(); j++)
                {
                    sum = sum + negEigenvectors[i][j] * negEigenvectors[i][j];
                }
                sum = Math.Sqrt(sum);
                for (int j = 0; j < negEigenvectors[i].Count(); j++)
                {
                    negEigenvectors[i][j] = negEigenvectors[i][j] / sum;
                }
            }
            for (int i = 0; i < posEigenvectors.Count(); i++)
            {
                double sum = 0;
                for (int j = 0; j < posEigenvectors[i].Count(); j++)
                {
                    sum = sum + posEigenvectors[i][j] * posEigenvectors[i][j];
                }
                sum = Math.Sqrt(sum);
                for (int j = 0; j < posEigenvectors[i].Count(); j++)
                {
                    posEigenvectors[i][j] = posEigenvectors[i][j] / sum;
                }
            }
        }

        // Calculate the value of tfidf 
        // Convert the document to vector list 
        private void CountEigenvectors()
        {
            int N = negfile + posfile;
            for (int i = 0; i < negfiletoken.Count(); i++)
            {
                List<double> vector = new List<double>();
                for (int j = 0; j < validTokens.Count(); j++)
                {
                    int tf = 0;
                    for (int start = 0; start < negfiletoken[i].Count(); start++)
                    {
                        if (string.Compare(negfiletoken[i][start], validTokens[j], true) == 0)
                        {
                            tf++;
                        }
                    }
                    int df = ((negtokens.Tokens.ContainsKey(validTokens[j]) == true) ? negtokens.Tokens[validTokens[j]].TAppearance : 0)
                        + ((postokens.Tokens.ContainsKey(validTokens[j]) == true) ? postokens.Tokens[validTokens[j]].TAppearance : 0);
                    double tfidf = tf * Math.Log(N * 1.0 / df);
                    vector.Add(tfidf);
                }
                negEigenvectors.Add(vector);
            }
            for (int i = 0; i < posfiletoken.Count(); i++)
            {
                List<double> vector = new List<double>();
                for (int j = 0; j < validTokens.Count(); j++)
                {
                    int tf = 0;
                    for (int start = 0; start < posfiletoken[i].Count(); start++)
                    {
                        if (string.Compare(posfiletoken[i][start], validTokens[j], true) == 0)
                        {
                            tf++;
                        }
                    }
                    int df = ((negtokens.Tokens.ContainsKey(validTokens[j]) == true) ? negtokens.Tokens[validTokens[j]].TAppearance : 0)
                      + ((postokens.Tokens.ContainsKey(validTokens[j]) == true) ? postokens.Tokens[validTokens[j]].TAppearance : 0);
                    double tfidf = tf * Math.Log(N * 1.0 / df);
                    vector.Add(tfidf);
                }
                posEigenvectors.Add(vector);
            }
        }

        // Select the valid tokens for machine learning. 
        private void SelectValidTokens()
        {
            SortedDictionary<string, double>.KeyCollection keycoll = igg.Ig.Keys;
            foreach (string s in keycoll)
            {
                //TODO: algorithm to select the valid token 
                if (double.IsNaN(igg.Ig[s]))
                {
                    continue;
                }
                if (char.IsDigit(s, 0))
                {
                    continue;
                }
                if ((s[0] >= 'a' && s[0] <= 'z') || (s[0] >= 'A' && s[0] <= 'Z'))
                {
                    continue;
                }
                if (char.IsPunctuation(s, 0))
                {
                    continue;
                }
                validTokens.Add(s);
            }
        }

        // Calculate the Information Gain value using a special function. 
        private void IGCount()
        {
            SortedDictionary<string, TotalCount>.KeyCollection keycoll = negtokens.Tokens.Keys;
            foreach (string s in keycoll)
            {
                double igt = -(negfile * 1.0 / (negfile + posfile) * Math.Log(negfile * 1.0 / (negfile + posfile))
                    + posfile * 1.0 / (negfile + posfile) * Math.Log(posfile * 1.0 / (negfile + posfile)));
                int temp = negtokens.Tokens[s].TAppearance + ((postokens.Tokens.ContainsKey(s) == true) ? postokens.Tokens[s].TAppearance : 0);
                double pt = (temp) * 1.0 / (negfile + posfile);
                double pctneg = negtokens.Tokens[s].TAppearance * 1.0 / (temp);
                double pctpos = ((postokens.Tokens.ContainsKey(s) == true) ? postokens.Tokens[s].TAppearance : 0) * 1.0 / (temp);

                igt = igt + pt * (pctneg * Math.Log(pctneg) + pctpos * Math.Log(pctpos));

                double npt = 1 - pt;
                temp = negfile - negtokens.Tokens[s].TAppearance + posfile - ((postokens.Tokens.ContainsKey(s) == true) ? postokens.Tokens[s].TAppearance : 0);
                double npctneg = (negfile - negtokens.Tokens[s].TAppearance) * 1.0 / (temp);
                double npctpos = (posfile - ((postokens.Tokens.ContainsKey(s) == true) ? postokens.Tokens[s].TAppearance : 0)) * 1.0 / (temp);

                igt = igt + npt * (npctneg * Math.Log(npctneg) + npctpos * Math.Log(npctpos));
                igg.Ig.Add(s, igt);
            }
            keycoll = postokens.Tokens.Keys;
            foreach (string s in keycoll)
            {
                if (negtokens.Tokens.ContainsKey(s) == false)
                {
                    double igt = -(negfile * 1.0 / (negfile + posfile) * Math.Log(negfile * 1.0 / (negfile + posfile))
                        + posfile * 1.0 / (negfile + posfile) * Math.Log(posfile * 1.0 / (negfile + posfile)));
                    double pt = (postokens.Tokens[s].TAppearance * 1.0 / (negfile + posfile));
                    double pctpos = 1;
                    double npt = 1 - pt;
                    double npctneg = negfile / (negfile + posfile - postokens.Tokens[s].TAppearance);
                    double npctpos = (posfile - postokens.Tokens[s].TAppearance) * 1.0 / (negfile + posfile - postokens.Tokens[s].TAppearance);
                    igt = igt + pt * (pctpos * Math.Log(pctpos));
                    igt = igt + npt * (npctpos * Math.Log(npctpos) + npctneg * Math.Log(npctneg));
                    igg.Ig.Add(s, igt);
                }
            }
        }

        // Return the home path of the project for the future use 
        private string GetHomePath()
        {
            string path = Environment.CurrentDirectory;
            while (path != null)
            {
                string name = Path.GetFileName(path);
                if (string.Compare(name, "xiaoming-source", true) == 0) break;
                path = Path.GetDirectoryName(path);
            }
            if (path == null)
            {
                path = Environment.CurrentDirectory;
            }

            return path;
        }

        // Count each token appearance time in all the corpus, 
        // Care the duplicate .
        private void Count(List<string> content, Token tk)
        {
            for (int i = 0; i < content.Count; i++)
            {
                if (tk.Tokens.ContainsKey(content[i]))
                {
                    tk.increaseCount(content[i]);
                }
                else
                {
                    tk.Tokens.Add(content[i], new TotalCount(1, 1));
                }
            }
        }

        // Count each token appearance time in the corpus.
        // Ignore the duplicate .
        private void CountDist(List<string> content, Token tk)
        {
            for (int i = 0; i < content.Count; i++)
            {
                if (tk.Tokens.ContainsKey(content[i]))
                {
                    tk.increaseAppearence(content[i]);
                }
                else
                {
                    tk.Tokens.Add(content[i], new TotalCount(1, 1));
                }
            }
        }

        // Word the sentence to token list using ICTCLAS.
        // Convert the test sentence to a specified type for the future used.
        private void Segmentsplit(string path, string subPath1, string subpath2, ref int filecount, SentimentType type)
        {
            WordSegmentSample sample = new WordSegmentSample(Path.Combine(path, "mb", "Data") + Path.DirectorySeparatorChar, 2);
            DirectoryInfo di = new DirectoryInfo(Path.Combine(path, subPath1, subpath2));
            foreach (FileInfo di1 in di.GetFiles())
            {
                filecount++;
                string[] lines = File.ReadAllLines(di1.FullName, Encoding.GetEncoding("gb2312"));
                string line = string.Empty;
                for (int i = 0; i < lines.Length; i++)
                {
                    if (string.IsNullOrEmpty(lines[i].Trim()) == false)
                    {
                        line += lines[i].Trim();
                    }
                }
                List<WordResult[]> result = sample.Segment(line.Trim());
                List<string> dist = new List<string>();
                List<string> texttoken = new List<string>();
                for (int i = 0; i < result.Count; i++)
                {
                    for (int j = 1; j < result[i].Length - 1; j++)
                    {
                        texts.Add(result[i][j].sWord);
                        texttoken.Add(result[i][j].sWord);
                        if (dist.Contains(result[i][j].sWord) == false)
                        {
                            dist.Add(result[i][j].sWord);
                        }
                    }
                }
                disttexts.AddRange(dist);
                if (type == SentimentType.Negative)
                {
                    negfiletoken.Add(texttoken);
                }
                else
                {
                    posfiletoken.Add(texttoken);
                }
            }
        }


        // Word the sentence to token list using ICTCLAS.
        // Convert the test sentence to a specified type for the future used.
        // convert the string to a list of double value type.
        private List<double> Classify(string sentence, string path)
        {
            WordSegmentSample sample = new WordSegmentSample(Path.Combine(path, "mb", "Data") + Path.DirectorySeparatorChar, 2);
            List<WordResult[]> result = sample.Segment(sentence.Trim());
            List<string> sentencetoken = new List<string>();
            List<string> disttoken = new List<string>();
            for (int i = 0; i < result.Count; i++)
            {
                for (int j = 1; j < result[i].Length - 1; j++)
                {
                    sentencetoken.Add(result[i][j].sWord);
                    if (disttoken.Contains(result[i][j].sWord) == false)
                    {
                        disttoken.Add(result[i][j].sWord);
                    }
                }
            }
            List<double> vector = new List<double>();
            for (int i = 0; i < validTokens.Count(); i++)
            {
                int tf = 0;
                for (int j = 0; j < sentencetoken.Count(); j++)
                {
                    if (string.Compare(validTokens[i], sentencetoken[j], true) == 0)
                    {
                        tf++;
                    }
                }
                int df = 1 + ((negtokens.Tokens.ContainsKey(validTokens[i]) == true) ? negtokens.Tokens[validTokens[i]].TAppearance : 0)
                     + ((postokens.Tokens.ContainsKey(validTokens[i]) == true) ? postokens.Tokens[validTokens[i]].TAppearance : 0);
                double tfidf = tf * Math.Log((negfile + posfile) * 1.0 / df);
                vector.Add(tfidf);
            }
            double sum = 0;
            for (int i = 0; i < vector.Count(); i++)
            {
                sum = sum + vector[i] * vector[i];
            }
            sum = Math.Sqrt(sum);
            for (int i = 0; i < vector.Count(); i++)
            {
                vector[i] = vector[i] / sum;
            }
            return vector;
        }

        // Calculate the similarity value of two vectors,
        // here I use the Cosine value to evaluate.
        private double Sim(List<double> v1, List<double> v2)
        {
            double plus = 0;
            double v1length = 0;
            double v2length = 0;
            for (int i = 0; i < Math.Min(v1.Count(), v2.Count()); i++)
            {
                plus = plus + v1[i] * v2[i];
                v1length = v1length + v1[i] * v1[i];
                v2length = v2length + v2[i] * v2[i];
            }
            v1length = Math.Sqrt(v1length);
            v2length = Math.Sqrt(v2length);
            return plus / (v1length * v2length);
        }

        // Maintain the top similarity values, 
        // here should specified the items number of the top values.
        private void MaintainMaxSim(Similar newsim)
        {
            if (double.IsNaN(newsim.SimValue))
                return;
            if (maxsim.Count() < 10) //specified the items we stored, here I choose 10 as the items number.
            {
                for (int i = 0; i < maxsim.Count(); i++)
                {
                    if (newsim.SimValue <= maxsim[i].SimValue)
                    {
                        maxsim.Insert(i, newsim);
                        return;
                    }
                }
                int index = maxsim.Count();
                maxsim.Insert(index, newsim);
                return;
            }
            if (newsim.SimValue <= maxsim[0].SimValue)
                return;
            maxsim.RemoveAt(0); //empty the place to store the item
            for (int ss = 0; ss < maxsim.Count(); ss++)
            {
                if (newsim.SimValue <= maxsim[ss].SimValue)
                {
                    maxsim.Insert(ss, newsim);
                    return;
                }
            }
            int end = maxsim.Count();
            maxsim.Insert(end, newsim);
            return;
        }

        // Calculate the similarity value of the test file with each file in trained corpus, 
        // then select the top values and store them for future comparison.
        private void CalculateSim(List<double> input)
        {
            // Must clear the maxsim items before calculate the new value.
            maxsim.Clear();
            for (int i = 0; i < posEigenvectors.Count(); i++)
            {
                MaintainMaxSim(new Similar(Sim(input, posEigenvectors[i]), SentimentType.Positive));
            }
            for (int i = 0; i < negEigenvectors.Count(); i++)
            {
                MaintainMaxSim(new Similar(Sim(input, negEigenvectors[i]), SentimentType.Negative));
            }
        }

        // Return the total score of the specified sentiment type similarity values, 
        // The parameter specified the type of the sentiment.
        private double Score(SentimentType type)
        {
            double score = 0;
            for (int i = 0; i < maxsim.Count(); i++)
            {
                if (double.IsNaN(maxsim[i].SimValue))
                    continue;
                if (maxsim[i].Type == type)
                {
                    score += maxsim[i].SimValue;
                }
            }
            return score;
        }

        // Return the sentiment type of the test file's content,
        // If the score of the posivite is larger than the score of the negative, the sentiment is positive,
        // Else if the score of the positive is smaller, the sentiment is negative, 
        // Else the sentiment type is netural. 
        private SentimentType Result()
        {
            //get scores 
            double posScore = Score(SentimentType.Positive);
            double negScore = Score(SentimentType.Negative);
            if (double.IsNaN(posScore) && double.IsNaN(negScore))
                return SentimentType.Neutral;
            if (double.IsNaN(posScore))
                return SentimentType.Negative;
            if (double.IsNaN(negScore))
                return SentimentType.Positive;
            if (posScore > negScore)
                return SentimentType.Positive;
            else if (posScore < negScore)
                return SentimentType.Negative;
            else
                return SentimentType.Neutral;
        }

        public SentimentType GetSentimentTypeFromConsole(string line)
        {
            List<double> testVector = Classify(line, homedirect);
            CalculateSim(testVector);
            return Result();
        }

        public SentimentType GetSentimentTypeFromFile(string filename)
        {
            string[] lines = File.ReadAllLines(filename , Encoding.GetEncoding("gb2312"));
            string line = string.Empty;
            for (int i = 0; i < lines.Length; i++)
            {
                if (string.IsNullOrEmpty(lines[i].Trim()) == false)
                {
                    line += lines[i].Trim();
                }
            }
            List<double> testVector = Classify(line, homedirect);
            CalculateSim(testVector);
            return Result();
        }

        public void GetSentimentTypeStatistics(string testFilePath)
        {
            int ppos = 0;
            int pneg = 0;
            DirectoryInfo di = new DirectoryInfo(Path.Combine(homedirect, testFilePath, "pos_test"));
            foreach (FileInfo di1 in di.GetFiles())
            {
                string[] lines = File.ReadAllLines(di1.FullName, Encoding.GetEncoding("gb2312"));
                string line = string.Empty;
                for (int i = 0; i < lines.Length; i++)
                {
                    if (string.IsNullOrEmpty(lines[i].Trim()) == false)
                    {
                        line += lines[i].Trim();
                    }
                }
                List<double> testVector = Classify(line, homedirect);
                CalculateSim(testVector);
                if (Result() == SentimentType.Positive)
                {
                    ppos++;
                }
                else if (Result() == SentimentType.Negative)
                {
                    pneg++;
                }

            }
            Console.WriteLine("POSITIVE: {0}    NEGATIVE---{1}", ppos, pneg);
            int npos = 0;
            int nneg = 0;
            di = new DirectoryInfo(Path.Combine(homedirect, testFilePath, "neg_test"));
            foreach (FileInfo di1 in di.GetFiles())
            {
                string[] lines = File.ReadAllLines(di1.FullName, Encoding.GetEncoding("gb2312"));
                string line = string.Empty;
                for (int i = 0; i < lines.Length; i++)
                {
                    if (string.IsNullOrEmpty(lines[i].Trim()) == false)
                    {
                        line += lines[i].Trim();
                    }
                }
                List<double> testVector = Classify(line, homedirect);
                CalculateSim(testVector);
                if (Result() == SentimentType.Positive)
                {
                    npos++;
                }
                else if (Result() == SentimentType.Negative)
                {
                    nneg++;
                }
            }
            Console.WriteLine("POSITIVE: {0}    NEGATIVE---{1}", npos, nneg);
            Console.WriteLine("-------------------------");
        }
    }
}
