﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Diagnostics;
using System.IO;

namespace CNSegmenter
{
    class CNSegmenter
    {
        // Singleton.
        static private CNSegmenter _segmenter = null;
        static public CNSegmenter Instance()
        {
            if (_segmenter == null)
            {
                _segmenter = new CNSegmenter();
            }

            Debug.Assert(_segmenter != null);
            return _segmenter;
        }
        protected CNSegmenter()
        {
            Initialize();
        }

        private void Initialize()
        {
            // Load dictionary for machinism tokenization.
            if (_dict == null)
            {
                var lines = File.ReadAllLines(@".\Model\dict.txt", Encoding.Default);
                _dict = new HashSet<string>();
                foreach (var line in lines)
                {
                    _dict.Add(line);
                }

                // Load model to support possibility evaluation.
                LoadModelFromFile();
            }
            Debug.Assert(_dict != null);
        }

        // API.
        /// <summary>
        /// Tokenize an input string.
        /// </summary>
        /// <param name="input"></param>
        /// <returns>Return null if fails.</returns>
        public List<string> Tokenize(string input)
        {
            Debug.Assert(input != null);
            input = input.Trim(' ');
            input = input.Trim("，。？！；……“”".ToCharArray());
            if (input.Length == 0)
            {
                return null;
            }

            List<string> tokenizedWords = null;
            List<List<string>> tokenizeSLNs = new List<List<string>>();

            tokenizeSLNs.Add(BasicTokenizer(input, true)); // backward
            tokenizeSLNs.Add(BasicTokenizer(input)); // forward
            
            // Filter those slns that are absolutely incorrect.
            tokenizeSLNs = SLNFilter(tokenizeSLNs);
            
            // Choose the best sln from model.
            double maxScore = -1;
            foreach (var sln in tokenizeSLNs)
            {
                double score = 0;
                foreach (var word in sln)
                {
                    score += WordScore(word);
                }
                if (score > maxScore)
                {
                    maxScore = score;
                    tokenizedWords = sln;
                }
                Debug.Print("sln score: " + score.ToString());
            }

            // Output Debug Info
            string debugString = "<";
            foreach (var word in tokenizedWords)
            {
                debugString += word + ", ";
            }
            debugString += ">";
            Debug.Print(debugString);
            //

            return tokenizedWords;
        }

        static private List<char> _oneWordRule = new List<char>();
        private List<List<string>> SLNFilter(List<List<string>> slns)
        {
            Debug.Assert(slns != null);

            // TODO: implementation.

            return slns;
        }

        private HashSet<string> _dict = null;
        private int _maxWordLength = 5;
        public int MaxWordLength
        {
            get
            {
                return _maxWordLength;
            }
            set
            {
                _maxWordLength = value;
            }
        }

        /// <summary>
        /// Tokenize the input string based on dictionary.
        /// </summary>
        /// <param name="input">non-null string.</param>
        /// <param name="isBackward"></param>
        /// <returns>List of words tokenized by the methods null if fails.</returns>
        private List<string> BasicTokenizer(string input, bool isBackward = false)
        {
            List<string> tokenizedWords = new List<string>();
            int startIndex;
            int maxLength;
            int length;
            if (isBackward)
            {
                // Backward
                startIndex = input.Length - 1;
                while (startIndex >= 0)
                {
                    length = 1;
                    maxLength = 1;
                    while (startIndex - length + 1 >= 0 && length < _maxWordLength)
                    {
                        string tempWord = input.Substring(startIndex - length + 1, length);
                        if (_dict.Contains(tempWord))
                        {
                            maxLength = length;
                        }
                        length++;
                    }
                    tokenizedWords.Add(input.Substring(startIndex - maxLength + 1, maxLength));
                    startIndex -= maxLength;
                }
                tokenizedWords.Reverse();
            }
            else
            {
                // Forward
                startIndex = 0;
                while (startIndex < input.Length)
                {
                    length = 1;
                    maxLength = 1;
                    while (startIndex + length <= input.Length && length < _maxWordLength)
                    {
                        string tempWord = input.Substring(startIndex, length);
                        if (_dict.Contains(tempWord))
                        {
                            maxLength = length;
                        }
                        length++;
                    }
                    tokenizedWords.Add(input.Substring(startIndex, maxLength));
                    startIndex += maxLength;
                }
            }
            return tokenizedWords;
        }

        static private string _punctuactions = "。：、？“”！，.《》 %@#$^&*()-_+=|\\\r\n "; /// punctuation + \r\nspace -_-
        static public string Punctuactions
        {
            get
            {
                return _punctuactions;
            }
        }
        private string _trainingFilePath = @".\Model\trainingFile.txt";
        public string TrainingFilePath
        {
            get
            {
                return _trainingFilePath;
            }
            set
            {
                _trainingFilePath = value;
            }
        }

        public class Node
        {
            public char ch;
            public int totalCount = 0;
            public Dictionary<char, int> outEdge = new Dictionary<char, int>();
            static private double _veryLowPossibility = 0.0001;
            public void AddAfterChar(char c)
            {
                if (outEdge.ContainsKey(c))
                {
                    outEdge[c]++;
                }
                else
                {
                    outEdge.Add(c, 1);
                }
                totalCount++;
            }
            public double MeAfterCPossibility(char c)
            {
                double p = 1;
                if (outEdge.ContainsKey(c))
                {
                    p = outEdge[c] * 1.0 / totalCount;
                }
                else
                {
                    p = _veryLowPossibility;
                }

                return p;
            }
        }
        private Dictionary<char, Node> _model = new Dictionary<char, Node>();
        public Dictionary<char, Node> Model
        {
            get
            {
                return _model;
            }
        }

        /// <summary>
        /// Learn model from file containing some ariticle
        /// </summary>
        public void LearnFromTrainingFile()
        {
            _model.Clear();

            var lines = File.ReadAllText(_trainingFilePath, Encoding.Default).Split(_punctuactions.ToArray());
            foreach (var line in lines)
            {
                for (int i = 0; i < line.Length - 1; i++)
                {
                    if (_model.ContainsKey(line[i]))
                    {
                        _model[line[i]].AddAfterChar(line[i + 1]);
                    }
                    else
                    {
                        Node node = new Node();
                        node.ch = line[i];
                        node.AddAfterChar(line[i + 1]);
                        _model.Add(node.ch, node);
                    }
                }
            }
            Debug.Print(_model.Count.ToString() + "charactors are built from file.");

            // Save the trainning result.
            SaveModelToFile();
        }
        static private string _modelFilePath = @".\Model\model.txt";
        static public string ModelFilePath
        {
            get
            {
                return _modelFilePath;
            }
            set
            {
                _modelFilePath = value;
            }
        }
        public void SaveModelToFile()
        {
            using (StreamWriter writer = new StreamWriter(_modelFilePath, false, Encoding.Default))
            {
                foreach (var item in _model)
                {
                    string record = item.Key + " " + item.Value.totalCount.ToString();
                    foreach (var edge in item.Value.outEdge)
                    {
                        record += "|" + edge.Key + " " + edge.Value.ToString();
                    }
                    writer.WriteLine(record);
                }
            }
            Debug.Print(string.Format("{0} records are saved to file.", _model.Count));
        }
        public void LoadModelFromFile()
        {
            _model.Clear();
            var records = File.ReadAllLines(_modelFilePath, Encoding.Default);
            foreach (var record in records)
            {
                var parts = record.Split('|');
                Node node = new Node();
                node.ch = Convert.ToChar(parts[0].Split(' ')[0]);
                node.totalCount = Convert.ToInt32(parts[0].Split(' ')[1]);
                foreach (var part in parts.Skip(1))
                {
                    var pair = part.Split(' ');
                    node.outEdge.Add(Convert.ToChar(pair[0]), Convert.ToInt32(pair[1]));
                }
                _model.Add(node.ch, node);
            }
            Debug.Print(string.Format("{0} records are loaded from file.", _model.Count));
        }

        public double WordScore(string word)
        {
            double possibility = 1;
            for (int i = 0; i < word.Length - 1; i++)
            {
                possibility *= _model[word[i]].MeAfterCPossibility(word[i + 1]);
            }
            return possibility;
        }
    }
}
