﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using HtmlAgilityPack;
using LingDong.HtmlParser;

namespace LingDong.HtmlClassification
{
    /// <summary>
    /// Feature select
    /// <![CDATA[
    // feature 1: TLR
    // tlr = (double)url_text_length / text_length * 100
    // in [0, 100], interval: 10
    // 0:[0, 9], 1:[10, 19], ..., 9:[90+]

    // feature 2: URL_NUM
    // in [0, +], interval: 20
    // 0:[0, 19], 1:[20, 29], ..., 8:[160, 179], 9:[180+]

    // feature 3: CLR
    // clr = (double)url_num / text_length * 1000
    // intervals are the same as TLR

    // feature 4: MAX_TEXT_SEG_LEN
    // in [0, +], interval: 100
    // 0:[0, 99], 1:[100, 199], ..., 9:[1000+]

    // feature 5: PERIOD_NUM
    // in [0, +], interval: 3
    // 0:[0, 2], 1:[3, 5], ..., 9:[27+]

    // feature 6: TITLE LENGHT
    // in [0, +], interval: 5
    // 0:[0, 4], 1:[5, 9], ..., 9:[45+]

    // feature 7: Link Distribution Uniformity
    // in [0, +], interval: 10
    // in [0, 100], interval: 10
    // 0:[0, 9], 1:[10, 19], ..., 9:[90+]

    // feature 8: resfile_block_num
    // in [0, +], interval: 10
    // in [0, 100], interval: 10
    // 0:[0, 9], 1:[10, 19], ..., 9:[90+]

    // feature 9: the max area of resfile block area, width * height
    // in [0, +], interval: 10
    // in [0, 100], interval: 10
    // 0:[0, 9], 1:[10, 19], ..., 9:[90+]
    /// ]]>
    /// </summary>
    public class BayesUtility
    {
        internal static int[] GetHtmlFeature(string html)
        {
            HtmlDocument doc = new HtmlDocument();
            doc.LoadHtml(html);

            string title;
            string content = HtmlContent.ParseHtml(html, out title);
            int textLength = content.Length + 1;

            HtmlNodeCollection hrefs = doc.DocumentNode.SelectNodes("//a[@href]");
            int urlNumber = (hrefs == null) ? 0 : hrefs.Count;
            int urlTextLength = (hrefs == null) ? 0 : hrefs.Sum(h => h.InnerText.Length);
            int[] linkDistance = new int[urlNumber];
            if (urlNumber > 0)
            {
                linkDistance[0] = hrefs[0].StreamPosition;
                for (int i = 1; i < urlNumber; i++)
                {
                    linkDistance[i] = hrefs[i].StreamPosition - linkDistance[0];
                }
            }
            
            HtmlNodeCollection imageNodeList = doc.DocumentNode.SelectNodes("//img[@src]");
            int imageNumber = (imageNodeList == null) ? 0 : imageNodeList.Count;
            int maxImageSize = 0;
            if (imageNumber > 0)
            {
                foreach (HtmlNode node in imageNodeList)
                {
                    maxImageSize = Math.Max(maxImageSize, GetImageSize(node));
                }
            }

            string[] paras = HtmlContent.GetHtmlContent(html).Split(new string[] { "\r\n" }, StringSplitOptions.RemoveEmptyEntries);

            int[] features = new int[FeatureNumber];
            features[0] = normalizingFeature(0, 100);
            features[1] = normalizingFeature(urlTextLength * 100 / textLength, 10);
            features[2] = normalizingFeature(urlNumber * 1000 / textLength, 10);
            int maxParaLen = paras.Length == 0 ? 0 : paras.Max(p => p.Length);
            features[3] = normalizingFeature(maxParaLen, 200);
            features[4] = normalizingFeature(html.Sum(c => c == '。' ? 1 : 0), 3);
            features[5] = normalizingFeature(title.Length, 5);
            int sd = (int)(GetStandardDeviation(linkDistance) * 100) - 50;
            features[6] = normalizingFeature(Math.Max(0, sd), 2);
            features[7] = normalizingFeature(imageNumber, 10);
            features[8] = normalizingFeature(maxImageSize, 2000);

            return features;
        }

        private static double GetStandardDeviation(int []values)
        {
            int num = values.Length;
            if (num == 0)
                return 0.0;

            double mean = 0.0;
            double result = 0.0;
            for (int i = 0; i < num; i++)
                mean += (double)values[i];
            mean /= (double)num;

            for (int i = 0; i < num; i++)
            {
                double diff = (double)values[i] - mean;
                result += diff * diff;
            }
            result /= (double)num;
            result = Math.Sqrt(result);
            result /= mean;

            return result;
        }

        private static int GetImageSize(HtmlNode imageNode)
        {
            int width = 0;
            if (imageNode.Attributes.Contains("width"))
                Int32.TryParse(imageNode.Attributes["width"].Value, out width);
            int height = 0;
            if (imageNode.Attributes.Contains("height"))
                Int32.TryParse(imageNode.Attributes["height"].Value, out height);
            return width * height;
        }

        private static int normalizingFeature(int feature, int interval)
        {
            feature /= interval;
            return Math.Min(feature, Dimensionality - 1);
        }
        
        public static readonly int FeatureNumber = 10;
        public static readonly int Dimensionality = 10;

        public static readonly int CategoryNumber = 2;
        internal static readonly double[] PriorProbability = new double[2] { 0.7, 0.3 };
    }
}
