﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Collections;
using WebCrawler.HtmlParser.Interfaces;
using WebCrawler.HtmlParser;
using WebCrawler.Graph.Interfaces;
using WebCrawler.Graph;
using WebCrawler.Crawler.Interfaces;
using System.Threading;
using GlobalInterfaces;

namespace WebCrawler.Crawler
{
    public class Crawler : ICrawler //, IObservable, IObserver
    {
        private static IList<IObserver> _observers = new List<IObserver>();
        private List<string> _visitedLinks;
        private IBasicGraph _siteMap;
        private IHtmlDoc _startPage;
        private Vertex _rootVertex;
        public IVertex RootVertex
        {
            get { return (IVertex)_rootVertex; }
            set { _rootVertex = value as Vertex; }
        }
        private string _url;

        //private int _breadthCount = 0;
        private int _depthCount = 0;

        public int MAXBREDTH { get; set; }
        public int MAXDEPTH { get; set; }
        public bool Dubplicate { get; set; }
        public string Url
        {
            get { return _url; }
            set
            {
                _startPage = new HtmlDoc(value, _observers);
                _url = RegexParser.GetDefaultPage(_startPage);
            }
        }

        public IBasicGraph Graph
        {
            get { return _siteMap; }
        }

        public Crawler()
        {
            _visitedLinks = new List<string>();
            _siteMap = new BasicGraph();
            Dubplicate = false;
        }
        public Crawler(string url)
            : this()
        {
            _startPage = new HtmlDoc(url);
            _url = RegexParser.GetDefaultPage(_startPage);
        }
        public Crawler(string url, IObserver o)
            : this()
        {
            _observers.Add(o);
            _startPage = new HtmlDoc(url, _observers);
            _url = RegexParser.GetDefaultPage(_startPage);
        }
        public Crawler(string url, int maxDepth)
            : this(url)
        {
            MAXDEPTH = maxDepth;
        }


        public void Traversal(out Thread t)
        {
            _depthCount = 0;
            _rootVertex = new Vertex(_startPage);
            _visitedLinks.Clear();
            int i = (Url.Contains("index")) ? Url.LastIndexOf("index") : 0;
            i += (Url.Contains("default")) ? Url.LastIndexOf("default") : 0;
            _visitedLinks.Add(_rootVertex.Url.Remove(i));
            _visitedLinks.Add(_rootVertex.Url);
            _visitedLinks.AddRange(_rootVertex.Document.GetAllLinks());
            _siteMap.AddVertex(_rootVertex);

            t = new Thread(BuildSiteMapRoots);
            t.Name = "CrawlerThread";
            t.IsBackground = true;
            t.Start(_rootVertex);
            //BuildSiteMapRoots(_rootVertex);
        }

        private void BuildSiteMapRoots(object obj)
        {
            IVertex vertex = (IVertex)obj;
            for (int i = 0; i < vertex.Document.GetAllLinks().Count; i++)
            {
                IVertex v = new Vertex(new HtmlDoc(vertex.Document.GetAllLinks()[i]));
                if (v != null && v.Url != _rootVertex.Url)
                {
                    _siteMap.AddVertex(v);
                    _siteMap.AddEdge(vertex, v);


                    if (v.Document.GetAllLinks().Count > 0)
                    {
                        BuildSiteMapDFS(vertex, v);
                        _depthCount = 0;

                    }
                }
                else if (Dubplicate)
                {

                    v = new Vertex() { Url = vertex.Document.GetAllLinks()[i] };
                    v.Dubplicated = true;
                    _siteMap.AddVertex(v);
                    _siteMap.AddEdge(vertex, v);

                }
            }
            Update("3|Crawling done.");
        }

        /// <summary>
        /// Build sitemap bfs mixed with dfs
        /// </summary>
        private void BuildSiteMapDFS(IVertex parent, IVertex current)
        {
            if (current == null || current.Document == null)
                return;
            else if (current.Document.GetAllLinks().Count > 0)
                _depthCount++;

            if (_depthCount < MAXDEPTH)
            {
                for (int i = 0; i < current.Document.GetAllLinks().Count; i++)
                {
                    bool skip = false;
                    IVertex v = new Vertex(new HtmlDoc(current.Document.GetAllLinks()[i]));
                    IList<string> links = v.Document.GetAllLinks();
                    int index = 0;
                    while (index < links.Count && links != null &&
                        !_visitedLinks.Contains(links[index++]))
                    {
                        continue;
                    }
                    
                    
                    if (!_visitedLinks.Contains(current.Document.GetAllLinks()[i]) && !skip)
                    {

                        
                        

                        _visitedLinks.Add(v.Url);
                        Update("2|New lvl 1 root added.");
                    }
                    else if (Dubplicate)
                    {
                        v = new Vertex() { Url = current.Document.GetAllLinks()[i] };
                        v.Dubplicated = true;
                    }

                    if (v != null)
                    {
                        _siteMap.AddVertex(v);
                        _siteMap.AddEdge(current, v);
                        BuildSiteMapDFS(current, v);
                    }
                    skip = false;
                }
            }
        }

        //>>>>>>>>Original>>>>>>>>
        //public void BuildSiteMap()
        //{
        //    Queue<IHtmlDoc> qPages = new Queue<IHtmlDoc>();
        //    Queue<Vertex> qVertices = new Queue<Vertex>();
        //    //Queue<IHtmlDoc> qPagesOld = new Queue<IHtmlDoc>();
        //    IList<string> _visitedLinksCompare = new List<string>();
        //    _mainRoot = new Vertex(_startPage.Url);
        //    Vertex root = _mainRoot;
        //    Vertex leaf = null;
        //    _siteMap.AddVertex(root);
        //    qVertices.Enqueue(root);
        //    qPages.Enqueue(_startPage);
        //    int numberOfPages = 2;
        //    int unvisitedLinksCount = 0; //compare with _visitedLinks
        //    int count = 0;

        //    Console.WriteLine(root.Name); // trace
        //    while (qPages.Count > 0 && count < numberOfPages)
        //    {

        //        while ((leaf = GetNextUnvisited(qPages.Peek().GetAllLinks())) != null)
        //        {

        //            Console.WriteLine(leaf.Name); // trace
        //            _siteMap.AddVertex(leaf);
        //            _siteMap.AddEdge(root, leaf);
        //            qVertices.Enqueue(leaf);
        //            qPages.Enqueue(new HtmlDoc(leaf.Name));
        //        } // end while


        //        root = qVertices.Dequeue();
        //        qPages.Dequeue();
        //        //Console.WriteLine("VisitedLinks: " + _visitedLinks.Count);
        //        //Console.WriteLine("VisitedLinksCompare: " + unvisitedLinksCount);
        //        if (unvisitedLinksCount < _visitedLinks.Count)
        //        {
        //            unvisitedLinksCount = _visitedLinks.Count;
        //            count++;
        //        }


        //    } // end while
        //}


        //Added webdepth - Erik
        public void BuildSiteMap()
        {
            Queue<IHtmlDoc> qPages = new Queue<IHtmlDoc>();
            Queue<Vertex> qVertices = new Queue<Vertex>();
            //Queue<IHtmlDoc> qPagesOld = new Queue<IHtmlDoc>();
            IList<string> _visitedLinksCompare = new List<string>();
            _rootVertex = new Vertex(_startPage);
            Vertex root = _rootVertex;
            Vertex leaf = null;
            _siteMap.AddVertex(root);
            qVertices.Enqueue(root);
            qPages.Enqueue(_startPage);
            int numberOfPages = 2; //defines webdepth
            int unvisitedLinksCount = 0; //compare with _visitedLinks
            int count = 0;

            Console.WriteLine(root.Url); // trace
            while (qPages.Count > 0 && count < numberOfPages)
            {

                while ((leaf = GetNextUnvisited(qPages.Peek().GetAllLinks())) != null)
                {

                    Console.WriteLine(leaf.Url); // trace
                    _siteMap.AddVertex(leaf);
                    _siteMap.AddEdge(root, leaf);
                    qVertices.Enqueue(leaf);
                    qPages.Enqueue(leaf.Document);
                } // end while


                root = qVertices.Dequeue();
                qPages.Dequeue();
                //Console.WriteLine("VisitedLinks: " + _visitedLinks.Count);
                //Console.WriteLine("VisitedLinksCompare: " + unvisitedLinksCount);
                if (unvisitedLinksCount < _visitedLinks.Count) //if number of visited links has been increased then increase count by 1
                {
                    unvisitedLinksCount = _visitedLinks.Count;
                    count++;
                }


            } // end while
        }

        private Vertex GetNextUnvisited(IList<string> links)
        {
            Vertex v = null;
            int i = 0;
            bool foundUnvisited = false;
            while (!foundUnvisited && i < links.Count)
            {
                if (!_visitedLinks.Contains(links[i]))
                {
                    _visitedLinks.Add(links[i]);
                    v = new Vertex(new HtmlDoc(links[i]));
                    foundUnvisited = true;
                } // end if

                i++;
            } // end while

            return v;
        }

        public void GraphTraversalTest()
        {
            Console.WriteLine("*** Dfs ***");
            IList list = _siteMap.DfsIterative(_rootVertex);
            foreach (Vertex v in list)
            {
                Console.WriteLine(v.Url);
            } // end foreach

            Console.WriteLine();

            Console.WriteLine("*** Bfs ***");
            list = _siteMap.BfsIterative(_rootVertex);
            foreach (Vertex v in list)
            {
                Console.WriteLine(v.Url);
            }
        }

        /// <summary>
        /// Traverse graph in BRF style, and returns a list of vertices.
        /// </summary>
        public IList GraphTraverse()
        {
            IList list = _siteMap.DfsIterative(_rootVertex);
            return list;
        }


        public void Update(object obj)
        { NotifyAll(obj); }
        public void NotifyAll(object obj)
        {
            foreach (IObserver o in _observers)
            {
                o.Update(obj);
            }
        }
    }
}
