﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using GraphPackage;
using HTMLPackage;

namespace WebCrawler
{
    public class Crawler
    {
        private BasicGraphImpl _sitemap = new BasicGraphImpl();
        private IHTMLDoc _startpage = null;
        private string _url = null;

        public Crawler(string url)
        {
            this._url = url;
            this._startpage = new HtmlDocRegEx(url);
        }

        public IHTMLDoc StartPage
        {
            get { return this._startpage; }
        }

        public BasicGraphImpl Sitemap
        {
            get { return this._sitemap; }
        }

        public string Url
        {
            get { return this._url; }
        }

        public void BuildSitemap(int depth)
        {
            PopulateTest(null, _startpage.GetAllLinks(), depth);
        }

        private void PopulateTest(Vertex previous, IList<Uri> links, int depth)
        {
            if (depth > 0)
            {
                if (previous != null)
                {
                    // if the previous vertex is not null, we go one layer down and decrement depth
                    depth--;
                    // loop through the links and generate vertexes and webpages for each
                    foreach (Uri u in links)
                    {
                        // Check if a vertex already exists for this link
                        IEnumerator<Vertex> vertices = _sitemap.Vertices();
                        bool exists = false;
                        Vertex existingVertice = null;
                        while(vertices.MoveNext())
                        {
                            if (vertices.Current.Name == u.AbsoluteUri)
                            {
                                //use original vertice
                                existingVertice = vertices.Current;
                                exists = true;
                            }
                        }
                        // create IHTMLDoc for the link and download it from the server
                        IHTMLDoc webSite = new HtmlDocRegEx(u.OriginalString);

                        // create a vertex for the link
                        Vertex tempVert;
                        if (!exists)
                        {
                            tempVert = new Vertex(webSite.URL.AbsoluteUri, webSite);
                            // add vertex to sitemap
                            _sitemap.AddVertex(tempVert);
                            // generate an edge between this vertex and previous
                            _sitemap.AddEdge(previous, tempVert);
                            // recursively call until depth is reached
                            PopulateTest(tempVert, webSite.GetAllLinks(), depth);
                        }       
                    }
                }
                else
                {
                    // if the previous vertex is null, it is assumed we need to add the root
                    Vertex tempVert = new Vertex(_startpage.URL.AbsoluteUri, _startpage);

                    // add root vertex
                    _sitemap.AddVertex(tempVert);

                    // recursively call until depth is reached
                    PopulateTest(tempVert, _startpage.GetAllLinks(), depth);
                }
            }
        }


        private Vertex GetNextUnvisited(IList<HtmlDocRegEx> links)
        {
            return new Vertex("blah");
        }

        public IEnumerator<Vertex> TraverseMap()
        {
            return this._sitemap.Vertices();
        }
    }
}
