﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using HTMLDoc;
using GraphList;
using System.Xml;
using System.IO;
using System.Windows.Forms;

namespace Controller
{
    public class CrawlerCtr
    {
        TreeCtr treeCtr;
        private IGraph graph;  // The graph where the htmlsites and links are stored.
        private string newUrl; // Needs to be used because of the recusive call in CrawlMyAss(...). If not used, there will be an error with the links. 
                               // i.e. http://public.ucn.dk/fen/testsite/internal02.htmleaf01.htm, when the correct link should be http://public.ucn.dk/fen/testsite/leaf01.htm.
        private int depthOfGraph=0;
        private int counter;
        
        public CrawlerCtr()
        {
            graph = new Graph();
            newUrl = "";
            
            
        }

        // This is the startmethod which is called from the GUI. The url is the website which is going to be exam for links.
        // This method is used as startmethod, because we need to create the first vertex before we can make an edge(link). (It takes two vertices to create an edge(link)).
        public void StartCrawl(string url,int depth)
        {
            treeCtr = TreeCtr.GetInstance(url);
            HtmlDoc htmlDoc = new HtmlDoc(url);
            Vertex firstVertex = new Vertex(htmlDoc);
            graph.AddVertex(firstVertex);
            this.depthOfGraph = depth;
            CrawlMyAss(url,firstVertex,depthOfGraph);
            counter = 0;

        }

        // When the first vertex is created in StartCrawl(...), the method will call this one.
        // There is used a stack(Not a manuel created stack, but the on in the computer) when we do a recursive call which leads to a Deepth-First-Search algoritm to find all htmlsites and links.
        // Can only be used with a connected graph..?
        public void CrawlMyAss(string url, Vertex inputVertex,int depthOfGraph)
        {
            
            //Checks that no url is stored in the newUrl instance variable.
            if (newUrl.Length == 0)
            {
                newUrl = url;
            }
            counter++;
            // if the graph doesn't contain the vertex, it will jump in to the if-statement.
            if (!graph.ContainsVertex(url))
            {
                string newUrl3 = "";
                HtmlDoc htmlDoc = new HtmlDoc(url);     // Creates a new htmlDocument from the specified url.
                Vertex vertex = new Vertex(htmlDoc);    // Uses the htmlDocument to create a new vertex.
                graph.AddVertex(vertex);                // Adds the new vertex to the graph.
                graph.AddEdge(inputVertex, vertex);     // Creates an edge(link) between the two vertices.
               // TreeNode tn = new TreeNode();
              //  tn.Text = vertex.Url;
               // treeCtr.AddTreeNode(tn);

                
                foreach (string link in htmlDoc.Links)  // For each link in the newly created htmlDocument, call my self(CrawlMyAss(...)). recursive call. (To find the htmlsites/links for this vertex.)
                {
                    if (link.Contains("http://"))
                    {
                        newUrl3 = link;
                    }
                    else
                    {
                        newUrl3 = newUrl + link;
                    }
                    
                    if (depthOfGraph > counter)
                    {
                       // TreeNode tn2 = new TreeNode();
                       // tn2.Text = newUrl3;
                      //  treeCtr.AddAsChild(tn, tn2);
                        CrawlMyAss(newUrl3, vertex, depthOfGraph); // hvis det er et relative links.
                        counter--;
                    }
                    }
            }
            // If the graph already contains the vertex, it will jump in to the else-statement.(i.e. The first vertex will be here because we added that with the StartCrawl(...))
            else
            {
                string newUrl2 =""; // needs to be used when we do recursive call, else we will get an error like that with newUrl in the top of this sourcecode.
                foreach(string link in graph.Vertices[url].HtmlDoc.Links) // for each link in the vertex with the specifed url.
                {
                    if (link.Contains("http://"))
                    {
                        newUrl2 = link;
                    }
                    else
                    {
                        newUrl2 = newUrl + link;
                    }
                    HtmlDoc html = new HtmlDoc(newUrl2);           // Creates a new htmlDocument with the specifed url.
                    Vertex newVertex = new Vertex(html);        // Creates a new Vertex with the specifed htmlDocument.
                    TreeNode tn = new TreeNode();
                    tn.Text = newVertex.Url;////////MÅSKEFEJL
                    Console.WriteLine(tn.Text);
                    if (graph.ContainsVertex(newVertex.Url))    // Checks whether the graph already contains the newly created vertex.
                    {
                        graph.AddEdge(inputVertex, newVertex);  // The graph contains the newly created vertex and therefore there is added an edge(link) between the two vertices.
                       // TreeNode parent = new TreeNode();
                       // parent.Text = inputVertex.Url;
                        //treeCtr.AddAsChild(parent,tn);                                        // We dont need to do a recursive call, because the vertex already is in the graph, the else-statement will had done a recursive call before this if-statement.
                    }
                    else                                        // The graph doesn't contains the newly created vertex.
                    {
                        graph.AddVertex(newVertex);             // Adds the new vertex to the graph.
                        graph.AddEdge(inputVertex, newVertex);  // Adds an edge(link) between the tho vertices.
                       // TreeNode parent = new TreeNode();
                       // parent.Text = inputVertex.Url;
                       // treeCtr.AddAsChild(parent, tn);    
                        if (counter < depthOfGraph)
                        {
                            CrawlMyAss(newUrl2, newVertex, depthOfGraph);        // hvis det er direkte link virker dette ikke ? det er relative links på http://public.ucn.dk/fen/startsite/ !!!!!!!!!!
                            counter--;
                        }
                        
                       
                        }
                }

            } 
        }

        public void PrintVerticesInGraph()
        {
            graph.PrintGraph();
        }

        public void ClearGraph()
        {
            graph.Vertices.Clear();
        }

        public void ClearConsole()
        {
            Console.Clear();
        }

        public void ClearNewUrl()
        {
            newUrl = "";
        }

    }
}
