﻿// Author: Iulian Lita
// Project: Robots
// Path: C:\Users\Tym\Documents\Visual Studio 2008\Projects\WebCrawlerProject\Robots
// Creation date: 1/10/2009 11:04 PM
// Last modified: 1/11/2009 1:55 PM

using System;
using System.Collections.Generic;
using Database;
using WebPage.Client;

namespace Robots
{
    public class ParseRobots
    {
        #region Properties
        private string _link;
        private List<string> _allowedZones;
        private List<string> _disallowedZones;
        #endregion

        public ParseRobots(string link)
        {
            _link = BuildLink(link);
            _allowedZones = new List<string>();
            _disallowedZones = new List<string>();
            BuildLists();
        }

        /// <summary>
        /// List of strings that represents the allowed zones to crawl
        /// </summary>
        public List<string> AllowedZones { get { return _allowedZones; } }

        /// <summary>
        /// List of strings that represents the disallowed zones to crawl
        /// </summary>
        public List<string> DisallowedZones { get { return _disallowedZones; } }

        /// <summary>
        /// Builds the robots.txt link
        /// </summary>
        /// <param name="link">the link of the page crawled for the first time on a new domain</param>
        /// <returns>a string cointaining the robots.txt file</returns>
        private string BuildLink(string link)
        {            
            string finalLink;
            string initialLink = link;
            finalLink = initialLink.Substring(0, initialLink.IndexOf('/', initialLink.IndexOf('/') + 2) + 1);
            finalLink += "robots.txt";
            return finalLink;
        }

        /// <summary>
        /// Populates the _allowedZones list
        /// </summary>
        private void BuildLists()
        {
            DatabaseWork dw = new DatabaseWork();            
            MyWebClient mwc = new MyWebClient(_link);
            int start, end, fs;
            string content;
            string path;
            string[] lines = { };
            DateTime lastModifiedDate;
            if (_link != null)
            {
                if (dw.RobotExists(_link))
                {
                    lastModifiedDate = mwc.GetLastModifiedDate();
                    if (dw.RobotisModified(lastModifiedDate, _link))
                    {
                        // il updatez in baza de date
                        content = mwc.DownloadDataString();
                        dw.UpdateRobots(_link, content);
                    }
                    else
                    {
                        // il citesc din baza de date                    
                        content = dw.GetRobotsContent(_link);
                    }
                } // if
                else
                {
                    // il adaug in baza de date
                    content = mwc.DownloadDataString();
                    dw.AddRobot(_link, content, mwc.GetLastModifiedDateString());                    
                }

                start = content.ToLower().IndexOf("user-agent: *");
                if (start != -1)
                {
                    content = content.Substring(start + 13).Trim();
                }
                else
                {
                    start = content.ToLower().IndexOf("user-agent:*");
                    content = content.Substring(start + 12).Trim();
                }
                if (start == -1)
                {
                    dw.Dispose();
                    throw new Exception("nu am gasit specificatii pt bot-ul nostru (nu are user-agent: *)");
                }
                end = content.Length;
                fs = content.ToLower().IndexOf("user-agent:");
                if (fs != -1)
                {
                    if (fs < end)
                    {
                        end = fs;
                    }
                    content = content.Substring(0, end).Trim();
                }

                lines = content.Split('\n');
                foreach (string line in lines)
                {
                    if (line.ToLower().StartsWith("disallow:"))
                    {
                        path = line.Substring(9);
                        path = path.Trim();
                        _disallowedZones.Add(_link + path);
                    }
                    else
                    {
                        if (line.ToLower().StartsWith("allow:"))
                        {
                            path = line.Substring(6);
                            path = path.Trim();
                            _allowedZones.Add(_link + path);
                        }
                        else
                        {
                            continue;
                        }
                    }
                }

                dw.Dispose();
            }
        }
    }
}
