//Copyright (c) Microsoft Corporation.  All rights reserved.

using System;
using System.Collections.Generic;
using System.Text;

namespace Eas.WebCrawler.Interfaces {
    
    /// <summary>The different states of processing in which a worker can be.</summary>
    public enum WorkerState { 
        /// <summary>The worker is unstarted.</summary>
        Unstarted=0, 
        /// <summary>The worker is working.</summary>
        Working=1, 
        /// <summary>The worker is paused.</summary>
        Paused=2, 
        /// <summary>The worker has completed processing.</summary>
        Stopped=3 
    };
    
    /// <summary>Context used by web crawler during processing.</summary>
    public interface WebCrawlerContext :  IDisposable {
        /// <summary>The comparer to use determine the order in which requests are processed.</summary>
        IComparer<Request> RequestPriorityComparer { get;}
        /// <summary>The <see cref="Eas.WebCrawler.Interfaces.LinkExtractor"/> to use to extract links from web responses.</summary>
        Eas.WebCrawler.Interfaces.LinkExtractor LinkExtractor { get;}
        /// <summary>The number of workers to use for processing requests.</summary>
        /// <remarks>This value determines how many requests can be processed at the same time.</remarks>
        int WorkerCount { get;}
        /// <summary>The collection of Uris with which to begin processing.</summary>
        /// <remarks>In other words, the uris in this list will be processed, and then the crawler will follow any appropriate links in these pages, and so on...</remarks>
        IEnumerable<string> StartingUris { get;}
        /// <summary>The <see cref="Eas.WebCrawler.Interfaces.LinkSelector"/> used to select which links to follow while processing.</summary>
        Eas.WebCrawler.Interfaces.LinkSelector Selector { get;}
        /// <summary>The factory to use to get a <see cref="Eas.WebCrawler.Interfaces.TemporaryStorage"/> for storing the responses of requests.</summary>
        Eas.WebCrawler.Interfaces.TemporaryStorageFactory StorageFactory { get;}
        /// <summary>The <see cref="Eas.WebCrawler.Interfaces.CompletedRequestHistory"/> used to check if a request was processed in a previous run.</summary>
        Eas.WebCrawler.Interfaces.CompletedRequestHistory CompletedRequestHistory { get;}
        /// <summary>The number of milliseconds to sleep during those actions that require intermitent sleeping.</summary>
        int SleepTimeInMilliseconds { get;}
        /// <summary>The number of milliseconds to wait without getting a response form the request's server before a request times out.</summary>
        int TimeoutInMilliseconds { get;}
        /// <summary>The <see cref="RequestStore"/> used to store requests.</summary>
        RequestStore Store {
            get;
        }

        /// <summary>The <see cref="Eas.WebCrawler.Interfaces.WebCrawlerEvents"/> which will fire relevant events during processing.</summary>
        Eas.WebCrawler.Interfaces.WebCrawlerEvents Events {
            get;
        }

    }
}
