<?xml version="1.0"?>
<doc>
    <assembly>
        <name>RobotRules</name>
    </assembly>
    <members>
        <member name="T:RobotRules.RobotException">
            <summary>
            The base class for exceptions thrown when an error occurs that is related to Internet robots.
            </summary>
        </member>
        <member name="T:RobotRules.ContentTypeException">
            <summary>
            The exception that is thrown when a robot control file has an inappropriate content type.
            </summary>
        </member>
        <member name="T:RobotRules.DownloadFailedException">
            <summary>
            The exception that is thrown when a robot control file cannot be downloaded due to
            a transfer problem (not just an HTTP status code indicating failure).
            </summary>
        </member>
        <member name="T:RobotRules.InvalidUserAgentException">
            <summary>
            The exception that is thrown when an invalid user agent token is specified.
            </summary>
        </member>
        <member name="T:RobotRules.SiteMismatchException">
            <summary>
            The exception that is thrown when the specified URI does not match the site to which
            the robot control file referred.
            </summary>
        </member>
        <member name="T:RobotRules.ParseOptions">
            <summary>
            Indicates how a robot control file is to be parsed.
            </summary>
        </member>
        <member name="F:RobotRules.ParseOptions.None">
            <summary>
            None of the options are set.
            </summary>
        </member>
        <member name="F:RobotRules.ParseOptions.IgnoreFieldNameCase">
            <summary>
            Field names in robot control files (such as "Allow" and "User-agent") are accepted
            in any combination of upper and lower case, not just the standard casing.
            </summary>
        </member>
        <member name="F:RobotRules.ParseOptions.AcceptBlankDisallow">
            <summary>
            Blank Disallow lines are understood to mean that the user agent is allowed to access
            any URI on the site, as defined in 'A Standard for Robot Exclusion' (1994) but not
            the 1996 RFC Draft Memo.
            </summary>
        </member>
        <member name="F:RobotRules.ParseOptions.AsteriskWildcard">
            <summary>
            The asterisk character ('*') in robot control files should be interpreted as a wildcard
            representing any character sequence.
            </summary>
            <remarks>
            Wildcard matching is not part of the robot control standard, under which '*' is a
            valid path character, but it is supported by Googlebot and possibly others.
            </remarks>
        </member>
        <member name="F:RobotRules.ParseOptions.All">
            <summary>
            All of the options are set.
            </summary>
        </member>
        <member name="F:RobotRules.ParseOptions.Defaults">
            <summary>
            The default options are set.
            </summary>
        </member>
        <member name="T:RobotRules.RobotsFileParser">
            <summary>
            Parses robot control files and indicates which resources are accessible to named user agents.
            </summary>
            <remarks>
            Based on Martijn Koster's 1996 RFC Draft Memo on Web Robots Control, with optional support
            for the blank Disallow lines of 'A Standard for Robot Exclusion' (1994). At the time of
            writing (2005), the 1994 document is still current, but the 1996 document is essentially
            backwards-compatible.
            </remarks>
        </member>
        <member name="F:RobotRules.RobotsFileParser.ROBOTS_FILE_NAME">
            <summary>
            The filename used for robot control files.
            </summary>
        </member>
        <member name="F:RobotRules.RobotsFileParser.TOKEN_ALL_USER_AGENTS">
            <summary>
            The user agent token that represents all user agents.
            </summary>
        </member>
        <member name="F:RobotRules.RobotsFileParser.ROBOTS_CONTENT_TYPE">
            <summary>
            The content type used for robot control files.
            </summary>
        </member>
        <member name="F:RobotRules.RobotsFileParser.RFC_1945_INVALID_CHARS">
            <summary>
            The set of characters (excluding control characters, i.e. those whose ASCII value
            is lower than 0x20, space) that are not permitted in a token, as defined in RFC 1945.
            </summary>
        </member>
        <member name="F:RobotRules.RobotsFileParser.RFC_1808_SINGLE_PCHARS">
            <summary>
            The set of characters (excluding '%', which is only valid as part of a hex triplet)
            that are permitted in a "pchar", as defined by RFC 1808.
            </summary>
            <remarks>
            This includes digits, alphabetic characters (regardless of case), "safe" characters
            <![CDATA[
            ('$', '-', '_', '.', '+'), "extra" characters ('!', '*', "'", '(', ')', ','),
            "national" characters ('{', '}', '|', '\', '^', '~', '[', ']', '`'), and the
            other characters that are specifically allowed in "pchar" (':', '@', '&', '=').
            ]]>
            </remarks>
        </member>
        <member name="M:RobotRules.RobotsFileParser.#ctor">
            <summary>
            Initialises a new RobotsFileParser.
            </summary>
        </member>
        <member name="M:RobotRules.RobotsFileParser.#ctor(System.Uri)">
            <summary>
            Reads and interprets the contents of a Web site's robot control file.
            </summary>
            <param name="site">The URI of the Web site or any file on it.</param>
        </member>
        <member name="M:RobotRules.RobotsFileParser.#ctor(System.Uri,System.Int32)">
            <summary>
            Reads and interprets the contents of a Web site's robot control file.
            </summary>
            <param name="site">The URI of the Web site or any file on it.</param>
            <param name="timeout">The time, in milliseconds, after which the Web request should
            be aborted if no response has been received, or zero for the default timeout.</param>
        </member>
        <member name="M:RobotRules.RobotsFileParser.#ctor(System.IO.FileInfo,System.Uri)">
            <summary>
            Reads and interprets the contents of a robot control file.
            </summary>
            <param name="file">The robot control file.</param>
            <param name="site">The URI of the Web site or any file on it.</param>
        </member>
        <member name="M:RobotRules.RobotsFileParser.#ctor(System.String[],System.Uri)">
            <summary>
            Reads and interprets the contents of a robot control file.
            </summary>
            <param name="robotsFileLines">The lines of the file in sequential order.</param>
            <param name="site">The URI of the Web site or any file on it.</param>
        </member>
        <member name="M:RobotRules.RobotsFileParser.Clear">
            <summary>
            Resets the parser to its original empty state.
            </summary>
        </member>
        <member name="M:RobotRules.RobotsFileParser.GetRegexOptions">
            <summary>
            Returns the regular expression options to be used when validating a field name
            in a robot control file.
            </summary>
            <returns>The regular expression options to be used.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.IsUserAgentLine(System.String,System.String@)">
            <summary>
            Determines whether a normalised line from a robot control file is a User-agent line.
            </summary>
            <param name="line">The line.</param>
            <param name="userAgent">The user agent string, or null.</param>
            <returns>True if the line is a User-agent line, else false.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.IsAllowOrDisallowLine(System.String,System.Boolean@,System.String[]@)">
            <summary>
            Determines whether a normalised line from a robot control file is an Allow or
            Disallow line with at least one valid path.
            </summary>
            <param name="line">The line.</param>
            <param name="allow">Whether the line is an Allow line.</param>
            <param name="paths">The valid paths to which the rule refers, or null.</param>
            <returns>True if the line is an Allow or Disallow line, else false.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.ContainsInvalidCharEscapes(System.String)">
            <summary>
            Determines whether a URI contains any incorrectly formed hexadecimal character escape.
            </summary>
            <param name="uri">The URI to be tested.</param>
            <returns>True if the URI contains any incorrectly formed character escape.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.IsValidPathForRule(System.String,System.Boolean)">
            <summary>
            Determines whether a string is a suitable path for an Allow or Disallow rule.
            </summary>
            <remarks>
            The path must be the leftmost non-empty part of a relative URI, considered to be
            rooted at the host, such as "/" or "/docum" or "/documents/" or "/~bob/index.htm".
            Complete URIs (with scheme and authority) such as "http:// ..." will not work;
            however, RFC 1808 allows ':' in path segments and permits empty path segments
            (so that '//' is legal), so we can't actually exclude anything that looks like a
            complete URI but must treat it as a relative one ("http://site.com/http://...").
            Any fragment identifier ("#section") will have been removed as a comment -
            and is meaningless to robots anyway - so we can ignore those.
            </remarks>
            <param name="s">The string to be tested.</param>
            <param name="isAllowRule">Whether the rule is an Allow rule.</param>
            <returns>True if the string is a valid path, else false.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.GetRfc1945TokenValidLength(System.String)">
            <summary>
            Returns the number of leading characters in a string that form a valid token, as
            defined by RFC 1945.
            </summary>
            <param name="s">The string to be tested.</param>
            <returns>The number of leading characters that form a valid token.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.IsRfc1945Token(System.String)">
            <summary>
            Determines whether a string is a token, as defined by RFC 1945.
            </summary>
            <param name="s">The string to be tested.</param>
            <returns>True if the string is a valid token, else false.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.IsExtensionLine(System.String)">
            <summary>
            Determines whether a normalised line from a robot control file matches the
            specification for an extension (i.e. some future addition to the format).
            </summary>
            <param name="line">The line.</param>
            <returns>True if the line is an extension, else false.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.NormaliseLine(System.String)">
            <summary>
            Converts a line from a robot control file into a normal form by removing
            leading and trailing whitespace and comments.
            </summary>
            <param name="line">The line.</param>
            <returns>The normalised line.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.CommitRules(System.Collections.ArrayList,System.Collections.ArrayList,System.Collections.Hashtable)">
            <summary>
            Adds a set of rules for a set of user agents to a hashtable.
            </summary>
            <param name="userAgents">The set of user agent tokens (ArrayList of string).</param>
            <param name="rules">The set of rules (ArrayList of Rule).</param>
            <param name="target">The hashtable to be added to (Hashtable of string -> Rule[]).</param>
        </member>
        <member name="M:RobotRules.RobotsFileParser.IsRfc1808Path(System.String)">
            <summary>
            Determines whether a string is a valid "path", as defined by RFC 1808.
            </summary>
            <param name="s">The string to be tested.</param>
            <returns>True if the string is a "path", else false.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.IsRfc1808Pchar(System.String)">
            <summary>
            Determines whether a string is a valid "pchar" (path character), as defined by RFC 1808.
            </summary>
            <param name="s">The string to be tested.</param>
            <returns>True if the string is a "pchar", else false.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.DownloadRobotControlFile(System.Uri,System.Int32)">
            <summary>
            Retrieves a robot control file from a Web server.
            </summary>
            <param name="uri">The URI of the robot control file.</param>
            <param name="timeout">The time, in milliseconds, after which the Web request should
            be aborted if no response has been received, or zero for the default timeout.</param>
            <returns>The lines from the robot control file, or an empty array if the file
            was not found.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.Parse(System.Uri)">
            <summary>
            Reads and interprets the contents of a Web site's robot control file.
            </summary>
            <param name="site">The URI of the Web site or any file on it.</param>
        </member>
        <member name="M:RobotRules.RobotsFileParser.Parse(System.Uri,System.Int32)">
            <summary>
            Reads and interprets the contents of a Web site's robot control file. If the site has
            no robot control file, it is assumed that robots may access any part of the site.
            </summary>
            <param name="site">The URI of the Web site or any file on it.</param>
            <param name="timeout">The time, in milliseconds, after which the Web request should
            be aborted if no response has been received, or zero for the default timeout.</param>
        </member>
        <member name="M:RobotRules.RobotsFileParser.Parse(System.IO.FileInfo,System.Uri)">
            <summary>
            Reads and interprets the contents of a robot control file.
            </summary>
            <param name="file">The robot control file.</param>
            <param name="site">The URI of the Web site or any file on it.</param>
        </member>
        <member name="M:RobotRules.RobotsFileParser.Parse(System.String[],System.Uri)">
            <summary>
            Reads and interprets the contents of a robot control file.
            </summary>
            <param name="robotsFileLines">The lines of the file in sequential order.</param>
            <param name="site">The URI of the Web site or any file on it.</param>
        </member>
        <member name="M:RobotRules.RobotsFileParser.IsAllowed(System.String,System.Uri)">
            <summary>
            Determines whether a named user agent is permitted to access the resource
            at a specified URI.
            </summary>
            <param name="userAgent">The user agent token.</param>
            <param name="resource">The URI for the resource that the user agent wishes to access.</param>
            <returns>True if the user agent is permitted to access the resource, else false.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.GetRegexFromWildcardPath(System.String)">
            <summary>
            Generates a regular expression from a path that contains the asterisk wildcard ('*'),
            replacing the asterisk with '.*' (indicating any character sequence) and all other
            metacharacters with their escape codes.
            </summary>
            <param name="path">The path.</param>
            <returns>The corresponding regular expression.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.UnescapeForPathComparison(System.String)">
            <summary>
            Converts hexadecimal character escapes in a path to their corresponding single
            characters, except those that would interfere with path matching.
            </summary>
            <param name="path">The path containing the hexadecimal escape sequences.</param>
            <returns>The path with any hexadecimal escape sequences replaced by their characters.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.IsPathMatch(System.String,System.String)">
            <summary>
            Determines whether one absolute path is considered to match another absolute path
            from a robot control file.
            </summary>
            <param name="ourPath">The absolute path to be tested.</param>
            <param name="rulePath">The path from the robot control file.</param>
            <returns>True if the first path is considered to match the second, else false.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.FindMatchingUserAgent(System.String)">
            <summary>
            Finds the user agent token from the robot control file that applies to a named robot.
            </summary>
            <param name="userAgent">The robot's user agent token.</param>
            <returns>The matching user agent token from the file, or null if nothing matched.</returns>
        </member>
        <member name="M:RobotRules.RobotsFileParser.GetRobotsFileUri(System.Uri)">
            <summary>
            Returns the URI at which the robot control file for a Web site is expected to reside.
            </summary>
            <param name="site">The URI of the Web site or any file on it.</param>
            <returns>The expected URI of the robot control file.</returns>
        </member>
        <member name="P:RobotRules.RobotsFileParser.Options">
            <summary>
            Gets or sets a value indicating how robot control files should be parsed.
            </summary>
        </member>
        <member name="P:RobotRules.RobotsFileParser.AllRestricted">
            <summary>
            Gets a value indicating whether access to the current site is completely forbidden
            because access to the robot control file was forbidden.
            </summary>
        </member>
        <member name="P:RobotRules.RobotsFileParser.SiteBase">
            <summary>
            Gets the URI of the Web site whose robot control file has been parsed.
            </summary>
            <remarks>
            The URI includes the scheme and authority but no path segments. This would usually
            correspond to the Web site's home page.
            </remarks>
        </member>
        <member name="T:RobotRules.Rule">
            <summary>
            Represents an Allow or Disallow rule associated with a partial URI.
            </summary>
        </member>
        <member name="M:RobotRules.Rule.#ctor(System.String,System.Boolean,System.Int32)">
            <summary>
            Initialises a new Rule.
            </summary>
            <param name="partialUri">The partial URI to which the rule applies.</param>
            <param name="allow">True if this is an Allow rule, or false for a Disallow rule.</param>
            <param name="priority">The zero-based index of the rule's parent record in the file.</param>
        </member>
        <member name="P:RobotRules.Rule.PartialUri">
            <summary>
            The partial URI to which the rule applies.
            </summary>
        </member>
        <member name="P:RobotRules.Rule.Allow">
            <summary>
            True if this is an Allow rule, or false for a Disallow rule.
            </summary>
        </member>
        <member name="P:RobotRules.Rule.Priority">
            <summary>
            The zero-based index of the rule's parent record in the robot control file,
            a lower number indicating an earlier record (which takes priority over any
            later records that also partially match a given user agent token).
            </summary>
        </member>
    </members>
</doc>
