using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Web;
using System.Xml;
using System.Xml.XPath;
using Monti.MVVM;

namespace WikiUploader.Classes.DotNetWikiBot
{
   /// <summary>Class defines a set of wiki pages (constructed inside as List object).</summary>
   [ClassInterface(ClassInterfaceType.AutoDispatch)]
   [Serializable]
   public class WikiPageList : ViewModelBase
   {
      private string m_ListName;

      /// <summary>
      /// Gets or Sets the name of the list.
      /// </summary>
      public string ListName
      {
         get { return this.m_ListName; }
         set
         {
            this.m_ListName = value;
            this.RaisePropertyChanged ("ListName");
         }
      }
      /// <summary>Internal generic List, that contains collection of pages.</summary>
      public List<WikiPage> Pages { get; set; }

      /// <summary>Site, on which the pages are located.</summary>
      public WikiSite WikiSite { get; set; }

      /// <summary>This constructor creates WikiPageList object with specified Site object and fills
      /// it with Page objects with specified titles. When constructed, new Page objects
      /// in WikiPageList don't contain text. Use Load() method to get text from live wiki,
      /// or use LoadEx() to get both text and metadata via XML export interface.</summary>
      /// <param name="wikiSite">Site object, it must be constructed beforehand.</param>
      /// <param name="pageNames">Page titles as array of strings.</param>
      /// <returns>Returns the WikiPageList object.</returns>
      public WikiPageList(WikiSite wikiSite, IEnumerable <string> pageNames)
      {
         this.Pages = new List<WikiPage> ();
         this.WikiSite = wikiSite;
         foreach (string pageName in pageNames)
            Pages.Add(new WikiPage(wikiSite, pageName));
         CorrectNsPrefixes();
      }

      /// <summary>This constructor creates WikiPageList object with specified Site object and fills
      /// it with Page objects with specified titles. When constructed, new Page objects
      /// in WikiPageList don't contain text. Use Load() method to get text from live wiki,
      /// or use LoadEx() to get both text and metadata via XML export interface.</summary>
      /// <param name="wikiSite">Site object, it must be constructed beforehand.</param>
      /// <param name="pageNames">Page titles as StringCollection object.</param>
      /// <returns>Returns the WikiPageList object.</returns>
      public WikiPageList(WikiSite wikiSite, StringCollection pageNames)
      {
         this.Pages = new List<WikiPage> ();
         this.WikiSite = wikiSite;
         foreach (string pageName in pageNames)
            Pages.Add(new WikiPage(wikiSite, pageName));
         CorrectNsPrefixes();
      }

      /// <summary>This constructor creates empty WikiPageList object with specified
      /// Site object.</summary>
      /// <param name="wikiSite">Site object, it must be constructed beforehand.</param>
      /// <returns>Returns the WikiPageList object.</returns>
      public WikiPageList(WikiSite wikiSite)
      {
         this.Pages = new List<WikiPage> ();
         this.WikiSite = wikiSite;
      }

      /// <summary>This constructor creates empty WikiPageList object, Site object with default
      /// properties is created internally and logged in. Constructing new Site object
      /// is too slow, don't use this constructor needlessly.</summary>
      /// <returns>Returns the WikiPageList object.</returns>
      public WikiPageList()
      {
         this.Pages = new List<WikiPage> ();
         WikiSite = new WikiSite();
      }

      /// <summary>This index allows to call pageList[i] instead of pageList.pages[i].</summary>
      /// <param name="index">Zero-based index.</param>
      /// <returns>Returns the Page object.</returns>
      public WikiPage this[int index]
      {
         get { return Pages[index]; }
         set { Pages[index] = value; }
      }

      /// <summary>This function allows to access individual pages in this WikiPageList.
      /// But it's better to use simple pageList[i] index, when it is possible.</summary>
      /// <param name="index">Zero-based index.</param>
      /// <returns>Returns the Page object.</returns>
      public WikiPage GetPageAtIndex(int index)
      {
         return Pages[index];
      }

      /// <summary>This function allows to set individual pages in this WikiPageList.
      /// But it's better to use simple pageList[i] index, when it is possible.</summary>
      /// <param name="wikiPage">Page object to set in this WikiPageList.</param>
      /// <param name="index">Zero-based index.</param>
      /// <returns>Returns the Page object.</returns>
      public void SetPageAtIndex(WikiPage wikiPage, int index)
      {
         Pages[index] = wikiPage;
      }

      /// <summary>This index allows to call pageList["title"]. Don't forget to use correct
      /// local namespace prefixes. Call CorrectNSPrefixes function to correct namespace
      /// prefixes in a whole WikiPageList at once.</summary>
      /// <param name="index">Title of page to get.</param>
      /// <returns>Returns the Page object, or null if there is no page with the specified
      /// title in this WikiPageList.</returns>
      public WikiPage this[string index]
      {
         get { return Pages.FirstOrDefault (p => p.Title == index); }
         set {
            for (int i=0; i < Pages.Count; i++)
               if (Pages[i].Title == index)
                  Pages[i] = value;
         }
      }

      /// <summary>This standard internal function allows to directly use WikiPageList objects
      /// in "foreach" statements.</summary>
      /// <returns>Returns IEnumerator object.</returns>
      public IEnumerator GetEnumerator()
      {
         return Pages.GetEnumerator();
      }

      /// <summary>This function adds specified page to the end of this WikiPageList.</summary>
      /// <param name="wikiPage">Page object to add.</param>
      public void Add(WikiPage wikiPage)
      {
         Pages.Add(wikiPage);
      }

      /// <summary>Inserts an element into this WikiPageList at the specified index.</summary>
      /// <param name="wikiPage">Page object to insert.</param>
      /// <param name="index">Zero-based index.</param>
      public void Insert(WikiPage wikiPage, int index)
      {
         Pages.Insert(index, wikiPage);
      }

      /// <summary>This function returns true, if in this WikiPageList there exists a page with
      /// the same title, as a page specified as a parameter.</summary>
      /// <param name="wikiPage">.</param>
      /// <returns>Returns bool value.</returns>
      public bool Contains(WikiPage wikiPage)
      {
         wikiPage.CorrectNsPrefix();
         CorrectNsPrefixes();
         return Pages.Any (p => p.Title == wikiPage.Title);
      }

      /// <summary>This function returns true, if a page with specified title exists
      /// in this WikiPageList.</summary>
      /// <param name="title">Title of page to check.</param>
      /// <returns>Returns bool value.</returns>
      public bool Contains(string title)
      {
         var wikiPage = new WikiPage(WikiSite, title);
         wikiPage.CorrectNsPrefix();
         CorrectNsPrefixes();
         return Pages.Any (p => p.Title == wikiPage.Title);
      }

      /// <summary>This function returns the number of pages in WikiPageList.</summary>
      /// <returns>Number of pages as positive integer value.</returns>
      public int Count()
      {
         return Pages.Count;
      }

      /// <summary>Removes page at specified index from WikiPageList.</summary>
      /// <param name="index">Zero-based index.</param>
      public void RemoveAt(int index)
      {
         Pages.RemoveAt(index);
      }

      /// <summary>Removes a page with specified title from this WikiPageList.</summary>
      /// <param name="title">Title of page to remove.</param>
      public void Remove(string title)
      {
         for(int i = 0; i < Count(); i++)
            if (Pages[i].Title == title)
               Pages.RemoveAt(i);
      }

      /// <summary>Gets page titles for this WikiPageList from "Special:Allpages" MediaWiki page.
      /// That means a list of pages in alphabetical order.</summary>
      /// <param name="firstPageTitle">Title of page to start enumerating from. The title
      /// must have no namespace prefix (like "Talk:"), just the page title itself. Or you can
      /// specify just a letter or two instead of full real title. Pass the empty string or null
      /// to start from the very beginning.</param>
      /// <param name="neededNSpace">Integer, presenting the key of namespace to get pages
      /// from. Only one key of one namespace can be specified (zero for default).</param>
      /// <param name="acceptRedirects">Set this to "false" to exclude redirects.</param>
      /// <param name="quantity">Maximum allowed quantity of pages in this WikiPageList.</param>
      public void FillFromAllPages(string firstPageTitle, int neededNSpace, bool acceptRedirects,
                                   int quantity)
      {
         if (quantity <= 0)
            throw new ArgumentOutOfRangeException("quantity",
                                                  WikiBot.Msg("Quantity must be positive."));
         if (WikiBot.UseBotQuery && WikiSite.botQuery) {
            FillFromCustomBotQueryList("allpages", "apnamespace=" + neededNSpace +
                                                   (acceptRedirects ? "" : "&apfilterredir=nonredirects") +
                                                   (string.IsNullOrEmpty(firstPageTitle) ? "" : "&apfrom=" +
                                                                                                HttpUtility.UrlEncode(firstPageTitle)), quantity);
            return;
         }
         Console.WriteLine(
            WikiBot.Msg("Getting {0} page titles from \"Special:Allpages\" MediaWiki page..."),
            quantity);
         int count = Pages.Count;
         quantity += Pages.Count;
         Regex linkToPageRe;
         if (acceptRedirects)
            linkToPageRe = new Regex("<td[^>]*>(?:<div class=\"allpagesredirect\">)?" +
                                     "<a href=\"[^\"]*?\" title=\"([^\"]*?)\">");
         else
            linkToPageRe = new Regex("<td[^>]*><a href=\"[^\"]*?\" title=\"([^\"]*?)\">");
         do {
            string res = WikiSite.SiteUrl + WikiSite.IndexPath +
                         "index.php?title=Special:Allpages&from=" +
                         HttpUtility.UrlEncode(
                            string.IsNullOrEmpty(firstPageTitle) ? "!" : firstPageTitle) +
                         "&namespace=" + neededNSpace.ToString(CultureInfo.InvariantCulture);
            MatchCollection matches = linkToPageRe.Matches(WikiSite.GetPageHtm(res));
            if (matches.Count < 2)
               break;
            for (int i = 1; i < matches.Count; i++)
               Pages.Add(new WikiPage(WikiSite, HttpUtility.HtmlDecode(matches[i].Groups[1].Value)));
            firstPageTitle = WikiSite.RemoveNsPrefix(Pages[Pages.Count - 1].Title, neededNSpace) +
                             "!";
         }
         while(Pages.Count < quantity);
         if (Pages.Count > quantity)
            Pages.RemoveRange(quantity, Pages.Count - quantity);
         Console.WriteLine(WikiBot.Msg("WikiPageList filled with {0} page titles from " +
                                   "\"Special:Allpages\" MediaWiki page."), (Pages.Count - count).ToString(CultureInfo.InvariantCulture));
      }

      /// <summary>Gets page titles for this WikiPageList from specified special page,
      /// e.g. "Deadendpages". The function does not filter namespaces. And the function
      /// does not clear the existing WikiPageList, so new titles will be added.</summary>
      /// <param name="pageTitle">Title of special page, e.g. "Deadendpages".</param>
      /// <param name="quantity">Maximum number of page titles to get. Usually
      /// MediaWiki provides not more than 1000 titles.</param>
      public void FillFromCustomSpecialPage(string pageTitle, int quantity)
      {
         if (string.IsNullOrEmpty(pageTitle))
            throw new ArgumentNullException("pageTitle");
         if (quantity <= 0)
            throw new ArgumentOutOfRangeException("quantity",
                                                  WikiBot.Msg("Quantity must be positive."));
         Console.WriteLine(WikiBot.Msg("Getting {0} page titles from \"Special:{1}\" page..."),
                           quantity, pageTitle);
         string res = WikiSite.SiteUrl + WikiSite.IndexPath + "index.php?title=Special:" +
                      HttpUtility.UrlEncode(pageTitle) + "&limit=" + quantity.ToString(CultureInfo.InvariantCulture);
         string src = WikiSite.GetPageHtm(res);
         MatchCollection matches;
         if (pageTitle == "Unusedimages" || pageTitle == "Uncategorizedimages" ||
             pageTitle == "UnusedFiles" || pageTitle == "UncategorizedFiles")
            matches = WikiSite.LinkToPageRe3.Matches(src);
         else
            matches = RegExes.LinkToPage2.Matches(src);
         if (matches.Count == 0)
            throw new WikiBotException(string.Format(
               WikiBot.Msg("Page \"Special:{0}\" does not contain page titles."), pageTitle));
         foreach (Match match in matches)
            Pages.Add(new WikiPage(WikiSite, HttpUtility.HtmlDecode(match.Groups[1].Value)));
         Console.WriteLine(WikiBot.Msg("WikiPageList filled with {0} page titles from " +
                                   "\"Special:{1}\" page."), matches.Count, pageTitle);
      }

      /// <summary>Gets page titles for this WikiPageList from specified special page,
      /// e.g. "Deadendpages". The function does not filter namespaces. And the function
      /// does not clear the existing WikiPageList, so new titles will be added.
      /// The function uses XML (XHTML) parsing instead of regular expressions matching.
      /// This function is slower, than FillFromCustomSpecialPage.</summary>
      /// <param name="pageTitle">Title of special page, e.g. "Deadendpages".</param>
      /// <param name="quantity">Maximum number of page titles to get. Usually
      /// MediaWiki provides not more than 1000 titles.</param>
      public void FillFromCustomSpecialPageEx(string pageTitle, int quantity)
      {
         if (string.IsNullOrEmpty(pageTitle))
            throw new ArgumentNullException("pageTitle");
         if (quantity <= 0)
            throw new ArgumentOutOfRangeException("quantity",
                                                  WikiBot.Msg("Quantity must be positive."));
         Console.WriteLine(WikiBot.Msg("Getting {0} page titles from \"Special:{1}\" page..."),
                           quantity, pageTitle);
         string res = WikiSite.SiteUrl + WikiSite.IndexPath + "index.php?title=Special:" +
                      HttpUtility.UrlEncode(pageTitle) + "&limit=" + quantity.ToString(CultureInfo.InvariantCulture);
         string src = WikiSite.StripContent(WikiSite.GetPageHtm(res), null, null, true, true);
         XPathNodeIterator ni = WikiSite.GetXmlIterator(src, "//ns:ol/ns:li/ns:a[@title != '']");
         if (ni.Count == 0)
            throw new WikiBotException(string.Format(
               WikiBot.Msg("Nothing was found on \"Special:{0}\" page."), pageTitle));
         while (ni.MoveNext ())
         {
            if (ni.Current != null)
               Pages.Add (new WikiPage (WikiSite, HttpUtility.HtmlDecode (ni.Current.GetAttribute ("title", ""))));
         }
         Console.WriteLine(WikiBot.Msg("WikiPageList filled with {0} page titles from " +
                                   "\"Special:{1}\" page."), ni.Count, pageTitle);
      }

      /// <summary>Gets page titles for this WikiPageList from specified MediaWiki events log.
      /// The function does not filter namespaces. And the function does not clear the
      /// existing WikiPageList, so new titles will be added.</summary>
      /// <param name="logType">Type of log, it could be: "block" for blocked users log;
      /// "protect" for protected pages log; "rights" for users rights log; "delete" for
      /// deleted pages log; "upload" for uploaded files log; "move" for renamed pages log;
      /// "import" for transwiki import log; "renameuser" for renamed accounts log;
      /// "newusers" for new users log; "makebot" for bot status assignment log.</param>
      /// <param name="userName">Select log entries only for specified account. Pass empty
      /// string, if that restriction is not needed.</param>
      /// <param name="pageTitle">Select log entries only for specified page. Pass empty
      /// string, if that restriction is not needed.</param>
      /// <param name="quantity">Maximum number of page titles to get.</param>
      public void FillFromCustomLog(string logType, string userName, string pageTitle,
                                    int quantity)
      {
         if (string.IsNullOrEmpty(logType))
            throw new ArgumentNullException("logType");
         if (quantity <= 0)
            throw new ArgumentOutOfRangeException("quantity",
                                                  WikiBot.Msg("Quantity must be positive."));
         Console.WriteLine(WikiBot.Msg("Getting {0} page titles from \"{1}\" log..."),
                           quantity.ToString(CultureInfo.InvariantCulture), logType);
         string res = WikiSite.SiteUrl + WikiSite.IndexPath + "index.php?title=Special:Log&type=" +
                      logType + "&user=" + HttpUtility.UrlEncode(userName) + "&page=" +
                      HttpUtility.UrlEncode(pageTitle) + "&limit=" + quantity.ToString(CultureInfo.InvariantCulture);
         string src = WikiSite.GetPageHtm(res);
         MatchCollection matches = RegExes.LinkToPage2.Matches(src);
         if (matches.Count == 0)
            throw new WikiBotException(
               string.Format(WikiBot.Msg("Log \"{0}\" does not contain page titles."), logType));
         foreach (Match match in matches)
            Pages.Add(new WikiPage(WikiSite, HttpUtility.HtmlDecode(match.Groups[1].Value)));
         Console.WriteLine(WikiBot.Msg("WikiPageList filled with {0} page titles from \"{1}\" log."),
                           matches.Count, logType);
      }

      /// <summary>Gets page titles for this WikiPageList from specified list, produced by
      /// bot query interface ("api.php" MediaWiki extension). The function
      /// does not clear the existing WikiPageList, so new titles will be added.</summary>
      /// <param name="listType">Title of list, the following values are supported: 
      /// "allpages", "alllinks", "allusers", "backlinks", "categorymembers",
      /// "embeddedin", "imageusage", "logevents", "recentchanges", 
      /// "usercontribs", "watchlist", "exturlusage". Detailed documentation
      /// can be found at "http://en.wikipedia.org/w/api.php".</param>
      /// <param name="queryParams">Additional query parameters, specific to the
      /// required list, e.g. "cmtitle=Category:Physical%20sciences&amp;cmnamespace=0|2".
      /// Parameter values must be URL-encoded with HttpUtility.UrlEncode function
      /// before calling this function.</param>
      /// <param name="quantity">Maximum number of page titles to get.</param>
      /// <example><code>
      /// pageList.FillFromCustomBotQueryList("categorymembers",
      /// 	"cmcategory=Physical%20sciences&amp;cmnamespace=0|14",
      /// 	int.MaxValue);
      /// </code></example>
      public void FillFromCustomBotQueryList(string listType, string queryParams, int quantity)
      {
         if (!WikiSite.botQuery)
            throw new WikiBotException(
               WikiBot.Msg("The \"api.php\" MediaWiki extension is not available."));
         if (string.IsNullOrEmpty(listType))
            throw new ArgumentNullException("listType");
         if (!WikiSite.botQueryLists.Contains(listType))
            throw new WikiBotException(
               string.Format(WikiBot.Msg("The list \"{0}\" is not supported."), listType));
         if (quantity <= 0)
            throw new ArgumentOutOfRangeException("quantity",
                                                  WikiBot.Msg("Quantity must be positive."));
         string prefix = WikiSite.botQueryLists[listType].ToString();
         string continueAttrTag1 = prefix + "from";
         string continueAttrTag2 = prefix + "continue";
         string attrTag = (listType != "allusers") ? "title" : "name";
         string queryUri = WikiSite.IndexPath + "api.php?action=query&list=" + listType +
                           "&format=xml&" + prefix + "limit=" +
                           ((quantity > 500) ? "500" : quantity.ToString(CultureInfo.InvariantCulture));
         string next = "";
         int count = Pages.Count;
         if (quantity != int.MaxValue)
            quantity += Pages.Count;
         do {
            string queryFullUri = queryUri;
            if (next != "")
               queryFullUri += "&" + prefix + "continue=" + HttpUtility.UrlEncode(next);
            string src = WikiSite.PostDataAndGetResultHtm(queryFullUri, queryParams);
            using (var reader = new XmlTextReader(new StringReader(src))) {
               next = "";
               while (reader.Read()) {
                  if (reader.IsEmptyElement && reader[attrTag] != null)
                     Pages.Add(new WikiPage(WikiSite, HttpUtility.HtmlDecode(reader[attrTag])));
                  if (reader.IsEmptyElement && reader[continueAttrTag1] != null)
                     next = reader[continueAttrTag1];
                  if (reader.IsEmptyElement && reader[continueAttrTag2] != null)
                     next = reader[continueAttrTag2];
               }
            }
         }
         while (next != "" && Pages.Count < quantity);
         if (Pages.Count > quantity)
            Pages.RemoveRange(quantity, Pages.Count - quantity);
         if (!string.IsNullOrEmpty(Environment.StackTrace) &&
             !Environment.StackTrace.Contains("FillAllFromCategoryEx"))
            Console.WriteLine(WikiBot.Msg("WikiPageList filled with {0} page titles " +
                                      "from \"{1}\" bot interface list."),
                              (Pages.Count - count).ToString(CultureInfo.InvariantCulture), listType);
      }

      /// <summary>Gets page titles for this WikiPageList from recent changes page,
      /// "Special:Recentchanges". File uploads, page deletions and page renamings are
      /// not included, use FillFromCustomLog function instead to fill from respective logs.
      /// The function does not clear the existing WikiPageList, so new titles will be added.
      /// Use FilterNamespaces() or RemoveNamespaces() functions to remove
      /// pages from unwanted namespaces.</summary>
      /// <param name="hideMinor">Ignore minor edits.</param>
      /// <param name="hideBots">Ignore bot edits.</param>
      /// <param name="hideAnons">Ignore anonymous users edits.</param>
      /// <param name="hideLogged">Ignore logged-in users edits.</param>
      /// <param name="hideSelf">Ignore edits of this bot account.</param>
      /// <param name="limit">Maximum number of changes to get.</param>
      /// <param name="days">Get changes for this number of recent days.</param>
      public void FillFromRecentChanges(bool hideMinor, bool hideBots, bool hideAnons,
                                        bool hideLogged, bool hideSelf, int limit, int days)
      {
         if (limit <= 0)
            throw new ArgumentOutOfRangeException("limit", WikiBot.Msg("Limit must be positive."));
         if (days <= 0)
            throw new ArgumentOutOfRangeException("days",
                                                  WikiBot.Msg("Number of days must be positive."));
         Console.WriteLine(WikiBot.Msg("Getting {0} page titles from " +
                                   "\"Special:Recentchanges\" page..."), limit);
         string uri = string.Format("{0}{1}index.php?title=Special:Recentchanges&" +
                                    "hideminor={2}&hideBots={3}&hideAnons={4}&hideliu={5}&hidemyself={6}&" +
                                    "limit={7}&days={8}", WikiSite.SiteUrl, WikiSite.IndexPath,
                                    hideMinor ? "1" : "0", hideBots ? "1" : "0", hideAnons ? "1" : "0",
                                    hideLogged ? "1" : "0", hideSelf ? "1" : "0",
                                    limit.ToString(CultureInfo.InvariantCulture), days.ToString(CultureInfo.InvariantCulture));
         string respStr = WikiSite.GetPageHtm(uri);
         MatchCollection matches = RegExes.LinkToPage2.Matches(respStr);
         foreach (Match match in matches)
            Pages.Add(new WikiPage(WikiSite, HttpUtility.HtmlDecode(match.Groups[1].Value)));
         Console.WriteLine(WikiBot.Msg("WikiPageList filled with {0} page titles from " +
                                   "\"Special:Recentchanges\" page."), matches.Count);
      }

      /// <summary>Gets page titles for this WikiPageList from specified wiki category page, excluding
      /// subcategories. Use FillSubsFromCategory function to get subcategories.</summary>
      /// <param name="categoryName">Category name, with or without prefix.</param>
      public void FillFromCategory(string categoryName)
      {
         int count = Pages.Count;
         var pl = new WikiPageList(WikiSite);
         pl.FillAllFromCategory(categoryName);
         pl.RemoveNamespaces(new[] {14});
         Pages.AddRange(pl.Pages);
         if (Pages.Count != count)
            Console.WriteLine(
               WikiBot.Msg("WikiPageList filled with {0} page titles, found in \"{1}\" category."),
               (Pages.Count - count).ToString(CultureInfo.InvariantCulture), categoryName);
         else
            Console.Error.WriteLine(
               WikiBot.Msg("Nothing was found in \"{0}\" category."), categoryName);
      }

      /// <summary>Gets subcategories titles for this WikiPageList from specified wiki category page,
      /// excluding other pages. Use FillFromCategory function to get other pages.</summary>
      /// <param name="categoryName">Category name, with or without prefix.</param>
      public void FillSubsFromCategory(string categoryName)
      {
         int count = Pages.Count;
         var pl = new WikiPageList(WikiSite);
         pl.FillAllFromCategory(categoryName);
         pl.FilterNamespaces(new[] {14});
         Pages.AddRange(pl.Pages);
         if (Pages.Count != count)
            Console.WriteLine(WikiBot.Msg("WikiPageList filled with {0} subcategory page titles, " +
                                      "found in \"{1}\" category."), (Pages.Count - count).ToString(CultureInfo.InvariantCulture), categoryName);
         else
            Console.Error.WriteLine(
               WikiBot.Msg("Nothing was found in \"{0}\" category."), categoryName);
      }

      /// <summary>This internal function gets all page titles for this WikiPageList from specified
      /// category page, including subcategories.</summary>
      /// <param name="categoryName">Category name, with or without prefix.</param>
      public void FillAllFromCategory(string categoryName)
      {
         if (string.IsNullOrEmpty(categoryName))
            throw new ArgumentNullException("categoryName");
         categoryName = categoryName.Trim("[]\f\n\r\t\v ".ToCharArray());
         categoryName = WikiSite.RemoveNsPrefix(categoryName, 14);
         categoryName = WikiSite.namespaces["14"] + ":" + categoryName;
         Console.WriteLine(WikiBot.Msg("Getting category \"{0}\" contents..."), categoryName);
         //RemoveAll();
         if (WikiBot.UseBotQuery && WikiSite.botQuery) {
            FillAllFromCategoryEx(categoryName);
            return;
         }
         string src = "";
         var nextPortionRe = new Regex("&(?:amp;)?from=([^\"=]+)\" title=\"");
         do {
            string res = WikiSite.SiteUrl + WikiSite.IndexPath + "index.php?title=" +
                         HttpUtility.UrlEncode(categoryName) +
                         "&from=" + nextPortionRe.Match(src).Groups[1].Value;
            src = WikiSite.GetPageHtm(res);
            src = HttpUtility.HtmlDecode(src);
            MatchCollection matches = RegExes.LinkToPage.Matches(src);
            foreach (Match match in matches)
               Pages.Add(new WikiPage(WikiSite, match.Groups[1].Value));
            if (src.Contains("<div class=\"gallerytext\">\n")) {
               matches = RegExes.LinkToImage.Matches(src);
               foreach (Match match in matches)
                  Pages.Add(new WikiPage(WikiSite, match.Groups[1].Value));
            }
            if (src.Contains("<div class=\"CategoryTreeChildren\"")) {
               matches = RegExes.LinkToSubCategory.Matches(src);
               foreach (Match match in matches)
                  Pages.Add(new WikiPage(WikiSite, WikiSite.namespaces["14"] + ":" +
                                                   match.Groups[1].Value));
            }
         }
         while(nextPortionRe.IsMatch(src));
      }

      /// <summary>This internal function gets all page titles for this WikiPageList from specified
      /// category using "api.php" MediaWiki extension (bot interface), if it is available.
      /// It gets subcategories too.</summary>
      /// <param name="categoryName">Category name, with or without prefix.</param>
      public void FillAllFromCategoryEx(string categoryName)
      {
         if (string.IsNullOrEmpty(categoryName))
            throw new ArgumentNullException("categoryName");
         categoryName = categoryName.Trim("[]\f\n\r\t\v ".ToCharArray());
         categoryName = WikiSite.RemoveNsPrefix(categoryName, 14);
         if (WikiSite.botQueryVersions.ContainsKey("ApiQueryCategoryMembers.php")) {
            if (int.Parse(
               WikiSite.botQueryVersions["ApiQueryCategoryMembers.php"].ToString()) >= 30533)
               FillFromCustomBotQueryList("categorymembers", "cmtitle=" + 
                                                             HttpUtility.UrlEncode(WikiSite.namespaces["14"] + ":" +
                                                                                   categoryName), int.MaxValue);
            else
               FillFromCustomBotQueryList("categorymembers", "cmcategory=" +  
                                                             HttpUtility.UrlEncode(categoryName), int.MaxValue);
         }
         else if (WikiSite.botQueryVersions.ContainsKey("query.php"))
            FillAllFromCategoryExOld(categoryName);
         else {
            Console.WriteLine(WikiBot.Msg("Can't get category members using bot interface.\n" +
                                      "Switching to common user interface (\"site.botQuery\" is set to \"false\")."));
            WikiSite.botQuery = false;
            FillAllFromCategory(categoryName);
         }
      }

      /// <summary>This internal function is kept for backwards compatibility only.
      /// It gets all pages and subcategories in specified category using old obsolete 
      /// "query.php" bot interface and adds all found pages and subcategories to WikiPageList object.
      /// It gets titles portion by portion. The "query.php" interface was superseded by
      /// "api.php" in MediaWiki 1.8.</summary>
      /// <param name="categoryName">Category name, with or without prefix.</param>
      public void FillAllFromCategoryExOld(string categoryName)
      {
         if (string.IsNullOrEmpty(categoryName))
            throw new ArgumentNullException("categoryName");
         string src = "";
         var nextPortionRe = new Regex("<category next=\"(.+?)\" />");
         do {
            string res = WikiSite.SiteUrl + WikiSite.IndexPath + "query.php?what=category&cptitle=" +
                         HttpUtility.UrlEncode(categoryName) + "&cpfrom=" +
                         nextPortionRe.Match(src).Groups[1].Value + "&cplimit=500&format=xml";
            src = WikiSite.GetPageHtm(res);
            MatchCollection matches = RegExes.PageTitleTag.Matches(src);
            foreach (Match match in matches)
               Pages.Add(new WikiPage(WikiSite, HttpUtility.HtmlDecode(match.Groups[1].Value)));
         }
         while(nextPortionRe.IsMatch(src));
      }

      /// <summary>Gets all levels of subcategories of some wiki category (that means
      /// subcategories, sub-subcategories, and so on) and fills this WikiPageList with titles
      /// of all pages, found in all levels of subcategories. The multiplicates of recurring pages
      /// are removed. Use FillSubsFromCategoryTree function instead to get titles
      /// of subcategories. This operation may be very time-consuming and traffic-consuming.
      /// The function clears the WikiPageList before filling.</summary>
      /// <param name="categoryName">Category name, with or without prefix.</param>
      public void FillFromCategoryTree(string categoryName)
      {
         FillAllFromCategoryTree(categoryName);
         RemoveNamespaces(new [] {14});
         if (Pages.Count != 0)
            Console.WriteLine(
               WikiBot.Msg("WikiPageList filled with {0} page titles, found in \"{1}\" category."),
               Count().ToString(CultureInfo.InvariantCulture), categoryName);
         else
            Console.Error.WriteLine(
               WikiBot.Msg("Nothing was found in \"{0}\" category."), categoryName);
      }

      /// <summary>Gets all levels of subcategories of some wiki category (that means
      /// subcategories, sub-subcategories, and so on) and fills this WikiPageList with found
      /// subcategory titles. Use FillFromCategoryTree function instead to get pages of other
      /// namespaces. The multiplicates of recurring categories are removed. The operation may
      /// be very time-consuming and traffic-consuming. The function clears the WikiPageList
      /// before filling.</summary>
      /// <param name="categoryName">Category name, with or without prefix.</param>
      public void FillSubsFromCategoryTree(string categoryName)
      {
         FillAllFromCategoryTree(categoryName);
         FilterNamespaces(new [] {14});
         if (Pages.Count != 0)
            Console.WriteLine(WikiBot.Msg("WikiPageList filled with {0} subcategory page titles, " +
                                      "found in \"{1}\" category."), Count().ToString(CultureInfo.InvariantCulture), categoryName);
         else
            Console.Error.WriteLine(
               WikiBot.Msg("Nothing was found in \"{0}\" category."), categoryName);
      }

      /// <summary>Gets all levels of subcategories of some wiki category (that means
      /// subcategories, sub-subcategories, and so on) and fills this WikiPageList with titles
      /// of all pages, found in all levels of subcategories, including the titles of
      /// subcategories. The multiplicates of recurring pages and subcategories are removed.
      /// The operation may be very time-consuming and traffic-consuming. The function clears
      /// the WikiPageList before filling.</summary>
      /// <param name="categoryName">Category name, with or without prefix.</param>
      public void FillAllFromCategoryTree(string categoryName)
      {
         Clear();
         categoryName = WikiSite.CorrectNsPrefix(categoryName);
         var doneCats = new StringCollection();
         FillAllFromCategory(categoryName);
         doneCats.Add(categoryName);
         for (int i = 0; i < Count(); i++)
            if (Pages[i].GetNamespace() == 14 && !doneCats.Contains(Pages[i].Title)) {
               FillAllFromCategory(Pages[i].Title);
               doneCats.Add(Pages[i].Title);
            }
         RemoveRecurring();
      }

      /// <summary>Gets page history and fills this WikiPageList with specified number of recent page
      /// revisions. Only revision identifiers, user names, timestamps and comments are
      /// loaded, not the texts. Call Load() (but not LoadEx) to load the texts of page revisions.
      /// The function combines XML (XHTML) parsing and regular expressions matching.</summary>
      /// <param name="pageTitle">Page to get history of.</param>
      /// <param name="lastRevisions">Number of last page revisions to get.</param>
      public void FillFromPageHistory(string pageTitle, int lastRevisions)
      {
         if (string.IsNullOrEmpty(pageTitle))
            throw new ArgumentNullException("pageTitle");
         if (lastRevisions <= 0)
            throw new ArgumentOutOfRangeException ("lastRevisions", WikiBot.Msg("Quantity must be positive."));
         Console.WriteLine (WikiBot.Msg("Getting {0} last revisons of \"{1}\" page..."), lastRevisions, pageTitle);
         string res = WikiSite.SiteUrl + WikiSite.IndexPath + "index.php?title=" +
                      HttpUtility.UrlEncode(pageTitle) + "&limit=" + lastRevisions.ToString(CultureInfo.InvariantCulture) + 
                      "&action=history";
         string src = WikiSite.GetPageHtm(res);
         src = src.Substring(src.IndexOf("<ul id=\"pagehistory\">", StringComparison.Ordinal));
         src = src.Substring(0, src.IndexOf("</ul>", StringComparison.Ordinal) + 5);
         WikiPage p = null;
         using (XmlReader reader = WikiSite.GetXmlReader(src)) {
            while (reader.Read()) {
               if (reader.Name == "li" && reader.NodeType == XmlNodeType.Element) {
                  p = new WikiPage (WikiSite, pageTitle) {LastMinorEdit = false, Comment = ""};
               }
               else if (reader.Name == "span" && reader["class"] == "mw-history-histlinks" && p != null)
               {
                  reader.ReadToFollowing("a");
                  string href = reader ["href"];
                  if (!string.IsNullOrEmpty (href))
                     p.LastRevisionId = href.Substring (href.IndexOf("oldid=", StringComparison.Ordinal) + 6);
                  DateTime timeStamp;
                  DateTime.TryParse (reader.ReadString (), WikiSite.regCulture, DateTimeStyles.AssumeLocal, out timeStamp);
                  p.Timestamp = timeStamp;
               }
               else if (reader.Name == "span" && reader["class"] == "history-user" && p != null)
               {
                  reader.ReadToFollowing("a");
                  p.LastUser = reader.ReadString();
               }
               else if (reader.Name == "abbr" && p != null)
                  p.LastMinorEdit = true;
               else if (reader.Name == "span" && reader["class"] == "history-size" && p != null)
               {
                  int lastBytesModified;
                  int.TryParse (Regex.Replace (reader.ReadString (), @"[^-+\d]", ""), out lastBytesModified);
                  p.LastBytesModified = lastBytesModified;
               }
               else if (reader.Name == "span" && reader["class"] == "comment" && p != null)
               {
                  p.Comment = Regex.Replace (reader.ReadInnerXml ().Trim (), "<.+?>", "");
                  p.Comment = p.Comment.Substring (1, p.Comment.Length - 2);	// brackets
               }
               if(reader.Name == "li" && reader.NodeType == XmlNodeType.EndElement)
                  Pages.Add(p);
            }
         }
         Console.WriteLine(WikiBot.Msg("WikiPageList filled with {0} last revisons of \"{1}\" page..."),
                           Pages.Count, pageTitle);
      }

      /// <summary>Gets page history using  bot query interface ("api.php" MediaWiki extension)
      /// and fills this WikiPageList with specified number of last page revisions, optionally loading
      /// revision texts as well. On most sites not more than 50 last revisions can be obtained.
      /// Thanks to Jutiphan Mongkolsuthree for idea and outline of this function.</summary>
      /// <param name="pageTitle">Page to get history of.</param>
      /// <param name="lastRevisions">Number of last page revisions to obtain.</param>
      /// <param name="loadTexts">Load revision texts right away.</param>
      public void FillFromPageHistoryEx(string pageTitle, int lastRevisions, bool loadTexts)
      {
         if (!WikiSite.botQuery)
            throw new WikiBotException(
               WikiBot.Msg("The \"api.php\" MediaWiki extension is not available."));
         if (string.IsNullOrEmpty(pageTitle))
            throw new ArgumentNullException("pageTitle");
         if (lastRevisions <= 0)
            throw new ArgumentOutOfRangeException("lastRevisions",
                                                  WikiBot.Msg("Quantity must be positive."));
         Console.WriteLine(
            WikiBot.Msg("Getting {0} last revisons of \"{1}\" page..."), lastRevisions, pageTitle);
         string queryUri = WikiSite.SiteUrl + WikiSite.IndexPath +
                           "api.php?action=query&prop=revisions&titles=" +
                           HttpUtility.UrlEncode(pageTitle) + "&rvprop=ids|user|comment|timestamp" +
                           (loadTexts ? "|content" : "") + "&format=xml&rvlimit=" + lastRevisions.ToString(CultureInfo.InvariantCulture);
         string src = WikiSite.GetPageHtm(queryUri);
         using (XmlReader reader = XmlReader.Create(new StringReader(src))) {
            reader.ReadToFollowing("api");
            reader.Read();
            if (reader.Name == "error")
               Console.Error.WriteLine(WikiBot.Msg("Error: {0}"), reader.GetAttribute("info"));
            while (reader.ReadToFollowing("rev")) {
               var p = new WikiPage (WikiSite, pageTitle)
                          {
                             LastRevisionId = reader.GetAttribute ("revid"),
                             LastUser = reader.GetAttribute ("user"),
                             Comment = reader.GetAttribute ("comment"),
                             Timestamp = DateTime.Parse (reader.GetAttribute ("timestamp")).ToUniversalTime ()
                          };
               if (loadTexts)
                  p.Text = reader.ReadString();
               Pages.Add(p);
            }
         }
         Console.WriteLine(WikiBot.Msg("WikiPageList filled with {0} last revisons of \"{1}\" page."),
                           Pages.Count, pageTitle);
      }

      /// <summary>Gets page titles for this WikiPageList from links in some wiki page. But only
      /// links to articles and pages from Project, Template and Help namespaces will be
      /// retrieved. And no interwiki links. Use FillFromAllPageLinks function instead
      /// to filter namespaces manually.</summary>
      /// <param name="pageTitle">Page name to get links from.</param>
      public void FillFromPageLinks(string pageTitle)
      {
         if (string.IsNullOrEmpty(pageTitle))
            throw new ArgumentNullException("pageTitle");
         FillFromAllPageLinks(pageTitle);
         FilterNamespaces(new[] {0,4,10,12});
      }

      /// <summary>Gets page titles for this WikiPageList from all links in some wiki page. All links
      /// will be retrieved, from all standard namespaces, except interwiki links to other
      /// sites. Use FillFromPageLinks function instead to filter namespaces
      /// automatically.</summary>
      /// <param name="pageTitle">Page title as string.</param>
      /// <example><code>pageList.FillFromAllPageLinks("Art");</code></example>
      public void FillFromAllPageLinks(string pageTitle)
      {
         if (string.IsNullOrEmpty(pageTitle))
            throw new ArgumentNullException("pageTitle");
         if (string.IsNullOrEmpty(WikiSite.WMLangsStr))
            WikiSite.GetWikimediaWikisList();
         var wikiLinkRe = new Regex(@"\[\[:*(.+?)(]]|\|)");
         var wikiPage = new WikiPage(WikiSite, pageTitle);
         wikiPage.Load();
         MatchCollection matches = wikiLinkRe.Matches(wikiPage.Text);
         var outWikiLink = new Regex("^(" + WikiSite.WMLangsStr +
                                     /*"|" + Site.WMSitesStr + */ "):");
         foreach (Match match in matches)
            if (!outWikiLink.IsMatch(match.Groups[1].Value))
               Pages.Add(new WikiPage(WikiSite, match.Groups[1].Value));
         Console.WriteLine(
            WikiBot.Msg("WikiPageList filled with links, found on \"{0}\" page."), pageTitle);
      }

      /// <summary>Gets page titles for this WikiPageList from "Special:Whatlinkshere" Mediawiki page
      /// of specified page. That means the titles of pages, referring to the specified page.
      /// But not more than 5000 titles. The function does not internally remove redirecting
      ///	pages from the results. Call RemoveRedirects() manually, if you need it. And the
      /// function does not clears the existing WikiPageList, so new titles will be added.</summary>
      /// <param name="pageTitle">Page title as string.</param>
      public void FillFromLinksToPage(string pageTitle)
      {
         if (string.IsNullOrEmpty(pageTitle))
            throw new ArgumentNullException("pageTitle");
         //RemoveAll();
         string res = WikiSite.SiteUrl + WikiSite.IndexPath +
                      "index.php?title=Special:Whatlinkshere/" +
                      HttpUtility.UrlEncode(pageTitle) + "&limit=5000";
         string src = WikiSite.GetPageHtm(res);
         MatchCollection matches = RegExes.LinkToPage.Matches(src);
         foreach (Match match in matches)
            Pages.Add(new WikiPage(WikiSite, HttpUtility.HtmlDecode(match.Groups[1].Value)));
         //RemoveRedirects();
         Console.WriteLine(
            WikiBot.Msg("WikiPageList filled with titles of pages, referring to \"{0}\" page."),
            pageTitle);
      }

      /// <summary>Gets titles of pages, in which the specified image file is included.
      /// Function also works with non-image files.</summary>
      /// <param name="imageFileTitle">File title. With or without "Image:" or
      /// "File:" prefix.</param>
      public void FillFromPagesUsingImage(string imageFileTitle)
      {
         if (string.IsNullOrEmpty(imageFileTitle))
            throw new ArgumentNullException("imageFileTitle");
         int pagesCount = Count();
         imageFileTitle = WikiSite.RemoveNsPrefix(imageFileTitle, 6);
         string res = WikiSite.SiteUrl + WikiSite.IndexPath + "index.php?title=" +
                      HttpUtility.UrlEncode(WikiSite.namespaces["6"].ToString()) + ":" +
                      HttpUtility.UrlEncode(imageFileTitle);
         string src = WikiSite.GetPageHtm(res);
         int startPos = src.IndexOf("<h2 id=\"filelinks\">", StringComparison.Ordinal);
         int endPos = src.IndexOf("<div class=\"printfooter\">", StringComparison.Ordinal);
         if (startPos == -1 || endPos == -1) {
            Console.Error.WriteLine(WikiBot.Msg("No page contains \"{0}\" image."), imageFileTitle);
            return;
         }
         src = src.Substring(startPos, endPos - startPos);
         MatchCollection matches = RegExes.LinkToPage.Matches(src);
         foreach (Match match in matches)
            Pages.Add(new WikiPage(WikiSite, HttpUtility.HtmlDecode(match.Groups[1].Value)));
         if (pagesCount == Count())
            Console.Error.WriteLine(WikiBot.Msg("No page contains \"{0}\" image."), imageFileTitle);
         else
            Console.WriteLine(
               WikiBot.Msg("WikiPageList filled with titles of pages, that contain \"{0}\" image."),
               imageFileTitle);
      }

      /// <summary>Gets page titles for this WikiPageList from user contributions
      /// of specified user. The function does not internally remove redirecting
      /// pages from the results. Call RemoveRedirects() manually, if you need it. And the
      /// function does not clears the existing WikiPageList, so new titles will be added.</summary>
      /// <param name="userName">User's name.</param>
      /// <param name="limit">Maximum number of page titles to get.</param>
      public void FillFromUserContributions(string userName, int limit)
      {
         if (string.IsNullOrEmpty(userName))
            throw new ArgumentNullException("userName");
         if (limit <= 0)
            throw new ArgumentOutOfRangeException("limit", WikiBot.Msg("Limit must be positive."));
         string res = WikiSite.SiteUrl + WikiSite.IndexPath +
                      "index.php?title=Special:Contributions&target=" + HttpUtility.UrlEncode(userName) +
                      "&limit=" + limit.ToString(CultureInfo.InvariantCulture);
         string src = WikiSite.GetPageHtm(res);
         MatchCollection matches = RegExes.LinkToPage2.Matches(src);
         foreach (Match match in matches)
            Pages.Add(new WikiPage(WikiSite, HttpUtility.HtmlDecode(match.Groups[1].Value)));
         Console.WriteLine(
            WikiBot.Msg("WikiPageList filled with user's \"{0}\" contributions."), userName);
      }

      /// <summary>Gets page titles for this WikiPageList from watchlist
      /// of bot account. The function does not internally remove redirecting
      /// pages from the results. Call RemoveRedirects() manually, if you need that. And the
      /// function neither filters namespaces, nor clears the existing WikiPageList,
      /// so new titles will be added to the existing in WikiPageList.</summary>
      public void FillFromWatchList()
      {
         string src = WikiSite.GetPageHtm(WikiSite.IndexPath + "index.php?title=Special:Watchlist/edit");
         MatchCollection matches = RegExes.LinkToPage2.Matches(src);
         foreach (Match match in matches)
            Pages.Add(new WikiPage(WikiSite, HttpUtility.HtmlDecode(match.Groups[1].Value)));
         Console.WriteLine(WikiBot.Msg("WikiPageList filled with bot account's watchlist."));
      }

      /// <summary>Gets page titles for this WikiPageList from list of recently changed
      /// watched articles (watched by bot account). The function does not internally
      /// remove redirecting pages from the results. Call RemoveRedirects() manually,
      /// if you need it. And the function neither filters namespaces, nor clears
      /// the existing WikiPageList, so new titles will be added to the existing
      /// in WikiPageList.</summary>
      public void FillFromChangedWatchedPages()
      {
         string src = WikiSite.GetPageHtm(WikiSite.IndexPath + "index.php?title=Special:Watchlist/edit");
         MatchCollection matches = RegExes.LinkToPage2.Matches(src);
         Console.WriteLine(src);
         foreach (Match match in matches)
            Pages.Add(new WikiPage(WikiSite, HttpUtility.HtmlDecode(match.Groups[1].Value)));
         Console.WriteLine(
            WikiBot.Msg("WikiPageList filled with changed pages from bot account's watchlist."));
      }

      /// <summary>Gets page titles for this WikiPageList from wiki site internal search results.
      /// The function does not filter namespaces. And the function does not clear
      /// the existing WikiPageList, so new titles will be added.</summary>
      /// <param name="searchStr">String to search.</param>
      /// <param name="limit">Maximum number of page titles to get.</param>
      public void FillFromSearchResults(string searchStr, int limit)
      {
         if (string.IsNullOrEmpty(searchStr))
            throw new ArgumentNullException("searchStr");
         if (limit <= 0)
            throw new ArgumentOutOfRangeException("limit", WikiBot.Msg("Limit must be positive."));
         string res = WikiSite.SiteUrl + WikiSite.IndexPath +
                      "index.php?title=Special:Search&fulltext=Search&search=" +
                      HttpUtility.UrlEncode(searchStr) + "&limit=" + limit.ToString(CultureInfo.InvariantCulture);
         string src = WikiSite.GetPageHtm(res);
         src = src.Substring(src.IndexOf("<ul class='mw-search-results'>", StringComparison.Ordinal));
         MatchCollection matches = RegExes.LinkToPage2.Matches(src);
         foreach (Match match in matches)
            Pages.Add(new WikiPage(WikiSite, HttpUtility.HtmlDecode(match.Groups[1].Value)));
         Console.WriteLine(WikiBot.Msg("WikiPageList filled with search results."));
      }

      /// <summary>Gets page titles for this WikiPageList from www.google.com search results.
      /// The function does not filter namespaces. And the function does not clear
      /// the existing WikiPageList, so new titles will be added.</summary>
      /// <param name="searchStr">String to search.</param>
      /// <param name="limit">Maximum number of page titles to get.</param>
      public void FillFromGoogleSearchResults(string searchStr, int limit)
      {
         if (string.IsNullOrEmpty(searchStr))
            throw new ArgumentNullException("searchStr");
         if (limit <= 0)
            throw new ArgumentOutOfRangeException("limit", WikiBot.Msg("Limit must be positive."));
         var res = new Uri("http://www.google.com/search?q=" + HttpUtility.UrlEncode(searchStr) +
                           "+site:" + WikiSite.SiteUrl.Substring(WikiSite.SiteUrl.IndexOf("://", StringComparison.Ordinal) + 3) +
                           "&num=" + limit.ToString(CultureInfo.InvariantCulture));
         string src = WikiBot.GetWebResource(res, "");
         var googleLinkToPageRe = new Regex("<a href=\"" + Regex.Escape(WikiSite.SiteUrl) + "(" +
                                            (string.IsNullOrEmpty(WikiSite.WikiPath) == false ?
                                                                                                 Regex.Escape(WikiSite.WikiPath) + "|" : "") +
                                            Regex.Escape(WikiSite.IndexPath) + @"index\.php\?title=)" +
                                            "([^\"]+?)\" class=\"?l\"?>");
         MatchCollection matches = googleLinkToPageRe.Matches(src);
         foreach (Match match in matches)
            Pages.Add(new WikiPage(WikiSite,
                                   HttpUtility.UrlDecode(match.Groups[2].Value).Replace("_", " ")));
         Console.WriteLine(WikiBot.Msg("WikiPageList filled with www.google.com search results."));
      }

      /// <summary>Gets page titles from UTF8-encoded file. Each title must be on new line.
      /// The function does not clear the existing WikiPageList, so new pages will be added.</summary>
      public void FillFromFile(string filePathName)
      {
         //RemoveAll();
         var strmReader = new StreamReader(filePathName);
         string input;
         while ((input = strmReader.ReadLine()) != null) {
            input = input.Trim(" []".ToCharArray());
            if (string.IsNullOrEmpty(input) != true)
               Pages.Add(new WikiPage(WikiSite, input));
         }
         strmReader.Close();
         strmReader.Dispose ();
         Console.WriteLine(
            WikiBot.Msg("WikiPageList filled with titles, found in \"{0}\" file."), filePathName);
      }

      /// <summary>Protects or unprotects all pages in this WikiPageList, so only chosen category
      /// of users can edit or rename it. Changing page protection modes requires administrator
      /// (sysop) rights on target wiki.</summary>
      /// <param name="editMode">Protection mode for editing this page (0 = everyone allowed
      /// to edit, 1 = only registered users are allowed, 2 = only administrators are allowed 
      /// to edit).</param>
      /// <param name="renameMode">Protection mode for renaming this page (0 = everyone allowed to
      /// rename, 1 = only registered users are allowed, 2 = only administrators
      /// are allowed).</param>
      /// <param name="cascadeMode">In cascading mode, all the pages, included into this page
      /// (e.g., templates or images) are also fully automatically protected.</param>
      /// <param name="expiryDate">Date ant time, expressed in UTC, when the protection expires
      /// and page becomes fully unprotected. Use DateTime.ToUniversalTime() method to convert
      /// local time to UTC, if necessary. Pass DateTime.MinValue to make protection
      /// indefinite.</param>
      /// <param name="reason">Reason for protecting this page.</param>
      public void Protect(int editMode, int renameMode, bool cascadeMode,
                          DateTime expiryDate, string reason)
      {
         if (IsEmpty())
            throw new WikiBotException(WikiBot.Msg("The WikiPageList is empty. Nothing to protect."));
         foreach (WikiPage p in Pages)
            p.Protect(editMode, renameMode, cascadeMode, expiryDate, reason);
      }

      /// <summary>Adds all pages in this WikiPageList to bot account's watchlist.</summary>
      public void Watch()
      {
         if (IsEmpty())
            throw new WikiBotException(WikiBot.Msg("The WikiPageList is empty. Nothing to watch."));
         foreach (WikiPage p in Pages)
            p.Watch();
      }

      /// <summary>Removes all pages in this WikiPageList from bot account's watchlist.</summary>
      public void Unwatch()
      {
         if (IsEmpty())
            throw new WikiBotException(WikiBot.Msg("The WikiPageList is empty. Nothing to unwatch."));
         foreach (WikiPage p in Pages)
            p.Unwatch();
      }

      /// <summary>Removes the pages, that are not in given namespaces.</summary>
      /// <param name="neededNSs">Array of integers, presenting keys of namespaces
      /// to retain.</param>
      /// <example><code>pageList.FilterNamespaces(new int[] {0,3});</code></example>
      public void FilterNamespaces(int[] neededNSs)
      {
         for (int i=Pages.Count-1; i >= 0; i--) {
            if (Array.IndexOf(neededNSs, Pages[i].GetNamespace()) == -1)
               Pages.RemoveAt(i); }
      }

      /// <summary>Removes the pages, that are in given namespaces.</summary>
      /// <param name="needlessNSs">Array of integers, presenting keys of namespaces
      /// to remove.</param>
      /// <example><code>pageList.RemoveNamespaces(new int[] {2,4});</code></example>
      public void RemoveNamespaces(int[] needlessNSs)
      {
         for (int i=Pages.Count-1; i >= 0; i--) {
            if (Array.IndexOf(needlessNSs, Pages[i].GetNamespace()) != -1)
               Pages.RemoveAt(i); }
      }

      /// <summary>This function sorts all pages in WikiPageList by titles.</summary>
      public void Sort()
      {
         if (IsEmpty())
            throw new WikiBotException(WikiBot.Msg("The WikiPageList is empty. Nothing to sort."));
         Pages.Sort(ComparePagesByTitles);
      }

      /// <summary>This internal function compares pages by titles (alphabetically).</summary>
      /// <returns>Returns 1 if x is greater, -1 if y is greater, 0 if equal.</returns>
      public int ComparePagesByTitles(WikiPage x, WikiPage y)
      {
         int r = string.Compare(x.Title, y.Title, false, WikiSite.langCulture);
         return (r != 0) ? r/Math.Abs(r) : 0;
      }

      /// <summary>Removes all pages in WikiPageList from specified category by deleting
      /// links to that category in pages texts.</summary>
      /// <param name="categoryName">Category name, with or without prefix.</param>
      public void RemoveFromCategory(string categoryName)
      {
         foreach (WikiPage p in Pages)
            p.RemoveFromCategory(categoryName);
      }

      /// <summary>Adds all pages in WikiPageList to the specified category by adding
      /// links to that category in pages texts.</summary>
      /// <param name="categoryName">Category name, with or without prefix.</param>
      public void AddToCategory(string categoryName)
      {
         foreach (WikiPage p in Pages)
            p.AddToCategory(categoryName);
      }

      /// <summary>Adds a specified template to the end of all pages in WikiPageList.</summary>
      /// <param name="templateText">Template text, like "{{template_name|...|...}}".</param>
      public void AddTemplate(string templateText)
      {
         foreach (WikiPage p in Pages)
            p.AddTemplate(templateText);
      }

      /// <summary>Removes a specified template from all pages in WikiPageList.</summary>
      /// <param name="templateTitle">Title of template  to remove.</param>
      public void RemoveTemplate(string templateTitle)
      {
         foreach (WikiPage p in Pages)
            p.RemoveTemplate(templateTitle);
      }

      /// <summary>Loads text for pages in WikiPageList from site via common web interface.
      /// Please, don't use this function when going to edit big amounts of pages on
      /// popular public wikis, as it compromises edit conflict detection. In that case,
      /// each page's text should be loaded individually right before its processing
      /// and saving.</summary>
      public void Load()
      {
         if (IsEmpty())
            throw new WikiBotException(WikiBot.Msg("The WikiPageList is empty. Nothing to load."));
         foreach (WikiPage page in Pages)
            page.Load();
      }

      /// <summary>Loads texts and metadata for pages in WikiPageList via XML export interface.
      /// Non-existent pages will be automatically removed from the WikiPageList.
      /// Please, don't use this function when going to edit big amounts of pages on
      /// popular public wikis, as it compromises edit conflict detection. In that case,
      /// each page's text should be loaded individually right before its processing
      /// and saving.</summary>
      public void LoadEx()
      {
         if (IsEmpty())
            throw new WikiBotException(WikiBot.Msg("The WikiPageList is empty. Nothing to load."));
         Console.WriteLine(WikiBot.Msg("Loading {0} pages..."), Pages.Count);
         string res = WikiSite.SiteUrl + WikiSite.IndexPath +
                      "index.php?title=Special:Export&action=submit";
         string postData = Pages.Aggregate ("curonly=True&pages=", (current, page) => current + (HttpUtility.UrlEncode (page.Title) + "\r\n"));
         XmlReader reader = XmlReader.Create(
            new StringReader(WikiSite.PostDataAndGetResultHtm(res, postData)));
         Clear();
         while (reader.ReadToFollowing("page")) {
            var p = new WikiPage(WikiSite, "");
            p.ParsePageXml(reader.ReadOuterXml());
            Pages.Add(p);
         }
         reader.Close();
      }

      /// <summary>Loads text and metadata for pages in WikiPageList via XML export interface.
      /// The function uses XPathNavigator and is less efficient than LoadEx().</summary>
      public void LoadEx2()
      {
         if (IsEmpty())
            throw new WikiBotException("The WikiPageList is empty. Nothing to load.");
         Console.WriteLine(WikiBot.Msg("Loading {0} pages..."), Pages.Count);
         string res = WikiSite.SiteUrl + WikiSite.IndexPath +
                      "index.php?title=Special:Export&action=submit";
         string postData = Pages.Aggregate ("curonly=True&pages=", (current, page) => current + HttpUtility.UrlEncode (page.Title + "\r\n"));
         string src = WikiSite.PostDataAndGetResultHtm(res, postData);
         src = WikiBot.RemoveXmlRootAttributes(src);
         var strReader = new StringReader(src);
         var doc = new XPathDocument(strReader);
         strReader.Close();
         strReader.Dispose ();
         XPathNavigator nav = doc.CreateNavigator();
         foreach (WikiPage page in Pages)
         {
            if (page.Title.Contains("'")) {
               page.LoadEx();
               continue;
            }
            string query = "//page[title='" + page.Title + "']/";
            var node = nav.SelectSingleNode (query + "revision/text");
            if (node != null)
               page.Text = node.InnerXml;
            page.Text = HttpUtility.HtmlDecode(page.Text);
            var selectSingleNode = nav.SelectSingleNode (query + "id");
            if (selectSingleNode != null) page.PageId = selectSingleNode.InnerXml;
            var lastUser = nav.SelectSingleNode (query + "revision/contributor/username");
            if (lastUser != null)
            {
               page.LastUser = lastUser.InnerXml;
               var lastUserId = nav.SelectSingleNode (query + "revision/contributor/id");
               if (lastUserId != null)
                  page.LastUserId = lastUserId.InnerXml;
            }
            else
            {
               lastUser = nav.SelectSingleNode (query + "revision/contributor/ip");
               if (lastUser != null)
                  page.LastUser = lastUser.InnerXml;
            }
            page.LastUser = HttpUtility.HtmlDecode(page.LastUser);
            var revision = nav.SelectSingleNode (query + "revision/id");
            if (revision != null)
               page.LastRevisionId = revision.InnerXml;
            page.LastMinorEdit = nav.SelectSingleNode(query + "revision/minor") != null;
            var comment = nav.SelectSingleNode (query + "revision/comment");
            if (comment != null)
               page.Comment = comment.InnerXml;
            page.Comment = HttpUtility.HtmlDecode(page.Comment);
            var timeStamp = nav.SelectSingleNode (query + "revision/timestamp");
            if (timeStamp != null)
               page.Timestamp = timeStamp.ValueAsDateTime;
         }
         Console.WriteLine(WikiBot.Msg("Pages download completed."));
      }

      /// <summary>Loads text and metadata for pages in WikiPageList via XML export interface.
      /// The function loads pages one by one, it is slightly less efficient
      /// than LoadEx().</summary>
      public void LoadEx3()
      {
         if (IsEmpty())
            throw new WikiBotException("The WikiPageList is empty. Nothing to load.");
         foreach (WikiPage p in Pages)
            p.LoadEx();
      }

      /// <summary>Gets page titles and page text from local XML dump.
      /// This function consumes much resources.</summary>
      /// <param name="filePathName">The path to and name of the XML dump file as string.</param>
      public void FillAndLoadFromXmlDump(string filePathName)
      {
         Console.WriteLine(WikiBot.Msg("Loading pages from XML dump..."));
         XmlReader reader = XmlReader.Create(filePathName);
         while (reader.ReadToFollowing("page")) {
            var p = new WikiPage(WikiSite, "");
            p.ParsePageXml(reader.ReadOuterXml());
            Pages.Add(p);
         }
         reader.Close();
         Console.WriteLine(WikiBot.Msg("XML dump loaded successfully."));
      }

      /// <summary>Gets page titles and page texts from all ".txt" files in the specified
      /// directory (folder). Each file becomes a page. Page titles are constructed from
      /// file names. Page text is read from file contents. If any Unicode numeric codes
      /// (also known as numeric character references or NCRs) of the forbidden characters
      /// (forbidden in filenames) are recognized in filenames, those codes are converted
      /// to characters (e.g. "&#x7c;" is converted to "|").</summary>
      /// <param name="dirPath">The path and name of a directory (folder)
      /// to load files from.</param>
      public void FillAndLoadFromFiles(string dirPath)
      {
         foreach (string fileName in Directory.GetFiles(dirPath, "*.txt")) {
            var p = new WikiPage(WikiSite, Path.GetFileNameWithoutExtension(fileName));
            p.Title = p.Title.Replace("&#x22;", "\"");
            p.Title = p.Title.Replace("&#x3c;", "<");
            p.Title = p.Title.Replace("&#x3e;", ">");
            p.Title = p.Title.Replace("&#x3f;", "?");
            p.Title = p.Title.Replace("&#x3a;", ":");
            p.Title = p.Title.Replace("&#x5c;", "\\");
            p.Title = p.Title.Replace("&#x2f;", "/");
            p.Title = p.Title.Replace("&#x2a;", "*");
            p.Title = p.Title.Replace("&#x7c;", "|");
            p.LoadFromFile(fileName);
            Pages.Add(p);
         }
      }

      /// <summary>Saves all pages in WikiPageList to live wiki site. Uses default bot
      /// edit comment and default minor edit mark setting ("true" by default). This function
      /// doesn't limit the saving speed, so in case of working on public wiki, it's better
      /// to use SaveSmoothly function in order not to overload public server (HTTP errors or
      /// framework errors may arise in case of overloading).</summary>
      public void Save()
      {
         Save(WikiBot.EditComment, WikiBot.IsMinorEdit);
      }

      /// <summary>Saves all pages in WikiPageList to live wiki site. This function
      /// doesn't limit the saving speed, so in case of working on public wiki, it's better
      /// to use SaveSmoothly function in order not to overload public server (HTTP errors or
      /// framework errors may arise in case of overloading).</summary>
      /// <param name="comment">Your edit comment.</param>
      /// <param name="isMinorEdit">Minor edit mark (true = minor edit).</param>
      public void Save(string comment, bool isMinorEdit)
      {
         foreach (WikiPage page in Pages)
            page.Save(page.Text, comment, isMinorEdit);
      }

      /// <summary>Saves all pages in WikiPageList to live wiki site. The function waits for 5 seconds
      /// between each page save operation in order not to overload server. Uses default bot
      /// edit comment and default minor edit mark setting ("true" by default). This function
      /// doesn't limit the saving speed, so in case of working on public wiki, it's better
      /// to use SaveSmoothly function in order not to overload public server (HTTP errors or
      /// framework errors may arise in case of overloading).</summary>
      public void SaveSmoothly()
      {
         SaveSmoothly(5, WikiBot.EditComment, WikiBot.IsMinorEdit);
      }

      /// <summary>Saves all pages in WikiPageList to live wiki site. The function waits for specified
      /// number of seconds between each page save operation in order not to overload server.
      /// Uses default bot edit comment and default minor edit mark setting
      /// ("true" by default).</summary>
      /// <param name="intervalSeconds">Number of seconds to wait between each
      /// save operation.</param>
      public void SaveSmoothly(int intervalSeconds)
      {
         SaveSmoothly(intervalSeconds, WikiBot.EditComment, WikiBot.IsMinorEdit);
      }

      /// <summary>Saves all pages in WikiPageList to live wiki site. The function waits for specified
      /// number of seconds between each page save operation in order not to overload
      /// server.</summary>
      /// <param name="intervalSeconds">Number of seconds to wait between each
      /// save operation.</param>
      /// <param name="comment">Your edit comment.</param>
      /// <param name="isMinorEdit">Minor edit mark (true = minor edit).</param>
      public void SaveSmoothly(int intervalSeconds, string comment, bool isMinorEdit)
      {
         if (intervalSeconds == 0)
            intervalSeconds = 1;
         foreach (WikiPage page in Pages) {
            Thread.Sleep(intervalSeconds * 1000);
            page.Save(page.Text, comment, isMinorEdit);
         }
      }

      /// <summary>Undoes the last edit of every page in this WikiPageList, so every page text reverts
      /// to previous contents. The function doesn't affect other operations
      /// like renaming.</summary>
      /// <param name="comment">Your edit comment.</param>
      /// <param name="isMinorEdit">Minor edit mark (true = minor edit).</param>
      public void Revert(string comment, bool isMinorEdit)
      {
         foreach (WikiPage page in Pages)
            page.Revert(comment, isMinorEdit);
      }

      /// <summary>Saves titles of all pages in WikiPageList to the specified file. Each title
      /// on a new line. If the target file already exists, it is overwritten.</summary>
      /// <param name="filePathName">The path to and name of the target file as string.</param>
      public void SaveTitlesToFile(string filePathName)
      {
         SaveTitlesToFile(filePathName, false);
      }

      /// <summary>Saves titles of all pages in WikiPageList to the specified file. Each title
      /// on a separate line. If the target file already exists, it is overwritten.</summary>
      /// <param name="filePathName">The path to and name of the target file as string.</param>
      /// <param name="useSquareBrackets">If true, each page title is enclosed
      /// in square brackets.</param>
      public void SaveTitlesToFile(string filePathName, bool useSquareBrackets)
      {
         var titles = new StringBuilder();
         foreach (WikiPage page in Pages)
            titles.Append(useSquareBrackets ?
                                               "[[" + page.Title + "]]\r\n" : page.Title + "\r\n");
         File.WriteAllText(filePathName, titles.ToString().Trim(), Encoding.UTF8);
         Console.WriteLine(WikiBot.Msg("Titles in WikiPageList saved to \"{0}\" file."), filePathName);
      }

      /// <summary>Saves the contents of all pages in pageList to ".txt" files in specified
      /// directory. Each page is saved to separate file, the name of that file is constructed
      /// from page title. Forbidden characters in filenames are replaced with their
      /// Unicode numeric codes (also known as numeric character references or NCRs).
      /// If the target file already exists, it is overwritten.</summary>
      /// <param name="dirPath">The path and name of a directory (folder)
      /// to save files to.</param>
      public void SaveToFiles(string dirPath)
      {
         string curDirPath = Directory.GetCurrentDirectory();
         Directory.SetCurrentDirectory(dirPath);
         foreach (WikiPage page in Pages)
            page.SaveToFile();
         Directory.SetCurrentDirectory(curDirPath);
      }

      /// <summary>Loads the contents of all pages in pageList from live site via XML export
      /// and saves the retrieved XML content to the specified file. The functions just dumps
      /// data, it does not load pages in WikiPageList itself, call LoadEx() or
      /// FillAndLoadFromXMLDump() to do that. Note, that on some sites, MediaWiki messages
      /// from standard namespace 8 are not available for export.</summary>
      /// <param name="filePathName">The path to and name of the target file as string.</param>
      public void SaveXmlDumpToFile(string filePathName)
      {
         Console.WriteLine(WikiBot.Msg("Loading {0} pages for XML dump..."), this.Pages.Count);
         string res = WikiSite.SiteUrl + WikiSite.IndexPath +
                      "index.php?title=Special:Export&action=submit";
         string postData = Pages.Aggregate ("catname=&curonly=true&action=submit&pages=", (current, page) => current + HttpUtility.UrlEncode (page.Title + "\r\n"));
         string rawXml = WikiSite.PostDataAndGetResultHtm(res, postData);
         rawXml = WikiBot.RemoveXmlRootAttributes(rawXml).Replace("\n", "\r\n");
         if (File.Exists(filePathName))
            File.Delete(filePathName);
         FileStream fs = File.Create(filePathName);
         byte[] xmlBytes = new UTF8Encoding(true).GetBytes(rawXml);
         fs.Write(xmlBytes, 0, xmlBytes.Length);
         fs.Close();
         Console.WriteLine(
            WikiBot.Msg("XML dump successfully saved in \"{0}\" file."), filePathName);
      }

      /// <summary>Removes all empty pages from WikiPageList. But firstly don't forget to load
      /// the pages from site using pageList.LoadEx().</summary>
      public void RemoveEmpty()
      {
         for (int i=Pages.Count-1; i >= 0; i--)
            if (Pages[i].IsEmpty())
               Pages.RemoveAt(i);
      }

      /// <summary>Removes all recurring pages from WikiPageList. Only one page with some title will
      /// remain in WikiPageList. This makes all page elements in WikiPageList unique.</summary>
      public void RemoveRecurring()
      {
         for (int i=Pages.Count-1; i >= 0; i--)
            for (int j=i-1; j >= 0; j--)
               if (Pages[i].Title == Pages[j].Title) {
                  Pages.RemoveAt(i);
                  break;
               }
      }

      /// <summary>Removes all redirecting pages from WikiPageList. But firstly don't forget to load
      /// the pages from site using pageList.LoadEx().</summary>
      public void RemoveRedirects()
      {
         for (int i=Pages.Count-1; i >= 0; i--)
            if (Pages[i].IsRedirect())
               Pages.RemoveAt(i);
      }

      /// <summary>For all redirecting pages in this WikiPageList, this function loads the titles and
      /// texts of redirected-to pages.</summary>
      public void ResolveRedirects()
      {
         foreach (WikiPage page in Pages) {
            if (page.IsRedirect() == false)
               continue;
            page.Title = page.RedirectsTo();
            page.Load();
         }
      }

      /// <summary>Removes all disambiguation pages from WikiPageList. But firstly don't
      /// forget to load the pages from site using pageList.LoadEx().</summary>
      public void RemoveDisambigs()
      {
         for (int i=Pages.Count-1; i >= 0; i--)
            if (Pages[i].IsDisambig())
               Pages.RemoveAt(i);
      }


      /// <summary>Removes all pages from WikiPageList.</summary>
      public void RemoveAll()
      {
         Pages.Clear();
      }

      /// <summary>Removes all pages from WikiPageList.</summary>
      public void Clear()
      {
         Pages.Clear();
      }

      /// <summary>Function changes default English namespace prefixes to correct local prefixes
      /// (e.g. for German wiki-sites it changes "Category:..." to "Kategorie:...").</summary>
      public void CorrectNsPrefixes()
      {
         foreach (WikiPage p in Pages)
            p.CorrectNsPrefix();
      }

      /// <summary>Shows if there are any Page objects in this WikiPageList.</summary>
      /// <returns>Returns bool value.</returns>
      public bool IsEmpty()
      {
         return (Pages.Count == 0);
      }

      /// <summary>Sends titles of all contained pages to console.</summary>
      public void ShowTitles()
      {
         Console.WriteLine("\n" + WikiBot.Msg("Pages in this WikiPageList:"));
         foreach (WikiPage p in Pages)
            Console.WriteLine(p.Title);
         Console.WriteLine("\n");
      }

      /// <summary>Sends texts of all contained pages to console.</summary>
      public void ShowTexts()
      {
         Console.WriteLine("\n" + WikiBot.Msg("Texts of all pages in this WikiPageList:"));
         Console.WriteLine("--------------------------------------------------");
         foreach (WikiPage p in Pages) {
            p.ShowText();
            Console.WriteLine("--------------------------------------------------");
         }
         Console.WriteLine("\n");
      }
   }
}