﻿using System;
using System.Collections.Specialized;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using RedFoxUtils;
using CsQuery;
using System.Web;
using System.Diagnostics;

namespace RedFoxScraper
{
  class Scraper
  {
    public static string FORUM_BASE = "http://fox.hanyu.com.tw";
    private IDatabase mDatabase = null;
    private IHttpClient mClient = null;
    private static IScraperCallback mCallback = null;

    public Scraper(IHttpClient client, IDatabase database)
    {
      Init(client, database, new MockScraperCallback());
    }

    public Scraper(IHttpClient client, IDatabase database, IScraperCallback callback)
    {
      Init(client, database, callback);
    }

    private void Init(IHttpClient client, IDatabase database, IScraperCallback callback)
    {
      mClient = client;
      mDatabase = database;
      mCallback = callback;
    }

    #region Utility functions
    delegate void ContentParserCallback(int currentPage, int totalPage);

    public enum STATE
    {
      STATE_NONE, STATE_BOARD, STATE_FIND_POSTS, STATE_GET_POST
    };

    public interface IScraperCallback
    {
      bool isCancellationPending();
      void ReportProgress(STATE state, int id, int page);
      STATE State { get; }
      int Id { get; }
      int Page { get; }
    }

    class MockScraperCallback : IScraperCallback
    {
      public bool isCancellationPending()
      {
        return false;
      }

      public void ReportProgress(STATE state, int id, int page)
      {
        // do nothing.
      }

      public STATE State
      {
        get { return STATE.STATE_NONE; }
      }

      public int Id
      {
        get { return 0; }
      }

      public int Page
      {
        get { return 0; }
      }
    }

    /// <summary>
    /// Convert NameValue collection to query string.
    /// For example: Give { "a": "111", "b", "222" } will return "?a=111&b=222"
    /// </summary>
    /// <param name="nvc">NameValue collection</param>
    /// <returns>Query string</returns>
    private static string ToQueryString(NameValueCollection nvc)
    {
      return "?" + string.Join("&", Array.ConvertAll(nvc.AllKeys, key => string.Format("{0}={1}", HttpUtility.UrlEncode(key), HttpUtility.UrlEncode(nvc[key]))));
    }

    /// <summary>
    /// Parse PageInfo div.
    /// </summary>
    /// <param name="cq">CQ</param>
    /// <returns>Tuple, 1st element is current page, 2nd element is total pages.</returns>
    private static Tuple<int, int> getPageInfo(CQ cq)
    {
      var pageInfo = cq.Select("#PageInfo");
      Debug.Print("{0}", pageInfo.Text());
      var pageInfoText = pageInfo.Text();
      if (pageInfoText.StartsWith("頁次："))
      {
        var spacePos = pageInfoText.IndexOf(' ');
        var pageOf = pageInfoText.Substring(3, spacePos - 3 + 1);
        Debug.Print("{0}", pageOf);
        String[] splited = pageOf.Split(new char[] { '/' });
        if (splited.Length >= 2)
        {
          int currentPage = Convert.ToInt32(splited[0]);
          int totalPage = Convert.ToInt32(splited[1]);
          return new Tuple<int, int>(currentPage, totalPage);
        }
      }
      return null;
    }

    /// <summary>
    /// A quick function for determining next page.
    /// </summary>
    /// <param name="cq"></param>
    /// <returns></returns>
    private static bool hasNextPage(CQ cq)
    {
      Tuple<int, int> pageInfo = getPageInfo(cq);
      if (pageInfo != null)
      {
        int currentPage = pageInfo.Item1;
        int totalPage = pageInfo.Item2;
        if (currentPage < totalPage)
          return true;
      }
      return false;
    }
    #endregion

    #region Scrape pattern
    /// <summary>
    /// The basic pattern for scrape web page.
    /// </summary>
    /// <param name="urlCombiner"></param>
    /// <param name="parser"></param>
    private void scrape(IURLCombiner urlCombiner, IContentParser parser)
    {
      String url = urlCombiner.getUrl();
      string content = "", msg;
      content = mClient.GetSrc(url, "big5", out msg);
      parser.parse(content);
    }
    #endregion

    #region IURLCombiner implementations
    /// <summary>
    /// Base URL Combiner, provide forumBase.
    /// </summary>
    class BaseURLCombiner : IURLCombiner
    {
      protected static string forumBase = FORUM_BASE;

      public virtual string getUrl()
      {
        return "";
      }
    }

    /// <summary>
    /// The URL Combiner for scrapeBoard().
    /// </summary>
    class HomePageURLCombiner : BaseURLCombiner
    {
      public override string getUrl()
      {
        return forumBase;
      }
    }

    class BoardURLCombiner : BaseURLCombiner
    {
      private int mBoardId;
      private int mPage;

      public BoardURLCombiner(int boardId, int page)
      {
        mBoardId = boardId;
        mPage = page;
      }

      public override string getUrl()
      {
        // String url = String.Format("http://fox.hanyu.com.tw/List.aspx?fbId={0}", board.Item1);
        // &Page=2
        NameValueCollection param = new NameValueCollection();
        param.Add("Page", mPage.ToString());
        param.Add("fbId", mBoardId.ToString());
        return forumBase + "/List.aspx" + ToQueryString(param);
      }
    }

    class PostURLCombiner : BaseURLCombiner
    {
      private int mBoardId;
      private int mPostId;
      private int mPage;

      public PostURLCombiner(int boardId, int postId, int page)
      {
        mBoardId = boardId;
        mPostId = postId;
        mPage = page;
      }

      public override string getUrl()
      {
        NameValueCollection param = new NameValueCollection();
        param.Add("Id", mPostId.ToString());
        param.Add("fbId", mBoardId.ToString());
        param.Add("Page", mPage.ToString());
        return forumBase + "/View.aspx" + ToQueryString(param);
      }
    }
    #endregion

    #region IContentParser implementations.
    /// <summary>
    /// 基礎的ContentParser,帶有 IDatabase 成員
    /// </summary>
    class BaseContentParser : IContentParser
    {
      protected IDatabase mDb;

      public BaseContentParser()
      {
        mDb = null;
      }

      public BaseContentParser(IDatabase db)
      {
        mDb = db;
      }

      public virtual void parse(string content)
      {
      }
    }

    /// <summary>
    /// 基礎的ContentParser,除了有 IDatabase 成員外,還有 callback
    /// </summary>
    class BaseContentParserWithCallback : BaseContentParser
    {
      protected ContentParserCallback mCallback;

      public BaseContentParserWithCallback(IDatabase db, ContentParserCallback callback)
        : base(db)
      {
        mCallback = callback;
      }
    }

    /// <summary>
    /// The content parser for scrapeBoard().
    /// </summary>
    class HomePageContentParser : BaseContentParser
    {
      public HomePageContentParser(IDatabase db)
        : base(db)
      {
      }

      public override void parse(string content)
      {
        if (String.IsNullOrEmpty(content))
          return;

        var cq = CQ.Create(content);
        var cells = cq.Select("#dlfclist > tbody > tr > td > table > tbody > tr > td > table > tbody > tr > td > a");
        foreach (var cell in cells)
        {
          var href = cell.GetAttribute("href");
          if (href.StartsWith("List.aspx"))
          {
            var qs = HttpUtility.ParseQueryString(new Uri(FORUM_BASE + "/" + href).Query);
            mDb.insertBorad(Convert.ToInt32(qs["fbId"]), cell.FirstChild.InnerText);
          }
        }
      }
    }

    class BoardContentParser : BaseContentParserWithCallback
    {
      public BoardContentParser(IDatabase db, ContentParserCallback callback)
        : base(db, callback)
      {
      }

      public override void parse(string content)
      {
        if (String.IsNullOrEmpty(content))
          return;

        var cq = CQ.Create(content);
        var cells = cq.Select("#dgfiList > tbody > tr > td > span > a");
        Debug.WriteLine("cells.length = {0}", cells.Length);
        foreach (var cell in cells)
        {
          if (!cell.ParentNode.Id.EndsWith("_Title"))
            continue;
          var href = cell.GetAttribute("href");
          if (href.StartsWith("View.aspx"))
          {
            var qs = HttpUtility.ParseQueryString(new Uri(FORUM_BASE + "/" + href).Query);
            var postedString = cell.ParentNode.ParentNode.NextElementSibling.NextElementSibling.NextElementSibling.FirstChild.NextElementSibling.InnerText;
            var dt = DateTime.Parse(postedString);
            Debug.WriteLine(dt.ToString());
            Debug.WriteLine(String.Format("{0} {1} {2} {3} {4}",
                Convert.ToInt32(qs["Id"]),
                Convert.ToInt32(qs["fbId"]),
                cell.InnerText,
                cell.ParentNode.ParentNode.NextElementSibling.FirstChild.InnerText,
                dt)
              );
            // TODO: 偶有重號的情況,尚未查明原因
            mDb.insertPostBrief(
                Convert.ToInt32(qs["Id"]),
                Convert.ToInt32(qs["fbId"]),
                cell.InnerText,
                cell.ParentNode.ParentNode.NextElementSibling.FirstChild.InnerText,
                dt
              );
          }
        }

        if (mCallback != null)
        {
          var pageInfo = getPageInfo(cq);
          if (pageInfo != null)  // 表示還有下頁，繼續抓；null 表示沒了。
            mCallback(pageInfo.Item1, pageInfo.Item2);
        }
      }
    }

    class PostContentParser : BaseContentParserWithCallback
    {
      private int mFbId;
      private int mId;

      public PostContentParser(IDatabase db, ContentParserCallback callback, int fbId, int id)
        : base(db, callback)
      {
        mFbId = fbId;
        mId = id;
      }

      public override void parse(string content)
      {
        if (String.IsNullOrEmpty(content))
          return;

        var cq = CQ.Create(content);
        var cells = cq.Select("#dgfiList > tbody > tr > td > table > tbody > tr > td > table > tbody > tr > td > span");
        Debug.WriteLine("cells.length = {0}", cells.Length);
        foreach (var cell in cells)
        {
          if (!cell.Id.EndsWith("_Body"))
            continue;

          // BODY
          string idStr = cell.ParentNode.ParentNode.ParentNode.ParentNode.ParentNode.ParentNode.ParentNode.ParentNode.ParentNode.PreviousElementSibling.FirstChild.FirstChild.FirstChild.FirstChild.FirstChild.GetAttribute("name");
          int id = -1;
          if (!Int32.TryParse(idStr, out id))
          {
            Debug.WriteLine("Parse fail.");
          }
          string body = convertCharacters(cell.InnerHTML);
          string title = cell.PreviousElementSibling.PreviousElementSibling.InnerHTML;
          string author = cell.ParentNode.ParentNode.ParentNode.ParentNode.ParentNode.ParentNode.ParentNode.ParentNode.ParentNode.PreviousElementSibling.FirstChild.FirstChild.FirstChild.FirstChild.FirstChild.NextElementSibling.FirstChild.FirstChild.FirstChild.FirstChild.FirstChild.InnerText;
          string dateTime = cell.ParentNode.ParentNode.ParentNode.ParentNode.ParentNode.ParentNode.NextElementSibling.FirstChild.FirstChild.FirstChild.NextElementSibling.FirstChild.FirstChild.NextElementSibling.FirstChild.NextElementSibling.InnerHTML;
          var dt = DateTime.Parse(dateTime);
          Debug.WriteLine(string.Format("{0} / {1} / {2} / {3}", id, title, author, dt));
          //Debug.WriteLine(cell.InnerText);
          //Debug.WriteLine(cell.InnerHTML);
          Debug.WriteLine(id);

          // 處理圖片
          var imagesCq = CQ.Create(cell.InnerHTML);
          var imgs = imagesCq.Select("img");
          if (imgs.Length > 0)
          {
            foreach (var img in imgs)
            {
              if (img.HasAttribute("src"))
              {
                // rewrite the image's url.
                img.Attributes["src"] = mDb.saveImage(id, img.Attributes["src"]);
              }
            }
          }

          if (id != mId)
          {
            // 表示不是1樓，要往下蓋樓 (新增)
            Debug.WriteLine("Insert related posts.");
            mDb.insertRelatedPost(
                id,
                mId,
                mFbId,
                title,
                body,
                author,
                dt
              );
          }
          else
          {
            // 相等，表示是1樓，只要更新1樓內容即可。
            Debug.WriteLine("Update post. mId={0} id={1} body={2}", mId, id, body);
            mDb.updatePost(mId, body);
          }
        }

        if (mCallback != null)
        {
          var pageInfo = getPageInfo(cq);
          if (pageInfo != null)  // 表示還有下頁，繼續抓；null 表示沒了。
            mCallback(pageInfo.Item1, pageInfo.Item2);
        }
      }

      /// <summary>
      /// 轉換被論壇程式轉換過的字元為原來字元
      /// </summary>
      /// <param name="body">原字串</param>
      /// <returns>回傳轉換回來的內容</returns>
      private string convertCharacters(string body)
      {
        if (String.IsNullOrEmpty(body))
          return "";

        List<Tuple<string, string>> cases = new List<Tuple<string, string>>();
        cases.Add(new Tuple<string, string>("／", "/"));
        cases.Add(new Tuple<string, string>("；", ";"));
        cases.Add(new Tuple<string, string>("&#160;", " "));

        string oldBody;
        string newBody = body;
        foreach (var ccase in cases)
        {
          oldBody = newBody;
          newBody = oldBody.Replace(ccase.Item1, ccase.Item2);
        }

        return newBody;
      }
    }
    #endregion

    #region Exposed methods.
    /// <summary>
    /// Scrape forum board information in home page.
    /// </summary>
    public void scrapeBoard()
    {
      scrape(new HomePageURLCombiner(), new HomePageContentParser(mDatabase));
      mCallback.ReportProgress(STATE.STATE_BOARD, 0, 0);
    }

    /// <summary>
    /// 抓取所有討論區裡的所有文章標題、日期、作者。(不含內文)
    /// </summary>
    public void scrapePostsBrief()
    {
      var boards = mDatabase.getBoards();
      foreach (var board in boards)
      {
        if (mCallback.isCancellationPending())
          return;
        if (board.Item1 < mCallback.Id)
          continue;
        scrapePostBrief(board);
      }
    }

    /// <summary>
    /// 抓取指定討論區裡的所有文章標題、日期、作者。(不含內文)
    /// </summary>
    /// <param name="board">指定的討論區</param>
    private void scrapePostBrief(Tuple<int, string> board)
    {
      if (mCallback.Page > 1)
        scrapePostBrief(board, mCallback.Page);
      else
      {
        // always scrape from 1st page.
        scrapePostBrief(board, 1);
      }
    }

    /// <summary>
    /// 抓取指定討論區指定頁面裡的所有文章標題、日期、作者。(不含內文)
    /// </summary>
    /// <param name="board">指定的討論區</param>
    /// <param name="page">指定的頁面</param>
    private void scrapePostBrief(Tuple<int, string> board, int page)
    {
      // The callback for BoardContentParser.
      // 抓到指定討論區第1頁以後，可以知道共有幾頁，目前有幾頁，這時候會呼叫此callback
      // 以繼續抓取後面指定頁面的內容。
      ContentParserCallback callback = delegate(int currentPage, int totalPage)
      {
        if (currentPage == totalPage)
          return;
        for (int i = currentPage + 1; i <= totalPage; i++)
        {
          scrape(
            new BoardURLCombiner(board.Item1, i),
            new BoardContentParser(mDatabase, null)
            );
          mCallback.ReportProgress(STATE.STATE_FIND_POSTS, board.Item1, i);
        }
      };

      if (mCallback.isCancellationPending())
        return;

      // Scrape first page of the specified board.
      scrape(
        new BoardURLCombiner(board.Item1, page),
        new BoardContentParser(mDatabase, callback)
        );
      mCallback.ReportProgress(STATE.STATE_FIND_POSTS, board.Item1, 1);
    }

    public void scrapePostDetail()
    {
      var boards = mDatabase.getBoards();
      foreach (var board in boards)
      {
        scrapePostDetail(board);
      }
    }

    public void scrapePostDetail(Tuple<int, string> board)
    {
      var posts = mDatabase.getPostsByBoard(board.Item1);
      foreach (var postId in posts)
      {
        if (mCallback.isCancellationPending())
          return;
        if (postId < mCallback.Id)
          continue;
        scrapePostDetail(board.Item1, postId);
      }
    }

    public void scrapePostDetail(Tuple<int, string> board, List<int> postIds)
    {
      foreach( var postId in postIds)
        scrapePostDetail(board.Item1, postId);
    }

    /// <summary>
    /// 抓取指定文章的內容,需要登入
    /// </summary>
    /// <param name="fbId">board id</param>
    /// <param name="id">post id</param>
    public void scrapePostDetail(int fbId, int id)
    {
      ContentParserCallback callback = delegate(int currentPage, int totalPage)
      {
        if (currentPage == totalPage)
          return;
        for (int i = currentPage + 1; i <= totalPage; i++)
        {
          scrape(
            new PostURLCombiner(fbId, id, i),
            new PostContentParser(mDatabase, null, fbId, id)
            );
          mCallback.ReportProgress(STATE.STATE_GET_POST, id, i);
        }
      };

      if (mCallback.isCancellationPending())
        return;
      scrape(
        new PostURLCombiner(fbId, id, 1),
        new PostContentParser(mDatabase, callback, fbId, id)
      );
      mCallback.ReportProgress(STATE.STATE_GET_POST, id, 1);
    }
    #endregion
  }
}
