#include <boost/lexical_cast.hpp>
#include <string>

#include <analyser/AnalyseArticlelist.h>
#include <analyser/AnalyseArticle.h>
#include <analyser/AnalyseContents.h>
#include <analyser/AnalyseChapter.h>
#include <config/Config.h>
#include <config/RuleManager.h>
#include <config/NovelConfig.h>
#include <gather/Gather.h>
#include <dao/Connections.h>
#include <dao/DaoAttach.h>
#include <exception/NpException.h>
#include <utils/XString.h>
#include "RobotGather.h"

#define SIZE_SPLIT_UNIT 1000 //文件分文件夹保存单位
//#define NOT_GET_CHAPTER_CONTENT 1
#define DEBUG 1
//todo:
//1.加入logger接口
//2.分析卷，如果多卷时候会有问题，需要测试修改
//3.robotgather采集后入库过错需要编译，测试，修改
//4.将封面下载后，需要将封面安装配置的文件路径filepath,并根据articleid号，1000个创建一个文件夹保存封面（txt，附件保存也是一样的规则）
//5.分析卷后，需要将附件内容也分析出来，robotgather 需要处理将附件下载并保存的过程
//6.保存txt后，还需要更新article，以保存最新更新的章节ID及章节名称。
//7.单本小说采集完成后，需要将刚才采集的aticleid更新到在内存中的config,程序关闭后退出articlelist循环的时候，将config保存至文件.
//8.迁移mover至另外一个pro,测试mover
//9.怎么分析备选站的章节内容，以最快的速度匹配出与qidian一样名称的章节。
//10.备选站没有最新内容时，查询baidu找出一样章节名称。
using namespace std;
using namespace npexception;
RobotGather::RobotGather()
{
  m_chapterList = new chapterInfo::ChapterInfo[SIZE_CHAPTER_LIST];
  //memset((void*)m_chapterList, 0x00, sizeof(m_chapterList) * SIZE_CHAPTER_LIST);

  Config::getInstance()-> initializeConfigs();
  Connections::getInstance()->init();
}

RobotGather::~RobotGather()
{
  if(m_chapterList != NULL)
  {
    delete[] m_chapterList;
    m_chapterList = NULL;
  }
}

void RobotGather::work()
{
  static DaoArticle daoArticle;
  static DaoChapter daoChapter;
  static DaoAttach daoAttach;
  daoArticle.setConn(Connections::getInstance()->getConnection(Config::getInstance()->getConfig()->flag + "target"));
  daoChapter.setConn(Connections::getInstance()->getConnection(Config::getInstance()->getConfig()->flag+ "target"));
  daoAttach.setConn(Connections::getInstance()->getConnection(Config::getInstance()->getConfig()->flag+ "target"));
  NovelConfig::getInstance()->setMaxArticleID(daoArticle.getArticleMaxID());
  NovelConfig::getInstance()->setMaxChapterID(daoChapter.getChapterMaxID());
  static int articleIndex;
  while(Config::getInstance()->getOffon())
  {
    static AnalyseArticleList analyseArticleList;
    static Rule* rule;
    rule = RuleManager::getInstance()->getRule(Config::getInstance()->getConfig()->ruleflag);
    analyseArticleList.setRule(*rule);
    static string htmlString;
    static string listUrl;
    listUrl = analyseArticleList.getNovelListUrl();
    for(int ipage =1; ipage< Config::getInstance()->getConfig()->maxpage;++ipage)
    {
      XString::replaceAll(listUrl,"(*)", boost::lexical_cast<string>(ipage));
      htmlString.assign(Gather::getInstance()->collect(listUrl.c_str()));
      analyseArticleList.setContent(htmlString);
      analyseArticleList.analyseUrlList();//分析出url列表
      for(articleIndex = 0; articleIndex< analyseArticleList.getSize(); ++articleIndex)
      {
        static AnalyseArticle analyseArticle;
        analyseArticle.setRule(*rule);
        htmlString.assign(Gather::getInstance()->collect("http://www.qidian.com/Book/2315236.aspx"));
        //htmlString.assign(Gather::getInstance()->collect("http://www.qidian.com/Book/24857.aspx"));
        //htmlString.assign(Gather::getInstance()->collect(analyseArticleList[articleIndex].c_str()));
        analyseArticle.setContent(htmlString);
        analyseArticle.analyseNovelInfo();//分析小说信息
        static AnalyseContents analyseContents;
        analyseContents.setRule(*rule);
        analyseContents.setArticle(analyseArticle.getArticle());
        htmlString.assign(Gather::getInstance()->collect(analyseArticle.getArticle().contentsLink.c_str()));
        analyseContents.setContent(htmlString);
        analyseContents.analyseVolumeList();//分析卷/章节
        for(int ci = 0 ; ci< analyseContents.getSize(); ++ci)
        {
          debugChapter(analyseContents[ci]);
        }
        //begin stock in
        static AnalyseChapter analyseChapter;
        static bool articleExists = false;
        static articleInfo::ArticleInfo articleinfo;
        daoArticle.getArticle(analyseArticle.getArticle().articlename, articleinfo)==1?articleExists=true:articleExists=false;
        if(!articleExists)
        {
          daoArticle.addArticle(analyseArticle.getArticle());  //novel id is memory maxid
          gatherCover(analyseArticle.getArticle().cover, analyseArticle.getArticle().articleid);
        }
        else
        {
          analyseArticle.setArticle(articleinfo);
        }
        static int chapterLen = 0;
        chapterLen = daoChapter.getChapterList(analyseArticle.getArticle(), m_chapterList);
        for(int ci = 0 ; ci< analyseContents.getSize(); ++ci)
        {
          analyseContents[ci].articleid = analyseArticle.getArticle().articleid;
          if( ci < chapterLen)
          {
            if(m_chapterList[ci].ctype == "1")//图片
            {
              analyseContents[ci].isnew = '1';//需要重新采集txt
            }
            else
            { analyseContents[ci].isnew='0'; }
            analyseContents[ci].chapterid = m_chapterList[ci].chapterid;//将之前CHAPTERID赋值
          }
          else if(ci >= chapterLen)
          {
            analyseContents.setChapterID(ci, NovelConfig::getInstance()-> getMaxChapterID());
            daoChapter.addChapter(analyseContents[ci]);
          }
          if(ci == analyseContents.getSize() -1)
          {
            analyseArticle.getArticle().lastchapterid = m_chapterList[ci].chapterid;
            analyseArticle.getArticle().lastchapter = m_chapterList[ci].chaptername;
          }
          if(analyseContents[ci].chaptertype == '1')//volume
          {continue; }
          if(boost::lexical_cast<int>(analyseContents[ci].isnew) > 0 )
          {
            //gather not vip chapter content
            if( analyseContents[ci].isvip =='0')
            {
              analyseChapter.setRule(*rule);
              analyseChapter.setContent(Gather::getInstance()->collect(analyseContents[ci].contentsLink.c_str()));
              analyseChapter.analyseChapterInfo(analyseContents[ci]);
              if(analyseChapter.getAttachSize() == 0)
              {
#ifndef NOT_GET_CHAPTER_CONTENT
                gatherChapterContent(analyseContents[ci], analyseChapter.getContentTxt());
#endif
                if(analyseContents[ci].ctype == "1")//原来为图片章节，现在换成文字章节，需更新数据库
                {
                  analyseContents[ci].ctype ="0";
                  daoChapter.updateChapterType(analyseContents[ci]);
                  daoAttach.updateAttachInvalid(analyseContents[ci].articleid);
                }
              }
              else if(analyseChapter.getAttachSize() >0 && analyseContents[ci].ctype == "2")//图片存在，且为新章节时
              {
                analyseContents[ci].ctype ="1";
                daoChapter.updateChapterType(analyseContents[ci]);
                //将标记类型更新为图片
                for(int ai = 0 ;ai<analyseChapter.getAttachSize(); ++ai)
                {
                  getAttachImage(analyseChapter[ai]);
                  daoAttach.addAttach(analyseChapter[ai]);
                }
              }
            }
            else//VIP章节
            {

            }
          }
        }
        //confirm chapter list to mysql
        //throwExIf(!daoChapter.confirmCacheTran(),1003);
      }
    }
  }
}

void RobotGather::gatherCover(const string& coverLink, const string& bookID)
{
  //需要将采集回来的封面，保存到config.filepath/ bookid/1000 / bookid .jpg或.png
  int len;
  char* cover =  Gather::getInstance()->collectAPic(coverLink.c_str(),len);    //gather the novel cover

  char path[128]={0};
  sprintf(path,"%s/files/article/image/%d/%s",
          Config::getInstance()->getConfig()->filepath.c_str(),
          boost::lexical_cast<int>(bookID)/SIZE_SPLIT_UNIT,
          bookID.c_str());
//  string tmp(path);
  m_file.checkDir(path);
  strcat(path,"/");
  strcat(path, bookID.c_str());
  strcat(path,".jpg");
  m_file.writeBinary(cover,len,path );
}

void RobotGather::getAttachImage(attachInfo::AttachInfo& attach)
{

}

void RobotGather::gatherChapterContent(const chapterInfo::ChapterInfo& chapter, const string& content)
{
  char path[128]={0};
  sprintf(path,"%s/files/article/txt/%d/%s",
          Config::getInstance()->getConfig()->filepath.c_str(),
          boost::lexical_cast<int>(chapter.articleid)/SIZE_SPLIT_UNIT,
          chapter.articleid.c_str());
  m_file.checkDir(path);
  strcat(path,"/");
  strcat(path, chapter.chapterid.c_str());
  strcat(path,".txt");
  std::cout<<path<<std::endl;
  if(m_file.isExists(path))
  {m_file.deleteFile(path);}
  m_file.writeFile(content, path);
}

void RobotGather::debugNovelInfo(const articleInfo::ArticleInfo& article)
{
  #ifdef DEBUG
  std::cout<<  article.articleid<<std::endl;
  std::cout<<  article.articlename <<std::endl;
  std::cout<<  article.author <<std::endl;
  std::cout<<  article.sortname<<std::endl;
  std::cout<<  article.cover<<std::endl;
  std::cout<<  article.contentsLink<<std::endl;
  std::cout<<  article.intro<<std::endl;
  #endif
}

  void RobotGather::debugChapter(const chapterInfo::ChapterInfo& chapter)
  {
    #ifdef DEBUG
    std:: cout<<chapter.articlename<<" "
                    <<chapter.volumeid<<"  "
                    <<chapter.chapterid<<"  "
                    <<chapter.chapterorder<<" "
                    <<chapter.chaptername<<std::endl;
    #endif
  }
