// crawlData.go
package xueqiu

import (
	"time"
	"fmt"
	"io/ioutil"
	"path"
	"strings"

	"gitee.com/Flearning/go_crawl/setting"
	"gitee.com/Flearning/go_crawl/xueqiu/sh01"
	"gitee.com/cnphpbb/data_crawl/mgobson"

	"gopkg.in/go-module/log.v1"
)

func init() {
	setting.NewConfigContext()
	setting.NewLogService()
}

func RunCrawl() {
	imgo := readMgoCfg()
	mgobson.NewInit(imgo)
	sh01.Req := readCrawlInfo()
	sh01.MgoWSet = true
	lenReqPrames := len(sh01.Req.Prames)
	sh01.CrawlUri = sh01.Req.Url
	sc := sh01.CrawlInfo{}
	for i:=0; i<=lenReqPrames; i++ {
		sc.Page = sh01.Req.Prames[i].Page
		sc.Sort = sh01.Req.Prames[i].Sort
		sleepTime := time.Duration(sh01.Req.Prames[i].Sleep) * time.Millisecond
		symbol := sh01.Req.Prames[i].Symbol
		sc.UriPath = fmt.Sprintf(sh01.Req.Url, symbol, sc.Page, sc.Sort)

		sci := sh01.NewCrawlInfo(&sc)
		i := sc.Page
		for{
			log.Info("Currently Crawl Data Page Num:", sc.Page)
			m, n, s := sci.RunCrawl()
			if s > 200 {
				log.Fatal("resp status code :", s)
			}
			sc.Page = n
			i = sc.Page
			log.Println("Next Crawl Data Page Num:", sc.Page)
			if i > m {
				break
			}
			time.Sleep(sleepTime)
			log.Info("Crawl Data %s--%s Sleep time %d Millisecond.", sh01.Req.Prames[i].Symbol, sc.Sort, sleepTime)
		}
	}
}

func readMgoCfg() *setting.MgoInfo {
	return setting.InfoMgo
}

func readCrawlInfo() (*sh01.ReqCrawlInfo, error) {
	var reqCrawlInfo *sh01.ReqCrawlInfo
	var err error
	siteinfos := setting.Sites
	crawlInfoLen := len(siteinfos)
	log.Info("Crawl Site Config Count: %d", crawlInfoLen)
	pwd, _ := setting.WorkDir()

	cfgPath := path.Join(pwd, "conf", siteinfos[0])
	ckFile := setting.IsFile(cfgPath)

	if ckFile {
		rb, err := ioutil.ReadFile(cfgPath)
		if err != nil {
			log.Error(4, "ReadFile config `./conf/%s` Error: %v", siteinfos[0], err)
		}
		reqCrawlInfo = sh01.ReadCrawlSiteInfo(rb)
		reqCrawlInfo.Header["Referer"] = strings.Replace(reqCrawlInfo.Header["Referer"], "%symbol%", reqCrawlInfo.Prames[0].Symbol, 1)
	} else {
		err = fmt.Errorf("Can not Read config `./conf/%s`", siteinfos[0])
	}
	return reqCrawlInfo, err
}
