package crawl

import (
	//"fmt"
	"crawl_toutiao/models/toutiao"
	//"crawl_toutiao/utils"
	"github.com/astaxie/beego"
	"log"
	"strconv"
	"time"
)

var configJsonFile string = "config.json"

type Config struct {
	Keyword string
}

type CrawlController struct {
	beego.Controller
}

func (this *CrawlController) Crawl() {
	log.Println(">>>开始抓取数据================>>>")

	/*
		var config *Config
		err := utils.Load(configJsonFile, &config)
		if err != nil {
			panic(err)
		}

		fmt.Println(config)
	*/

	kw := toutiao.GetKeywords()
	log.Println(kw)

	for _, k := range kw {
		toutiao.CrawlList(string(k.Name), k.Id)
	}

	log.Println(">>>抓取数据完毕================>>>")
	time.Sleep(3 * time.Second)

	log.Println(">>>开始下载任务================>>>")

	urls := toutiao.GetUrlsAll()
	path := "./static/images/"
	//log.Println(len(urls))
	for _, u := range urls {
		k := strconv.FormatInt(u.Keyword.Id, 10)
		_, err := toutiao.DirExists(path + k)
		if err == nil {
			toutiao.SaveImage(u.Url, path+k)
		}
	}

	log.Println(">>>下载ok!================>>>")

}
