package main

import (
	"fmt"
	"my-crawler/common"
	"my-crawler/concurrence/dao"
	"my-crawler/concurrence/engine"
	"my-crawler/concurrence/fetcher"
	"my-crawler/concurrence/model"
	"my-crawler/concurrence/parser"
	"my-crawler/database/mysql"
)

//concurrence 并行版
func main() {
	saveChan := dao.SaveChannel()
	//saveChan := dao.SaveChannelByJsonRpc() //分布式存储 通过jsonrpc
	newEngine := engine.ConcurrenceEngine{
		WorkerCount: 10,
		SaverChan:   saveChan,
		Scheduler:   &engine.ConcurrenceScheduler{},
	}
	//searchModels := getModels()
	reqData := map[string]string{
		"url": common.CRAWLERURL,
	}
	newEngine.Initial(engine.CrawlerRequest{
		Data:     reqData,
		HttpFunc: fetcher.FetchByGet,
		Parser:   parser.GetCityListParser,
	})
}

func getModels() []model.QYMD {
	var DB = mysql.Db
	var result []model.QYMD
	err := DB.Select(&result,
		"select NSRSBH,NSRMC,SHXYDM,DJXH from qymd where DJRQ >= date('2020-01-01') and DJRQ < date('2020-07-01') limit 10")

	if err != nil {
		fmt.Println("exec failed, ", err)
	}
	return result
}
