package dao

import (
	"log"
	"my-crawler/concurrence/model"
	"my-crawler/database/mysql"
	"net/url"
)

//初始化 获取id
func GetModels(id string) (map[string]string, map[string]string, string) {
	var DB = mysql.Db
	var result []model.Articles
	err := DB.Select(&result,
		"select id,link from crawler_articles where id > "+id+" limit 1")

	if err != nil {
		log.Fatal("exec failed, ", err)
	}
	m := make(map[string]string)
	parse, _ := url.Parse(result[0].Link)
	urlParam := parse.Query()
	for k, v := range urlParam {
		m[k] = v[0]
	}
	m["id"] = result[0].Id
	m["is_only_read"] = "1"
	//拼url
	var reqUrl = result[0].Link

	//Cookie 也是需要变
	header := map[string]string{
		"User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36 NetType/WIFI MicroMessenger/7.0.20.1781(0x6700143B) WindowsWechat(0x6304051b)",
	}
	return m, header, reqUrl
}
