package spider

import (
	"encoding/json"
	xpath "gopkg.in/xmlpath.v2"
	"io/ioutil"
	"log"
	"net/http"
)

type Result struct {
	Name   string `json:"name"`
	Code   string `json:"code"`
	Expire string `json:"expire"`
	Buy    string `json:"buy"`
	Sell   string `json:"sell"`
	Center string `json:"center"`
	Date   string `json:"date"`
}

const url = "https://www.bankofchina.com/sourcedb/ffx/"

func SourcedbSpider() {
	res, err := http.Get(url)
	if err != nil {
		log.Println("网页", url, "请求失败：", err)
	}
	defer res.Body.Close()
	node, err := xpath.ParseHTML(res.Body)
	if err != nil {
		log.Println("网页解析失败", url, "请求失败：", err)
	}
	var result []string
	path, err := xpath.Compile("//div[@class='publish']//tbody//tr/td")
	it := path.Iter(node)
	for it.Next() {
		result = append(result, it.Node().String())
	}
	temp := group(result, 7)
	var body []Result
	for _, v := range temp {
		body = append(body, Result{
			Name:   v[0],
			Code:   v[1],
			Expire: v[2],
			Buy:    v[3],
			Sell:   v[4],
			Center: v[5],
			Date:   v[6],
		})
	}
	b, _ := json.Marshal(&body)
	_ = ioutil.WriteFile("./result.txt", b, 0777)
}

// 将数组 laxiconid 按指定大小进行分隔
func group(laxiconid []string, subGroupLength int64) [][]string {
	max := int64(len(laxiconid))
	var segmens = make([][]string, 0)
	quantity := max / subGroupLength
	remainder := max % subGroupLength
	i := int64(0)
	for i = int64(0); i < quantity; i++ {
		segmens = append(segmens, laxiconid[i*subGroupLength:(i+1)*subGroupLength])
	}
	if quantity == 0 || remainder != 0 {
		segmens = append(segmens, laxiconid[i*subGroupLength:i*subGroupLength+remainder])
	}
	return segmens
}
