package main

import (
	"fmt"
	"log"
	"strings"

	"github.com/hu17889/go_spider/core/common/page"
	"github.com/hu17889/go_spider/core/common/request"
	"github.com/hu17889/go_spider/core/spider"
)

func processCommaString(str string) string {
	if strings.Contains(str, ",") {
		return "\"" + str + "\""
	}

	return str
}

type Processor struct {
	crawler *spider.Spider
	reqs    []*request.Request
}

func NewProcessor(reqs []*request.Request) *Processor {
	return &Processor{
		crawler: spider.NewSpider(&Processor{}, "Processor"),
		reqs:    reqs,
	}
}

func (t *Processor) Run() {
	log.Print("获取 Intel 各个处理器信息...")
	t.crawler.SetSleepTime("rand", 500, 2000).SetThreadnum(2).GetAllByRequest(t.reqs)
}

func (t *Processor) Process(p *page.Page) {
	if !p.IsSucc() {
		log.Printf("Url: %s, Err: %s", p.GetRequest().Url, p.Errormsg())
		return
	}

	q := p.GetHtmlParser()

	parts := make([]string, len(dict)+2)
	processorName := p.GetUrlTag()
	refererUrl := p.GetRequest().Url
	log.Printf("%s:", processorName)
	parts[0] = processCommaString(processorName)
	parts[1] = processCommaString(refererUrl)

	for field, info := range dict {
		val := strings.TrimSpace(q.Find(fmt.Sprintf("span[data-key='%s']", field)).Text())
		parts[info.Rank+2-1] = processCommaString(val)
		log.Printf("%s: %s", info.Chinese, val)
	}

	f.WriteString(strings.Join(parts, ",") + "\n")
}

func (t *Processor) Finish() {
	log.Printf("Intel 处理器各个处理器信息已解析完毕！！！")

	err := f.Close()
	assertErr(err)
}
