package main

import (
	"fmt"
	"io"
	"net/http"
	"path/filepath"
	"time"

	"james.org/sister/core"
)

type DoubleTasks struct {
	Tasks []DoubleTask
}

type DoubleTask struct {
	Log            bool
	Save           bool
	SetHeader      bool
	RequestTimeout int
	Dir            string
	Sources        []string
	PreySources    []core.PreySource
	SecondFloor    SecondFloor
	Pagination     core.Pagination
}

type SecondFloor struct {
	Prey       core.Prey
	Pagination core.Pagination
}

func RunDoubleTask(jsonPath string) {
	// 读取JSON文件任务
	var doubleTasks DoubleTasks
	err := core.DecodeJsonFile(jsonPath, &doubleTasks)
	if err != nil {
		fmt.Println(err)
	}
	for _, doubleTask := range doubleTasks.Tasks {
		// 创建目录
		err = core.MkDirAll("./save/" + doubleTask.Dir)
		if err != nil {
			fmt.Println(err)
		}
		// 分页分析
		var pages []string
		for _, source := range doubleTask.Sources {
			pagesTemp, err := core.PaginationUrlAnalysis(source, doubleTask.Pagination)
			if err != nil {
				fmt.Println(err)
			}
			pages = append(pages, pagesTemp...)
		}
		fmt.Println("页面分析完毕")
		// 分页结果日志记录
		logPath := "./logs/" + doubleTask.Dir + ".log"
		if doubleTask.Log {
			err := core.WriteLogs(logPath, pages, true)
			if err != nil {
				fmt.Println(err)
			}
		}
		// 猎物源获取
		var sources []core.Source
		for _, page := range pages {
			sourcesTemp, err := core.PreySourceAnalysis(page, doubleTask.PreySources)
			if err != nil {
				fmt.Println(err)
			}
			sources = append(sources, sourcesTemp...)
		}
		// 猎物源分页分析
		for k, source := range sources {
			sourcePagesTemp, _ := core.PaginationUrlAnalysis(source.Url, doubleTask.SecondFloor.Pagination)
			sources[k].Pages = sourcePagesTemp
			if doubleTask.Log {
				err := core.WriteLogs(logPath, sourcePagesTemp, true)
				if err != nil {
					fmt.Println(err)
				}
			}
			fmt.Println(fmt.Sprintf("猎物源【%d】-分页分析完毕!", k))
		}

		// 猎物分析
		chs := make([]chan []string, len(sources))
		runSource := func(task_id int, source core.Source, ch chan []string) {
			var srcs []string
			for _, page := range source.Pages {
				srcsTemp, err := core.UrlAnalysis(page, doubleTask.SecondFloor.Prey)
				if err != nil {
					fmt.Println(err)
				}
				srcs = append(srcs, srcsTemp...)
			}
			if doubleTask.Log {
				core.WriteLogs(logPath, srcs, true)
			}
			chs[task_id] <- srcs
			fmt.Println(fmt.Sprintf("猎物源【%d】-猎物分析完毕!", task_id))
		}
		for task_id, source := range sources {
			chs[task_id] = make(chan []string, 1)
			go runSource(task_id, source, chs[task_id])
		}
		// 等待分析
		for k, ch := range chs {
			sources[k].Srcs = <-ch
		}
		fmt.Println("猎物分析完毕")

		// 猎物捕捉
		if doubleTask.Save {
			client := http.Client{
				Timeout: time.Second * time.Duration(doubleTask.RequestTimeout),
			}
			phs := make([]chan int, len(sources))
			savePrey := func(pid int, source core.Source, ph chan int) {
				filePath := "./save/" + doubleTask.Dir + "/" + source.Title
				core.MkDirAll(filePath)
				for _, prey := range source.Srcs {
					var resp *http.Response
					if doubleTask.SetHeader {
						req, err := http.NewRequest("GET", prey, nil)
						if err != nil {
							fmt.Println(err)
						}
						req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.181 Safari/537.36")
						req.Header.Set("Referer", prey)
						resp, err = client.Do(req)
					} else {
						resp, err = client.Get(prey)
					}
					if err != nil {
						fmt.Println(err)
					} else {
						data, _ := io.ReadAll(resp.Body)
						resp.Body.Close()
						path := fmt.Sprintf("%s/pid_%d_%s", filePath, pid, filepath.Base(prey))
						err = core.SaveFile(path, data)
						if err != nil {
							fmt.Println(err)
						} else {
							fmt.Println("已捕捉: " + prey)
						}
					}
				}
				phs[pid] <- pid
			}
			for pid, source := range sources {
				phs[pid] = make(chan int, 1)
				go savePrey(pid, source, phs[pid])
			}
			// 等待捕捉
			for _, ph := range phs {
				<-ph
			}
		}
		// 结束
		fmt.Println("任务已处理完毕")
	}
}
