package spider

import (
	"fmt"
	"io"
	"net/http"
	"os"
	"os/exec"
	"path/filepath"
	"qbl_spider/share"
	"qbl_spider/slog"
	"qbl_spider/sredis"
	"regexp"
	"strconv"
	"strings"
	"sync"
	"time"
)

/*
* @Auther:create by hjx
* @Email :1245863260@qq.com g1245863260@gmail.com
* @Date  :2021/4/5 22:20
 */

type Spider struct {
	url    string
	path   string
	merger string
	page   int
	user   int
	suren  bool
}

var wg sync.WaitGroup
var targetChan = make(chan map[string]string)

func SpiderNew(url, path, merger string, page, user int, suren bool) *Spider {
	return &Spider{
		url:    url,
		path:   path,
		merger: merger,
		page:   page,
		user:   user,
		suren:  suren,
	}
}

func (s *Spider) DeployWorkDir() error {
	if err := share.CreateDir(s.path); err != nil {
		return err
	}
	if err := share.CreateDir(filepath.Join(s.path, "/", s.merger)); err != nil {
		return err
	}
	return nil
}

func (s *Spider) SpiderMain() {
	wg.Add(1)
	go func() {
		s.getTargetUrl()
		wg.Done()
	}()
	// url consumer
	for i := 0; i < s.user; i++ {
		wg.Add(1)
		go func(thread int) {
			s.downloadIndex(thread)
			wg.Done()
		}(i)
	}

	wg.Wait()
	wg.Wait()
}

func (s *Spider) getTargetUrl() {
	url := ""
	if s.suren {
		url = s.url + "/videos/amateur?page="
	} else {
		url = s.url + "/videos/japanese?page="
	}
	for i := 1; i <= s.page; i++ {
		url = url + strconv.Itoa(i+1)
		indexText, err := share.GetUrlContentString(url)
		if err != nil {
			slog.Slog.Logger.Println(slog.Error, url, "request err")
			continue
		}
		regURL := regexp.MustCompile(`href="/video/\d*/"`)
		regTargetListURL := regURL.FindAllString(indexText, -1)
		regTitle := regexp.MustCompile(`title="[^"]*"`)
		regTargetListTitle := regTitle.FindAllString(indexText, -1)
		regTargetListTitle = regTargetListTitle[2 : len(regTargetListTitle)-2]
		//var titleURL map[string]string
		for i, indexURL := range regTargetListURL {
			mapValueURL := s.url + strings.TrimSpace(strings.Split(indexURL, "\"")[1])
			mapKey := strings.TrimSpace(strings.Split(regTargetListTitle[i], "\"")[1])
			// check down
			rec, err := sredis.RedisConn.Do("GET", mapKey)
			if err != nil {
				slog.Slog.Logger.Println(slog.Error, "failed to get", mapKey, "information in redis")
				continue
			}
			if rec != nil {
				slog.Slog.Logger.Println(slog.Error, "redis server exist", mapKey, "continue")
				continue
			}
			indexTarget, err := share.GetUrlContentString(mapValueURL)
			if err != nil {
				slog.Slog.Logger.Println(slog.Error, mapValueURL, "request err")
				continue
			}
			regIndex := regexp.MustCompile(`<source src="[^"]*"`)
			regIndexURL := regIndex.FindString(indexTarget)
			regIndexURL = "http:" + strings.TrimSpace(strings.Split(regIndexURL, "\"")[1])
			targetChan <- map[string]string{
				mapKey: regIndexURL,
			}
		}
	}
	close(targetChan)
}

func (s *Spider) downloadIndex(threadID int) {
	thread := "thread:" + strconv.Itoa(threadID+1)
	for indexMap := range targetChan {
		for title, index := range indexMap {
			downloadTsFolder := filepath.Join(s.path, "/", title)
			if err := share.CreateDir(downloadTsFolder); err != nil {
				slog.Slog.Logger.Println(slog.Error, "make dir", downloadTsFolder, "err")
				continue
			}
			m3u8Text, err := share.GetUrlContentString(index)
			if err != nil {
				slog.Slog.Logger.Println(slog.Error, index, "request err")
				continue
			}
			indexM3u8, err := os.OpenFile(downloadTsFolder+"/index.m3u8", os.O_CREATE|os.O_RDWR, 0644)
			if err != nil {
				slog.Slog.Logger.Println(slog.Error, "create index.m3u8 err in the", downloadTsFolder)
				continue
			}
			lineList := strings.Split(m3u8Text, "\n")
			var downTslist []string
			for _, line := range lineList {
				if line != "" && string([]rune(line)[0]) == "#" {
					if line != "#EXTINF:10.041667," {
						indexM3u8.WriteString(line + "\n")
					}
				} else {
					lineSplitList := strings.Split(line, "/")
					tsName := lineSplitList[len(lineSplitList)-1]
					if tsName == "aaa0.ts" {
						continue
					} else {
						indexM3u8.WriteString(tsName + "\n")
						downTslist = append(downTslist, line)
					}
				}
			}
			indexM3u8.Close()
			lostTsnum := 0
			for _, url := range downTslist {
				if len(url) != 0 {
					lineSplitList := strings.Split(url, "/")
					tsName := lineSplitList[len(lineSplitList)-1]
					var downtsTimes = 0
				loop:
					slog.Slog.Logger.Println(slog.Info, thread, title, "The ts being downloaded is:", tsName, "Current download time:", downtsTimes+1)
					fmt.Println(slog.Info, thread, title, "The ts being downloaded is:", tsName, "Current download time:", downtsTimes+1)
					res, err := http.Get(url)
					if err != nil && downtsTimes != 5 {
						downtsTimes++
						goto loop
					}
					if downtsTimes == 5 {
						slog.Slog.Logger.Println(slog.Warning,  thread, title, tsName, "Download failed, lost current ts,url is", url)
						fmt.Println(slog.Warning, thread, title, tsName, "Download failed, lost current ts,url is", url)
						lostTsnum++
						continue
					}
					f, err := os.Create(downloadTsFolder + "/" + tsName)
					if err != nil {
						res.Body.Close()
						continue
					}
					io.Copy(f, res.Body)
					res.Body.Close()
					time.Sleep(time.Duration(2) * time.Second)
				}
			}
			if lostTsnum > 5 {
				continue
			}
			mergerTs := exec.Command("cmd", "/C", "ffmpeg", "-i", downloadTsFolder+"/index.m3u8", "-vcodec", "copy", "-acodec", "copy", "-absf", "aac_adtstoasc", filepath.Join(s.path, "/", s.merger)+"/"+strings.Replace(title, " ", "", -1)+".mp4")
			if err := mergerTs.Run(); err != nil {
				fmt.Println("Error: ergred mp4 failure", err)
				slog.Slog.Logger.Println(slog.Error, thread, title, "mergred mp4 failure")
			} else {
				slog.Slog.Logger.Println(slog.Info, thread, title, "aready mergred mp4 success")
				_, err := sredis.RedisConn.Do("SET", title, "downed")
				if err != nil {
					slog.Slog.Logger.Println(slog.Error, thread, title, "save redis key err")
				}
			}
		}
	}
}
