package pixabay

import (
	"encoding/json"
	"errors"
	"fmt"
	"github.com/gogf/gf/frame/g"
	"github.com/gogf/gf/os/gtime"
	"github.com/gogf/gf/util/gconv"
	"github.com/gomodule/redigo/redis"
	"github.com/tebeka/selenium"
	"github.com/xuri/excelize/v2"
	"io/ioutil"
	"log"
	"os"
	"path/filepath"
	"pixabay_spider/utils"
	"regexp"
	"sort"
	"strings"
	"time"
)

type Detail struct {
	Url          string   `json:"url"`
	Title        string   `json:"title"`
	Tags         []string `json:"tags"`
	DownloadUrls []string `json:"download_urls"`
	Dir          string   `json:"dir"`
}

func GetList(url string) ([]string, error) {

	port := utils.RedisGetValidPort()
	_, webDriver, err := utils.GetWebDriver(utils.RedisGetValidPort(), nil)
	defer utils.RedisReleasePort(port)
	defer webDriver.Quit()
	if err != nil {
		fmt.Println("GetWebDriver err : ", err)
	}

	err = webDriver.Get(url)
	if err != nil {
		fmt.Println("webDriver.Get err : ", err)
	}
	utils.RandomSleep()

	script := "window.scrollTo(0, document.body.scrollHeight);"
	if _, err = webDriver.ExecuteScript(script, nil); err != nil {
		fmt.Println("滚动到页面底部 err:", err.Error())
	}

	utils.RandomSleep()

	eleItems, err := webDriver.FindElements(selenium.ByXPATH, "//div[@class='cell--B7yKd']")
	re := regexp.MustCompile(`<a[^>]*>(.*?)</a>`)

	var pixList []string
	for i, item := range eleItems {
		var Article Detail
		fmt.Println("i is ", i)
		hrefItem, err := item.FindElement(selenium.ByXPATH, ".//a[@class='link--WHWzm']")
		if err != nil {
			fmt.Println("拿链接出错了 ！！！！！！")
			fmt.Println(item.GetAttribute("outerHTML"))

			continue
		}
		href, err := hrefItem.GetAttribute("href")
		if err != nil {
			fmt.Println("hrefItem.GetAttribute err : ", err)
			continue
		}
		Article.Url = href
		pixList = append(pixList, href)
		Article.Title = utils.GetTitleFromUrl(href)

		tagsItem, _ := item.FindElements(selenium.ByXPATH, ".//div[@class='tags--lDvZS']//a")
		for _, tagItem := range tagsItem {
			tagHtml, _ := tagItem.GetAttribute("outerHTML")
			match := re.FindStringSubmatch(tagHtml)
			//fmt.Println("match is ", len(match))
			if len(match) > 1 {
				tag := match[1]
				fmt.Println("tag is ", tag)
				Article.Tags = append(Article.Tags, tag)
			}
		}

		text, _ := json.Marshal(Article)
		utils.SetPixItemToRedis(href, gconv.String(text))

	}

	return pixList, nil
}

func GetDetail(url string) error {

	status, err := utils.RedisGetItemStatus(url)
	if err != nil {
		fmt.Println("RedisGetItemStatus err : ", err)
	} else if status == utils.URL_STATUS_DONE || status == utils.URL_STATUS_CRAWLED {
		fmt.Println("PIXABAY详情[跳过详情]:" + " url :" + url + " 已经处理或正在处理中")
		return nil
	}

	//if status == utils.URL_STATUS_DONE {
	//	fmt.Println("PIXABAY详情[跳过详情]:" + " url :" + url + " 已经处理或正在处理中")
	//	return nil
	//}

	fmt.Println("PIXABAY详情[开始处理]:" + " url :" + url)
	utils.RedisSetItemStatus(url, utils.URL_STATUS_PROCESSING)

	port := utils.RedisGetValidPort()
	_, webDriver, err := utils.GetWebDriver(utils.RedisGetValidPort(), nil)
	defer utils.RedisReleasePort(port)
	defer webDriver.Quit()
	if err != nil {
		fmt.Println("GetWebDriver err : ", err)
	}

	err = webDriver.Get(url)
	if err != nil {
		fmt.Println("webDriver.Get err : ", err)
		log.Fatal("反爬虫机制触发,wait 10 minutes")
	}

	val, err := utils.GetPixItemFromRedis(url)
	info := &Detail{}
	err = json.Unmarshal(gconv.Bytes(val), info)

	utils.RandomSleep()

	DownloadBtn, err := webDriver.FindElement(selenium.ByXPATH, "//div[@class='container--YKYLB fullWidthContainer--a8QAe']")
	if err != nil {
		log.Fatal("反爬虫机制触发,wait 10 minutes")

		return err
	}
	DownloadBtn.Click()

	utils.RandomSleep()
	//匹配包含
	Items, err := webDriver.FindElements(selenium.ByXPATH, "//label[contains(@class, 'input--b6Wi1') and contains(@class, 'standard--5wnWE')]")
	if err != nil {
		fmt.Println("没找到下载名称 : ", err)
		log.Fatal("反爬虫机制触发,wait 10 minutes，没找到下载名称" + url)
	}
	// 定义正则表达式
	re := regexp.MustCompile(`<input[^>]*value="([^"]+)"[^>]*>`)

	var DownloadUrl string
	for _, item := range Items {
		value, _ := item.GetAttribute("outerHTML")
		fmt.Println("value is ", value)
		fmt.Println("=====")

		// 提取匹配的结果
		match := re.FindStringSubmatch(value)

		DownloadUrl = "https://pixabay.com/videos/download/" + match[1] + "?attachment"

		info.DownloadUrls = append(info.DownloadUrls, DownloadUrl)

		utils.RandomSleep()

	}
	info.DownloadUrls = utils.Unique(info.DownloadUrls)

	info.Dir = utils.WeekTime(info.Title)

	text, _ := json.Marshal(info)
	utils.SetPixItemToRedis(url, gconv.String(text))

	utils.RandomSleep()

	utils.RedisSetItemStatus(url, utils.URL_STATUS_CRAWLED)

	return nil
}

func DownloadVideos(url string) error {

	status, err := utils.RedisGetItemStatus(url)
	if err != nil {
		fmt.Println("RedisGetItemStatus err : ", err)
	} else if status == utils.URL_STATUS_DONE {
		fmt.Println("PIXABAY资源[跳过下载]:" + " url :" + url + " 已经处理或正在处理中")
		return nil
	}

	fmt.Println("PIXABAY资源[开始下载]:" + " url :" + url)
	utils.RedisSetItemStatus(url, utils.URL_STATUS_DOWNLOADING)

	val, err := utils.GetPixItemFromRedis(url)
	info := &Detail{}
	err = json.Unmarshal(gconv.Bytes(val), info)

	port := utils.RedisGetValidPort()
	_, webDriver, err := utils.GetWebDriverDownload(utils.RedisGetValidPort(), nil, info.Dir)
	defer utils.RedisReleasePort(port)
	defer webDriver.Quit()
	if err != nil {
		fmt.Println("GetWebDriver err : ", err)
		log.Fatal("反爬虫机制触发,wait 10 minutes")

	}

	loginUrl := "https://pixabay.com/videos/search/?order=ec"

	err = webDriver.Get(loginUrl)
	if err != nil {
		fmt.Println("webDriver.Get err : ", err)
	}
	utils.RandomSleep()

	account := utils.RandAccount()

	loginButton, err := webDriver.FindElement(selenium.ByXPATH, "//div[@class='loginLinks--l3oiD']")
	if err != nil {
		fmt.Println("loginButton err : ", err)
		log.Fatal("反爬虫机制触发,wait 10 minutes")

	}
	err = loginButton.Click()
	if err != nil {
		fmt.Println("loginButton.Click err : ", err)
		log.Fatal("反爬虫机制触发,wait 10 minutes")
	}
	utils.RandomSleep()
	inputEmail, err := webDriver.FindElement(selenium.ByXPATH, "//input[@name='login_user']")
	if err != nil {
		fmt.Println("inputEmail err : ", err)
		log.Fatal("反爬虫机制触发,wait 10 minutes")
	}
	err = inputEmail.SendKeys(account.Email)
	if err != nil {
		fmt.Println("inputEmail.SendKeys err : ", err)
	}
	utils.RandomSleep()
	inputPassword, _ := webDriver.FindElement(selenium.ByXPATH, "//input[@name='login_pass']")
	inputPassword.SendKeys(account.Password)
	utils.RandomSleep()

	submitButton, _ := webDriver.FindElement(selenium.ByXPATH, "//button[@class='loginButton--cVPDu e2e-auth-login-submit-button base--o-Oap primary--uRlHk']")
	err = submitButton.Click()
	if err != nil {
		fmt.Println(err)
	}
	utils.RandomSleep()

	warningMessage, _ := webDriver.FindElement(selenium.ByXPATH, "//div[@class='message--gkH6b error--kkwhB']")
	if warningMessage != nil {
		warningText, _ := warningMessage.Text()
		if warningText == "Too many login attempts. Please wait a while before trying again" {
			utils.DelAccountToRedis(account.Email)
			fmt.Println("PIXABAY资源[登录失败]:" + " url :" + url)
			utils.RedisSetItemStatus(url, utils.URL_STATUS_FAIL)
			return errors.New("PIXABAY资源[登录失败]:" + " Email :" + account.Email)
		}
	} else {
		fmt.Println("PIXABAY资源[登录成功]:" + " url :" + url)

	}

	utils.RandomSleep()

	dir := info.Dir

	if info.Dir == "" {
		dir = utils.WeekTime(info.Title)
	}

	isFinish := true

	////先请空一下文件夹
	//utils.ClearDir(dir)

	maxRetries := 3
	retryCount := 0
	timeout := 30 * time.Minute // 设置超时时间为 10 秒

	for _, item := range info.DownloadUrls {
		isFinish = false
		retryCount = 0
		webDriver.Get(item) // 执行下载操作
		time.Sleep(2 * time.Second)

		// 获取当前地址栏的URL
		currentUrl, err := webDriver.CurrentURL()
		if err != nil {
			log.Fatal(err)
		}

		isCurrentUrl, currentFileName := utils.CheckDownloadUrl(currentUrl)

		if isCurrentUrl == true {
			currentDir := filepath.Join(dir, currentFileName)
			err = utils.GetVideo(currentFileName, currentDir)
			if err != nil {
				fmt.Println("PIXABAY资源[GetVideo下载失败]:" + " url :" + url)
				isFinish = false
			} else {
				fmt.Println("PIXABAY资源[GetVideo下载成功]:" + " url :" + url)
				isFinish = true
			}
		}

		notfoundItem, _ := webDriver.FindElement(selenium.ByXPATH, "//pre")
		if notfoundItem != nil {
			notfoundText, _ := notfoundItem.Text()
			if notfoundText == "File not found" {
				fmt.Println("网站缺少文件")
				isFinish = true
				continue
			}
		}

		for retryCount < maxRetries {

			err = webDriver.WaitWithTimeout(func(wd selenium.WebDriver) (bool, error) {
				utils.RandomSleep()

				for i := 0; i < 300; i++ { // 等待 300 秒
					files, err := ioutil.ReadDir(dir)
					if err != nil {
						fmt.Println(err)
					}
					// 对文件列表按照修改时间进行排序 然后获取最新文件
					sort.Slice(files, func(i, j int) bool {
						return files[i].ModTime().After(files[j].ModTime())
					})
					utils.RandomSleep()
					if len(files) > 0 {
						lastFile := files[0]
						if !utils.IsFileDownloading(lastFile.Name()) {
							fmt.Println("PIXABAY资源[下载完成]:" + " url :" + info.Url)
							isFinish = true
							return true, nil
						}
					}
					utils.RandomSleep()
				}
				return false, nil
			}, timeout)

			if err == nil {
				break
			}

			fmt.Println("PIXABAY资源[网络失败]:", err)
			utils.RedisSetItemStatus(info.Url, utils.URL_STATUS_FAIL)
			retryCount++
			webDriver.Get(item) // 执行下载操作

			// 删除 crdownload 文件
			files, err := ioutil.ReadDir(dir)
			if err != nil {
				fmt.Println(err)
			}
			for _, file := range files {
				if strings.HasSuffix(file.Name(), ".crdownload") {
					err = os.Remove(filepath.Join(dir, file.Name()))
					if err != nil {
						fmt.Println(err)
					}
				}
			}

			time.Sleep(15 * time.Second) // 等待 5 秒钟
		}

		if retryCount >= maxRetries {
			fmt.Println("PIXABAY资源[达到最大重试次数]:", err)
			isFinish = false
		}
	}

	if isFinish {
		text, _ := json.Marshal(info)
		utils.SetPixResultToRedis(info.Url, gconv.String(text))
		utils.RedisSetItemStatus(info.Url, utils.URL_STATUS_DONE)
		fmt.Println("PIXABAY资源[下载完成 全部下载完成]:" + " url :" + url)
	} else {
		fmt.Println("PIXABAY资源[下载未完成]:", err)
		utils.RedisSetItemStatus(info.Url, utils.URL_STATUS_DOWNLOAD_FAIL)
	}

	return nil
}

func RepairResult() ([]string, error) {
	conn := g.Redis().Conn()
	defer conn.Close()
	values, err := redis.StringMap(conn.Do("HGETALL", utils.PIXABAY_RESULT_REDIS))
	if err != nil {
		fmt.Println("PIXABAY修复[获取redis数据失败]", err)
		return nil, err
	}
	arData := make([]*Detail, 0)
	var urls []string

	for _, value := range values {
		Article := &Detail{}
		err = json.Unmarshal(gconv.Bytes(value), Article)
		if err != nil {
			fmt.Println("PIXABAY修复导出出错", err)
		}
		arData = append(arData, Article)

	}

	conn = g.Redis().Conn()
	defer conn.Close()

	for _, element := range arData {

		count, err := utils.CountFiles(element.Dir)

		if err != nil {
			fmt.Println(err)
			fmt.Println("PIXABAY修复[获取文件数量失败]:" + element.Url)

			utils.WeekTime(element.Title)
		}
		fmt.Println("count is ", count)

		if len(element.DownloadUrls) == 0 {
			fmt.Println("PIXABAY修复[下载链接为空]:" + element.Url)
		}

		if len(element.DownloadUrls) == 0 {
			fmt.Println("PIXABAY修复[下载链接为空]:" + element.Url)
			utils.RedisSetItemStatus(element.Url, utils.URL_STATUS_FAIL)
			utils.ClearDir(element.Dir)
			urls = append(urls, element.Url)
		} else {
			if err != nil {
				fmt.Println(err)
				fmt.Println("PIXABAY修复[获取文件数量失败]:" + element.Url)
				continue
			}
			if count != len(element.DownloadUrls) || count == 0 {
				fmt.Println("PIXABAY修复[下载文件数量不匹配]:" + element.Url)
				fmt.Println(count)
				utils.RedisSetItemStatus(element.Url, utils.URL_STATUS_FAIL)
				utils.ClearDir(element.Dir)
				urls = append(urls, element.Url)
			}

			if count == len(element.DownloadUrls) {
				utils.RedisSetItemStatus(element.Url, utils.URL_STATUS_DONE)
			}
		}

	}

	return urls, nil
}

func ExportExcel() {
	conn := g.Redis().Conn()
	defer conn.Close()
	values, err := redis.StringMap(conn.Do("HGETALL", utils.PIXABAY_RESULT_REDIS))
	if err != nil {
		fmt.Println("PIXABAY[导出出错]", err)
	}
	arData := make([]*Detail, 0)

	for _, value := range values {
		ninjaInfo := &Detail{}
		err = json.Unmarshal(gconv.Bytes(value), ninjaInfo)
		if err != nil {
			fmt.Println("PIXABAY导出出错", err)
		}
		arData = append(arData, ninjaInfo)

	}
	f := excelize.NewFile()
	index, err := f.NewSheet("PIXABAY")
	if err != nil {
		fmt.Println("PIXABAY[导出出错]:" + err.Error())
		return
	}
	defer f.Close()
	f.SetCellValue("PIXABAY", "A1", "标题")
	f.SetCellValue("PIXABAY", "B1", "标签")

	for index, element := range arData {
		lineNumber := index + 2
		titleCell := fmt.Sprintf("A%d", lineNumber)
		tagCell := fmt.Sprintf("B%d", lineNumber)

		tags := strings.Join(element.Tags, "、")
		f.SetCellValue("PIXABAY", titleCell, element.Title)
		f.SetCellValue("PIXABAY", tagCell, tags)
	}

	f.SetActiveSheet(index)
	fileName := fmt.Sprintf("PIXABAY_%s.xlsx", gtime.Now().Format("Ymd_His"))
	err = f.SaveAs(fileName)
	if err != nil {
		fmt.Println("PIXABAY[导出出错]:" + err.Error())
		return
	}

	fmt.Println("PIXABAY[导出成功]:" + fileName)
}

func Check() {
	conn := g.Redis().Conn()
	defer conn.Close()

	keys1, err := redis.Strings(conn.Do("HKEYS", utils.PIXABAY_ITEM_REDIS))
	if err != nil {
		fmt.Println("HKEYS hash1 err : ", err)
	}

	keys2, err := redis.Strings(conn.Do("HKEYS", utils.PIXABAY_RESULT_REDIS))
	if err != nil {
		fmt.Println("HKEYS hash2 err : ", err)
	}

	// 找出在hash1中存在但在hash2中不存在的键
	extraKeys := make([]string, 0)
	for _, key := range keys1 {
		found := false
		for _, k := range keys2 {
			if key == k {
				found = true
				break
			}
		}
		if !found {
			extraKeys = append(extraKeys, key)
		}
	}

	fmt.Println("extraKeys:", extraKeys)
	// extraKeys 中存储了在 hash1 中存在但在 hash2 中不存在的键

	//conn := g.Redis().Conn()
	//defer conn.Close()
	//
	//result, err := redis.Strings(conn.Do("HGETALL", utils.PIXABAY_RESULT_REDIS))
	//if err != nil {
	//	fmt.Println("HGETALL err : ", err)
	//
	//}
	//
	//// 使用 map 存储键和出现次数
	//keyCount := make(map[string]int)
	//for i := 0; i < len(result); i += 2 {
	//	key := result[i]
	//	keyCount[key]++
	//}
	//
	//// 找出重复的键
	//duplicateKeys := make([]string, 0)
	//for key, count := range keyCount {
	//	if count > 1 {
	//		duplicateKeys = append(duplicateKeys, key)
	//	}
	//}
	//
	//fmt.Println("duplicateKeys:", duplicateKeys)
	//// duplicateKeys 中存储了重复的键

	//dir := "F:\\pixabay\\2024.01.08-2024.01.14\\" // 替换为你要遍历的文件夹路径
	//
	//err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
	//	if err != nil {
	//		fmt.Println("访问文件或文件夹时出错:", err)
	//		return nil
	//	}
	//
	//	if !info.IsDir() {
	//		if !strings.HasSuffix(info.Name(), ".mp4") {
	//			fmt.Println("文件名:", info.Name())
	//			fmt.Println("文件路径:", path)
	//		}
	//	}
	//
	//	return nil
	//})
	//
	//if err != nil {
	//	fmt.Println("遍历文件夹时出错:", err)
	//}
}

func Test() {
	port := utils.RedisGetValidPort()
	_, webDriver, err := utils.GetWebDriver(utils.RedisGetValidPort(), nil)
	defer utils.RedisReleasePort(port)
	defer webDriver.Quit()
	if err != nil {
		fmt.Println("GetWebDriver err : ", err)
	}

	//url := "https://s3.eu-west-1.amazonaws.com/eu-west-1.vimeo.com/videos/672/434/672434375.mp4?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAZRUUNWVAWWO32QM7%2F20240117%2Feu-west-1%2Fs3%2Faws4_request&X-Amz-Date=20240117T011348Z&X-Amz-Expires=86399&X-Amz-SignedHeaders=host&X-Amz-Signature=628c3d2dc8b9cda5c1df92ee7b450c967c4d6444d074f900bb123d418337484f"

	//webDriver.Get(url)

	//DownloadFile("F:\\pixabay\\ferris-wheel-amusement-park-fun-27057.mp4", url)
}

func CheckFolder(root string) {
	err := filepath.Walk(root, func(path string, info os.FileInfo, err error) error {
		if err != nil {
			return err
		}

		// 检查是否为文件夹
		if info.IsDir() {
			// 检查文件夹是否为空
			isEmpty, err := isDirEmpty(path)
			if err != nil {
				return err
			}

			// 如果文件夹为空，则打印路径
			if isEmpty {
				fmt.Println("Empty directory:", path)
			}
		}

		return nil
	})

	if err != nil {
		fmt.Println("Error:", err)
	}
}

// 检查文件夹是否为空
func isDirEmpty(dirPath string) (bool, error) {
	dir, err := os.Open(dirPath)
	if err != nil {
		return false, err
	}
	defer dir.Close()

	_, err = dir.Readdirnames(1)
	if err == nil {
		// 文件夹不为空
		return false, nil
	}

	if len(err.Error()) > 0 && err.Error() == "EOF" {
		// 文件夹为空
		return true, nil
	}

	return false, err
}
