package service

import (
	"errors"
	"fmt"
	"gitee.com/gomod/utils/logger"
	"gitee.com/gomod/utils/uuid"
	"gorm.io/gorm"
	"io"
	"learn-land-crawler/app/config"
	"learn-land-crawler/app/model"
	"net/http"
	"net/url"
	"os"
	"path/filepath"
	"regexp"
	"strconv"
	"strings"
	"sync"
)

func ConverToTProductBy(id int) {
	var cp model.CProduct
	p, _ := cp.FirstById(uint64(id))
	arr := []*model.CProduct{p}

	var wg sync.WaitGroup
	wg.Add(1)
	doConver(arr, 0, &wg)
	wg.Wait()

	logger.Info.Println("ready to execute Increment!!!")
	executeIncrement()
}

func ConverToTProduct(limit int) {
	var cp model.CProduct
	arr, err := cp.GetDownloads(limit)
	if err != nil {
		logger.Error.Println(err)
	}
	size := 300
	total := len(arr)
	threads := total / size
	if total%size > 0 {
		threads++
	}
	var wg sync.WaitGroup
	for i := 0; i < threads; i++ {
		start := i * size
		end := (i + 1) * size
		if end > total {
			end = total
		}
		wg.Add(1)
		go doConver(arr[start:end], i, &wg)
	}

	wg.Wait()

	logger.Info.Println("ready to execute Increment!!!")
	executeIncrement()
}

func doConver(arr []*model.CProduct, thread int, wg *sync.WaitGroup) {
	defer wg.Done()

	var tp model.TProduct
	for _, v := range arr {
		p, err := tp.FirstById(uint64(v.ID))
		if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) {
			logger.Error.Println(err)
			continue
		}
		if p == nil || p.ID == 0 {
			createTProduct(v, thread)
			continue
		}
		updateTProduct(v, p, thread)
	}
}

func createTProduct(c *model.CProduct, thread int) {
	p := model.TProduct{
		Name:          c.Name,
		MainPic:       c.MainPic,
		Sold:          c.Sold,
		Description:   strings.TrimPrefix(c.Description, "\n"),
		CommonPrice:   c.CommonPrice,
		MemberPrice:   c.MemberPrice,
		VipPrice:      c.VipPrice,
		DownloadUrl:   c.DownloadUrl,
		Pwd:           c.Pwd,
		DownloadType:  c.DownloadType,
		FinishPicDown: false,
	}
	p.ID = c.ID
	nurl, finish := downloadAndSaveMainPic(c.ID, c.MainPic, thread)
	if !finish {
		return
	}
	p.MainPic = nurl
	// 获取description中的url
	descImages := findImgSrc(c.Description)
	for _, v := range descImages {
		nurl, finish := downloadAndSavePic(c.ID, v, thread)
		if !finish {
			return
		}
		p.Description = strings.Replace(p.Description, v, nurl, -1)
	}
	p.FinishPicDown = finish
	p.UpdatedDay = c.UpdatedDay
	p.Create(&p)
}

func downloadAndSaveMainPic(id uint, urlAddr string, thread int) (string, bool) {
	// todo
	// main pic https://www.600xue.com/wp-content/themes/ripro/timthumb.php?src=https://www.600xue.com/wp-content/uploads/2022/09/1662478266-b32c58f02a5a6ad.png&h=200&w=300&zc=1&a=c&q=100&s=1
	var tsp model.TsPics
	tspic, err := tsp.FirstBytspicIdAndImageUrl(uint64(id), urlAddr)
	if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) {
		logger.Error.Println(err)
		return "", false
	}
	if tspic != nil && tspic.ID != 0 {
		return tspic.NewImagesUrl, true
	}

	u, err := url.Parse(urlAddr)
	if err != nil {
		logger.Error.Println(urlAddr, " Failed to parse URL: ", err)
		return "", false
	}

	src := u.Query().Get("src")
	logger.Info.Println("src:", src)
	nsrc, err := downAndSaveImage(src, thread)
	if err != nil {
		logger.Error.Println(err)
		return "", false
	}
	tspic = &model.TsPics{
		ProductId:    uint64(id),
		ImagesUrl:    urlAddr,
		NewImagesUrl: nsrc,
	}
	tsp.Create(tspic)
	return nsrc, true
}

func downloadAndSavePic(id uint, urlAddr string, thread int) (string, bool) {
	// todo
	// main pic https://www.600xue.com/wp-content/themes/ripro/timthumb.php?src=https://www.600xue.com/wp-content/uploads/2022/09/1662478266-b32c58f02a5a6ad.png&h=200&w=300&zc=1&a=c&q=100&s=1
	var tsp model.TsPics
	tspic, err := tsp.FirstBytspicIdAndImageUrl(uint64(id), urlAddr)
	if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) {
		logger.Error.Println(err)
		return "", false
	}
	if tspic != nil && tspic.ID != 0 {
		return tspic.NewImagesUrl, true
	}

	logger.Info.Println("urlAddr:", urlAddr)
	nsrc, err := downAndSaveImage(urlAddr, thread)
	if err != nil {
		logger.Error.Println(err)
		return "", false
	}
	tspic = &model.TsPics{
		ProductId:    uint64(id),
		ImagesUrl:    urlAddr,
		NewImagesUrl: nsrc,
	}
	tsp.Create(tspic)
	return nsrc, true
}

func downAndSaveImage(url string, thread int) (string, error) {
	// 发起HTTP GET请求获取图片内容
	nurl := url
	if strings.Index(url, "?") >= 0 {
		ss := strings.Split(url, "?")
		nurl = ss[0]
	}
	resp, err := http.Get(nurl)
	if err != nil {
		return "", err
	}
	defer resp.Body.Close()

	// 创建本地文件
	savePath := config.CrawlerConfig.DownloadPicPath + strconv.Itoa(thread) + "/"
	if _, err := os.Stat(savePath); os.IsNotExist(err) {
		// 路径不存在，创建目录
		err := os.MkdirAll(savePath, 0755)
		if err != nil {
			logger.Error.Println("创建目录失败：", err)
		}
	}
	filename := newFileNameFromURL(nurl)
	file, err := os.Create(savePath + filename)
	if err != nil {
		return "", err
	}
	defer file.Close()

	// 将HTTP响应的内容写入本地文件
	_, err = io.Copy(file, resp.Body)
	if err != nil {
		return "", err
	}

	return config.CrawlerConfig.PicPath + strconv.Itoa(thread) + "/" + filename, nil
}

func newFileNameFromURL(url string) string {
	fileName := filepath.Base(url)          // 从URL中提取文件名
	fileExt := filepath.Ext(fileName)       // 获取文件名的后缀名
	uuidFileName := uuid.UUID32() + fileExt // 生成基于UUID的新文件名
	return uuidFileName
}

func updateTProduct(c *model.CProduct, p *model.TProduct, thread int) {
	nurl, finish := downloadAndSaveMainPic(c.ID, c.MainPic, thread)
	if !finish {
		return
	}
	p.MainPic = nurl
	// 获取description中的url
	p.Description = c.Description
	descImages := findImgSrc(p.Description)
	for _, v := range descImages {
		nurl, finish := downloadAndSavePic(c.ID, v, thread)
		if !finish {
			return
		}
		p.Description = strings.Replace(p.Description, v, nurl, -1)
	}
	p.FinishPicDown = finish
	p.UpdatedAt = *c.UpdatedDay
	p.Name = c.Name
	var tp model.TProduct
	tp.Update(p)
}

func findImgSrc(str string) []string {
	re := regexp.MustCompile(`<img[^>]*src="([^"]*)"`)
	matches := re.FindAllStringSubmatch(str, -1)
	var srcs []string
	for _, match := range matches {
		srcs = append(srcs, match[1])
	}
	return srcs
}

func executeIncrement() {
	var schema = "learnland"
	if config.Profile == "prod" {
		schema += "-prod"
	}
	sql := fmt.Sprintf("INSERT INTO `%s`.`tag` (`id`, `code`,`name`, `sort`,  `available`, `type`) "+
		"SELECT id, `code`, `name`, 0, 0, 1 "+
		"FROM tag t WHERE NOT EXISTS (SELECT 1 FROM `%s`.`tag` WHERE t.id = id)", schema, schema)
	model.Execute(sql)

	sql = fmt.Sprintf("INSERT INTO `%s`.`article` (`created_at`, `updated_at`, `code`, `user_id`, `title`,`cover_image`,`category_id`,`is_publish`, `type_code`) "+
		"SELECT created_at, updated_day, `id`, 0, `name`, `main_pic`, 1, 1, 'course' "+
		"FROM t_product t WHERE finish_pic_down = 1 AND NOT EXISTS (SELECT 1 FROM `%s`.`article` WHERE t.id = `code`)", schema, schema)
	model.Execute(sql)

	sql = fmt.Sprintf("INSERT INTO `%s`.`article_content` (`id`, `content`) "+
		"SELECT a.id, p.description "+
		"FROM t_product p INNER JOIN `%s`.`article` a ON p.id = a.`code` "+
		"WHERE p.finish_pic_down = 1 AND NOT EXISTS (SELECT 1 FROM `%s`.`article_content` WHERE a.id = `id`)", schema, schema, schema)
	model.Execute(sql)

	sql = fmt.Sprintf("INSERT INTO `%s`.`article_tag` (`tag_id`, `article_id`) "+
		"SELECT pt.tag_id, a.id "+
		"FROM c_product_tag pt INNER JOIN t_product p ON pt.product_id = p.id "+
		"INNER JOIN `%s`.`article` a ON p.id = a.`code` "+
		"WHERE pt.deleted_at IS NULL AND p.finish_pic_down = 1 AND NOT EXISTS (SELECT 1 FROM `%s`.`article_tag` WHERE a.id = `article_id`)", schema, schema, schema)
	model.Execute(sql)

	sql = fmt.Sprintf("INSERT INTO `%s`.`t_product` (`id`, `created_at`, `updated_at`, `code`,`download_url`, `pwd`,`download_type`,`virtual_sold`,`sold`, `price`, `member_price`, `vip_price`) "+
		"SELECT a.id, a.created_at, a.updated_at, a.`code`, p.download_url, p.`pwd`, p.`download_type`, p.`sold`, 0, p.common_price, p.member_price, p.vip_price "+
		"FROM t_product p INNER JOIN `%s`.`article` a ON p.id = a.`code` "+
		"WHERE p.finish_pic_down = 1 AND NOT EXISTS (SELECT 1 FROM `%s`.`t_product` WHERE a.id = `id`)", schema, schema, schema)
	model.Execute(sql)
}
