package da

import (
	"encoding/json"
	"fmt"
	"gallery/crawler"
	"gallery/crawler/utils"
	"io"
	"log"
	"net/http"
	"os"
	"path"
	"strconv"
	"strings"
	"sync"
	"time"

	"github.com/fatih/color"
)

const (
	userInfo  = "https://www.deviantart.com/_napi/da-user-profile/api/init/gallery?username=rickgriffin&deviations_limit=24&with_subfolders=true"
	apiFolder = "https://www.deviantart.com/_napi/da-user-profile/api/gallery/contents?username=RickGriffin&offset=0&limit=24&folderid=510849"
	apiAll    = "https://www.deviantart.com/_napi/da-user-profile/api/gallery/contents?username=RickGriffin&offset=0&limit=24&all_folder=true&mode=newest"
)

// DA deviant art定制
type DA struct {
	Limit int
	*sync.WaitGroup
	ch chan struct{}
	*utils.Bar
}

type img struct {
	title string
	uri   string
}

// GetAllFolders 获取除了all外的folder
func (da *DA) GetAllFolders(user string) []*Folder {
	folders := new(Folders)
	res := make([]*Folder, 0)
	uri, q := utils.MustParseURL(userInfo)
	q.Set("username", user)
	uri.RawQuery = q.Encode()
	resp, err := crawler.MyClient.Do(utils.MustRequest(http.MethodGet, uri.String(), nil))
	if err != nil {
		color.Red("无法获取目录信息")
		os.Exit(2)
	}
	defer resp.Body.Close()
	json.NewDecoder(resp.Body).Decode(folders)
	// 提取子树
	ori := folders.SectionData.Modules[0].ModuleData.Folders.Results
	for i, v := range ori {
		if v.FolderID == -1 {
			continue
		}
		res = append(res, &ori[i])
	}
	//fmt.Println(folders)
	return res
}

// DownloadFolderImgs 包装getAllImgs
func (da *DA) DownloadFolderImgs(user string, fd *Folder) {
	// 计数器相关
	color.Cyan("\n当前: %s\t共计: %d个项目(包括但不限于图片)", fd.Name, fd.Size)
	da.Reset(int32(fd.Size))

	// 处理访问链接
	uri, q := utils.MustParseURL(apiFolder)
	q.Set("username", user)
	q.Set("folderid", strconv.FormatInt(fd.FolderID, 10))
	uri.RawQuery = q.Encode()

	out := path.Join(crawler.Dir, user, fd.Name)
	os.MkdirAll(out, os.ModeDir)

	offset := 0
	for {
		pageImgs, next := da.getLimitImgs(uri.String(), 3)
		for _, v := range pageImgs {
			da.Add(1)
			da.ch <- struct{}{}
			go da.downloadIMG(v.uri, path.Join(out, v.title))
			time.Sleep(time.Second * time.Duration(crawler.Duration))
		}
		if !next {
			color.Green("\n此文件夹完成")
			break
		}
		offset += da.Limit
		q.Set("offset", strconv.Itoa(offset))
		uri.RawQuery = q.Encode()
	}
}

// getLimitImgs 仅对于能返回标准图片列表的uri使用
func (da *DA) getLimitImgs(uri string, retry int) ([]*img, bool) {
	// 重试次数检查
	if retry < 0 {
		log.Fatalln("请检查网络")
	}

	client := crawler.MyClient
	resp, err := client.Do(utils.MustRequest(http.MethodGet, uri, nil))
	if err != nil {
		log.Println("网络错误!重试...", strings.Repeat(" ", 20))
		// 重试前睡眠一会
		time.Sleep(time.Second * 3)
		return da.getLimitImgs(uri, retry-1)
	}
	defer resp.Body.Close()
	// 解析json，返回拼接后的链接列表
	deco := json.NewDecoder(resp.Body)
	result := new(rest)
	e := deco.Decode(result)
	if e != nil {
		color.Red("%v", err)
		os.Exit(2)
	}
	imgs := make([]*img, 0)
	for _, v := range result.Results {
		base, tokens := v.BaseURI, v.Token
		var token string
		// 只下载图片，否则跳过
		if len(tokens) > 0 && v.Type == "image" {
			token = tokens[0]
		} else {
			continue
		}
		pretty, types := v.Media.PrettyName, v.Media.Types
		itype := types[len(types)-1]
		var c string
		if itype.C != "" {
			c = strings.Replace(itype.C, "<prettyName>", pretty, 1)
		} else {
			// 拼接完全图的链接
			// v1/fill/w_<w>,h_<h>,q_70,strp/<prettyName>-<t>.jpg
			c = fmt.Sprintf("v1/fill/w_%d,h_%d,q_80,strp/%s-%s.jpg", int64(itype.W), int64(itype.H), itype.T, pretty)
		}

		imgs = append(imgs, &img{
			title: formatTitle(v.Title),
			uri:   fmt.Sprintf("%s/%s?token=%s", base, c, token),
		})
	}
	return imgs, result.HasMore
}

func formatTitle(title string) string {
	return strings.NewReplacer(
		" ", "",
		"-", "_",
		"/", "_",
		"\\", "_",
	).Replace(title) + ".jpg"
}

func (da *DA) downloadIMG(src, out string) {
	defer func() {
		fmt.Print(color.MagentaString(da.AddAndShow(1)))
		da.Done()
		<-da.ch
	}()
	//log.Println("下载: ", src)
	resp, err := crawler.MyClient.Get(src)
	if err != nil {
		log.Println("下载失败！")
		return
	}
	defer resp.Body.Close()

	f, _ := os.Create(out)
	_, err = io.Copy(f, resp.Body)
	if err != nil {
		log.Println("写入失败", err)
		os.Remove(out)
		return
	}
}
