package main

import (
	"bytes"
	"errors"
	"fmt"
	"html"
	"net/http"
	uri "net/url"
	"regexp"
	"strings"
	"sync"
	"time"

	"golang.org/x/text/encoding"
	"golang.org/x/text/encoding/simplifiedchinese"
	"golang.org/x/text/transform"
)

// 索引页存储
type weblist struct {
	urls   []string
	titles []string
	index  int
	mutex  sync.Mutex
}

// ParseIndex 从指定网址获取索引页列表
func (list *weblist) ParseIndex(config SiteItem) bool {
	// 解析网址
	LogPrintln(9, "解析网址: ", g_args.URL)
	urlobj, urlerr := uri.Parse(g_args.URL)
	if urlerr != nil {
		LogPrintln(0, "网址解析失败: ", urlerr)
		return false
	}

	var code encoding.Encoding
	switch config.Code {
	case "gbk":
		code = simplifiedchinese.GBK
	default:
		code = nil
	}

	// 获取索引页内容
	var listinner string
	var ok error
	for {
		listinner, ok = getFromURL(g_args.URL, code)
		if ok == nil {
			break
		}

		LogPrintln(0, "索引页获取失败，2秒后重试:", ok)
		time.Sleep(time.Second * 2)
	}

	// 编译索引页正则表达式
	listregex, lcerr := regexp.Compile(config.ListRegex)
	if lcerr != nil {
		LogPrintln(0, "索引页正则表达式编译失败:", lcerr)
		return false
	}

	// 以正则表达式解析
	listall := listregex.FindAllStringSubmatch(listinner, -1)
	// 显示信息
	LogPrintln(0, "获取索引数量:", len(listall))

	if len(listall) < 1 {
		LogPrintln(0, "未解析到索引内容，可能是正则表达式错误")
		return false
	}

	list.urls = make([]string, len(listall))
	list.titles = make([]string, len(listall))
	for i := 0; i < len(listall); i++ {
		// 处理反序
		var j int
		if g_args.Reverse {
			j = len(listall) - 1 - i
		} else {
			j = i
		}

		// 保存标题
		list.titles[j] = listall[i][config.ListTitle]

		// 保存链接地址
		switch config.ListLinkPrefix {
		case "@index@":
			pathindex := strings.LastIndex(urlobj.Path, "/")
			var path string
			if pathindex < 0 {
				path = "/"
			} else {
				path = string([]rune(urlobj.Path)[:pathindex+1])
			}
			list.urls[j] = fmt.Sprint(urlobj.Scheme, "://", urlobj.Host, path, listall[i][config.ListLink])
		case "@top@":
			list.urls[j] = fmt.Sprint(urlobj.Scheme, "://", urlobj.Host, listall[i][config.ListLink])
		default:
			list.urls[j] = listall[i][config.ListLink]
		}

		// 显示信息
		LogPrintln(3, i, " ", list.titles[j], ": [", list.urls[j], "]")
	}
	list.index = 0

	return true
}

// DownloadPage 下载内容页
func DownloadPage(title string, url string, config SiteItem) (string, bool) {
	// 获取字符编码
	var code encoding.Encoding
	switch config.Code {
	case "gbk":
		code = simplifiedchinese.GBK
	default:
		code = nil
	}

	// 开始下载内容
	var pageinner string
	var ok error
	for {
		LogPrintf(8, "[%s] 开始获取: %s\n", title, time.Now().String())
		pageinner, ok = getFromURL(url, code)
		LogPrintf(8, "[%s] 完成获取: %s\n", title, time.Now().String())
		if ok == nil {
			break
		}

		LogPrintf(0, "[%s] 获取失败，2秒后重试: %s\n", title, ok.Error())
		time.Sleep(time.Second * 2)
	}

	// 编译内容页正则表达式
	pageregex, pcerr := regexp.Compile(config.PageRegex)
	if pcerr != nil {
		LogPrintln(0, "内容页正则表达式编译失败:", pcerr)
		return "", false
	}

	// 以正则表达式解析
	pageall := pageregex.FindAllStringSubmatch(pageinner, -1)
	// 显示信息
	LogPrintf(3, "[%s] 获取内容数量: %d\n", title, len(pageall))
	LogPrintln(7, pageall)

	if len(pageall) < 1 {
		LogPrintln(0, "未解析到页面内容，可能是正则表达式错误")
		return "", false
	}

	var buffer bytes.Buffer
	for i := 0; i < len(pageall); i++ {
		LogPrintf(5, "内容 [%d]: %s\n", i, pageall[i][config.PageGet])
		_, err := buffer.WriteString(pageall[i][config.PageGet])
		if err != nil {
			LogPrintln(0, "内容页缓存失败: ", err)
			return buffer.String(), false
		}
		_, err = buffer.WriteString("\n")
		if err != nil {
			LogPrintln(0, "内容页缓存失败: ", err)
			return buffer.String(), false
		}
	}

	// 获取行解析数据
	lineinner := html.UnescapeString(buffer.String())

	// 编译行正则表达式
	lineregex, lcerr := regexp.Compile(config.LineRegex)
	if lcerr != nil {
		LogPrintln(0, "行正则表达式编译失败: ", lcerr)
		return "", false
	}

	// 以正则表达式解析
	lineall := lineregex.FindAllStringSubmatch(lineinner, -1)
	// 显示信息
	LogPrintf(3, "[%s] 获取行数量: %d\n", title, len(lineall))
	LogPrintln(7, lineall)

	if len(lineall) < 1 {
		LogPrintln(0, "未解析到行内容，可能是正则表达式错误")
		return "", false
	}

	buffer.Reset()
	buffer.WriteString("==========\n")
	buffer.WriteString(title)
	buffer.WriteString("\n==========\n")

	for i := 0; i < len(lineall); i++ {
		_, err := buffer.WriteString(lineall[i][config.LineGet])
		if err != nil {
			LogPrintln(0, "行结果缓存失败: ", err)
			return buffer.String(), false
		}
		_, err = buffer.WriteString("\n")
		if err != nil {
			LogPrintln(0, "行结果缓存失败: ", err)
			return buffer.String(), false
		}
	}

	LogPrintf(0, "[%s] 成功\n", title)
	return buffer.String(), true
}

// 下载内容页
func (list *weblist) DownloadPageMutex(config SiteItem) (int, string, bool) {
	var index int
	var title string
	var url string

	// 申请下载地址
	list.mutex.Lock()
	if list.titles == nil || list.urls == nil ||
		list.index >= len(list.urls) || list.index >= len(list.titles) {
		index = -1
	} else {
		index = list.index
		title = list.titles[index]
		url = list.urls[index]
		list.index++
	}
	list.mutex.Unlock()

	// 未获取到地址
	if index < 0 {
		return index, "", false
	}

	inner, ok := DownloadPage(title, url, config)
	return index, inner, ok
}

// getFromURL 根据 URL 获取网页内容
func getFromURL(url string, coder encoding.Encoding) (string, error) {
	var reader *transform.Reader

	// 获取页面信息
	resp, err := http.Get(url)
	if err != nil {
		return "", err
	}

	// 判断页面状态
	if resp.StatusCode/100 != 2 {
		return "", errors.New("net/url: server is error")
	}

	if coder != nil {
		reader = transform.NewReader(resp.Body, coder.NewDecoder())
	} else {
		reader = transform.NewReader(resp.Body, encoding.Nop.NewDecoder())
	}

	buf := new(bytes.Buffer)
	if _, err := buf.ReadFrom(reader); err != nil {
		resp.Body.Close()
		return "", err
	}

	resp.Body.Close()
	return buf.String(), nil
}
