//License: Artistic License v2.0, http://opensource.org/licenses/Artistic-2.0

package main

import (
	"fmt"
	"log"
	"os"
	"sort"
	"strings"
	"time"
)

type Content struct {
	ContentType, Time string
	Gz, MobileGz      []byte
	//Content.Time is generated time stamp for http response header
}

type Copy struct {
	Cid, Copy, SEOTags, Title, Lang, Date string
	Tags                                  []string
	UsesOwnLayout                         bool //ignore global layout: Copy str == full page static html
	//Copy.Date is for blog usage
}

func readCopy(cid string) Copy {
	if cid == "" {
		return Copy{}
	}
	return copies[cid]
}

func (c *Copy) cidExists(s string) bool {
	if copies[s].Cid != "" {
		return true
	}
	return false
}

func (c *Copy) save() {
	if c.Cid != "" {
		copies[c.Cid] = *c
	}
}

func (c *Copy) del() {
	delete(copies, c.Cid)
}

func (c *Copy) search(str string) (score int) {
	str = strings.ToLower(str)
	score = strings.Count(strings.ToLower(c.Title), str) * 100
	score = score + strings.Count(strings.ToLower(c.Cid), str)*100
	score = score + strings.Count(strings.ToLower(c.Copy), str)*100
	score = score + strings.Count(strings.ToLower(c.SEOTags), str)*50
	words := strings.Fields(str)
	wordCount := len(words)
	if wordCount > 1 {
		for i := 0; i < wordCount; i++ {
			score = score + (strings.Count(c.Title, words[i]) * 1)
			score = score + (strings.Count(c.Cid, words[i]) * 1)
			score = score + (strings.Count(c.Copy, words[i]) * 1)
			score = score + (strings.Count(c.SEOTags, words[i]) * 1)
		}
	}
	return score
}

func searchCopies(str, lang string) []string {
	//str = strings.TrimSpace(str)
	if len(str) < 2 { //CN,KR,JP: 2 chars == 2 words
		return []string{}
	}
	var results, tmp []string
	for _, c := range copies { //brute searching + scoring
		if c.Lang == lang {
			score := c.search(str) //used for sorting purpose
			if score > 0 {
				url := fmt.Sprintf("%012d%s", score, c.Cid)
				tmp = append(tmp, url)
			}
		}
	}
	sort.Strings(tmp) //std lib does not have descending sort?
	for i := len(tmp) - 1; i >= 0; i-- {
		results = append(results, tmp[i][12:]) //drop the score from output
	}
	return results //slice of string, i.e. the cid
}

func writeSiteMap(domain, path string) { //crude but works
	//domain and path must have trailing /
	var s string
	for _, c := range copies {
		s = s + domain + c.Cid + "\n"
	}
	fileName := path + "sitemap.txt"
	f, err := os.Create(fileName)
	defer f.Close()
	if err != nil {
		log.Println(projName, err)
	}
	f.Write([]byte(s))
}

func setBlogIndex(allCopies *map[string]Copy) map[string]([]string) {
	var idx, cids []string
	for _, c := range *allCopies {
		if len(c.Date) == 10 { //date format: yyyy-mm-dd; len == 10
			idx = append(idx, c.Date+c.Cid) //date is for sorting only
		}
	}
	blogCount := len(idx)
	if blogCount == 0 {
		return map[string]([]string){}
	}
	sort.Strings(idx)
	for j := blogCount; j > 0; j-- { //reverse slice
		cids = append(cids, idx[j-1][10:]) //remove date
	}
	cidsMap := make(map[string]([]string))
	for i := 0; i < len(cids); i++ {
		c := readCopy(cids[i])
		cidsMap[c.Lang] = append(cidsMap[c.Lang], c.Cid)
	}
	return cidsMap
}

func setTagIndex(allCopies *map[string]Copy) (map[string](map[string][]string), map[string]string) {
	//e.g.: tagIndex, tagSnippets := setTagIndex(&copies)
	//e.g.: tagIndex[tag]["en"]
	//e.g.: tagSnippets["en"]
	var tmpIndex = map[string][]string{}
	var tmpTagIndex = map[string](map[string][]string){}
	for _, c := range *allCopies {
		for _, tag := range c.Tags {
			tmpIndex[tag] = append(tmpIndex[tag], c.Cid)
		}
	}
	var tmpTagSnippets = map[string]string{}
	var tagURLs = map[string][]string{}
	for tag, cids := range tmpIndex {
		sort.Strings(cids)
		tmpTagIndex[tag] = map[string][]string{} //reset
		for _, cid := range cids {
			c := readCopy(cid)
			tmpTagIndex[tag][c.Lang] = append(tmpTagIndex[tag][c.Lang], c.Cid)
		}
		template := " <a href=\"/tag/%s/1/%s/\">%s(%d)</a> "
		//not sure about hard coding the above
		//using readSnippet is better but higher risk due to Sprintf below
		for lang, _ := range supportedLang {
			count := len(tmpTagIndex[tag][lang])
			if count > 0 {
				tagURLs[lang] = append(tagURLs[lang], fmt.Sprintf(template, tag, lang, tag, count))
			}
			sort.Strings(tagURLs[lang])
		}
	}
	for lang, _ := range supportedLang {
		tmpTagSnippets[lang] = SliceToString(tagURLs[lang], "") //a.k.a. tag cloud
	}
	//tmpTagIndex and tagSnippets list both blog and non-blog copies by design
	return tmpTagIndex, tmpTagSnippets
}

func setSnippet(lang, tid, snippet string) {
	snippets[lang][tid] = snippet
}

//setPage constructs the page from layout snippets and copy, and returns the HTML for the page
//template name is either "4Mobile" or "default"
func setPage(template, lang, main, title, date, seo string) (p string) {
	//Outside content.go, setPage is also used by "search" in webServer.go
	if template == "4Mobile" {
		p = readSnippet(lang, "SiteLayout4Mobile")
		p = strings.Replace(p, "<!--SiteNav-->", readSnippet(lang, "SiteNav4Mobile"), -1)
		p = strings.Replace(p, "<!--SiteHeader-->", readSnippet(lang, "SiteHeader4Mobile"), -1)
		p = strings.Replace(p, "<!--SiteFooter-->", readSnippet(lang, "SiteFooter4Mobile"), -1)
	} else {
		p = readSnippet(lang, "SiteLayout")
		p = strings.Replace(p, "<!--SiteNav-->", readSnippet(lang, "SiteNav"), -1)
		p = strings.Replace(p, "<!--SiteHeader-->", readSnippet(lang, "SiteHeader"), -1)
		p = strings.Replace(p, "<!--SiteFooter-->", readSnippet(lang, "SiteFooter"), -1)
	}
	p = strings.Replace(p, "<!--SEOTags-->", seo, -1)
	p = strings.Replace(p, "<!--Date-->", date, -1)
	p = strings.Replace(p, "{Title}", title, -1)
	p = strings.Replace(p, "{Lang}", lang, -1)
	p = strings.Replace(p, "<!--Copy-->", main, -1)
	return p
}

func setHTMLContent(lang, page, title, date, seo string) (c Content) {
	c.ContentType = "html"
	c.MobileGz = Gz("", setPage("4Mobile", lang, page, title, "", seo))
	c.Gz = Gz("", setPage("default", lang, page, title, "", seo))
	c.Time = time.Now().UTC().Format(time.RFC1123) //e.g. "Mon, 02 Jan 2006 15:04:05 UTC"
	return c
}

func readSnippet(lang, t string) string {
	if snippetExists(lang, t) {
		return snippets[lang][t]
	}
	return snippets[conf.DefaultLang][t]
}

func readSnippets(lang string, snips []string) (s string) {
	if len(snips) == 0 {
		return ""
	}
	for _, t := range snips {
		s = s + readSnippet(lang, t)
	}
	return s
}

func snippetExists(lang, t string) bool {
	if snippets[lang] == nil || snippets[lang][t] == "" {
		return false
	}
	return true
}

func loadStaticContents() map[string]Content {
	log.Println(projName, "Loading static files to contents")
	var contents = map[string]Content{}
	var fileList []string
	load := func(dir string) {
		ListFiles(dir, &fileList)
		for i := 0; i < len(fileList); i++ {
			var c Content
			c.ContentType = GetFileExtension(fileList[i])
			c.Gz = Gz(fileList[i], "")
			c.Time = time.Now().UTC().Format(time.RFC1123) //e.g. "Mon, 02 Jan 2006 15:04:05 UTC"
			var urlPath string = fileList[i][len(dir):]
			contents[urlPath] = c
		}
	}
	load(path("static"))
	load(path("theme"))
	return contents
}

func loadAssembledContents(allCopies *map[string]Copy) map[string]Content {
	log.Println(projName, "Loading HTML copies to contents")
	var contents = map[string]Content{}
	for _, cp := range *allCopies {
		if !cp.UsesOwnLayout {
			contents[cp.Cid] = setHTMLContent(cp.Lang, cp.Copy, cp.Title, cp.Date, cp.SEOTags)
		} else { //UsesOwnLayout contains full page HTML
			var c Content
			c.ContentType = "html"
			c.MobileGz = Gz("", cp.Copy)
			c.Gz = Gz("", cp.Copy)
			c.Time = time.Now().Format(time.RFC1123Z) //e.g. "Mon, 02 Jan 2006 15:04:05 -0700"
			contents[cp.Cid] = c
		}
	}
	//
	log.Println(projName, "Loading tag posts to contents")
	var lang, page, urlPath string
	var posts []string
	var urlPattern string
	var seo string = ""
	var title string = ""
	var pageNum int
	var tagIndex = map[string](map[string][]string){} //e.g.: tagIndex[tag]["en"]
	var tagSnippets = map[string]string{}             //e.g.: tagSnippets["en"]
	var tagSnippet string
	tagIndex, tagSnippets = setTagIndex(allCopies)
	for tag, sameLangPosts := range tagIndex {
		title = tag
		pageNum = 1
		urlPattern = "/tag/" + tag + "/%d/%s/" //e.g. tag/news/2/en/
		for lang, posts = range sameLangPosts {
			for {
				if ((pageNum - 1) * conf.BlogsPerPage) >= len(posts) {
					break
				}
				page = paginate(posts, urlPattern, lang, "SiteBlogList", pageNum, conf.BlogsPerPage) //
				page = strings.Replace(readSnippet(lang, "SiteSearchResultsDiv"), "<!--Results-->", page, -1)
				tagSnippet = strings.Replace(readSnippet(lang, "SiteTagsDiv"), "<!--Tags-->", tagSnippets[lang], -1)
				page = page + tagSnippet
				urlPath = fmt.Sprintf(urlPattern, pageNum, lang) //tag/{tag}/1/en/
				contents[urlPath] = setHTMLContent(lang, page, title, "", seo)
				pageNum++
			}
		}
	}
	//
	log.Println(projName, "Loading blog posts to contents")
	urlPattern = "/blog/%d/%s/" //e.g. domain/blog/2/en/
	seo = readSnippet(lang, "SiteBlogSEOTags")
	title = readSnippet(lang, "SiteBlogTitle")
	pageNum = 1
	for lang, posts = range setBlogIndex(allCopies) {
		for {
			if ((pageNum - 1) * conf.BlogsPerPage) >= len(posts) {
				break
			}
			page = paginate(posts, urlPattern, lang, "SiteBlogList", pageNum, conf.BlogsPerPage) //
			page = strings.Replace(readSnippet(lang, "SiteSearchResultsDiv"), "<!--Results-->", page, -1)
			tagSnippet = strings.Replace(readSnippet(lang, "SiteTagsDiv"), "<!--Tags-->", tagSnippets[lang], -1)
			page = page + tagSnippet
			urlPath = fmt.Sprintf(urlPattern, pageNum, lang)
			contents[urlPath] = setHTMLContent(lang, page, title, "", seo)
			pageNum++
		}
	}
	return contents
}

func SliceToString(slice []string, separator string) (str string) {
	for _, val := range slice {
		str = str + val + separator
	}
	return str
}

//Copyright: Slamet Hendry
