package server

import (
  "fmt"
  "http"
	"old/template"
	"strings"
	"search_backend"
	"time"
	"strconv"
	"log"
	"parser"
	"io"
	"io/ioutil"
	"os"
	"regexp"
	"sort"
	"crypto/md5"
)

// Campos de sustitucion en el template HTML
type alias struct{
	WebTitle string
	Query string
	Description string
	Items string
	Seconds string
	Document string
	Pager string
}

// Constantes
const TITLE string = "Arianna Project"
const DESCRIPTION string = "Arianna Project es un buscador realizado por Sergio García Mondaray y Javier Angulo Lucerón,<br>como práctica para la asignatura \"Almacenamiento y Recuperación de la Información\".<br><br>Escuela Superior de Informática de Ciudad Real<br>Universidad de Castilla-La Mancha"
const ELEMENTS_IN_PAGE = 10

// Estructura de un item de la lista para un documento resultante
type ResultItem struct {
	position int
	title string
	id string 
	relevance float64
	url string
}

type ResultSet struct {
	time int64
	set map [int] *ResultItem
	number int
	html string
	xml string
}

// Lista de resultados
var resultsCache map[string] map[int] search_backend.Doc

func loading (c http.ResponseWriter, req *http.Request) {
	if strings.Contains (req.URL.Path, ".gif") ||strings.Contains (req.URL.Path, ".png") || strings.Contains (req.URL.Path, ".jpg") || strings.Contains (req.URL.Path, ".css")  || strings.Contains (req.URL.Path, ".js") {
		cwd,_ := os.Getwd ()
		http.ServeFile (c, req, cwd + req.URL.Path)
		return
	}

	f := &alias{WebTitle: TITLE} 
	var t, _ = template.ParseFile("src/loading.html", nil)
	t.Execute(c, f)
}

// Inicia el servidor 
func StartServer(printTable bool){	
	http.Handle ("/", http.HandlerFunc(loading))
	log.Print("Server started at localhost:8080")			
	go func () {
		log.Print("Loading documents index... ")
		search_backend.LoadWordsMap()
		if printTable {
			search_backend.PrintWordsMap()			
		}
		log.Printf("Index of %d words loaded.", search_backend.WordsMapSize())
		log.Printf("Doing some magic to reduce searching time (caching document vectors)...")
		emptySearch := make([] string, 1)
		emptySearch[0] = ""
		search_backend.Search(emptySearch, true)
		log.Print("Server ready")
		http.Handle ("/admin", http.HandlerFunc(admin))
		http.Handle ("/uploaded", http.HandlerFunc(uploaded))
		http.Handle ("/upload", http.HandlerFunc(upload))
		http.Handle ("/", http.HandlerFunc(serve))
	}()
	http.ListenAndServe(":8080", nil)
}

func admin(c http.ResponseWriter, req *http.Request) {   
	f := &alias{WebTitle: TITLE} 
	var t, _ = template.ParseFile("src/admin.html", nil)
	t.Execute(c, f)
}

func upload(c http.ResponseWriter, req *http.Request) {   
	fmt.Printf ("Uploading file...\n")
	f, header, _ := req.FormFile("file")
	if (f == nil) {
		http.Redirect (c, req, "/admin", 302)
		return
	}
	defer f.Close()
	t, _ := ioutil.TempFile("/tmp", "ari-upload")
	defer t.Close()
	io.Copy (t, f)

	contents, _ := ioutil.ReadFile (t.Name())
	hash := md5.New()
	hash.Write([]byte(contents))
	md5sum := hash.Sum ()
	newFileName := "ARI-"
	for _, number := range (md5sum) {
		chunk := fmt.Sprintf("%d", number)
		newFileName += chunk
	}
	path := "./docs/" + newFileName + "_" + header.Filename
	ioutil.WriteFile(path, [] byte (contents), 0644)

	stoplist := parser.ParseFile("res/stoplist.txt", nil)
	words := parser.ParseFile(path, stoplist)
	search_backend.AddDocument(path)
	for _, word := range words {
		search_backend.AddWord(path, word)
	}
	search_backend.SaveWordsMap()
	search_backend.InvalidateCaches()
	invalidateResultCache()
	http.Redirect (c, req, "/uploaded", 302)
}

func uploaded(c http.ResponseWriter, req *http.Request) { 
	f := &alias{WebTitle: TITLE} 
	var t, _ = template.ParseFile("src/uploaded.html", nil)
	t.Execute(c, f)
}

func similarController (similarParam string, c http.ResponseWriter, req *http.Request, page int) {
	if (len(similarParam) == 0) {
		return		
	}
	docsim := search_backend.GetDocFromKey(similarParam)
	log.Printf("Searching similar documents to \"%s\"", docsim)
	stoplist := parser.ParseFile("res/stoplist.txt", nil)
	keywords := parser.ParseFile(docsim, stoplist)
	results := search (keywords, page)
	numberOfResults := results.number
	timeString := "Su búsqueda no produjo resultados"
	if numberOfResults > 0 {
		timeString = fmt.Sprintf("%d resultados en %f segundos", numberOfResults, float64(results.time)/1000000000)
	}
	itemsStr := ""
	if (results.set != nil) {
		itemsStr = results.html
	}
	search_backend.WriteResultsToXML ("xml/Resultados.xml", results.xml)
	pager := getPager (page, numberOfResults, req.RawURL)

	f := &alias{WebTitle: TITLE, Query: fmt.Sprintf("\"%s\"", "Documentos similares a '" + docsim + "'"), Items: itemsStr, Description: DESCRIPTION, Seconds: timeString, Pager: pager} 
	var t, _ = template.ParseFile("src/gui.html", nil)
	t.Execute(c, f)
}

func viewController (viewParam string, c http.ResponseWriter, req *http.Request) {
	if (len(viewParam) == 0) {
		return
	}

	doc := search_backend.GetDocFromKey (viewParam)
	log.Printf("Showing document \"%s\"", doc)
	content, err := ioutil.ReadFile (doc);
	docContent := ""
	if err != nil {
		println("ERROR: ", err)
	} else {
		docContent = strings.Replace (string(content), "\n", "<br>", 1000)
	}		
	f := &alias{WebTitle: TITLE, Document: docContent} 
	var t, _ = template.ParseFile("src/doc.html", nil)
	t.Execute(c, f)
}

func searchController(searchParam string, c http.ResponseWriter, req *http.Request, page int) {
	if (len(searchParam) == 0) {
		return
	}

	query := string(searchParam)
	keywords := getKeywordsFromInput(query)	
	results := search (keywords, page)
	numberOfResults := results.number
	timeString := "Su búsqueda no produjo resultados"
	if numberOfResults > 0 {
		timeString = fmt.Sprintf("%d resultados en %f segundos", numberOfResults, float64(results.time)/1000000000)
	}
	log.Print(timeString)
	itemsStr := ""
	if (results.set != nil) {
		itemsStr = results.html
	}

	search_backend.WriteResultsToXML ("xml/Resultados.xml", results.xml)
	pager := getPager (page, numberOfResults, req.RawURL)

	f := &alias{WebTitle: TITLE, Query: fmt.Sprintf("\"%s\"", query), Items: itemsStr, Description: "<font size=1.5>" + DESCRIPTION + "</font>", Seconds: timeString, Pager: pager} 
	var t, _ = template.ParseFile("src/gui.html", nil)
	t.Execute(c, f)
}

func fileController (url string, c http.ResponseWriter, req *http.Request) {
	cwd,_ := os.Getwd ()
	http.ServeFile (c, req, cwd + url)
}

func mainController (c http.ResponseWriter, req *http.Request) {
	f := &alias{WebTitle: TITLE, Query: "\"\""} 
	var t, _ = template.ParseFile("src/gui0.html", nil)
	t.Execute(c, f)
}

func getPager (page int, numberOfResults int, url string) string {
	pager := ""
	numberOfPages := numberOfResults / ELEMENTS_IN_PAGE
	pagerPos := strings.Index(url, "&")
	if (pagerPos != -1) {
		url = url[0:pagerPos]
	}

	firstPage := page - 5

	if (firstPage < 1) {
		firstPage = 1
	} else {
		pager = " ... "
	}

	lastPage := page + 9
	if (lastPage > numberOfPages) {
		lastPage = numberOfPages	
	} 

	for i := firstPage; i <= lastPage; i++ {
		if (i == page) {
			pager += fmt.Sprintf (" <strong> %d </strong> ", page)
		} else {
			pager += fmt.Sprintf (" <a href=%s&page=%d>%d</a> ", url, i, i)
		}
	}
	
	if (lastPage < numberOfPages) {
		pager += " ... "
	} 

	if (page < numberOfPages) {
		pager += fmt.Sprintf ("    <a href=%s&page=%d>Siguiente</a> ", url, page + 1)
	}
	if (page > 1) {
		pager = fmt.Sprintf ("<a href=%s&page=%d>Anterior</a>     ", url, page - 1) + pager
	}

	return pager
}

func serve (c http.ResponseWriter, req *http.Request) {   
	if strings.Contains (req.URL.Path, ".png") || strings.Contains (req.URL.Path, ".jpg") || strings.Contains (req.URL.Path, ".css")  || strings.Contains (req.URL.Path, ".ico") {
		fileController (req.URL.Path, c, req)
		return
	}
	
	searchParam := req.FormValue("search")
	pageParam := req.FormValue("page")
	similarParam := req.FormValue("similar")
	viewParam := req.FormValue("view")

	if (len(searchParam) > 0) {
		page := 1
		if (len(pageParam) > 0) {
			page, _ = strconv.Atoi(pageParam)	
		}
		searchController (searchParam, c, req, page)
	} else if (len(similarParam) > 0) {
		page := 1
		if (len(pageParam) > 0) {
			page, _ = strconv.Atoi(pageParam)	
		}
		similarController (similarParam, c, req, page)
	} else if (len(viewParam) > 0) {
		viewController (viewParam, c, req)
	} else {
		mainController (c, req)
	}
}

func pieceOf(url string, query [] string) string{
	content, err := ioutil.ReadFile (url);
	if (len(query) > 10) {
		if (len(content) >= 200) {
			aux := string(content[0:200])
			aux = strings.Replace(aux, "�", "", 100)
			return aux
		} else {
			aux := string(content)
			aux = strings.Replace(aux, "�", "", 100)
			return aux
		}
	}
	extract := ""
	if (err != nil) {
		log.Fatal (err)
		return extract
	}

	if (content != nil && len(query) > 0) {
		for _, queryWord := range query {
			foundPos := strings.Index(strings.ToLower(string(content)), strings.ToLower(queryWord))
			if (foundPos > 0) {
				endPos := foundPos + len(queryWord) + 90 
				if (endPos >= len(string(content)) - 1) {
					endPos = len(string(content)) - 1
				} else {
					extract += " [...]"
				}
				initialPos := endPos - 180 - len(queryWord)
				if (initialPos > 0) {
					extract = "[...] " + extract
				} else if (initialPos < 0) {
					initialPos = 0
				}
				extract += string(content[initialPos:endPos])
				if (endPos < len(content) - 1) {
					extract += " [...]"
				}
				break
			}
		}
	}

	extract = strings.ToLower(extract)
	extract = strings.Replace(extract, "�", "", 100)
	for i := range query {
		if len(query[i]) > 0 {
			extract = strings.Replace(extract, query[i], "<font color=#333333>" + query[i] + "</font>", 100)
			extract = strings.Replace(extract, query[i], "<strong>" + query[i] + "</strong>", 100)
		}
	}

	return extract
}

func getKeywordsFromInput (query string) [] string {
	tokens := strings.Split(query, " ")
	var keywords [] string
	for _, token := range tokens {
		trimmedToken := strings.Trim (token, " \t")
		if trimmedToken != "" {
			weight := 1
			underscorePosition := strings.Index(trimmedToken, "_")
			if (underscorePosition != -1) {
				matched, _ := regexp.MatchString(`x[0-9]+\_`, trimmedToken[0:underscorePosition + 1])
				if matched {
					weight, _ = strconv.Atoi(trimmedToken[1:underscorePosition])
					if (weight <= 0) {
						weight = 1
					} else {
						trimmedToken = trimmedToken[underscorePosition + 1:]
					}
				}
			}
			for i := 0; i < weight; i++ {
				keywords = append (keywords, strings.ToLower(trimmedToken))
			}
		}
	}
	return keywords
}


func search(keywords []string, page int) ResultSet {
	initialTime := time.Nanoseconds()
	if (resultsCache == nil) {
		resultsCache = make (map[string] map[int] search_backend.Doc)
	}
	unorderedQuery := make ([] string, len(keywords))
	if (len(keywords) < 10) {
		for _, word := range keywords {
			unorderedQuery = append (unorderedQuery, word)
		}
	} else {
		unorderedQuery = keywords
	}
	cachedQuery := queryToString(keywords)
	result := resultsCache[cachedQuery]
	if result == nil {
		result = search_backend.Search(keywords, false)
		resultsCache[cachedQuery] = result
	}

	var results ResultSet
	results.set = make(map[int] *ResultItem)
	results.time = time.Nanoseconds() - initialTime
//	results.html = "<thead>"+ "<tr> <th scope=\"col\">Orden</th> <th scope=\"col\">Documento</th>" /*+ "<th scope=\"col\">ID</th>"*/ + " <th scope=\"col\">Relevancia</th> </tr> </thead>"
	results.html += " <tbody>"
	results.xml  = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "\n" + "<!DOCTYPE Resultado SYSTEM \"Resultados.dtd\">" + "\n" + "<?xml-stylesheet type=\"text/xsl\" href=\"Resultados.xsl\"?>" + "\n" + "<Resultado>" + "\n"

	results.xml += "\t<Pregunta>" + "\n"
	for _, queryItem := range unorderedQuery {
		if len(queryItem) > 0 {
			results.xml += "\t\t<Item>" + queryItem + "</Item>" + "\n"
		}
	}
	results.xml += "\t</Pregunta>" + "\n"

	orderedHtml := make ([] string, len (result))
	orderedXML := make ([] string, len (result))

	// Paging issues
	lowerBound := (page - 1) * ELEMENTS_IN_PAGE	
	upperBound := lowerBound + 9

	for i, item := range result {
		title := item.Title
		sem := item.Sem
		url := item.Url
		id := search_backend.GetDocKey (url)

		parsedTitle := ""
		if (len(title) >= 4 && title[0:4] == "ARI-") {
			parsedTitle = title[strings.Index(title, "_") + 1:]
		} else {
			parsedTitle = title
		}

		html := ""
		xml := ""

		// Discard not in page elements
		if (i >= lowerBound && i <= upperBound) {
			item := &ResultItem{position: i, title: parsedTitle, url: url, relevance: sem * 100, id: id}
			results.set[i] = item
			html = "<tr>" + "<td>" + fmt.Sprintf("%d", i + 1) + "</td>" + "<td><a href=\"?view=" + id + "\"><strong>" + title + "</strong></a> (<a href=\"?similar=" + id + "\">buscar similares</a>)<br><font color=#31A72F size=2>" + url + "</font><br>" + pieceOf(url, unorderedQuery) + "</td>" + /*"<td>" + id + "</td>" +  */ "<td width=70px>" + "<img height=13px src=\""

			html += "res/img/ball-"
			
			if sem * 100 > 40 {
				html += fmt.Sprintf("%d", (int)(sem * 10))
			}
			html += "0.png\"> "+fmt.Sprintf("%.2f", sem*100) +" %</td></tr>		</tbody>"
			orderedHtml[i] = html
		}

		// Xml data addition
		xml = "\t<Documento ID=\"" + id + "\">" + "\n" + "\t\t<Titulo>" + title + "</Titulo>" + "\n" + "\t\t<Relevancia>" + fmt.Sprintf("%.2f", sem*100) + "%</Relevancia>" + "\n" + "\t\t<Texto>" + url + "</Texto>" + "\n"
		xml += "\t</Documento>" + "\n"
		orderedXML[i] = xml
	}

	for i := 0; i < len(orderedHtml); i++ {
		results.html += orderedHtml[i]
		results.xml += orderedXML[i]
	}
	results.xml += "</Resultado>"

	results.number = len(result)
	return results
}

func queryToString (query [] string) string {
	result := ""
	if (len (query) < 10) {
		sort.Strings(query)
	}
	for _, item := range query {
		result += "_"
		result += item
	}
	return result
}

func invalidateResultCache () {
	resultsCache = nil	
}


