package cats
import (
	"www2/sp"
	"github.com/jbowtie/gokogiri"
	"net/url"
	"www2/cfg"
	//"gopkg.in/mgo.v2/bson"
	"github.com/jbowtie/gokogiri/html"
	"log"
)

func GetEsWebSpider() *sp.Spider{
//	url:="https://www.elastic.co/guide/en/elasticsearch/reference/current/toc.html"
	url:="https://www.elastic.co/guide/en/elasticsearch/reference/2.1/index.html"
	fc:=&sp.Spider{Url:url,
		OnlySub:true,
				//LinksTest:true,
		//		ExcludeUrls:[]string{"$locales","$edit","$history","$translate"},
		SubDomain:sp.CD_es,
	}
	fc.Init()
	return fc
}
func ConvertEsTempalte() {
	ConvertTempalteStaic(GetEsWebSpider(),"/home/wxf/go/src/www2/templates/es.tmpl")
}
func GetHtmlTitleAndDes(htmldoc *html.HtmlDocument)(title string ,des string){
	title_xpath:="/html/head/title";
	description_xpath:=`/html/head/meta[@name="description"]/@content`;

	nodes, _ := htmldoc.Search(title_xpath)
	if len(nodes)>0{
		title=nodes[0].Content()
	}
	nodes, _ = htmldoc.Search(description_xpath)
	if len(nodes)>0{
		des=nodes[0].String()
	}
	return

}
func ESToDocDest(str  string,fc *sp.Spider,refurl string ) (*sp.Doc,error) {
	refURL,_:=url.Parse(refurl)


	content_xpath:=`//*[@id="guide"]/div/div/div[1]`

	excludeXpath := []string{
//		`/html/body/main/div/div/div/div[2]/div[2]`, //contributor-avatars
//		`/html/body/main/div/div/div/div[1]/ul`, //class="page-buttons" /html/body/div/div[1]/ul
//		`/html/body/main/div/div/div/div[3]/div/div[2]/div/div[2]`, //contributors-sub
//		`/html/body/main/div/div/div/div[3]/div/div[2]/div/div[1]`, //class="tag-attach-list contributors-sub
	}
	htmldoc, err := gokogiri.ParseHtml([]byte(str))
	if err!=nil{return nil,err}
	defer htmldoc.Free()
	doct:=new(sp.Doc)
	doct.Title,doct.Des= GetHtmlTitleAndDes(htmldoc)

	nodes, _ := htmldoc.Search(content_xpath)
	if len(nodes)>0{
		content_node:=nodes[0]
		//		log.Println(content_node.Path())
		for _, exlude := range excludeXpath {
			nodes, err := content_node.Search(exlude)
			if err == nil {
				for _, nod := range nodes {
					nod.Remove()
				}
			}
		}

		links,_:=content_node.Search(content_node.Path()+"//a/@href")
		for _, link := range links {
			linkURL,_:=url.Parse(link.String())
			iURL:=refURL.ResolveReference(linkURL)
			//iurl, err := fc.GetFUrl(linkURL, refurl)
			if err == nil {
				ok, _,etype := fc.ShouldFetched(iURL)
				if !ok && etype>sp.FT_CrossDomain{
					link.SetContent(iURL.String())
				}
			}
			//			log.Println(link.Path())

		}

		imgs,_:=content_node.Search(content_node.Path()+"//img/@src")
		log.Println("len imgs:",len(imgs))
		for _, img := range imgs {
			linkURL,_:=url.Parse(img.String())
			iURL:=refURL.ResolveReference(linkURL)

			//download if not exists
			imgsrc:=iURL.String()
			sp.DownLoadImgWithCheck(imgsrc)
			//iurl, err := fc.GetFUrl(iURL.String(), refurl)
			imgurl:="http://"+*cfg.ImageDomain+"/"+iURL.String();
			img.SetContent(imgurl)
		}

		//

		doct.Content=content_node.String()
	}else{
		log.Println("content xpath fail")
		doct.Content=str
	}
	//	log.Println(title,description)

	return doct,err
}
