package cats
import (
	"www2/sp"
	"github.com/jbowtie/gokogiri"
	//"github.com/jbowtie/gokogiri/xml"
	"net/url"
	//"gopkg.in/mgo.v2/bson"
	"log"
	"www2/cfg"
	"strings"
	"regexp"
)

func Getw3WebSpider() *sp.Spider{
//	url:="https://www.elastic.co/guide/en/elasticsearch/reference/current/toc.html"
	url:="http://www.w3schools.com/"
	fc:=&sp.Spider{Url:url,
		OnlySub:true,
				//LinksTest:true,
		//		ExcludeUrls:[]string{"$locales","$edit","$history","$translate"},
		SubDomain:sp.CD_w3,
	}
	fc.Init()
	return fc
}

func Convertw3Template(){
	ConvertTempalteStaic(Getw3WebSpider(),"/home/wxf/go/src/www2/templates/w3.tmpl.src")
	return
}

func W3ToDocDest(str  string,fc *sp.Spider,refurl string ) (*sp.Doc,error) {
	refURL,_:=url.Parse(refurl)

	doct:=new(sp.Doc)
	istrypage:=false
	if strings.HasSuffix( refURL.Path,"tryit.asp"){
		istrypage=true
	}
	content_xpath:=`//*[@id="belowtopnav"]`
	if istrypage {
		//content_xpath=`//*[@id="textareaCode"]`
		conreg,_:=regexp.Compile(`(?s:\<textarea[^>]*\>(.*)\</textarea\>)`)
		cs:=conreg.FindStringSubmatch(str)
		//log.Println(len(cs),cs,err);
		doct.Content=cs[1]
		return doct,nil
	}
	excludeXpath := []string{
//		`/html/body/main/div/div/div/div[2]/div[2]`, //contributor-avatars
//		`/html/body/main/div/div/div/div[1]/ul`, //class="page-buttons" /html/body/div/div[1]/ul
//		`/html/body/main/div/div/div/div[3]/div/div[2]/div/div[2]`, //contributors-sub
//		`/html/body/main/div/div/div/div[3]/div/div[2]/div/div[1]`, //class="tag-attach-list contributors-sub
	}
	if refURL.Path=="/"{
		content_xpath=`//*[@class="w3-main"]`
	}


	htmldoc, err := gokogiri.ParseHtml([]byte(str))
	if err!=nil{return nil,err}
	defer htmldoc.Free()

	doct.Title,doct.Des= GetHtmlTitleAndDes(htmldoc)

	nodes, _ := htmldoc.Search(content_xpath)
	if len(nodes)>0{
		content_node:=nodes[0]

	//	log.Println(content_node.Path())
		for _, exlude := range excludeXpath {
			nodes, err := content_node.Search(exlude)
			if err == nil {
				for _, nod := range nodes {
					nod.Remove()
				}
			}
		}

		links,_:=content_node.Search(content_node.Path()+"//a/@href")
		for _, link := range links {
			linkURL,_:=url.Parse(link.String())
			iURL:=refURL.ResolveReference(linkURL)
			//iurl, err := fc.GetFUrl(linkURL, refurl)
			if err == nil {
				ok, _,etype := fc.ShouldFetched(iURL)
				if !ok && etype>sp.FT_CrossDomain{
					link.SetContent(iURL.String())
				}
			}
			//			log.Println(link.Path())

		}

		imgs, _ := content_node.Search(content_node.Path() + "//img/@src")
		log.Println("len imgs:", len(imgs))
		for _, img := range imgs {
			linkURL, _ := url.Parse(img.String())
			iURL := refURL.ResolveReference(linkURL)

			//download if not exists
			imgsrc := iURL.String()
			sp.DownLoadImgWithCheck(imgsrc)
			//iurl, err := fc.GetFUrl(iURL.String(), refurl)
			imgurl := "http://" + *cfg.ImageDomain + "/" + iURL.String();
			img.SetContent(imgurl)
		}

		doct.Content=content_node.String()

	}else{
		log.Println("content xpath fail")
		doct.Content=str
	}
	//	log.Println(title,description)

	return doct,err
}
