package crawler

import (
	"bytes"

	"buss"
	"common"
	"encoding/json"
	"fmt"
	log "github.com/cihub/seelog"
	"github.com/disintegration/imaging"
	"io"
	"io/ioutil"
	"net/http"
	"os"
	"strconv"
	"time"
)

type Meta struct {
	Limit       int    `json:"limit"`
	Next        string `json:"next"`
	Offset      int    `json:"offset"`
	Previous    string `json:"previous"`
	Total_count int    `json:"total_count"`
}
type Icon struct {
	Created_at string `json:"created_at"`
	Id         int    `json:"id"`
	Image      string `json:"image"`
}
type Tag struct {
	Id   int    `json:"id"`
	Name string `json:"name"`
}
type Object struct {
	Created_at     int     `json:"created_at"`
	Created_by     string  `json:"created_by"`
	Description    string  `json:"description"`
	Icon           Icon    `json:"icon"`
	Id             int     `json:"id"`
	Mp_qrcode      string  `json:"mp_qrcode"`
	Name           string  `json:"name"`
	Overall_rating float32 `json:"overall_rating"`
	Qrcode         Icon    `json:"qrcode"`
	//Rating         []int   `json:"rating"`
	Resource_uri string `json:"resource_uri"`
	Screenshot   []Icon `json:"screenshot"`
	Tag          []Tag  `json:"tag"`
	Url          string `json:"url"`
}

type Response struct {
	Meta    Meta     `json:"meta"`
	Objects []Object `json:"objects"`
}

type MiniAppCrawler struct {
	DownLoadPath string
}

var dao buss.Dao

func (crawler MiniAppCrawler) Start() {
	dao.Init()
	/*pwd, _ := os.Getwd()
		path := pwd + "/miniJson.txt"
		//fmt.Printf(" path: %s ", path)
		body, _ := ioutil.ReadFile(path)

		// fmt.Printf(" json: %s ", string(body))
	         crawler.parse("20170205",body)
		 var r Response
		json.Unmarshal(body, &r)*/
	var batch int
	size := 762
	pageSize := 21
	if size%pageSize == 0 {
		batch = size / pageSize
	} else {
		batch = size/pageSize + 1
	}
	fmt.Printf(" batch: %d ", batch)

	bashUrl := "https://minapp.com/api/v3/trochili/miniapp/?tag=&limit=%d&offset=%d"
	for i := 24; i < batch; i++ {
		url := fmt.Sprintf(bashUrl, pageSize, i*pageSize)
		// fmt.Println(url)
		resp, _ := http.Get(url)
		defer resp.Body.Close()
		body, _ := ioutil.ReadAll(resp.Body)
		crawler.parse("20170205", body)
	}

}

func (crawler MiniAppCrawler) parse(curDate string, body []byte) {
	var r Response
	json.Unmarshal(body, &r)
	//fmt.Println(r)
	apps := r.Objects

	for _, v := range apps {

		name := v.Name
		desc := v.Description

		icoUrl := v.Icon.Image
		erCodeUrl := v.Qrcode.Image

		var app buss.AppExtInfo

		tags := v.Tag
		tag := make(map[int]string, 10)

		for _, t := range tags {
			appType := dao.QueryForTypeByName(t.Name)
			fmt.Println(appType)

			typeId := appType.Id
			tag[typeId] = t.Name
		}
		app.Name = name
		app.Introduce = desc
		tagv, _ := json.Marshal(tag)
		app.Tags = string(tagv)

		var targetName string

		targetDir := crawler.DownLoadPath + "/" + curDate
		downDir := "/upload" + "/" + curDate
		fileName := common.Md5(name+"ico") + ".jpg"
		targetName = downDir + "/" + fileName
		filePath := crawler.DownLoadPath + "/" + curDate + "/" + fileName
		//log.Infof(" filePath:  %s", targetName)
		crawler.downLoadFile(targetDir, targetName, filePath, icoUrl, ImageSize{width: 100, height: 100})
		app.Ico = targetName

		fileName = common.Md5(name+"ercode") + ".jpg"
		targetName = downDir + "/" + fileName
		filePath = crawler.DownLoadPath + "/" + curDate + "/" + fileName
		//log.Infof(" filePath:  %s", targetName)

		crawler.downLoadFile(targetDir, targetName, filePath, erCodeUrl, ImageSize{width: 150, height: 150})
		app.ErCode = targetName

		imageSize := ImageSize{width: 640, height: 1120}
		screenshots := v.Screenshot
		downScreenShots := map[int]string{}
		for i, v := range screenshots {
			fileName = common.Md5(name+"screenshot"+strconv.Itoa(i)) + ".jpg"
			targetName = downDir + "/" + fileName
			filePath = crawler.DownLoadPath + "/" + curDate + "/" + fileName
			//log.Infof(" filePath:  %s", targetName)
			crawler.downLoadFile(targetDir, targetName, filePath, v.Image, imageSize)
			downScreenShots[i] = targetName
		}
		js, _ := json.Marshal(downScreenShots)
		app.Screenshots = string(js)
		app.Recommend = 0
		app.View = 0
		app.Url = ""
		app.Status = -1
		app.CreateDate = time.Now().Format("2006-01-02 15:04:05")
		dao.SaveApp(app)

	}
}

func (crawler MiniAppCrawler) downLoadFile(targetDir, filename, filePath, url string, size ImageSize) {
	log.Debugf(" targetDir :%s", targetDir)

	f, err := os.Open(targetDir)
	defer f.Close()
	if err != nil {
		os.Mkdir(targetDir, os.ModePerm)
	}

	//把文件指针指到文件末，当然你说为何不直接用 O_APPEND 模式打开，没错是可以。我这里只是试验。
	//log.Debugf(" url :%s", url)
	//filePath := filename
	out, err := os.Create(filePath)
	if err != nil {
		log.Error(err)
	}
	defer out.Close()
	resp, err := http.Get(url)
	defer resp.Body.Close()
	pix, err := ioutil.ReadAll(resp.Body)
	if err != nil {
		log.Error(err)
	}
	written, err := io.Copy(out, bytes.NewReader(pix))
	if err != nil {
		//log.Error(err)
	}
	log.Debugf(filename + "  下载完成  size:" + strconv.FormatInt(written, 10))

	srcImage, _ := imaging.Open(filePath)
	newImage := imaging.Resize(srcImage, size.width, size.height, imaging.Lanczos)
	imaging.Save(newImage, filePath)

}
