package crawler

import (
	"crawlies/src/base"
	"crawlies/src/util/fileutil"
)

func save(b []byte, path string, file string) bool {
	path = base.Path + path
	exists, err := fileutil.PathExists(path)
	if err != nil {
		base.Logger.Err().Println(err.Error())
		return false
	}
	//如果没有文件夹 创建文件夹
	if !exists {
		err := fileutil.Mkdir(path)
		if err != nil {
			base.Logger.Err().Println(err.Error())
			return false
		}
	}
	totalPath := path + "/" + file
	//如果没有文件创建文件
	pathExists, err := fileutil.PathExists(totalPath)
	if err != nil {
		base.Logger.Err().Println(err.Error())
		return false
	}
	if pathExists {
		base.Logger.Err().Println(" file already exists :", totalPath)
		return false
	}
	//写入文件
	fileW, err := fileutil.MkFile(totalPath)
	if err != nil {
		base.Logger.Err().Println(err.Error())
		return false
	}
	write, err := fileW.Write(b)
	if err != nil || write == 0 || write != len(b) {
		return false
	}
	return true
}
