 
const fs = require("fs");
const path = require("path");
const scrapy = require("node-scrapy");
 
 const savedir="./save";
if(!fs.existsSync(savedir))
{
    fs.mkdirSync(savedir);
}


//工具函数
let paths=[];
 function getpath()
{
    return path.join(savedir,...paths);
}
 function getfilename(fname)
{
    return path.join(getpath(),fname);
}
/**
 * 保证paths表示的目录存在
 */
 function ensure_paths()
{
    if(!fs.existsSync(getpath()))
    {
        fs.mkdirSync(getpath());
    }
}
//确保子目录存在
 function ensure_dir(dir)
{

    let p=path.join(getpath(),dir);
    if(!fs.existsSync(p))
    {
        fs.mkdirSync(p);
    }
}
 function enter_dir(name)
{
    //防止错误字符
    name=name.replace(/[\/"\\:*?><|]/g," ");

    paths.push(name);
    ensure_paths();

}

 function leave_dir()
{
    paths.pop();
}

//下载部分
const req=require("request");
let fail_num=1;
 function download(url,fpath)
{
    try
    {
        req.get(url,(err,res,body)=>{
            if(err==null);
                //res.pipe(fs.createWriteStream(fpath));
            else
            {
                console.log(`${fail_num++}个文件下载失败!`);
            }
        }).pipe(fs.createWriteStream(fpath));
    }
    catch(e)
    {
        console.log(`${fail_num++}个文件下载失败!`);
    }
    
}

//同步化
 async function scrape_async(url,model)
{
    return new Promise((resolve,reject)=>{
        scrapy.scrape(url,model,async (err,data)=>{
            resolve(data);
        })
    })
}

 function writefile(file,lst)
{
    let str="";
        lst.forEach(element => {
           str+=`${element}\n`; 
        });
    fs.writeFileSync(file,str);
}

 function sleep(milliSeconds) { 
    var startTime = new Date().getTime(); 
    while (new Date().getTime() < startTime + milliSeconds);
 };

