#!/usr/bin/env node
const fs = require('fs');

console.log(process.argv);
console.log(process.cwd());
let cur_path = process.cwd();

let instruct = process.argv.slice(2,4);
let spider_name = instruct[1];
if (instruct[0].toLowerCase()=='create'){
    //copy模板文件
    if (!fs.existsSync(spider_name)){
        fs.mkdirSync(spider_name);
        copyFile(__dirname.replace('lib','template'),spider_name);
    }
    else{
        console.log("项目已经创建!")
    }

}

//创建项目模板
function copyFile(src,dst){

    console.log(__dirname);
    let files = fs.readdirSync(src);
    for (let filePath of files){
        let _src = src+'/'+filePath;
        let _dst = process.cwd()+'/'+dst+'/'+filePath;
        if(_dst.endsWith('.js')){
            _dst = _dst.replace('xxx',dst);
        }
        let stat = fs.statSync(_src);
        if (stat.isFile()){
            fs.copyFileSync(_src,_dst);
        }
    }
    edit_config();

}

// 更新模板信息
function  edit_config() {
    let path = process.cwd()+'/'+spider_name+'/config.json';
    let json = fs.readFileSync(path);
    let config = JSON.parse(fs.readFileSync(path));
    config.pipeline=spider_name+'-pipeline';
    fs.writeFileSync(path,JSON.stringify(config),{flag:'w'});
    console.log("爬虫创建完毕");
}
// var start = Date.now();
// new Scheduler().run();
// process.on('exit',()=>{console.log('退出程序耗时:'+(Date.now()-start))})