// require modules
const fs = require("fs");
const archiver = require("archiver");
const { name:packageName } = require('./package.json')

const distDir = __dirname + '/dist'
//构建zip包包含的目录和文件
const includeDirs = ['node_modules','src']
const outZipFileName = packageName+'.zip'
const outPath=distDir + '/'+outZipFileName

fs.rmSync(distDir,{ recursive: true, force: true })
fs.mkdirSync(distDir)
// create a file to stream archive data to.
const output = fs.createWriteStream(outPath);
const archive = archiver("zip", {
  zlib: { level: 9 }, // Sets the compression level.
});

// listen for all archive data to be written
// 'close' event is fired only when a file descriptor is involved
output.on("close", function () {
  console.log("zip包大小:"+ (archive.pointer()/1024/1024).toFixed(2) + " M ")
  console.log("构建的zip :"+outPath)
  
});

// This event is fired when the data source is drained no matter what was the data source.
// It is not part of this library but rather from the NodeJS Stream API.
// @see: https://nodejs.org/api/stream.html#stream_event_end
output.on("end", function () {
  console.log("Data has been drained");
});

// good practice to catch warnings (ie stat failures and other non-blocking errors)
archive.on("warning", function (err) {
  if (err.code === "ENOENT") {
    // log warning
  } else {
    // throw error
    throw err;
  }
});

// good practice to catch this error explicitly
archive.on("error", function (err) {
  throw err;
});

// pipe archive data to the file
archive.pipe(output);

for(let i=0;i<includeDirs.length;i++){
    const path = includeDirs[i]
    const stats = fs.statSync(path)
    if(stats.isDirectory()){
        console.log('includeDir ',path)
        archive.directory(path,packageName+'/'+path)
    }else{
        console.log('includeFile ',path)
        archive.file(path,{name:packageName+'/'+path})
    }
    
}


// append a file from stream
/* const file1 = __dirname + "/file1.txt";
archive.append(fs.createReadStream(file1), { name: "file1.txt" });

// append a file from string
archive.append("string cheese!", { name: "file2.txt" });

// append a file from buffer
const buffer3 = Buffer.from("buff it!");
archive.append(buffer3, { name: "file3.txt" });

// append a file
archive.file("file1.txt", { name: "file4.txt" });

// append files from a sub-directory and naming it `new-subdir` within the archive
archive.directory("subdir/", "new-subdir");

// append files from a sub-directory, putting its contents at the root of archive
archive.directory("subdir/", false);

// append files from a glob pattern
archive.glob("file*.txt", { cwd: __dirname }); */

// finalize the archive (ie we are done appending files but streams have to finish yet)
// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
archive.finalize();