// // const rm = require('rimraf')
// // const compressing = require('compressing')
// // const config = require('../config')
// var fs = require('fs');
// var archiver = require('archiver');
// var path = require('path')

// const filePath = path.resolve(__dirname, '../businessPlatformTest')
// // rm(`${filePath}.zip`, function (error) {
// //   if (error) throw error
  
// // })

// var output = fs.createWriteStream(`${filePath}.zip`);
// var archive = archiver('zip', {
//   zlib: { level: 9 } // Sets the compression level.
// });

// output.on('close', function() {
//   console.log(archive.pointer() + ' total bytes');
//   console.log('archiver has been finalized and the output file descriptor has closed.');
// });

// // This event is fired when the data source is drained no matter what was the data source.
// // It is not part of this library but rather from the NodeJS Stream API.
// // @see: https://nodejs.org/api/stream.html#stream_event_end
// output.on('end', function() {
//   console.log('Data has been drained');
// });

// // good practice to catch warnings (ie stat failures and other non-blocking errors)
// archive.on('warning', function(err) {
//   if (err.code === 'ENOENT') {
//     // log warning
//   } else {
//     // throw error
//     throw err;
//   }
// });


// archive.directory(filePath, false);
// archive.on('error', function(err) {
//   throw err;
// });
// archive.pipe(output);
// archive.finalize();

const compressing = require('compressing')

compressing.zip.compressDir(filePath, `${fileName}.zip`)
.then(() => {
console.log('zip success');
})
.catch(err => {
console.error(err);
});