diff --git a/.github/workflows/nodejs.yml b/.github/workflows/nodejs.yml index d11701dd..ea4260e5 100644 --- a/.github/workflows/nodejs.yml +++ b/.github/workflows/nodejs.yml @@ -17,14 +17,14 @@ jobs: node-version: [14.x, 16.x, 18.x, 20.x] steps: - - uses: actions/checkout@v4.2.1 - - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v4.0.4 - with: - node-version: ${{ matrix.node-version }} - - name: npm install and test - run: | - npm ci - npm test - env: - CI: true + - uses: actions/checkout@v4.2.1 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v4.0.4 + with: + node-version: ${{ matrix.node-version }} + - name: npm install and test + run: | + npm ci + npm test + env: + CI: true diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 00000000..1b8ac889 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,3 @@ +# Ignore artifacts: +build +coverage diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index eca68a45..629a8f29 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,13 +2,13 @@ #### Code Style Guide -* code should be indented with 2 spaces -* single quotes should be used where feasible -* commas should be followed by a single space (function params, etc) -* variable declaration should include `var`, [no multiple declarations](http://benalman.com/news/2012/05/multiple-var-statements-javascript/) +- code should be indented with 2 spaces +- single quotes should be used where feasible +- commas should be followed by a single space (function params, etc) +- variable declaration should include `var`, [no multiple declarations](http://benalman.com/news/2012/05/multiple-var-statements-javascript/) #### Tests -* tests should be added to the nodeunit configs in `tests/` -* tests can be run with `npm test` -* see existing tests for guidance \ No newline at end of file +- tests should be added to the nodeunit configs in `tests/` +- tests can be run with `npm test` +- see existing tests for guidance diff --git a/README.md b/README.md index 2ee7c171..20f8c4e1 100644 --- a/README.md +++ b/README.md @@ -14,32 +14,34 @@ npm install archiver --save ```js // require modules -const fs = require('fs'); -const archiver = require('archiver'); +const fs = require("fs"); +const archiver = require("archiver"); // create a file to stream archive data to. -const output = fs.createWriteStream(__dirname + '/example.zip'); -const archive = archiver('zip', { - zlib: { level: 9 } // Sets the compression level. +const output = fs.createWriteStream(__dirname + "/example.zip"); +const archive = archiver("zip", { + zlib: { level: 9 }, // Sets the compression level. }); // listen for all archive data to be written // 'close' event is fired only when a file descriptor is involved -output.on('close', function() { - console.log(archive.pointer() + ' total bytes'); - console.log('archiver has been finalized and the output file descriptor has closed.'); +output.on("close", function () { + console.log(archive.pointer() + " total bytes"); + console.log( + "archiver has been finalized and the output file descriptor has closed.", + ); }); // This event is fired when the data source is drained no matter what was the data source. // It is not part of this library but rather from the NodeJS Stream API. // @see: https://nodejs.org/api/stream.html#stream_event_end -output.on('end', function() { - console.log('Data has been drained'); +output.on("end", function () { + console.log("Data has been drained"); }); // good practice to catch warnings (ie stat failures and other non-blocking errors) -archive.on('warning', function(err) { - if (err.code === 'ENOENT') { +archive.on("warning", function (err) { + if (err.code === "ENOENT") { // log warning } else { // throw error @@ -48,7 +50,7 @@ archive.on('warning', function(err) { }); // good practice to catch this error explicitly -archive.on('error', function(err) { +archive.on("error", function (err) { throw err; }); @@ -56,27 +58,27 @@ archive.on('error', function(err) { archive.pipe(output); // append a file from stream -const file1 = __dirname + '/file1.txt'; -archive.append(fs.createReadStream(file1), { name: 'file1.txt' }); +const file1 = __dirname + "/file1.txt"; +archive.append(fs.createReadStream(file1), { name: "file1.txt" }); // append a file from string -archive.append('string cheese!', { name: 'file2.txt' }); +archive.append("string cheese!", { name: "file2.txt" }); // append a file from buffer -const buffer3 = Buffer.from('buff it!'); -archive.append(buffer3, { name: 'file3.txt' }); +const buffer3 = Buffer.from("buff it!"); +archive.append(buffer3, { name: "file3.txt" }); // append a file -archive.file('file1.txt', { name: 'file4.txt' }); +archive.file("file1.txt", { name: "file4.txt" }); // append files from a sub-directory and naming it `new-subdir` within the archive -archive.directory('subdir/', 'new-subdir'); +archive.directory("subdir/", "new-subdir"); // append files from a sub-directory, putting its contents at the root of archive -archive.directory('subdir/', false); +archive.directory("subdir/", false); // append files from a glob pattern -archive.glob('file*.txt', {cwd:__dirname}); +archive.glob("file*.txt", { cwd: __dirname }); // finalize the archive (ie we are done appending files but streams have to finish yet) // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand diff --git a/benchmark/common.js b/benchmark/common.js index 667c741c..a3809f34 100644 --- a/benchmark/common.js +++ b/benchmark/common.js @@ -2,10 +2,10 @@ function binaryBuffer(n) { var buffer = Buffer.alloc(n); for (var i = 0; i < n; i++) { - buffer.writeUInt8(i&255, i); + buffer.writeUInt8(i & 255, i); } return buffer; } -module.exports.binaryBuffer = binaryBuffer; \ No newline at end of file +module.exports.binaryBuffer = binaryBuffer; diff --git a/benchmark/simple/pack-zip.js b/benchmark/simple/pack-zip.js index a6c76705..38561bb7 100644 --- a/benchmark/simple/pack-zip.js +++ b/benchmark/simple/pack-zip.js @@ -1,10 +1,10 @@ -var fs = require('fs'); +var fs = require("fs"); -var mkdir = require('mkdirp'); -var streamBench = require('stream-bench'); +var mkdir = require("mkdirp"); +var streamBench = require("stream-bench"); -var archiver = require('../../'); -var common = require('../common'); +var archiver = require("../../"); +var common = require("../common"); var binaryBuffer = common.binaryBuffer; @@ -30,29 +30,27 @@ if (process.argv[2]) { } } -var archive = archiver('zip', { +var archive = archiver("zip", { zlib: { - level: level - } + level: level, + }, }); if (file === false) { - mkdir.sync('tmp'); + mkdir.sync("tmp"); - file = 'tmp/20mb.dat'; + file = "tmp/20mb.dat"; fs.writeFileSync(file, binaryBuffer(BITS_IN_MBYTE * 20)); } -console.log('zlib level: ' + level); +console.log("zlib level: " + level); var bench = streamBench({ logReport: true, interval: 500, - dump: true + dump: true, }); archive.pipe(bench); -archive - .file(file, { name: 'large file' }) - .finalize(); +archive.file(file, { name: "large file" }).finalize(); diff --git a/examples/express.js b/examples/express.js index 73531515..e5531317 100644 --- a/examples/express.js +++ b/examples/express.js @@ -1,40 +1,44 @@ -var app = require('express')(); -var archiver = require('archiver'); -var p = require('path'); +var app = require("express")(); +var archiver = require("archiver"); +var p = require("path"); -app.get('/', function(req, res) { +app.get("/", function (req, res) { + var archive = archiver("zip"); - var archive = archiver('zip'); - - archive.on('error', function(err) { - res.status(500).send({error: err.message}); + archive.on("error", function (err) { + res.status(500).send({ error: err.message }); }); //on stream closed we can end the request - archive.on('end', function() { - console.log('Archive wrote %d bytes', archive.pointer()); + archive.on("end", function () { + console.log("Archive wrote %d bytes", archive.pointer()); }); //set the archive name - res.attachment('archive-name.zip'); + res.attachment("archive-name.zip"); //this is the streaming magic archive.pipe(res); - var files = [__dirname + '/fixtures/file1.txt', __dirname + '/fixtures/file2.txt']; + var files = [ + __dirname + "/fixtures/file1.txt", + __dirname + "/fixtures/file2.txt", + ]; - for(var i in files) { + for (var i in files) { archive.file(files[i], { name: p.basename(files[i]) }); } - var directories = [__dirname + '/fixtures/somedir'] + var directories = [__dirname + "/fixtures/somedir"]; - for(var i in directories) { - archive.directory(directories[i], directories[i].replace(__dirname + '/fixtures', '')); + for (var i in directories) { + archive.directory( + directories[i], + directories[i].replace(__dirname + "/fixtures", ""), + ); } archive.finalize(); - }); app.listen(3000); diff --git a/examples/pack-tar.js b/examples/pack-tar.js index ac846482..1d6b4b30 100644 --- a/examples/pack-tar.js +++ b/examples/pack-tar.js @@ -1,25 +1,27 @@ -var fs = require('fs'); +var fs = require("fs"); -var archiver = require('archiver'); +var archiver = require("archiver"); -var output = fs.createWriteStream(__dirname + '/example-output.tar'); -var archive = archiver('tar'); +var output = fs.createWriteStream(__dirname + "/example-output.tar"); +var archive = archiver("tar"); -output.on('close', function() { - console.log(archive.pointer() + ' total bytes'); - console.log('archiver has been finalized and the output file descriptor has closed.'); +output.on("close", function () { + console.log(archive.pointer() + " total bytes"); + console.log( + "archiver has been finalized and the output file descriptor has closed.", + ); }); -archive.on('error', function(err) { +archive.on("error", function (err) { throw err; }); archive.pipe(output); -var file1 = __dirname + '/fixtures/file1.txt'; -var file2 = __dirname + '/fixtures/file2.txt'; +var file1 = __dirname + "/fixtures/file1.txt"; +var file2 = __dirname + "/fixtures/file2.txt"; archive - .append(fs.createReadStream(file1), { name: 'file1.txt' }) - .append(fs.createReadStream(file2), { name: 'file2.txt' }) - .finalize(); \ No newline at end of file + .append(fs.createReadStream(file1), { name: "file1.txt" }) + .append(fs.createReadStream(file2), { name: "file2.txt" }) + .finalize(); diff --git a/examples/pack-tgz.js b/examples/pack-tgz.js index c6c2dc2c..58f6aa45 100644 --- a/examples/pack-tgz.js +++ b/examples/pack-tgz.js @@ -1,29 +1,31 @@ -var fs = require('fs'); -var archiver = require('archiver'); +var fs = require("fs"); +var archiver = require("archiver"); -var output = fs.createWriteStream(__dirname + '/example-output.tar.gz'); -var archive = archiver('tar', { +var output = fs.createWriteStream(__dirname + "/example-output.tar.gz"); +var archive = archiver("tar", { gzip: true, gzipOptions: { - level: 1 - } + level: 1, + }, }); -output.on('close', function() { - console.log(archive.pointer() + ' total bytes'); - console.log('archiver has been finalized and the output file descriptor has closed.'); +output.on("close", function () { + console.log(archive.pointer() + " total bytes"); + console.log( + "archiver has been finalized and the output file descriptor has closed.", + ); }); -archive.on('error', function(err) { +archive.on("error", function (err) { throw err; }); archive.pipe(output); -var file1 = __dirname + '/fixtures/file1.txt'; -var file2 = __dirname + '/fixtures/file2.txt'; +var file1 = __dirname + "/fixtures/file1.txt"; +var file2 = __dirname + "/fixtures/file2.txt"; archive - .append(fs.createReadStream(file1), { name: 'file1.txt' }) - .append(fs.createReadStream(file2), { name: 'file2.txt' }) - .finalize(); \ No newline at end of file + .append(fs.createReadStream(file1), { name: "file1.txt" }) + .append(fs.createReadStream(file2), { name: "file2.txt" }) + .finalize(); diff --git a/examples/pack-zip.js b/examples/pack-zip.js index 07e85c13..e47c7b1c 100644 --- a/examples/pack-zip.js +++ b/examples/pack-zip.js @@ -1,25 +1,27 @@ -var fs = require('fs'); +var fs = require("fs"); -var archiver = require('archiver'); +var archiver = require("archiver"); -var output = fs.createWriteStream(__dirname + '/example-output.zip'); -var archive = archiver('zip'); +var output = fs.createWriteStream(__dirname + "/example-output.zip"); +var archive = archiver("zip"); -output.on('close', function() { - console.log(archive.pointer() + ' total bytes'); - console.log('archiver has been finalized and the output file descriptor has closed.'); +output.on("close", function () { + console.log(archive.pointer() + " total bytes"); + console.log( + "archiver has been finalized and the output file descriptor has closed.", + ); }); -archive.on('error', function(err) { +archive.on("error", function (err) { throw err; }); archive.pipe(output); -var file1 = __dirname + '/fixtures/file1.txt'; -var file2 = __dirname + '/fixtures/file2.txt'; +var file1 = __dirname + "/fixtures/file1.txt"; +var file2 = __dirname + "/fixtures/file2.txt"; archive - .append(fs.createReadStream(file1), { name: 'file1.txt' }) - .append(fs.createReadStream(file2), { name: 'file2.txt' }) - .finalize(); \ No newline at end of file + .append(fs.createReadStream(file1), { name: "file1.txt" }) + .append(fs.createReadStream(file2), { name: "file2.txt" }) + .finalize(); diff --git a/examples/progress.js b/examples/progress.js index a3b52940..3c6beb11 100644 --- a/examples/progress.js +++ b/examples/progress.js @@ -1,41 +1,46 @@ -var archiver = require('../'); -var tmp = require('os').tmpdir(); -var async = require('async'); -var fs = require('fs'); +var archiver = require("../"); +var tmp = require("os").tmpdir(); +var async = require("async"); +var fs = require("fs"); // You can change this by something bigger! -var directory = __dirname + '/fixtures'; -var destination = tmp + '/' + Date.now() + '.zip'; +var directory = __dirname + "/fixtures"; +var destination = tmp + "/" + Date.now() + ".zip"; var destinationStream = fs.createWriteStream(destination); -console.log('Zipping %s to %s', directory, destination); +console.log("Zipping %s to %s", directory, destination); // To find out the progression, we may prefer to first calculate the size of the zip's future content // For this, we need to recursivly `readDir` and get the size from a `stat` call on every file. // Note that Archiver is also computing the total size, but it's done asynchronously and may not be accurate -directorySize(directory, function(err, totalSize) { - var prettyTotalSize = bytesToSize(totalSize) - var archive = archiver('zip'); +directorySize(directory, function (err, totalSize) { + var prettyTotalSize = bytesToSize(totalSize); + var archive = archiver("zip"); - archive.on('error', function(err) { - console.error('Error while zipping', err); + archive.on("error", function (err) { + console.error("Error while zipping", err); }); - archive.on('progress', function(progress) { - var percent = progress.fs.processedBytes / totalSize * 100; + archive.on("progress", function (progress) { + var percent = (progress.fs.processedBytes / totalSize) * 100; - console.log('%s / %s (%d %)', bytesToSize(progress.fs.processedBytes), prettyTotalSize, percent); - }) + console.log( + "%s / %s (%d %)", + bytesToSize(progress.fs.processedBytes), + prettyTotalSize, + percent, + ); + }); //on stream closed we can end the request - archive.on('end', function() { - console.log('%s / %s (%d %)', prettyTotalSize, prettyTotalSize, 100); + archive.on("end", function () { + console.log("%s / %s (%d %)", prettyTotalSize, prettyTotalSize, 100); var archiveSize = archive.pointer(); - console.log('Archiver wrote %s bytes', bytesToSize(archiveSize)); - console.log('Compression ratio: %d:1', Math.round(totalSize / archiveSize)); - console.log('Space savings: %d %', (1 - (archiveSize / totalSize)) * 100); + console.log("Archiver wrote %s bytes", bytesToSize(archiveSize)); + console.log("Compression ratio: %d:1", Math.round(totalSize / archiveSize)); + console.log("Space savings: %d %", (1 - archiveSize / totalSize) * 100); }); archive.pipe(destinationStream); @@ -43,8 +48,7 @@ directorySize(directory, function(err, totalSize) { archive.directory(directory); archive.finalize(); -}) - +}); /** * You can use a nodejs module to do this, this function is really straightforward and will fail on error @@ -56,7 +60,7 @@ function directorySize(path, cb, size) { size = 0; } - fs.stat(path, function(err, stat) { + fs.stat(path, function (err, stat) { if (err) { cb(err); return; @@ -69,26 +73,34 @@ function directorySize(path, cb, size) { return; } - fs.readdir(path, function(err, paths) { + fs.readdir(path, function (err, paths) { if (err) { cb(err); return; } - async.map(paths.map(function(p) { return path + '/' + p }), directorySize, function(err, sizes) { - size += sizes.reduce(function(a, b) { return a + b }, 0); - cb(err, size); - }) - }) - }) + async.map( + paths.map(function (p) { + return path + "/" + p; + }), + directorySize, + function (err, sizes) { + size += sizes.reduce(function (a, b) { + return a + b; + }, 0); + cb(err, size); + }, + ); + }); + }); } /** * https://stackoverflow.com/questions/15900485/correct-way-to-convert-size-in-bytes-to-kb-mb-gb-in-javascript#18650828 */ function bytesToSize(bytes) { - var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB']; - if (bytes == 0) return '0 Byte'; - var i = parseInt(Math.floor(Math.log(bytes) / Math.log(1024))); - return Math.round(bytes / Math.pow(1024, i), 2) + ' ' + sizes[i]; -}; + var sizes = ["Bytes", "KB", "MB", "GB", "TB"]; + if (bytes == 0) return "0 Byte"; + var i = parseInt(Math.floor(Math.log(bytes) / Math.log(1024))); + return Math.round(bytes / Math.pow(1024, i), 2) + " " + sizes[i]; +} diff --git a/index.js b/index.js index 0996daef..dd25bac1 100644 --- a/index.js +++ b/index.js @@ -5,7 +5,7 @@ * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} * @copyright (c) 2012-2014 Chris Talkington, contributors. */ -var Archiver = require('./lib/core'); +var Archiver = require("./lib/core"); var formats = {}; @@ -17,7 +17,7 @@ var formats = {}; * @param {Object} options See [Archiver]{@link Archiver} * @return {Archiver} */ -var vending = function(format, options) { +var vending = function (format, options) { return vending.create(format, options); }; @@ -28,7 +28,7 @@ var vending = function(format, options) { * @param {Object} options See [Archiver]{@link Archiver} * @return {Archiver} */ -vending.create = function(format, options) { +vending.create = function (format, options) { if (formats[format]) { var instance = new Archiver(format, options); instance.setFormat(format); @@ -36,7 +36,7 @@ vending.create = function(format, options) { return instance; } else { - throw new Error('create(' + format + '): format not registered'); + throw new Error("create(" + format + "): format not registered"); } }; @@ -47,17 +47,20 @@ vending.create = function(format, options) { * @param {Function} module The function for archiver to interact with. * @return void */ -vending.registerFormat = function(format, module) { +vending.registerFormat = function (format, module) { if (formats[format]) { - throw new Error('register(' + format + '): format already registered'); + throw new Error("register(" + format + "): format already registered"); } - if (typeof module !== 'function') { - throw new Error('register(' + format + '): format module invalid'); + if (typeof module !== "function") { + throw new Error("register(" + format + "): format module invalid"); } - if (typeof module.prototype.append !== 'function' || typeof module.prototype.finalize !== 'function') { - throw new Error('register(' + format + '): format module missing methods'); + if ( + typeof module.prototype.append !== "function" || + typeof module.prototype.finalize !== "function" + ) { + throw new Error("register(" + format + "): format module missing methods"); } formats[format] = module; @@ -65,7 +68,7 @@ vending.registerFormat = function(format, module) { /** * Check if the format is already registered. - * + * * @param {String} format the name of the format. * @return boolean */ @@ -73,12 +76,12 @@ vending.isRegisteredFormat = function (format) { if (formats[format]) { return true; } - + return false; }; -vending.registerFormat('zip', require('./lib/plugins/zip')); -vending.registerFormat('tar', require('./lib/plugins/tar')); -vending.registerFormat('json', require('./lib/plugins/json')); +vending.registerFormat("zip", require("./lib/plugins/zip")); +vending.registerFormat("tar", require("./lib/plugins/tar")); +vending.registerFormat("json", require("./lib/plugins/json")); -module.exports = vending; \ No newline at end of file +module.exports = vending; diff --git a/lib/core.js b/lib/core.js index 7c0a74d7..fb487456 100644 --- a/lib/core.js +++ b/lib/core.js @@ -1,89 +1,71 @@ -/** - * Archiver Core - * - * @ignore - * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} - * @copyright (c) 2012-2014 Chris Talkington, contributors. - */ -var fs = require('fs'); -var glob = require('readdir-glob'); -var async = require('async'); -var path = require('path'); -var util = require('archiver-utils'); - -var inherits = require('util').inherits; -var ArchiverError = require('./error'); -var Transform = require('readable-stream').Transform; - -var win32 = process.platform === 'win32'; - +import fs from "fs"; +import * as glob from "readdir-glob"; +import * as async from "async"; +import path from "path"; +import util from "archiver-utils"; +import { inherits as inherits$0 } from "util"; +import * as ArchiverError from "./error.js"; +import { Transform as Transform$0 } from "readable-stream"; +var inherits = { inherits: inherits$0 }.inherits; +var Transform = { Transform: Transform$0 }.Transform; +var win32 = process.platform === "win32"; /** * @constructor * @param {String} format The archive format to use. * @param {(CoreOptions|TransformOptions)} options See also {@link ZipOptions} and {@link TarOptions}. */ -var Archiver = function(format, options) { +var Archiver = function (format, options) { if (!(this instanceof Archiver)) { return new Archiver(format, options); } - - if (typeof format !== 'string') { + if (typeof format !== "string") { options = format; - format = 'zip'; + format = "zip"; } - options = this.options = util.defaults(options, { highWaterMark: 1024 * 1024, - statConcurrency: 4 + statConcurrency: 4, }); - Transform.call(this, options); - this._format = false; this._module = false; this._pending = 0; this._pointer = 0; - this._entriesCount = 0; this._entriesProcessedCount = 0; this._fsEntriesTotalBytes = 0; this._fsEntriesProcessedBytes = 0; - this._queue = async.queue(this._onQueueTask.bind(this), 1); this._queue.drain(this._onQueueDrain.bind(this)); - - this._statQueue = async.queue(this._onStatQueueTask.bind(this), options.statConcurrency); + this._statQueue = async.queue( + this._onStatQueueTask.bind(this), + options.statConcurrency, + ); this._statQueue.drain(this._onQueueDrain.bind(this)); - this._state = { aborted: false, finalize: false, finalizing: false, finalized: false, - modulePiped: false + modulePiped: false, }; - this._streams = []; }; - inherits(Archiver, Transform); - /** * Internal logic for `abort`. * * @private * @return void */ -Archiver.prototype._abort = function() { +Archiver.prototype._abort = function () { this._state.aborted = true; this._queue.kill(); this._statQueue.kill(); - if (this._queue.idle()) { this._shutdown(); } }; - /** * Internal helper for appending files. * @@ -92,74 +74,66 @@ Archiver.prototype._abort = function() { * @param {EntryData} data The entry data. * @return void */ -Archiver.prototype._append = function(filepath, data) { +Archiver.prototype._append = function (filepath, data) { data = data || {}; - var task = { source: null, - filepath: filepath + filepath: filepath, }; - if (!data.name) { data.name = filepath; } - data.sourcePath = filepath; task.data = data; this._entriesCount++; - if (data.stats && data.stats instanceof fs.Stats) { task = this._updateQueueTaskWithStats(task, data.stats); if (task) { if (data.stats.size) { this._fsEntriesTotalBytes += data.stats.size; } - this._queue.push(task); } } else { this._statQueue.push(task); } }; - /** * Internal logic for `finalize`. * * @private * @return void */ -Archiver.prototype._finalize = function() { +Archiver.prototype._finalize = function () { if (this._state.finalizing || this._state.finalized || this._state.aborted) { return; } - this._state.finalizing = true; - this._moduleFinalize(); - this._state.finalizing = false; this._state.finalized = true; }; - /** * Checks the various state variables to determine if we can `finalize`. * * @private * @return {Boolean} */ -Archiver.prototype._maybeFinalize = function() { +Archiver.prototype._maybeFinalize = function () { if (this._state.finalizing || this._state.finalized || this._state.aborted) { return false; } - - if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { + if ( + this._state.finalize && + this._pending === 0 && + this._queue.idle() && + this._statQueue.idle() + ) { this._finalize(); return true; } - return false; }; - /** * Appends an entry to the module. * @@ -170,86 +144,80 @@ Archiver.prototype._maybeFinalize = function() { * @param {Function} callback * @return void */ -Archiver.prototype._moduleAppend = function(source, data, callback) { +Archiver.prototype._moduleAppend = function (source, data, callback) { if (this._state.aborted) { callback(); return; } - - this._module.append(source, data, function(err) { - this._task = null; - - if (this._state.aborted) { - this._shutdown(); - return; - } - - if (err) { - this.emit('error', err); - setImmediate(callback); - return; - } - - /** - * Fires when the entry's input has been processed and appended to the archive. - * - * @event Archiver#entry - * @type {EntryData} - */ - this.emit('entry', data); - this._entriesProcessedCount++; - - if (data.stats && data.stats.size) { - this._fsEntriesProcessedBytes += data.stats.size; - } - - /** - * @event Archiver#progress - * @type {ProgressData} - */ - this.emit('progress', { - entries: { - total: this._entriesCount, - processed: this._entriesProcessedCount - }, - fs: { - totalBytes: this._fsEntriesTotalBytes, - processedBytes: this._fsEntriesProcessedBytes + this._module.append( + source, + data, + function (err) { + this._task = null; + if (this._state.aborted) { + this._shutdown(); + return; } - }); - - setImmediate(callback); - }.bind(this)); + if (err) { + this.emit("error", err); + setImmediate(callback); + return; + } + /** + * Fires when the entry's input has been processed and appended to the archive. + * + * @event Archiver#entry + * @type {EntryData} + */ + this.emit("entry", data); + this._entriesProcessedCount++; + if (data.stats && data.stats.size) { + this._fsEntriesProcessedBytes += data.stats.size; + } + /** + * @event Archiver#progress + * @type {ProgressData} + */ + this.emit("progress", { + entries: { + total: this._entriesCount, + processed: this._entriesProcessedCount, + }, + fs: { + totalBytes: this._fsEntriesTotalBytes, + processedBytes: this._fsEntriesProcessedBytes, + }, + }); + setImmediate(callback); + }.bind(this), + ); }; - /** * Finalizes the module. * * @private * @return void */ -Archiver.prototype._moduleFinalize = function() { - if (typeof this._module.finalize === 'function') { +Archiver.prototype._moduleFinalize = function () { + if (typeof this._module.finalize === "function") { this._module.finalize(); - } else if (typeof this._module.end === 'function') { + } else if (typeof this._module.end === "function") { this._module.end(); } else { - this.emit('error', new ArchiverError('NOENDMETHOD')); + this.emit("error", new ArchiverError("NOENDMETHOD")); } }; - /** * Pipes the module to our internal stream with error bubbling. * * @private * @return void */ -Archiver.prototype._modulePipe = function() { - this._module.on('error', this._onModuleError.bind(this)); +Archiver.prototype._modulePipe = function () { + this._module.on("error", this._onModuleError.bind(this)); this._module.pipe(this); this._state.modulePiped = true; }; - /** * Determines if the current module supports a defined feature. * @@ -257,25 +225,22 @@ Archiver.prototype._modulePipe = function() { * @param {String} key * @return {Boolean} */ -Archiver.prototype._moduleSupports = function(key) { +Archiver.prototype._moduleSupports = function (key) { if (!this._module.supports || !this._module.supports[key]) { return false; } - return this._module.supports[key]; }; - /** * Unpipes the module from our internal stream. * * @private * @return void */ -Archiver.prototype._moduleUnpipe = function() { +Archiver.prototype._moduleUnpipe = function () { this._module.unpipe(this); this._state.modulePiped = false; }; - /** * Normalizes entry data with fallbacks for key properties. * @@ -284,45 +249,39 @@ Archiver.prototype._moduleUnpipe = function() { * @param {fs.Stats} stats * @return {Object} */ -Archiver.prototype._normalizeEntryData = function(data, stats) { +Archiver.prototype._normalizeEntryData = function (data, stats) { data = util.defaults(data, { - type: 'file', + type: "file", name: null, date: null, mode: null, prefix: null, sourcePath: null, - stats: false + stats: false, }); - if (stats && data.stats === false) { data.stats = stats; } - - var isDir = data.type === 'directory'; - + var isDir = data.type === "directory"; if (data.name) { - if (typeof data.prefix === 'string' && '' !== data.prefix) { - data.name = data.prefix + '/' + data.name; + if (typeof data.prefix === "string" && "" !== data.prefix) { + data.name = data.prefix + "/" + data.name; data.prefix = null; } - data.name = util.sanitizePath(data.name); - - if (data.type !== 'symlink' && data.name.slice(-1) === '/') { + if (data.type !== "symlink" && data.name.slice(-1) === "/") { isDir = true; - data.type = 'directory'; + data.type = "directory"; } else if (isDir) { - data.name += '/'; + data.name += "/"; } } - // 511 === 0777; 493 === 0755; 438 === 0666; 420 === 0644 - if (typeof data.mode === 'number') { + if (typeof data.mode === "number") { if (win32) { data.mode &= 511; } else { - data.mode &= 4095 + data.mode &= 4095; } } else if (data.stats && data.mode === null) { if (win32) { @@ -330,7 +289,6 @@ Archiver.prototype._normalizeEntryData = function(data, stats) { } else { data.mode = data.stats.mode & 4095; } - // stat isn't reliable on windows; force 0755 for dir if (win32 && isDir) { data.mode = 493; @@ -338,16 +296,13 @@ Archiver.prototype._normalizeEntryData = function(data, stats) { } else if (data.mode === null) { data.mode = isDir ? 493 : 420; } - if (data.stats && data.date === null) { data.date = data.stats.mtime; } else { data.date = util.dateify(data.date); } - return data; }; - /** * Error listener that re-emits error on to our internal stream. * @@ -355,14 +310,13 @@ Archiver.prototype._normalizeEntryData = function(data, stats) { * @param {Error} err * @return void */ -Archiver.prototype._onModuleError = function(err) { +Archiver.prototype._onModuleError = function (err) { /** * @event Archiver#error * @type {ErrorData} */ - this.emit('error', err); + this.emit("error", err); }; - /** * Checks the various state variables after queue has drained to determine if * we need to `finalize`. @@ -370,16 +324,19 @@ Archiver.prototype._onModuleError = function(err) { * @private * @return void */ -Archiver.prototype._onQueueDrain = function() { +Archiver.prototype._onQueueDrain = function () { if (this._state.finalizing || this._state.finalized || this._state.aborted) { return; } - - if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { + if ( + this._state.finalize && + this._pending === 0 && + this._queue.idle() && + this._statQueue.idle() + ) { this._finalize(); } }; - /** * Appends each queue task to the module. * @@ -388,23 +345,20 @@ Archiver.prototype._onQueueDrain = function() { * @param {Function} callback * @return void */ -Archiver.prototype._onQueueTask = function(task, callback) { +Archiver.prototype._onQueueTask = function (task, callback) { var fullCallback = () => { - if(task.data.callback) { + if (task.data.callback) { task.data.callback(); } callback(); - } - + }; if (this._state.finalizing || this._state.finalized || this._state.aborted) { fullCallback(); return; } - this._task = task; this._moduleAppend(task.source, task.data, fullCallback); }; - /** * Performs a file stat and reinjects the task back into the queue. * @@ -413,55 +367,49 @@ Archiver.prototype._onQueueTask = function(task, callback) { * @param {Function} callback * @return void */ -Archiver.prototype._onStatQueueTask = function(task, callback) { +Archiver.prototype._onStatQueueTask = function (task, callback) { if (this._state.finalizing || this._state.finalized || this._state.aborted) { callback(); return; } - - fs.lstat(task.filepath, function(err, stats) { - if (this._state.aborted) { - setImmediate(callback); - return; - } - - if (err) { - this._entriesCount--; - - /** - * @event Archiver#warning - * @type {ErrorData} - */ - this.emit('warning', err); - setImmediate(callback); - return; - } - - task = this._updateQueueTaskWithStats(task, stats); - - if (task) { - if (stats.size) { - this._fsEntriesTotalBytes += stats.size; + fs.lstat( + task.filepath, + function (err, stats) { + if (this._state.aborted) { + setImmediate(callback); + return; } - - this._queue.push(task); - } - - setImmediate(callback); - }.bind(this)); + if (err) { + this._entriesCount--; + /** + * @event Archiver#warning + * @type {ErrorData} + */ + this.emit("warning", err); + setImmediate(callback); + return; + } + task = this._updateQueueTaskWithStats(task, stats); + if (task) { + if (stats.size) { + this._fsEntriesTotalBytes += stats.size; + } + this._queue.push(task); + } + setImmediate(callback); + }.bind(this), + ); }; - /** * Unpipes the module and ends our internal stream. * * @private * @return void */ -Archiver.prototype._shutdown = function() { +Archiver.prototype._shutdown = function () { this._moduleUnpipe(); this.end(); }; - /** * Tracks the bytes emitted by our internal stream. * @@ -471,14 +419,12 @@ Archiver.prototype._shutdown = function() { * @param {Function} callback * @return void */ -Archiver.prototype._transform = function(chunk, encoding, callback) { +Archiver.prototype._transform = function (chunk, encoding, callback) { if (chunk) { this._pointer += chunk.length; } - callback(null, chunk); }; - /** * Updates and normalizes a queue task using stats data. * @@ -487,41 +433,43 @@ Archiver.prototype._transform = function(chunk, encoding, callback) { * @param {fs.Stats} stats * @return {Object} */ -Archiver.prototype._updateQueueTaskWithStats = function(task, stats) { +Archiver.prototype._updateQueueTaskWithStats = function (task, stats) { if (stats.isFile()) { - task.data.type = 'file'; - task.data.sourceType = 'stream'; + task.data.type = "file"; + task.data.sourceType = "stream"; task.source = util.lazyReadStream(task.filepath); - } else if (stats.isDirectory() && this._moduleSupports('directory')) { + } else if (stats.isDirectory() && this._moduleSupports("directory")) { task.data.name = util.trailingSlashIt(task.data.name); - task.data.type = 'directory'; + task.data.type = "directory"; task.data.sourcePath = util.trailingSlashIt(task.filepath); - task.data.sourceType = 'buffer'; + task.data.sourceType = "buffer"; task.source = Buffer.concat([]); - } else if (stats.isSymbolicLink() && this._moduleSupports('symlink')) { + } else if (stats.isSymbolicLink() && this._moduleSupports("symlink")) { var linkPath = fs.readlinkSync(task.filepath); var dirName = path.dirname(task.filepath); - task.data.type = 'symlink'; - task.data.linkname = path.relative(dirName, path.resolve(dirName, linkPath)); - task.data.sourceType = 'buffer'; + task.data.type = "symlink"; + task.data.linkname = path.relative( + dirName, + path.resolve(dirName, linkPath), + ); + task.data.sourceType = "buffer"; task.source = Buffer.concat([]); } else { if (stats.isDirectory()) { - this.emit('warning', new ArchiverError('DIRECTORYNOTSUPPORTED', task.data)); + this.emit( + "warning", + new ArchiverError("DIRECTORYNOTSUPPORTED", task.data), + ); } else if (stats.isSymbolicLink()) { - this.emit('warning', new ArchiverError('SYMLINKNOTSUPPORTED', task.data)); + this.emit("warning", new ArchiverError("SYMLINKNOTSUPPORTED", task.data)); } else { - this.emit('warning', new ArchiverError('ENTRYNOTSUPPORTED', task.data)); + this.emit("warning", new ArchiverError("ENTRYNOTSUPPORTED", task.data)); } - return null; } - task.data = this._normalizeEntryData(task.data, stats); - return task; }; - /** * Aborts the archiving process, taking a best-effort approach, by: * @@ -534,16 +482,13 @@ Archiver.prototype._updateQueueTaskWithStats = function(task, stats) { * * @return {this} */ -Archiver.prototype.abort = function() { +Archiver.prototype.abort = function () { if (this._state.aborted || this._state.finalized) { return this; } - this._abort(); - return this; }; - /** * Appends an input source (text string, buffer, or stream) to the instance. * @@ -555,44 +500,42 @@ Archiver.prototype.abort = function() { * @param {EntryData} data See also {@link ZipEntryData} and {@link TarEntryData}. * @return {this} */ -Archiver.prototype.append = function(source, data) { +Archiver.prototype.append = function (source, data) { if (this._state.finalize || this._state.aborted) { - this.emit('error', new ArchiverError('QUEUECLOSED')); + this.emit("error", new ArchiverError("QUEUECLOSED")); return this; } - data = this._normalizeEntryData(data); - - if (typeof data.name !== 'string' || data.name.length === 0) { - this.emit('error', new ArchiverError('ENTRYNAMEREQUIRED')); + if (typeof data.name !== "string" || data.name.length === 0) { + this.emit("error", new ArchiverError("ENTRYNAMEREQUIRED")); return this; } - - if (data.type === 'directory' && !this._moduleSupports('directory')) { - this.emit('error', new ArchiverError('DIRECTORYNOTSUPPORTED', { name: data.name })); + if (data.type === "directory" && !this._moduleSupports("directory")) { + this.emit( + "error", + new ArchiverError("DIRECTORYNOTSUPPORTED", { name: data.name }), + ); return this; } - source = util.normalizeInputSource(source); - if (Buffer.isBuffer(source)) { - data.sourceType = 'buffer'; + data.sourceType = "buffer"; } else if (util.isStream(source)) { - data.sourceType = 'stream'; + data.sourceType = "stream"; } else { - this.emit('error', new ArchiverError('INPUTSTEAMBUFFERREQUIRED', { name: data.name })); + this.emit( + "error", + new ArchiverError("INPUTSTEAMBUFFERREQUIRED", { name: data.name }), + ); return this; } - this._entriesCount++; this._queue.push({ data: data, - source: source + source: source, }); - return this; }; - /** * Appends a directory and its files, recursively, given its dirpath. * @@ -602,88 +545,74 @@ Archiver.prototype.append = function(source, data) { * [TarEntryData]{@link TarEntryData}. * @return {this} */ -Archiver.prototype.directory = function(dirpath, destpath, data) { +Archiver.prototype.directory = function (dirpath, destpath, data) { if (this._state.finalize || this._state.aborted) { - this.emit('error', new ArchiverError('QUEUECLOSED')); + this.emit("error", new ArchiverError("QUEUECLOSED")); return this; } - - if (typeof dirpath !== 'string' || dirpath.length === 0) { - this.emit('error', new ArchiverError('DIRECTORYDIRPATHREQUIRED')); + if (typeof dirpath !== "string" || dirpath.length === 0) { + this.emit("error", new ArchiverError("DIRECTORYDIRPATHREQUIRED")); return this; } - this._pending++; - if (destpath === false) { - destpath = ''; - } else if (typeof destpath !== 'string'){ + destpath = ""; + } else if (typeof destpath !== "string") { destpath = dirpath; } - var dataFunction = false; - if (typeof data === 'function') { + if (typeof data === "function") { dataFunction = data; data = {}; - } else if (typeof data !== 'object') { + } else if (typeof data !== "object") { data = {}; } - var globOptions = { stat: true, - dot: true + dot: true, }; - function onGlobEnd() { this._pending--; this._maybeFinalize(); } - function onGlobError(err) { - this.emit('error', err); + this.emit("error", err); } - - function onGlobMatch(match){ + function onGlobMatch(match) { globber.pause(); - var ignoreMatch = false; var entryData = Object.assign({}, data); entryData.name = match.relative; entryData.prefix = destpath; entryData.stats = match.stat; entryData.callback = globber.resume.bind(globber); - try { if (dataFunction) { entryData = dataFunction(entryData); - if (entryData === false) { ignoreMatch = true; - } else if (typeof entryData !== 'object') { - throw new ArchiverError('DIRECTORYFUNCTIONINVALIDDATA', { dirpath: dirpath }); + } else if (typeof entryData !== "object") { + throw new ArchiverError("DIRECTORYFUNCTIONINVALIDDATA", { + dirpath: dirpath, + }); } } - } catch(e) { - this.emit('error', e); + } catch (e) { + this.emit("error", e); return; } - if (ignoreMatch) { globber.resume(); return; } - this._append(match.absolute, entryData); } - var globber = glob(dirpath, globOptions); - globber.on('error', onGlobError.bind(this)); - globber.on('match', onGlobMatch.bind(this)); - globber.on('end', onGlobEnd.bind(this)); - + globber.on("error", onGlobError.bind(this)); + globber.on("match", onGlobMatch.bind(this)); + globber.on("end", onGlobEnd.bind(this)); return this; }; - /** * Appends a file given its filepath using a * [lazystream]{@link https://github.com/jpommerening/node-lazystream} wrapper to @@ -697,22 +626,18 @@ Archiver.prototype.directory = function(dirpath, destpath, data) { * [TarEntryData]{@link TarEntryData}. * @return {this} */ -Archiver.prototype.file = function(filepath, data) { +Archiver.prototype.file = function (filepath, data) { if (this._state.finalize || this._state.aborted) { - this.emit('error', new ArchiverError('QUEUECLOSED')); + this.emit("error", new ArchiverError("QUEUECLOSED")); return this; } - - if (typeof filepath !== 'string' || filepath.length === 0) { - this.emit('error', new ArchiverError('FILEFILEPATHREQUIRED')); + if (typeof filepath !== "string" || filepath.length === 0) { + this.emit("error", new ArchiverError("FILEFILEPATHREQUIRED")); return this; } - this._append(filepath, data); - return this; }; - /** * Appends multiple files that match a glob pattern. * @@ -722,41 +647,33 @@ Archiver.prototype.file = function(filepath, data) { * [TarEntryData]{@link TarEntryData}. * @return {this} */ -Archiver.prototype.glob = function(pattern, options, data) { +Archiver.prototype.glob = function (pattern, options, data) { this._pending++; - options = util.defaults(options, { stat: true, - pattern: pattern + pattern: pattern, }); - function onGlobEnd() { this._pending--; this._maybeFinalize(); } - function onGlobError(err) { - this.emit('error', err); + this.emit("error", err); } - - function onGlobMatch(match){ + function onGlobMatch(match) { globber.pause(); var entryData = Object.assign({}, data); entryData.callback = globber.resume.bind(globber); entryData.stats = match.stat; entryData.name = match.relative; - this._append(match.absolute, entryData); } - - var globber = glob(options.cwd || '.', options); - globber.on('error', onGlobError.bind(this)); - globber.on('match', onGlobMatch.bind(this)); - globber.on('end', onGlobEnd.bind(this)); - + var globber = glob(options.cwd || ".", options); + globber.on("error", onGlobError.bind(this)); + globber.on("match", onGlobMatch.bind(this)); + globber.on("end", onGlobEnd.bind(this)); return this; }; - /** * Finalizes the instance and prevents further appending to the archive * structure (queue will continue til drained). @@ -767,83 +684,68 @@ Archiver.prototype.glob = function(pattern, options, data) { * * @return {Promise} */ -Archiver.prototype.finalize = function() { +Archiver.prototype.finalize = function () { if (this._state.aborted) { - var abortedError = new ArchiverError('ABORTED'); - this.emit('error', abortedError); + var abortedError = new ArchiverError("ABORTED"); + this.emit("error", abortedError); return Promise.reject(abortedError); } - if (this._state.finalize) { - var finalizingError = new ArchiverError('FINALIZING'); - this.emit('error', finalizingError); + var finalizingError = new ArchiverError("FINALIZING"); + this.emit("error", finalizingError); return Promise.reject(finalizingError); } - this._state.finalize = true; - if (this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { this._finalize(); } - var self = this; - - return new Promise(function(resolve, reject) { + return new Promise(function (resolve, reject) { var errored; - - self._module.on('end', function() { + self._module.on("end", function () { if (!errored) { resolve(); } - }) - - self._module.on('error', function(err) { + }); + self._module.on("error", function (err) { errored = true; reject(err); - }) - }) + }); + }); }; - /** * Sets the module format name used for archiving. * * @param {String} format The name of the format. * @return {this} */ -Archiver.prototype.setFormat = function(format) { +Archiver.prototype.setFormat = function (format) { if (this._format) { - this.emit('error', new ArchiverError('FORMATSET')); + this.emit("error", new ArchiverError("FORMATSET")); return this; } - this._format = format; - return this; }; - /** * Sets the module used for archiving. * * @param {Function} module The function for archiver to interact with. * @return {this} */ -Archiver.prototype.setModule = function(module) { +Archiver.prototype.setModule = function (module) { if (this._state.aborted) { - this.emit('error', new ArchiverError('ABORTED')); + this.emit("error", new ArchiverError("ABORTED")); return this; } - if (this._state.module) { - this.emit('error', new ArchiverError('MODULESET')); + this.emit("error", new ArchiverError("MODULESET")); return this; } - this._module = module; this._modulePipe(); - return this; }; - /** * Appends a symlink to the instance. * @@ -854,55 +756,52 @@ Archiver.prototype.setModule = function(module) { * @param {Number} mode Sets the entry permissions. * @return {this} */ -Archiver.prototype.symlink = function(filepath, target, mode) { +Archiver.prototype.symlink = function (filepath, target, mode) { if (this._state.finalize || this._state.aborted) { - this.emit('error', new ArchiverError('QUEUECLOSED')); + this.emit("error", new ArchiverError("QUEUECLOSED")); return this; } - - if (typeof filepath !== 'string' || filepath.length === 0) { - this.emit('error', new ArchiverError('SYMLINKFILEPATHREQUIRED')); + if (typeof filepath !== "string" || filepath.length === 0) { + this.emit("error", new ArchiverError("SYMLINKFILEPATHREQUIRED")); return this; } - - if (typeof target !== 'string' || target.length === 0) { - this.emit('error', new ArchiverError('SYMLINKTARGETREQUIRED', { filepath: filepath })); + if (typeof target !== "string" || target.length === 0) { + this.emit( + "error", + new ArchiverError("SYMLINKTARGETREQUIRED", { filepath: filepath }), + ); return this; } - - if (!this._moduleSupports('symlink')) { - this.emit('error', new ArchiverError('SYMLINKNOTSUPPORTED', { filepath: filepath })); + if (!this._moduleSupports("symlink")) { + this.emit( + "error", + new ArchiverError("SYMLINKNOTSUPPORTED", { filepath: filepath }), + ); return this; } - var data = {}; - data.type = 'symlink'; - data.name = filepath.replace(/\\/g, '/'); - data.linkname = target.replace(/\\/g, '/'); - data.sourceType = 'buffer'; - + data.type = "symlink"; + data.name = filepath.replace(/\\/g, "/"); + data.linkname = target.replace(/\\/g, "/"); + data.sourceType = "buffer"; if (typeof mode === "number") { data.mode = mode; } - this._entriesCount++; this._queue.push({ data: data, - source: Buffer.concat([]) + source: Buffer.concat([]), }); - return this; }; - /** * Returns the current length (in bytes) that has been emitted. * * @return {Number} */ -Archiver.prototype.pointer = function() { +Archiver.prototype.pointer = function () { return this._pointer; }; - /** * Middleware-like helper that has yet to be fully implemented. * @@ -910,65 +809,8 @@ Archiver.prototype.pointer = function() { * @param {Function} plugin * @return {this} */ -Archiver.prototype.use = function(plugin) { +Archiver.prototype.use = function (plugin) { this._streams.push(plugin); return this; }; - -module.exports = Archiver; - -/** - * @typedef {Object} CoreOptions - * @global - * @property {Number} [statConcurrency=4] Sets the number of workers used to - * process the internal fs stat queue. - */ - -/** - * @typedef {Object} TransformOptions - * @property {Boolean} [allowHalfOpen=true] If set to false, then the stream - * will automatically end the readable side when the writable side ends and vice - * versa. - * @property {Boolean} [readableObjectMode=false] Sets objectMode for readable - * side of the stream. Has no effect if objectMode is true. - * @property {Boolean} [writableObjectMode=false] Sets objectMode for writable - * side of the stream. Has no effect if objectMode is true. - * @property {Boolean} [decodeStrings=true] Whether or not to decode strings - * into Buffers before passing them to _write(). `Writable` - * @property {String} [encoding=NULL] If specified, then buffers will be decoded - * to strings using the specified encoding. `Readable` - * @property {Number} [highWaterMark=16kb] The maximum number of bytes to store - * in the internal buffer before ceasing to read from the underlying resource. - * `Readable` `Writable` - * @property {Boolean} [objectMode=false] Whether this stream should behave as a - * stream of objects. Meaning that stream.read(n) returns a single value instead - * of a Buffer of size n. `Readable` `Writable` - */ - -/** - * @typedef {Object} EntryData - * @property {String} name Sets the entry name including internal path. - * @property {(String|Date)} [date=NOW()] Sets the entry date. - * @property {Number} [mode=D:0755/F:0644] Sets the entry permissions. - * @property {String} [prefix] Sets a path prefix for the entry name. Useful - * when working with methods like `directory` or `glob`. - * @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing - * for reduction of fs stat calls when stat data is already known. - */ - -/** - * @typedef {Object} ErrorData - * @property {String} message The message of the error. - * @property {String} code The error code assigned to this error. - * @property {String} data Additional data provided for reporting or debugging (where available). - */ - -/** - * @typedef {Object} ProgressData - * @property {Object} entries - * @property {Number} entries.total Number of entries that have been appended. - * @property {Number} entries.processed Number of entries that have been processed. - * @property {Object} fs - * @property {Number} fs.totalBytes Number of bytes that have been appended. Calculated asynchronously and might not be accurate: it growth while entries are added. (based on fs.Stats) - * @property {Number} fs.processedBytes Number of bytes that have been processed. (based on fs.Stats) - */ +export default Archiver; diff --git a/lib/error.js b/lib/error.js index 6bcb0ae1..1b51295b 100644 --- a/lib/error.js +++ b/lib/error.js @@ -1,32 +1,28 @@ -/** - * Archiver Core - * - * @ignore - * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} - * @copyright (c) 2012-2014 Chris Talkington, contributors. - */ - -var util = require('util'); - +import util from "util"; const ERROR_CODES = { - 'ABORTED': 'archive was aborted', - 'DIRECTORYDIRPATHREQUIRED': 'diretory dirpath argument must be a non-empty string value', - 'DIRECTORYFUNCTIONINVALIDDATA': 'invalid data returned by directory custom data function', - 'ENTRYNAMEREQUIRED': 'entry name must be a non-empty string value', - 'FILEFILEPATHREQUIRED': 'file filepath argument must be a non-empty string value', - 'FINALIZING': 'archive already finalizing', - 'QUEUECLOSED': 'queue closed', - 'NOENDMETHOD': 'no suitable finalize/end method defined by module', - 'DIRECTORYNOTSUPPORTED': 'support for directory entries not defined by module', - 'FORMATSET': 'archive format already set', - 'INPUTSTEAMBUFFERREQUIRED': 'input source must be valid Stream or Buffer instance', - 'MODULESET': 'module already set', - 'SYMLINKNOTSUPPORTED': 'support for symlink entries not defined by module', - 'SYMLINKFILEPATHREQUIRED': 'symlink filepath argument must be a non-empty string value', - 'SYMLINKTARGETREQUIRED': 'symlink target argument must be a non-empty string value', - 'ENTRYNOTSUPPORTED': 'entry not supported' + ABORTED: "archive was aborted", + DIRECTORYDIRPATHREQUIRED: + "diretory dirpath argument must be a non-empty string value", + DIRECTORYFUNCTIONINVALIDDATA: + "invalid data returned by directory custom data function", + ENTRYNAMEREQUIRED: "entry name must be a non-empty string value", + FILEFILEPATHREQUIRED: + "file filepath argument must be a non-empty string value", + FINALIZING: "archive already finalizing", + QUEUECLOSED: "queue closed", + NOENDMETHOD: "no suitable finalize/end method defined by module", + DIRECTORYNOTSUPPORTED: "support for directory entries not defined by module", + FORMATSET: "archive format already set", + INPUTSTEAMBUFFERREQUIRED: + "input source must be valid Stream or Buffer instance", + MODULESET: "module already set", + SYMLINKNOTSUPPORTED: "support for symlink entries not defined by module", + SYMLINKFILEPATHREQUIRED: + "symlink filepath argument must be a non-empty string value", + SYMLINKTARGETREQUIRED: + "symlink target argument must be a non-empty string value", + ENTRYNOTSUPPORTED: "entry not supported", }; - function ArchiverError(code, data) { Error.captureStackTrace(this, this.constructor); //this.name = this.constructor.name; @@ -34,7 +30,4 @@ function ArchiverError(code, data) { this.code = code; this.data = data; } - util.inherits(ArchiverError, Error); - -exports = module.exports = ArchiverError; \ No newline at end of file diff --git a/lib/plugins/json.js b/lib/plugins/json.js index caf63de9..220f2352 100644 --- a/lib/plugins/json.js +++ b/lib/plugins/json.js @@ -5,17 +5,17 @@ * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} * @copyright (c) 2012-2014 Chris Talkington, contributors. */ -var inherits = require('util').inherits; -var Transform = require('readable-stream').Transform; +var inherits = require("util").inherits; +var Transform = require("readable-stream").Transform; -var crc32 = require('buffer-crc32'); -var util = require('archiver-utils'); +var crc32 = require("buffer-crc32"); +var util = require("archiver-utils"); /** * @constructor * @param {(JsonOptions|TransformOptions)} options */ -var Json = function(options) { +var Json = function (options) { if (!(this instanceof Json)) { return new Json(options); } @@ -26,7 +26,7 @@ var Json = function(options) { this.supports = { directory: true, - symlink: true + symlink: true, }; this.files = []; @@ -43,7 +43,7 @@ inherits(Json, Transform); * @param {Function} callback * @return void */ -Json.prototype._transform = function(chunk, encoding, callback) { +Json.prototype._transform = function (chunk, encoding, callback) { callback(null, chunk); }; @@ -53,7 +53,7 @@ Json.prototype._transform = function(chunk, encoding, callback) { * @private * @return void */ -Json.prototype._writeStringified = function() { +Json.prototype._writeStringified = function () { var fileString = JSON.stringify(this.files); this.write(fileString); }; @@ -66,7 +66,7 @@ Json.prototype._writeStringified = function() { * @param {Function} callback * @return void */ -Json.prototype.append = function(source, data, callback) { +Json.prototype.append = function (source, data, callback) { var self = this; data.crc32 = 0; @@ -85,9 +85,9 @@ Json.prototype.append = function(source, data, callback) { callback(null, data); } - if (data.sourceType === 'buffer') { + if (data.sourceType === "buffer") { onend(null, source); - } else if (data.sourceType === 'stream') { + } else if (data.sourceType === "stream") { util.collectStream(source, onend); } }; @@ -97,7 +97,7 @@ Json.prototype.append = function(source, data, callback) { * * @return void */ -Json.prototype.finalize = function() { +Json.prototype.finalize = function () { this._writeStringified(); this.end(); }; diff --git a/lib/plugins/tar.js b/lib/plugins/tar.js index 3a170090..ccd8ae8a 100644 --- a/lib/plugins/tar.js +++ b/lib/plugins/tar.js @@ -5,31 +5,31 @@ * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} * @copyright (c) 2012-2014 Chris Talkington, contributors. */ -var zlib = require('zlib'); +var zlib = require("zlib"); -var engine = require('tar-stream'); -var util = require('archiver-utils'); +var engine = require("tar-stream"); +var util = require("archiver-utils"); /** * @constructor * @param {TarOptions} options */ -var Tar = function(options) { +var Tar = function (options) { if (!(this instanceof Tar)) { return new Tar(options); } options = this.options = util.defaults(options, { - gzip: false + gzip: false, }); - if (typeof options.gzipOptions !== 'object') { + if (typeof options.gzipOptions !== "object") { options.gzipOptions = {}; } this.supports = { directory: true, - symlink: true + symlink: true, }; this.engine = engine.pack(options); @@ -37,7 +37,7 @@ var Tar = function(options) { if (options.gzip) { this.compressor = zlib.createGzip(options.gzipOptions); - this.compressor.on('error', this._onCompressorError.bind(this)); + this.compressor.on("error", this._onCompressorError.bind(this)); } }; @@ -48,8 +48,8 @@ var Tar = function(options) { * @param {Error} err * @return void */ -Tar.prototype._onCompressorError = function(err) { - this.engine.emit('error', err); +Tar.prototype._onCompressorError = function (err) { + this.engine.emit("error", err); }; /** @@ -60,7 +60,7 @@ Tar.prototype._onCompressorError = function(err) { * @param {Function} callback * @return void */ -Tar.prototype.append = function(source, data, callback) { +Tar.prototype.append = function (source, data, callback) { var self = this; data.mtime = data.date; @@ -71,22 +71,22 @@ Tar.prototype.append = function(source, data, callback) { return; } - self.engine.entry(data, sourceBuffer, function(err) { + self.engine.entry(data, sourceBuffer, function (err) { callback(err, data); }); } - if (data.sourceType === 'buffer') { + if (data.sourceType === "buffer") { append(null, source); - } else if (data.sourceType === 'stream' && data.stats) { + } else if (data.sourceType === "stream" && data.stats) { data.size = data.stats.size; - var entry = self.engine.entry(data, function(err) { + var entry = self.engine.entry(data, function (err) { callback(err, data); }); source.pipe(entry); - } else if (data.sourceType === 'stream') { + } else if (data.sourceType === "stream") { util.collectStream(source, append); } }; @@ -96,7 +96,7 @@ Tar.prototype.append = function(source, data, callback) { * * @return void */ -Tar.prototype.finalize = function() { +Tar.prototype.finalize = function () { this.engine.finalize(); }; @@ -105,7 +105,7 @@ Tar.prototype.finalize = function() { * * @return this.engine */ -Tar.prototype.on = function() { +Tar.prototype.on = function () { return this.engine.on.apply(this.engine, arguments); }; @@ -116,9 +116,11 @@ Tar.prototype.on = function() { * @param {Object} options * @return this.engine */ -Tar.prototype.pipe = function(destination, options) { +Tar.prototype.pipe = function (destination, options) { if (this.compressor) { - return this.engine.pipe.apply(this.engine, [this.compressor]).pipe(destination, options); + return this.engine.pipe + .apply(this.engine, [this.compressor]) + .pipe(destination, options); } else { return this.engine.pipe.apply(this.engine, arguments); } @@ -129,7 +131,7 @@ Tar.prototype.pipe = function(destination, options) { * * @return this.engine */ -Tar.prototype.unpipe = function() { +Tar.prototype.unpipe = function () { if (this.compressor) { return this.compressor.unpipe.apply(this.compressor, arguments); } else { diff --git a/lib/plugins/zip.js b/lib/plugins/zip.js index df6f0743..eea5c79a 100644 --- a/lib/plugins/zip.js +++ b/lib/plugins/zip.js @@ -5,8 +5,8 @@ * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} * @copyright (c) 2012-2014 Chris Talkington, contributors. */ -var engine = require('zip-stream'); -var util = require('archiver-utils'); +var engine = require("zip-stream"); +var util = require("archiver-utils"); /** * @constructor @@ -18,21 +18,21 @@ var util = require('archiver-utils'); * @param {Boolean} [options.store=false] Sets the compression method to STORE. * @param {Object} [options.zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} */ -var Zip = function(options) { +var Zip = function (options) { if (!(this instanceof Zip)) { return new Zip(options); } options = this.options = util.defaults(options, { - comment: '', + comment: "", forceUTC: false, namePrependSlash: false, - store: false + store: false, }); this.supports = { directory: true, - symlink: true + symlink: true, }; this.engine = new engine(options); @@ -52,35 +52,35 @@ var Zip = function(options) { * @param {Function} callback * @return void */ -Zip.prototype.append = function(source, data, callback) { +Zip.prototype.append = function (source, data, callback) { this.engine.entry(source, data, callback); }; /** * @return void */ -Zip.prototype.finalize = function() { +Zip.prototype.finalize = function () { this.engine.finalize(); }; /** * @return this.engine */ -Zip.prototype.on = function() { +Zip.prototype.on = function () { return this.engine.on.apply(this.engine, arguments); }; /** * @return this.engine */ -Zip.prototype.pipe = function() { +Zip.prototype.pipe = function () { return this.engine.pipe.apply(this.engine, arguments); }; /** * @return this.engine */ -Zip.prototype.unpipe = function() { +Zip.prototype.unpipe = function () { return this.engine.unpipe.apply(this.engine, arguments); }; diff --git a/package-lock.json b/package-lock.json index ee089e24..86f99618 100644 --- a/package-lock.json +++ b/package-lock.json @@ -23,6 +23,7 @@ "jsdoc": "4.0.3", "mkdirp": "3.0.1", "mocha": "10.7.3", + "prettier": "3.3.3", "rimraf": "5.0.10", "stream-bench": "0.1.2", "tar": "6.2.1", @@ -1499,6 +1500,21 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/prettier": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz", + "integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==", + "dev": true, + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, "node_modules/process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", @@ -3235,6 +3251,12 @@ "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true }, + "prettier": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz", + "integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==", + "dev": true + }, "process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", diff --git a/package.json b/package.json index c98d54a9..120a44ca 100644 --- a/package.json +++ b/package.json @@ -42,6 +42,7 @@ "jsdoc": "4.0.3", "mkdirp": "3.0.1", "mocha": "10.7.3", + "prettier": "3.3.3", "rimraf": "5.0.10", "stream-bench": "0.1.2", "tar": "6.2.1", diff --git a/test/archiver.js b/test/archiver.js index 21ec91fa..a2cae1a9 100644 --- a/test/archiver.js +++ b/test/archiver.js @@ -1,450 +1,448 @@ -/*global before,describe,it */ -var fs = require('fs'); -var PassThrough = require('readable-stream').PassThrough; -var Readable = require('readable-stream').Readable; +import fs from "fs"; +import { PassThrough as PassThrough$0 } from "readable-stream"; +import { Readable as Readable$0 } from "readable-stream"; +import { assert as assert$0 } from "chai"; +import * as mkdir from "mkdirp"; +import * as helpers from "./helpers/index.js"; +import archiver from "../index.js"; +var PassThrough = { PassThrough: PassThrough$0 }.PassThrough; +var Readable = { Readable: Readable$0 }.Readable; var WriteStream = fs.createWriteStream; - -var assert = require('chai').assert; -var mkdir = require('mkdirp'); - -var helpers = require('./helpers'); +var assert = { assert: assert$0 }.assert; var HashStream = helpers.HashStream; var UnBufferedStream = helpers.UnBufferedStream; var WriteHashStream = helpers.WriteHashStream; var binaryBuffer = helpers.binaryBuffer; - -var archiver = require('../'); - var testBuffer = binaryBuffer(1024 * 16); - -var testDate = new Date('Jan 03 2013 14:26:38 GMT'); -var testDate2 = new Date('Feb 10 2013 10:24:42 GMT'); - -var win32 = process.platform === 'win32'; - -describe('archiver', function() { - before(function() { - mkdir.sync('tmp'); - +var testDate = new Date("Jan 03 2013 14:26:38 GMT"); +var testDate2 = new Date("Feb 10 2013 10:24:42 GMT"); +var win32 = process.platform === "win32"; +describe("archiver", function () { + before(function () { + mkdir.sync("tmp"); if (!win32) { - fs.chmodSync('test/fixtures/executable.sh', 0777); - fs.chmodSync('test/fixtures/directory/subdir/', 0755); - fs.symlinkSync('test/fixtures/directory/level0.txt', 'test/fixtures/directory/subdir/level0link.txt'); - fs.symlinkSync('test/fixtures/directory/subdir/subsub/', 'test/fixtures/directory/subdir/subsublink'); + fs.chmodSync("test/fixtures/executable.sh", 0777); + fs.chmodSync("test/fixtures/directory/subdir/", 0755); + fs.symlinkSync( + "test/fixtures/directory/level0.txt", + "test/fixtures/directory/subdir/level0link.txt", + ); + fs.symlinkSync( + "test/fixtures/directory/subdir/subsub/", + "test/fixtures/directory/subdir/subsublink", + ); } else { - fs.writeFileSync('test/fixtures/directory/subdir/level0link.txt', '../level0.txt'); - fs.writeFileSync('test/fixtures/directory/subdir/subsublink', 'subsub'); + fs.writeFileSync( + "test/fixtures/directory/subdir/level0link.txt", + "../level0.txt", + ); + fs.writeFileSync("test/fixtures/directory/subdir/subsublink", "subsub"); } }); - - after(function() { - fs.unlinkSync('test/fixtures/directory/subdir/level0link.txt'); - fs.unlinkSync('test/fixtures/directory/subdir/subsublink'); + after(function () { + fs.unlinkSync("test/fixtures/directory/subdir/level0link.txt"); + fs.unlinkSync("test/fixtures/directory/subdir/subsublink"); }); - - describe('core', function() { - var archive = archiver('json'); - - describe('#_normalizeEntryData', function() { - it('should support prefix of the entry name', function() { - var prefix1 = archive._normalizeEntryData({ name: 'entry.txt', prefix: 'prefix/' }); - assert.propertyVal(prefix1, 'name', 'prefix/entry.txt'); - - var prefix2 = archive._normalizeEntryData({ name: 'entry.txt', prefix: '' }); - assert.propertyVal(prefix2, 'name', 'entry.txt'); + describe("core", function () { + var archive = archiver("json"); + describe("#_normalizeEntryData", function () { + it("should support prefix of the entry name", function () { + var prefix1 = archive._normalizeEntryData({ + name: "entry.txt", + prefix: "prefix/", + }); + assert.propertyVal(prefix1, "name", "prefix/entry.txt"); + var prefix2 = archive._normalizeEntryData({ + name: "entry.txt", + prefix: "", + }); + assert.propertyVal(prefix2, "name", "entry.txt"); }); - - it('should support special bits on unix', function () { + it("should support special bits on unix", function () { if (!win32) { - var mode = archive._normalizeEntryData({ name: 'executable.sh', mode: fs.statSync('test/fixtures/executable.sh').mode }); - assert.propertyVal(mode, 'mode', 511); + var mode = archive._normalizeEntryData({ + name: "executable.sh", + mode: fs.statSync("test/fixtures/executable.sh").mode, + }); + assert.propertyVal(mode, "mode", 511); } }); }); }); - - describe('api', function() { - describe('#abort', function() { + describe("api", function () { + describe("#abort", function () { var archive; - - before(function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/abort.json'); - - testStream.on('close', function() { + before(function (done) { + archive = archiver("json"); + var testStream = new WriteStream("tmp/abort.json"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - archive - .append(testBuffer, { name: 'buffer.txt', date: testDate }) - .append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate }) - .file('test/fixtures/test.txt') + .append(testBuffer, { name: "buffer.txt", date: testDate }) + .append(fs.createReadStream("test/fixtures/test.txt"), { + name: "stream.txt", + date: testDate, + }) + .file("test/fixtures/test.txt") .abort(); }); - - it('should have a state of aborted', function() { - assert.property(archive, '_state'); - assert.propertyVal(archive._state, 'aborted', true); + it("should have a state of aborted", function () { + assert.property(archive, "_state"); + assert.propertyVal(archive._state, "aborted", true); }); }); - - describe('#append', function() { + describe("#append", function () { var actual; var archive; var entries = {}; - - before(function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/append.json'); - - testStream.on('close', function() { - actual = helpers.readJSON('tmp/append.json'); - - actual.forEach(function(entry) { + before(function (done) { + archive = archiver("json"); + var testStream = new WriteStream("tmp/append.json"); + testStream.on("close", function () { + actual = helpers.readJSON("tmp/append.json"); + actual.forEach(function (entry) { entries[entry.name] = entry; }); - done(); }); - archive.pipe(testStream); - archive - .append(testBuffer, { name: 'buffer.txt', date: testDate }) - .append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate }) - .append(Readable.from(['test']), { name: 'stream-like.txt', date: testDate }) - .append(null, { name: 'directory/', date: testDate }) + .append(testBuffer, { name: "buffer.txt", date: testDate }) + .append(fs.createReadStream("test/fixtures/test.txt"), { + name: "stream.txt", + date: testDate, + }) + .append(Readable.from(["test"]), { + name: "stream-like.txt", + date: testDate, + }) + .append(null, { name: "directory/", date: testDate }) .finalize(); }); - - it('should append multiple entries', function() { + it("should append multiple entries", function () { assert.isArray(actual); assert.lengthOf(actual, 4); }); - - it('should append buffer', function() { - assert.property(entries, 'buffer.txt'); - assert.propertyVal(entries['buffer.txt'], 'name', 'buffer.txt'); - assert.propertyVal(entries['buffer.txt'], 'type', 'file'); - assert.propertyVal(entries['buffer.txt'], 'date', '2013-01-03T14:26:38.000Z'); - assert.propertyVal(entries['buffer.txt'], 'mode', 420); - assert.propertyVal(entries['buffer.txt'], 'crc32', 3893830384); - assert.propertyVal(entries['buffer.txt'], 'size', 16384); + it("should append buffer", function () { + assert.property(entries, "buffer.txt"); + assert.propertyVal(entries["buffer.txt"], "name", "buffer.txt"); + assert.propertyVal(entries["buffer.txt"], "type", "file"); + assert.propertyVal( + entries["buffer.txt"], + "date", + "2013-01-03T14:26:38.000Z", + ); + assert.propertyVal(entries["buffer.txt"], "mode", 420); + assert.propertyVal(entries["buffer.txt"], "crc32", 3893830384); + assert.propertyVal(entries["buffer.txt"], "size", 16384); }); - - it('should append stream', function() { - assert.property(entries, 'stream.txt'); - assert.propertyVal(entries['stream.txt'], 'name', 'stream.txt'); - assert.propertyVal(entries['stream.txt'], 'type', 'file'); - assert.propertyVal(entries['stream.txt'], 'date', '2013-01-03T14:26:38.000Z'); - assert.propertyVal(entries['stream.txt'], 'mode', 420); - assert.propertyVal(entries['stream.txt'], 'crc32', 585446183); - assert.propertyVal(entries['stream.txt'], 'size', 19); + it("should append stream", function () { + assert.property(entries, "stream.txt"); + assert.propertyVal(entries["stream.txt"], "name", "stream.txt"); + assert.propertyVal(entries["stream.txt"], "type", "file"); + assert.propertyVal( + entries["stream.txt"], + "date", + "2013-01-03T14:26:38.000Z", + ); + assert.propertyVal(entries["stream.txt"], "mode", 420); + assert.propertyVal(entries["stream.txt"], "crc32", 585446183); + assert.propertyVal(entries["stream.txt"], "size", 19); }); - - it('should append stream-like source', function() { - assert.property(entries, 'stream-like.txt'); - assert.propertyVal(entries['stream-like.txt'], 'name', 'stream-like.txt'); - assert.propertyVal(entries['stream-like.txt'], 'type', 'file'); - assert.propertyVal(entries['stream-like.txt'], 'date', '2013-01-03T14:26:38.000Z'); - assert.propertyVal(entries['stream-like.txt'], 'mode', 420); - assert.propertyVal(entries['stream-like.txt'], 'crc32', 3632233996); - assert.propertyVal(entries['stream-like.txt'], 'size', 4); + it("should append stream-like source", function () { + assert.property(entries, "stream-like.txt"); + assert.propertyVal( + entries["stream-like.txt"], + "name", + "stream-like.txt", + ); + assert.propertyVal(entries["stream-like.txt"], "type", "file"); + assert.propertyVal( + entries["stream-like.txt"], + "date", + "2013-01-03T14:26:38.000Z", + ); + assert.propertyVal(entries["stream-like.txt"], "mode", 420); + assert.propertyVal(entries["stream-like.txt"], "crc32", 3632233996); + assert.propertyVal(entries["stream-like.txt"], "size", 4); }); - - it('should append directory', function() { - assert.property(entries, 'directory/'); - assert.propertyVal(entries['directory/'], 'name', 'directory/'); - assert.propertyVal(entries['directory/'], 'type', 'directory'); - assert.propertyVal(entries['directory/'], 'date', '2013-01-03T14:26:38.000Z'); - assert.propertyVal(entries['directory/'], 'mode', 493); - assert.propertyVal(entries['directory/'], 'crc32', 0); - assert.propertyVal(entries['directory/'], 'size', 0); + it("should append directory", function () { + assert.property(entries, "directory/"); + assert.propertyVal(entries["directory/"], "name", "directory/"); + assert.propertyVal(entries["directory/"], "type", "directory"); + assert.propertyVal( + entries["directory/"], + "date", + "2013-01-03T14:26:38.000Z", + ); + assert.propertyVal(entries["directory/"], "mode", 493); + assert.propertyVal(entries["directory/"], "crc32", 0); + assert.propertyVal(entries["directory/"], "size", 0); }); }); - - describe('#directory', function() { + describe("#directory", function () { var actual; var archive; var entries = {}; - - before(function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/directory.json'); - - testStream.on('close', function() { - actual = helpers.readJSON('tmp/directory.json'); - - actual.forEach(function(entry) { + before(function (done) { + archive = archiver("json"); + var testStream = new WriteStream("tmp/directory.json"); + testStream.on("close", function () { + actual = helpers.readJSON("tmp/directory.json"); + actual.forEach(function (entry) { entries[entry.name] = entry; }); - done(); }); - archive.pipe(testStream); - archive - .directory('test/fixtures/directory', null, { date: testDate }) - .directory('test/fixtures/directory', 'Win\\DS\\', { date: testDate }) - .directory('test/fixtures/directory', 'directory', function(data) { - if (data.name === 'ignore.txt') { + .directory("test/fixtures/directory", null, { date: testDate }) + .directory("test/fixtures/directory", "Win\\DS\\", { date: testDate }) + .directory("test/fixtures/directory", "directory", function (data) { + if (data.name === "ignore.txt") { return false; } - data.funcProp = true; return data; }) .finalize(); }); - - it('should append multiple entries', function() { + it("should append multiple entries", function () { assert.isArray(actual); - - assert.property(entries, 'test/fixtures/directory/level0.txt'); - assert.property(entries, 'test/fixtures/directory/subdir/'); - assert.property(entries, 'test/fixtures/directory/subdir/level1.txt'); - assert.property(entries, 'test/fixtures/directory/subdir/subsub/'); - assert.property(entries, 'test/fixtures/directory/subdir/subsub/level2.txt'); - assert.propertyVal(entries['test/fixtures/directory/level0.txt'], 'date', '2013-01-03T14:26:38.000Z'); - assert.propertyVal(entries['test/fixtures/directory/subdir/'], 'date', '2013-01-03T14:26:38.000Z'); - - assert.property(entries, 'directory/level0.txt'); - assert.property(entries, 'directory/subdir/'); - assert.property(entries, 'directory/subdir/level1.txt'); - assert.property(entries, 'directory/subdir/subsub/'); - assert.property(entries, 'directory/subdir/subsub/level2.txt'); + assert.property(entries, "test/fixtures/directory/level0.txt"); + assert.property(entries, "test/fixtures/directory/subdir/"); + assert.property(entries, "test/fixtures/directory/subdir/level1.txt"); + assert.property(entries, "test/fixtures/directory/subdir/subsub/"); + assert.property( + entries, + "test/fixtures/directory/subdir/subsub/level2.txt", + ); + assert.propertyVal( + entries["test/fixtures/directory/level0.txt"], + "date", + "2013-01-03T14:26:38.000Z", + ); + assert.propertyVal( + entries["test/fixtures/directory/subdir/"], + "date", + "2013-01-03T14:26:38.000Z", + ); + assert.property(entries, "directory/level0.txt"); + assert.property(entries, "directory/subdir/"); + assert.property(entries, "directory/subdir/level1.txt"); + assert.property(entries, "directory/subdir/subsub/"); + assert.property(entries, "directory/subdir/subsub/level2.txt"); }); - - it('should support setting data properties via function', function() { - assert.property(entries, 'directory/level0.txt'); - assert.propertyVal(entries['directory/level0.txt'], 'funcProp', true); + it("should support setting data properties via function", function () { + assert.property(entries, "directory/level0.txt"); + assert.propertyVal(entries["directory/level0.txt"], "funcProp", true); }); - - it('should support ignoring matches via function', function() { - assert.notProperty(entries, 'directory/ignore.txt'); + it("should support ignoring matches via function", function () { + assert.notProperty(entries, "directory/ignore.txt"); }); - - it('should find dot files', function() { - assert.property(entries, 'directory/.dotfile'); + it("should find dot files", function () { + assert.property(entries, "directory/.dotfile"); }); - - it('should retain symlinks', function() { - assert.property(entries, 'test/fixtures/directory/subdir/level0link.txt'); - assert.property(entries, 'directory/subdir/level0link.txt'); + it("should retain symlinks", function () { + assert.property( + entries, + "test/fixtures/directory/subdir/level0link.txt", + ); + assert.property(entries, "directory/subdir/level0link.txt"); }); - - it('should retain directory symlink', function() { - assert.property(entries, 'test/fixtures/directory/subdir/subsublink'); - assert.property(entries, 'directory/subdir/subsublink'); + it("should retain directory symlink", function () { + assert.property(entries, "test/fixtures/directory/subdir/subsublink"); + assert.property(entries, "directory/subdir/subsublink"); }); - - it('should handle windows path separators in prefix', function() { - assert.property(entries, 'Win/DS/level0.txt'); + it("should handle windows path separators in prefix", function () { + assert.property(entries, "Win/DS/level0.txt"); }); }); - - describe('#file', function() { + describe("#file", function () { var actual; var archive; var entries = {}; - - before(function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/file.json'); - - testStream.on('close', function() { - actual = helpers.readJSON('tmp/file.json'); - - actual.forEach(function(entry) { + before(function (done) { + archive = archiver("json"); + var testStream = new WriteStream("tmp/file.json"); + testStream.on("close", function () { + actual = helpers.readJSON("tmp/file.json"); + actual.forEach(function (entry) { entries[entry.name] = entry; }); - done(); }); - archive.pipe(testStream); - archive - .file('test/fixtures/test.txt', { name: 'test.txt', date: testDate }) - .file('test/fixtures/test.txt') - .file('test/fixtures/executable.sh', { mode: win32 ? 0777 : null }) + .file("test/fixtures/test.txt", { name: "test.txt", date: testDate }) + .file("test/fixtures/test.txt") + .file("test/fixtures/executable.sh", { mode: win32 ? 0777 : null }) .finalize(); }); - - it('should append multiple entries', function() { + it("should append multiple entries", function () { assert.isArray(actual); assert.lengthOf(actual, 3); }); - - it('should append filepath', function() { - assert.property(entries, 'test.txt'); - assert.propertyVal(entries['test.txt'], 'name', 'test.txt'); - assert.propertyVal(entries['test.txt'], 'date', '2013-01-03T14:26:38.000Z'); - assert.propertyVal(entries['test.txt'], 'crc32', 585446183); - assert.propertyVal(entries['test.txt'], 'size', 19); + it("should append filepath", function () { + assert.property(entries, "test.txt"); + assert.propertyVal(entries["test.txt"], "name", "test.txt"); + assert.propertyVal( + entries["test.txt"], + "date", + "2013-01-03T14:26:38.000Z", + ); + assert.propertyVal(entries["test.txt"], "crc32", 585446183); + assert.propertyVal(entries["test.txt"], "size", 19); }); - - it('should fallback to filepath when no name is set', function() { - assert.property(entries, 'test/fixtures/test.txt'); + it("should fallback to filepath when no name is set", function () { + assert.property(entries, "test/fixtures/test.txt"); }); - - it('should fallback to file stats when applicable', function() { - assert.property(entries, 'test/fixtures/executable.sh'); - assert.propertyVal(entries['test/fixtures/executable.sh'], 'name', 'test/fixtures/executable.sh'); - assert.propertyVal(entries['test/fixtures/executable.sh'], 'mode', 511); - assert.propertyVal(entries['test/fixtures/executable.sh'], 'crc32', 3957348457); - assert.propertyVal(entries['test/fixtures/executable.sh'], 'size', 11); + it("should fallback to file stats when applicable", function () { + assert.property(entries, "test/fixtures/executable.sh"); + assert.propertyVal( + entries["test/fixtures/executable.sh"], + "name", + "test/fixtures/executable.sh", + ); + assert.propertyVal(entries["test/fixtures/executable.sh"], "mode", 511); + assert.propertyVal( + entries["test/fixtures/executable.sh"], + "crc32", + 3957348457, + ); + assert.propertyVal(entries["test/fixtures/executable.sh"], "size", 11); }); }); - - describe('#glob', function() { + describe("#glob", function () { var actual; var archive; var entries = {}; - - before(function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/glob.json'); - - testStream.on('close', function() { - actual = helpers.readJSON('tmp/glob.json'); - - actual.forEach(function(entry) { + before(function (done) { + archive = archiver("json"); + var testStream = new WriteStream("tmp/glob.json"); + testStream.on("close", function () { + actual = helpers.readJSON("tmp/glob.json"); + actual.forEach(function (entry) { entries[entry.name] = entry; }); - done(); }); - archive.pipe(testStream); - archive - .glob('test/fixtures/test.txt', null ) - .glob('test/fixtures/empty.txt', null ) - .glob('test/fixtures/executable.sh', null ) - .glob('test/fixtures/directory/**/*', { ignore: 'test/fixtures/directory/subdir/**/*', nodir: true }) - .glob('**/*', { cwd: 'test/fixtures/directory/subdir/' }) + .glob("test/fixtures/test.txt", null) + .glob("test/fixtures/empty.txt", null) + .glob("test/fixtures/executable.sh", null) + .glob("test/fixtures/directory/**/*", { + ignore: "test/fixtures/directory/subdir/**/*", + nodir: true, + }) + .glob("**/*", { cwd: "test/fixtures/directory/subdir/" }) .finalize(); }); - - it('should append multiple entries', function() { + it("should append multiple entries", function () { assert.isArray(actual); - - assert.property(entries, 'test/fixtures/test.txt'); - assert.property(entries, 'test/fixtures/executable.sh'); - assert.property(entries, 'test/fixtures/empty.txt'); - - assert.property(entries, 'test/fixtures/directory/level0.txt'); - - assert.property(entries, 'level1.txt'); - assert.property(entries, 'subsub/level2.txt'); + assert.property(entries, "test/fixtures/test.txt"); + assert.property(entries, "test/fixtures/executable.sh"); + assert.property(entries, "test/fixtures/empty.txt"); + assert.property(entries, "test/fixtures/directory/level0.txt"); + assert.property(entries, "level1.txt"); + assert.property(entries, "subsub/level2.txt"); }); }); - - describe('#promise', function() { + describe("#promise", function () { var archive; - - it('should use a promise', function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/promise.json'); - + it("should use a promise", function (done) { + archive = archiver("json"); + var testStream = new WriteStream("tmp/promise.json"); archive.pipe(testStream); - archive - .append(testBuffer, { name: 'buffer.txt', date: testDate }) - .append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate }) - .append(null, { name: 'directory/', date: testDate }) - .finalize() - .then(function() { - done() + .append(testBuffer, { name: "buffer.txt", date: testDate }) + .append(fs.createReadStream("test/fixtures/test.txt"), { + name: "stream.txt", + date: testDate, }) + .append(null, { name: "directory/", date: testDate }) + .finalize() + .then(function () { + done(); + }); }); }); - - describe('#errors', function() { - var archive; - - it('should allow continue on stat failing', function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/errors-stat.json'); - - testStream.on('close', function() { - done(); - }); - - archive.pipe(testStream); - - archive - .file('test/fixtures/test.txt') - .file('test/fixtures/test-missing.txt') - .file('test/fixtures/empty.txt') - .finalize() + describe("#errors", function () { + var archive; + it("should allow continue on stat failing", function (done) { + archive = archiver("json"); + var testStream = new WriteStream("tmp/errors-stat.json"); + testStream.on("close", function () { + done(); }); - - it('should allow continue on with several stat failings', function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/errors-stat.json'); - - testStream.on('close', function() { - done(); - }); - - archive.pipe(testStream); - - archive.file('test/fixtures/test.txt'); - for (var i = 1; i <= 20; i++) - archive.file('test/fixtures/test-missing.txt'); - - archive.finalize() - }); + archive.pipe(testStream); + archive + .file("test/fixtures/test.txt") + .file("test/fixtures/test-missing.txt") + .file("test/fixtures/empty.txt") + .finalize(); + }); + it("should allow continue on with several stat failings", function (done) { + archive = archiver("json"); + var testStream = new WriteStream("tmp/errors-stat.json"); + testStream.on("close", function () { + done(); + }); + archive.pipe(testStream); + archive.file("test/fixtures/test.txt"); + for (var i = 1; i <= 20; i++) + archive.file("test/fixtures/test-missing.txt"); + archive.finalize(); + }); }); - - describe('#isRegisteredFormat', function () { - var isRegisteredFormat = archiver.isRegisteredFormat('zip'); - it('should return true when the value is present', function () { + describe("#isRegisteredFormat", function () { + var isRegisteredFormat = archiver.isRegisteredFormat("zip"); + it("should return true when the value is present", function () { assert.equal(true, isRegisteredFormat); }); }); - }); - - describe('#symlink', function() { + describe("#symlink", function () { var actual; var archive; var entries = {}; - - before(function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/symlink.json'); - - testStream.on('close', function() { - actual = helpers.readJSON('tmp/symlink.json'); - - actual.forEach(function(entry) { + before(function (done) { + archive = archiver("json"); + var testStream = new WriteStream("tmp/symlink.json"); + testStream.on("close", function () { + actual = helpers.readJSON("tmp/symlink.json"); + actual.forEach(function (entry) { entries[entry.name] = entry; }); - done(); }); - archive.pipe(testStream); - archive .append("file-a", { name: "file-a" }) .symlink("directory-a/symlink-to-file-a", "../file-a") - .symlink("directory-b/directory-c/symlink-to-directory-a", "../../directory-a", 493) + .symlink( + "directory-b/directory-c/symlink-to-directory-a", + "../../directory-a", + 493, + ) .finalize(); }); - - it('should append multiple entries', () => { + it("should append multiple entries", () => { assert.isArray(actual); - assert.property(entries, 'file-a'); - assert.property(entries, 'directory-a/symlink-to-file-a'); - assert.property(entries, 'directory-b/directory-c/symlink-to-directory-a'); - assert.propertyVal(entries['directory-b/directory-c/symlink-to-directory-a'], 'mode', 493); + assert.property(entries, "file-a"); + assert.property(entries, "directory-a/symlink-to-file-a"); + assert.property( + entries, + "directory-b/directory-c/symlink-to-directory-a", + ); + assert.propertyVal( + entries["directory-b/directory-c/symlink-to-directory-a"], + "mode", + 493, + ); }); }); }); diff --git a/test/helpers/index.js b/test/helpers/index.js index 05cb597a..d6fe04c7 100644 --- a/test/helpers/index.js +++ b/test/helpers/index.js @@ -1,110 +1,80 @@ -var crypto = require('crypto'); -var fs = require('fs'); -var inherits = require('util').inherits; - -var Stream = require('stream').Stream; -var Readable = require('readable-stream').Readable; -var Writable = require('readable-stream').Writable; - +import crypto from "crypto"; +import fs from "fs"; +import { inherits as inherits$0 } from "util"; +import stream from "stream"; +import { Readable as Readable$0 } from "readable-stream"; +import { Writable as Writable$0 } from "readable-stream"; +var inherits = { inherits: inherits$0 }.inherits; +var Stream = stream.Stream; +var Readable = { Readable: Readable$0 }.Readable; +var Writable = { Writable: Writable$0 }.Writable; function adjustDateByOffset(d, offset) { - d = (d instanceof Date) ? d : new Date(); - + d = d instanceof Date ? d : new Date(); if (offset >= 1) { d.setMinutes(d.getMinutes() - offset); } else { d.setMinutes(d.getMinutes() + Math.abs(offset)); } - return d; } - -module.exports.adjustDateByOffset = adjustDateByOffset; - function binaryBuffer(n) { var buffer = Buffer.alloc(n); - for (var i = 0; i < n; i++) { - buffer.writeUInt8(i&255, i); + buffer.writeUInt8(i & 255, i); } - return buffer; } - -module.exports.binaryBuffer = binaryBuffer; - function BinaryStream(size, options) { Readable.call(this, options); - var buf = Buffer.alloc(size); - for (var i = 0; i < size; i++) { - buf.writeUInt8(i&255, i); + buf.writeUInt8(i & 255, i); } - this.push(buf); this.push(null); } - inherits(BinaryStream, Readable); - -BinaryStream.prototype._read = function(size) {}; - -module.exports.BinaryStream = BinaryStream; - +BinaryStream.prototype._read = function (size) {}; function DeadEndStream(options) { Writable.call(this, options); } - inherits(DeadEndStream, Writable); - -DeadEndStream.prototype._write = function(chuck, encoding, callback) { +DeadEndStream.prototype._write = function (chuck, encoding, callback) { callback(); }; - -module.exports.DeadEndStream = DeadEndStream; - function readJSON(filepath) { var contents; - try { contents = fs.readFileSync(String(filepath)); contents = JSON.parse(contents); - } catch(e) { + } catch (e) { contents = null; } - return contents; } - -module.exports.readJSON = readJSON; - function UnBufferedStream() { this.readable = true; } - inherits(UnBufferedStream, Stream); - -module.exports.UnBufferedStream = UnBufferedStream; - function WriteHashStream(path, options) { fs.WriteStream.call(this, path, options); - - this.hash = crypto.createHash('sha1'); + this.hash = crypto.createHash("sha1"); this.digest = null; - - this.on('close', function() { - this.digest = this.hash.digest('hex'); + this.on("close", function () { + this.digest = this.hash.digest("hex"); }); } - inherits(WriteHashStream, fs.WriteStream); - -WriteHashStream.prototype.write = function(chunk) { +WriteHashStream.prototype.write = function (chunk) { if (chunk) { this.hash.update(chunk); } - return fs.WriteStream.prototype.write.call(this, chunk); }; - -module.exports.WriteHashStream = WriteHashStream; \ No newline at end of file +export { adjustDateByOffset }; +export { binaryBuffer }; +export { BinaryStream }; +export { DeadEndStream }; +export { readJSON }; +export { UnBufferedStream }; +export { WriteHashStream }; diff --git a/test/plugins.js b/test/plugins.js index ba128a8c..5486acfe 100644 --- a/test/plugins.js +++ b/test/plugins.js @@ -1,51 +1,48 @@ -/*global before,describe,it */ -var fs = require('fs'); -var assert = require('chai').assert; -var mkdir = require('mkdirp'); -var tar = require('tar'); -var yauzl = require('yauzl'); +import fs from "fs"; +import { assert as assert$0 } from "chai"; +import * as mkdir from "mkdirp"; +import * as tar from "tar"; +import * as yauzl from "yauzl"; +import archiver from "../index.js"; +import * as helpers from "./helpers/index.js"; +var assert = { assert: assert$0 }.assert; var WriteStream = fs.createWriteStream; - -var archiver = require('../'); -var helpers = require('./helpers'); var binaryBuffer = helpers.binaryBuffer; - var testBuffer = binaryBuffer(1024 * 16); -var testDate = new Date('Jan 03 2013 14:26:38 GMT'); -var testDate2 = new Date('Feb 10 2013 10:24:42 GMT'); - -var win32 = process.platform === 'win32'; - -describe('plugins', function() { - before(function() { - mkdir.sync('tmp'); - +var testDate = new Date("Jan 03 2013 14:26:38 GMT"); +var testDate2 = new Date("Feb 10 2013 10:24:42 GMT"); +var win32 = process.platform === "win32"; +describe("plugins", function () { + before(function () { + mkdir.sync("tmp"); if (!win32) { - fs.chmodSync('test/fixtures/executable.sh', 0777); - fs.chmodSync('test/fixtures/directory/subdir/', 0755); - fs.symlinkSync('../level0.txt', 'test/fixtures/directory/subdir/level0link.txt'); - fs.symlinkSync('subsub/', 'test/fixtures/directory/subdir/subsublink'); + fs.chmodSync("test/fixtures/executable.sh", 0777); + fs.chmodSync("test/fixtures/directory/subdir/", 0755); + fs.symlinkSync( + "../level0.txt", + "test/fixtures/directory/subdir/level0link.txt", + ); + fs.symlinkSync("subsub/", "test/fixtures/directory/subdir/subsublink"); } else { - fs.writeFileSync('test/fixtures/directory/subdir/level0link.txt', '../level0.txt'); - fs.writeFileSync('test/fixtures/directory/subdir/subsublink', 'subsub'); + fs.writeFileSync( + "test/fixtures/directory/subdir/level0link.txt", + "../level0.txt", + ); + fs.writeFileSync("test/fixtures/directory/subdir/subsublink", "subsub"); } }); - - after(function() { - fs.unlinkSync('test/fixtures/directory/subdir/level0link.txt'); - fs.unlinkSync('test/fixtures/directory/subdir/subsublink'); + after(function () { + fs.unlinkSync("test/fixtures/directory/subdir/level0link.txt"); + fs.unlinkSync("test/fixtures/directory/subdir/subsublink"); }); - - describe('tar', function() { + describe("tar", function () { var actual = []; var archive; var entries = {}; - - before(function(done) { - archive = archiver('tar'); + before(function (done) { + archive = archiver("tar"); var testStream = new tar.Parse(); - - testStream.on('entry', function(entry) { + testStream.on("entry", function (entry) { actual.push(entry.path); entries[entry.path] = { type: entry.type, @@ -59,169 +56,199 @@ describe('plugins', function() { mtime: entry.mtime, atime: entry.atime, ctime: entry.ctime, - linkpath: entry.linkpath + linkpath: entry.linkpath, }; entry.resume(); }); - - testStream.on('end', function() { + testStream.on("end", function () { done(); }); - archive.pipe(testStream); - archive - .append(testBuffer, { name: 'buffer.txt', date: testDate }) - .append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate }) - .append(null, { name: 'folder/', date: testDate }) - .directory('test/fixtures/directory', 'directory') - .symlink('manual-link.txt', 'manual-link-target.txt') + .append(testBuffer, { name: "buffer.txt", date: testDate }) + .append(fs.createReadStream("test/fixtures/test.txt"), { + name: "stream.txt", + date: testDate, + }) + .append(null, { name: "folder/", date: testDate }) + .directory("test/fixtures/directory", "directory") + .symlink("manual-link.txt", "manual-link-target.txt") .finalize(); }); - - it('should append multiple entries', function() { + it("should append multiple entries", function () { assert.isArray(actual); assert.isAbove(actual.length, 10); }); - - it('should append buffer', function() { - assert.property(entries, 'buffer.txt'); - assert.propertyVal(entries['buffer.txt'], 'path', 'buffer.txt'); - assert.propertyVal(entries['buffer.txt'], 'type', 'File'); - assert.propertyVal(entries['buffer.txt'], 'mode', 420); - assert.propertyVal(entries['buffer.txt'], 'size', 16384); - }); - - it('should append stream', function() { - assert.property(entries, 'stream.txt'); - assert.propertyVal(entries['stream.txt'], 'path', 'stream.txt'); - assert.propertyVal(entries['stream.txt'], 'type', 'File'); - assert.propertyVal(entries['stream.txt'], 'mode', 420); - assert.propertyVal(entries['stream.txt'], 'size', 19); - }); - - it('should append folder', function() { - assert.property(entries, 'folder/'); - assert.propertyVal(entries['folder/'], 'path', 'folder/'); - assert.propertyVal(entries['folder/'], 'type', 'Directory'); - assert.propertyVal(entries['folder/'], 'mode', 493); - assert.propertyVal(entries['folder/'], 'size', 0); - }); - - it('should append manual symlink', function() { - assert.property(entries, 'manual-link.txt'); - assert.propertyVal(entries['manual-link.txt'], 'type', 'SymbolicLink'); - assert.propertyVal(entries['manual-link.txt'], 'linkpath', 'manual-link-target.txt'); - }); - - it('should append via directory', function() { - assert.property(entries, 'directory/subdir/level1.txt'); - assert.property(entries, 'directory/subdir/level0link.txt'); - }); - - it('should retain symlinks via directory', function() { + it("should append buffer", function () { + assert.property(entries, "buffer.txt"); + assert.propertyVal(entries["buffer.txt"], "path", "buffer.txt"); + assert.propertyVal(entries["buffer.txt"], "type", "File"); + assert.propertyVal(entries["buffer.txt"], "mode", 420); + assert.propertyVal(entries["buffer.txt"], "size", 16384); + }); + it("should append stream", function () { + assert.property(entries, "stream.txt"); + assert.propertyVal(entries["stream.txt"], "path", "stream.txt"); + assert.propertyVal(entries["stream.txt"], "type", "File"); + assert.propertyVal(entries["stream.txt"], "mode", 420); + assert.propertyVal(entries["stream.txt"], "size", 19); + }); + it("should append folder", function () { + assert.property(entries, "folder/"); + assert.propertyVal(entries["folder/"], "path", "folder/"); + assert.propertyVal(entries["folder/"], "type", "Directory"); + assert.propertyVal(entries["folder/"], "mode", 493); + assert.propertyVal(entries["folder/"], "size", 0); + }); + it("should append manual symlink", function () { + assert.property(entries, "manual-link.txt"); + assert.propertyVal(entries["manual-link.txt"], "type", "SymbolicLink"); + assert.propertyVal( + entries["manual-link.txt"], + "linkpath", + "manual-link-target.txt", + ); + }); + it("should append via directory", function () { + assert.property(entries, "directory/subdir/level1.txt"); + assert.property(entries, "directory/subdir/level0link.txt"); + }); + it("should retain symlinks via directory", function () { if (win32) { this.skip(); } - - assert.property(entries, 'directory/subdir/level0link.txt'); - assert.propertyVal(entries['directory/subdir/level0link.txt'], 'type', 'SymbolicLink'); - assert.propertyVal(entries['directory/subdir/level0link.txt'], 'linkpath', '../level0.txt'); - - assert.property(entries, 'directory/subdir/subsublink'); - assert.propertyVal(entries['directory/subdir/subsublink'], 'type', 'SymbolicLink'); - assert.propertyVal(entries['directory/subdir/subsublink'], 'linkpath', 'subsub'); + assert.property(entries, "directory/subdir/level0link.txt"); + assert.propertyVal( + entries["directory/subdir/level0link.txt"], + "type", + "SymbolicLink", + ); + assert.propertyVal( + entries["directory/subdir/level0link.txt"], + "linkpath", + "../level0.txt", + ); + assert.property(entries, "directory/subdir/subsublink"); + assert.propertyVal( + entries["directory/subdir/subsublink"], + "type", + "SymbolicLink", + ); + assert.propertyVal( + entries["directory/subdir/subsublink"], + "linkpath", + "subsub", + ); }); }); - - describe('zip', function() { + describe("zip", function () { var actual = []; var archive; var entries = {}; - var zipComment = ''; - - before(function(done) { - archive = archiver('zip', { comment: 'archive comment' }); - var testStream = new WriteStream('tmp/plugin.zip'); - - testStream.on('close', function(entry) { - yauzl.open('tmp/plugin.zip', function(err, zip) { - zip.on('entry', function(entry) { + var zipComment = ""; + before(function (done) { + archive = archiver("zip", { comment: "archive comment" }); + var testStream = new WriteStream("tmp/plugin.zip"); + testStream.on("close", function (entry) { + yauzl.open("tmp/plugin.zip", function (err, zip) { + zip.on("entry", function (entry) { actual.push(entry.fileName); entries[entry.fileName] = entry; }); - - zip.on('close', function() { + zip.on("close", function () { done(); }); - zipComment = zip.comment; }); }); - archive.pipe(testStream); - archive - .append(testBuffer, { name: 'buffer.txt', date: testDate, comment: 'entry comment' }) - .append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate }) - .file('test/fixtures/executable.sh', { name: 'executable.sh', mode: win32 ? 0777 : null }) - .directory('test/fixtures/directory', 'directory') - .symlink('manual-link.txt', 'manual-link-target.txt') + .append(testBuffer, { + name: "buffer.txt", + date: testDate, + comment: "entry comment", + }) + .append(fs.createReadStream("test/fixtures/test.txt"), { + name: "stream.txt", + date: testDate, + }) + .file("test/fixtures/executable.sh", { + name: "executable.sh", + mode: win32 ? 0777 : null, + }) + .directory("test/fixtures/directory", "directory") + .symlink("manual-link.txt", "manual-link-target.txt") .finalize(); }); - - it('should append multiple entries', function() { + it("should append multiple entries", function () { assert.isArray(actual); assert.isAbove(actual.length, 10); }); - - it('should append buffer', function() { - assert.property(entries, 'buffer.txt'); - assert.propertyVal(entries['buffer.txt'], 'uncompressedSize', 16384); - assert.propertyVal(entries['buffer.txt'], 'crc32', 3893830384); - }); - - it('should append stream', function() { - assert.property(entries, 'stream.txt'); - assert.propertyVal(entries['stream.txt'], 'uncompressedSize', 19); - assert.propertyVal(entries['stream.txt'], 'crc32', 585446183); - }); - - it('should append via file', function() { - assert.property(entries, 'executable.sh'); - assert.propertyVal(entries['executable.sh'], 'uncompressedSize', 11); - assert.propertyVal(entries['executable.sh'], 'crc32', 3957348457); - }); - - it('should append via directory', function() { - assert.property(entries, 'directory/subdir/level1.txt'); - assert.propertyVal(entries['directory/subdir/level1.txt'], 'uncompressedSize', 6); - assert.propertyVal(entries['directory/subdir/level1.txt'], 'crc32', 133711013); - }); - - it('should append manual symlink', function() { - assert.property(entries, 'manual-link.txt'); - assert.propertyVal(entries['manual-link.txt'], 'crc32', 1121667014); - assert.propertyVal(entries['manual-link.txt'], 'externalFileAttributes', 2684354592); - }); - - it('should allow for custom unix mode', function() { - assert.property(entries, 'executable.sh'); - assert.propertyVal(entries['executable.sh'], 'externalFileAttributes', 2180972576); - assert.equal((entries['executable.sh'].externalFileAttributes >>> 16) & 0xFFF, 511); - - assert.property(entries, 'directory/subdir/'); - assert.propertyVal(entries['directory/subdir/'], 'externalFileAttributes', 1106051088); - assert.equal((entries['directory/subdir/'].externalFileAttributes >>> 16) & 0xFFF, 493); - }); - - it('should allow for entry comments', function() { - assert.property(entries, 'buffer.txt'); - assert.propertyVal(entries['buffer.txt'], 'fileComment', 'entry comment'); - }); - - it('should allow for archive comment', function() { - assert.equal('archive comment', zipComment); + it("should append buffer", function () { + assert.property(entries, "buffer.txt"); + assert.propertyVal(entries["buffer.txt"], "uncompressedSize", 16384); + assert.propertyVal(entries["buffer.txt"], "crc32", 3893830384); + }); + it("should append stream", function () { + assert.property(entries, "stream.txt"); + assert.propertyVal(entries["stream.txt"], "uncompressedSize", 19); + assert.propertyVal(entries["stream.txt"], "crc32", 585446183); + }); + it("should append via file", function () { + assert.property(entries, "executable.sh"); + assert.propertyVal(entries["executable.sh"], "uncompressedSize", 11); + assert.propertyVal(entries["executable.sh"], "crc32", 3957348457); + }); + it("should append via directory", function () { + assert.property(entries, "directory/subdir/level1.txt"); + assert.propertyVal( + entries["directory/subdir/level1.txt"], + "uncompressedSize", + 6, + ); + assert.propertyVal( + entries["directory/subdir/level1.txt"], + "crc32", + 133711013, + ); + }); + it("should append manual symlink", function () { + assert.property(entries, "manual-link.txt"); + assert.propertyVal(entries["manual-link.txt"], "crc32", 1121667014); + assert.propertyVal( + entries["manual-link.txt"], + "externalFileAttributes", + 2684354592, + ); + }); + it("should allow for custom unix mode", function () { + assert.property(entries, "executable.sh"); + assert.propertyVal( + entries["executable.sh"], + "externalFileAttributes", + 2180972576, + ); + assert.equal( + (entries["executable.sh"].externalFileAttributes >>> 16) & 0xfff, + 511, + ); + assert.property(entries, "directory/subdir/"); + assert.propertyVal( + entries["directory/subdir/"], + "externalFileAttributes", + 1106051088, + ); + assert.equal( + (entries["directory/subdir/"].externalFileAttributes >>> 16) & 0xfff, + 493, + ); + }); + it("should allow for entry comments", function () { + assert.property(entries, "buffer.txt"); + assert.propertyVal(entries["buffer.txt"], "fileComment", "entry comment"); + }); + it("should allow for archive comment", function () { + assert.equal("archive comment", zipComment); }); }); }); diff --git a/website/babel.config.js b/website/babel.config.js index e00595da..bfd75dbd 100644 --- a/website/babel.config.js +++ b/website/babel.config.js @@ -1,3 +1,3 @@ module.exports = { - presets: [require.resolve('@docusaurus/core/lib/babel/preset')], + presets: [require.resolve("@docusaurus/core/lib/babel/preset")], }; diff --git a/website/docs/archiver_api.md b/website/docs/archiver_api.md index 4651596a..17828be4 100644 --- a/website/docs/archiver_api.md +++ b/website/docs/archiver_api.md @@ -7,37 +7,37 @@ sidebar_label: "Archiver" ## Archiver Class ```js -new Archiver(format, options) +new Archiver(format, options); ``` ### constructor ##### Parameters -- `format` - *String* - The archive format to use. -- `options` - *Object* +- `format` - _String_ - The archive format to use. +- `options` - _Object_ #### Options -The `options` object may include the following properties as well as all [Stream.duplex options](https://nodejs.org/api/stream.html#stream_new_stream_duplex_options): +The `options` object may include the following properties as well as all [Stream.duplex options](https://nodejs.org/api/stream.html#stream_new_stream_duplex_options): ##### Core Options -- `statConcurrency` - *Number* (default 4) - Sets the number of workers used to process the internal fs stat queue. +- `statConcurrency` - _Number_ (default 4) - Sets the number of workers used to process the internal fs stat queue. ##### ZIP Options -- `comment` - *String* - Sets the zip archive comment. -- `forceLocalTime` - *Boolean* - Forces the archive to contain local file times instead of UTC. -- `forceZip64` - *Boolean* - Forces the archive to contain ZIP64 headers. -- `namePrependSlash` - *Boolean* - Prepends a forward slash to archive file paths. -- `store` - *Boolean* - Sets the compression method to STORE. -- `zlib` - *Object* - Passed to [zlib](https://nodejs.org/api/zlib.html#zlib_class_options) to control compression. +- `comment` - _String_ - Sets the zip archive comment. +- `forceLocalTime` - _Boolean_ - Forces the archive to contain local file times instead of UTC. +- `forceZip64` - _Boolean_ - Forces the archive to contain ZIP64 headers. +- `namePrependSlash` - _Boolean_ - Prepends a forward slash to archive file paths. +- `store` - _Boolean_ - Sets the compression method to STORE. +- `zlib` - _Object_ - Passed to [zlib](https://nodejs.org/api/zlib.html#zlib_class_options) to control compression. ##### TAR Options -- `gzip` - *Boolean* - Compress the tar archive using gzip. -- `gzipOptions` - *Object* - Passed to [zlib](https://nodejs.org/api/zlib.html#zlib_class_options) to control compression. +- `gzip` - _Boolean_ - Compress the tar archive using gzip. +- `gzipOptions` - _Object_ - Passed to [zlib](https://nodejs.org/api/zlib.html#zlib_class_options) to control compression. See [tar-stream](https://www.npmjs.com/package/tar-stream) documentation for additional properties. @@ -51,10 +51,10 @@ abort() → {this} Aborts the archiving process, taking a best-effort approach, by: -* removing any pending queue tasks -* allowing any active queue workers to finish -* detaching internal module pipes -* ending both sides of the Transform stream +- removing any pending queue tasks +- allowing any active queue workers to finish +- detaching internal module pipes +- ending both sides of the Transform stream It will NOT drain any remaining sources. @@ -76,8 +76,8 @@ When the instance has received, processed, and emitted the input, the entry even ##### Parameters -- `source` - *Buffer | Stream | String* - The input source. -- `data` - *Object* - [The entry data](#entry-data). +- `source` - _Buffer | Stream | String_ - The input source. +- `data` - _Object_ - [The entry data](#entry-data). --- @@ -91,9 +91,9 @@ Appends a directory and its files, recursively, given its dirpath. ##### Parameters -- `dirpath` - *String* - The source directory path. -- `destpath` - *String* - The destination path within the archive. -- `data` - *Object* - [The entry data](#entry-data). +- `dirpath` - _String_ - The source directory path. +- `destpath` - _String_ - The destination path within the archive. +- `data` - _Object_ - [The entry data](#entry-data). --- @@ -109,8 +109,8 @@ When the instance has received, processed, and emitted the file, the entry event ##### Parameters -- `filepath` - *String* - The source filepath. -- `data` - *Object* - [The entry data](#entry-data). +- `filepath` - _String_ - The source filepath. +- `data` - _Object_ - [The entry data](#entry-data). --- @@ -124,7 +124,6 @@ Finalizes the instance and prevents further appending to the archive structure ( The `end`, `close` or `finish` events on the destination stream may fire right after calling this method so you should set listeners beforehand to properly detect stream completion. - ##### Parameters None @@ -141,9 +140,9 @@ Appends multiple files that match a glob pattern. ##### Parameters -- `pattern` - *String* - The [glob pattern](https://github.com/isaacs/minimatch) to match. -- `options` - *Object* - Options passed to [node-readdir-glob](https://github.com/yqnn/node-readdir-glob#options), plus an optional `cwd` property that sets the directory to read (defaults to `'.'`). -- `data` - *Object* - [The entry data](#entry-data). +- `pattern` - _String_ - The [glob pattern](https://github.com/isaacs/minimatch) to match. +- `options` - _Object_ - Options passed to [node-readdir-glob](https://github.com/yqnn/node-readdir-glob#options), plus an optional `cwd` property that sets the directory to read (defaults to `'.'`). +- `data` - _Object_ - [The entry data](#entry-data). --- @@ -171,7 +170,7 @@ Sets the module format name used for archiving. ##### Parameters -- `format` - *String* - The name of the format. +- `format` - _String_ - The name of the format. --- @@ -185,7 +184,7 @@ Sets the module used for archiving. ##### Parameters -- `module` - *Function* - The function for archiver to interact with. +- `module` - _Function_ - The function for archiver to interact with. --- @@ -201,9 +200,9 @@ This does NOT interact with filesystem and is used for programmatically creating ##### Parameters -- `filepath` - *String* - The symlink path (within archive). -- `target` - *String* - The target path (within archive). -- `mode` - *Number* - The entry permissions. +- `filepath` - _String_ - The symlink path (within archive). +- `target` - _String_ - The target path (within archive). +- `mode` - _Number_ - The entry permissions. ## Events @@ -219,28 +218,28 @@ The `entry` event object contains the following properties: The `progress` event object contains the following properties: -- `entries` - *Object* - An object containing the following properties: - - `total` - *Number* - The number of entries that have been appended. - - `processed` - *Number* - The number of entries that have been processed. +- `entries` - _Object_ - An object containing the following properties: + - `total` - _Number_ - The number of entries that have been appended. + - `processed` - _Number_ - The number of entries that have been processed. - `fs` - Object - An object containing the following properties: - - `totalBytes` - *Number* - The number of bytes that have been appended. Calculated asynchronously and might not be accurate: it growth while entries are added. (based on fs.Stats) - - `processedBytes` - *Number* - The number of bytes that have been processed. (based on fs.Stats) + - `totalBytes` - _Number_ - The number of bytes that have been appended. Calculated asynchronously and might not be accurate: it growth while entries are added. (based on fs.Stats) + - `processedBytes` - _Number_ - The number of bytes that have been processed. (based on fs.Stats) #### Event: error The `error` event object contains the following properties: -- `message` - *String* - The message of the error. -- `code` - *String* - The error code assigned to this error. -- `data` - *Object* - Additional data provided for reporting or debugging (where available). +- `message` - _String_ - The message of the error. +- `code` - _String_ - The error code assigned to this error. +- `data` - _Object_ - Additional data provided for reporting or debugging (where available). #### Event: warning The `warning` event object contains the following properties: -- `message` - *String* - The message of the error. -- `code` - *String* - The error code assigned to this error. -- `data` - *Object* - Additional data provided for reporting or debugging (where available). +- `message` - _String_ - The message of the error. +- `code` - _String_ - The error code assigned to this error. +- `data` - _Object_ - Additional data provided for reporting or debugging (where available). ## Entry Data @@ -248,36 +247,36 @@ The entry data object may contain the following properties: #### Core Entry Properties -- `name` - *String* - Sets the entry name including internal path. -- `date` - *String | Date* - Sets the entry date. -- `mode` - *Number* - Sets the entry permissions. -- `prefix` - *String* - Sets a path prefix for the entry name. Useful when working with methods like [directory](#directory) or [glob](#glob). -- `stats` - *fs.Stats* - Sets the stat data for this entry allowing for reduction of fs.stat calls. +- `name` - _String_ - Sets the entry name including internal path. +- `date` - _String | Date_ - Sets the entry date. +- `mode` - _Number_ - Sets the entry permissions. +- `prefix` - _String_ - Sets a path prefix for the entry name. Useful when working with methods like [directory](#directory) or [glob](#glob). +- `stats` - _fs.Stats_ - Sets the stat data for this entry allowing for reduction of fs.stat calls. #### ZIP Entry Properties -- `namePrependSlash` - *Boolean* - Prepends a forward slash to archive file paths. -- `store` - *Boolean* - Sets the compression method to STORE. +- `namePrependSlash` - _Boolean_ - Prepends a forward slash to archive file paths. +- `store` - _Boolean_ - Sets the compression method to STORE. ## Format Registration ### registerFormat ```js -registerFormat(format, module) +registerFormat(format, module); ``` Registers a format for use with archiver. ##### Parameters -- `format` - *String* - The name of the format. -- `module` - *Function* - The function for archiver to interact with. +- `format` - _String_ - The name of the format. +- `module` - _Function_ - The function for archiver to interact with. #### module ```js -module(options) +module(options); ``` The `module` function should consist of the following: @@ -308,11 +307,11 @@ module.prototype.finalize() {} ### isFormatRegistered ```js -isRegisteredFormat(format) +isRegisteredFormat(format); ``` Check if the format is already registered. ##### Parameters -- `format` - *String* - The name of the format. +- `format` - _String_ - The name of the format. diff --git a/website/docs/quickstart.md b/website/docs/quickstart.md index 2c88f421..ff366e2b 100644 --- a/website/docs/quickstart.md +++ b/website/docs/quickstart.md @@ -13,32 +13,34 @@ Archiver is available on [npm](https://www.npmjs.com/package/archiver). ```js // require modules -const fs = require('fs'); -const archiver = require('archiver'); +const fs = require("fs"); +const archiver = require("archiver"); // create a file to stream archive data to. -const output = fs.createWriteStream(__dirname + '/example.zip'); -const archive = archiver('zip', { - zlib: { level: 9 } // Sets the compression level. +const output = fs.createWriteStream(__dirname + "/example.zip"); +const archive = archiver("zip", { + zlib: { level: 9 }, // Sets the compression level. }); // listen for all archive data to be written // 'close' event is fired only when a file descriptor is involved -output.on('close', function() { - console.log(archive.pointer() + ' total bytes'); - console.log('archiver has been finalized and the output file descriptor has closed.'); +output.on("close", function () { + console.log(archive.pointer() + " total bytes"); + console.log( + "archiver has been finalized and the output file descriptor has closed.", + ); }); // This event is fired when the data source is drained no matter what was the data source. // It is not part of this library but rather from the NodeJS Stream API. // @see: https://nodejs.org/api/stream.html#stream_event_end -output.on('end', function() { - console.log('Data has been drained'); +output.on("end", function () { + console.log("Data has been drained"); }); // good practice to catch warnings (ie stat failures and other non-blocking errors) -archive.on('warning', function(err) { - if (err.code === 'ENOENT') { +archive.on("warning", function (err) { + if (err.code === "ENOENT") { // log warning } else { // throw error @@ -47,7 +49,7 @@ archive.on('warning', function(err) { }); // good practice to catch this error explicitly -archive.on('error', function(err) { +archive.on("error", function (err) { throw err; }); @@ -55,27 +57,27 @@ archive.on('error', function(err) { archive.pipe(output); // append a file from stream -const file1 = __dirname + '/file1.txt'; -archive.append(fs.createReadStream(file1), { name: 'file1.txt' }); +const file1 = __dirname + "/file1.txt"; +archive.append(fs.createReadStream(file1), { name: "file1.txt" }); // append a file from string -archive.append('string cheese!', { name: 'file2.txt' }); +archive.append("string cheese!", { name: "file2.txt" }); // append a file from buffer -const buffer3 = Buffer.from('buff it!'); -archive.append(buffer3, { name: 'file3.txt' }); +const buffer3 = Buffer.from("buff it!"); +archive.append(buffer3, { name: "file3.txt" }); // append a file -archive.file('file1.txt', { name: 'file4.txt' }); +archive.file("file1.txt", { name: "file4.txt" }); // append files from a sub-directory and naming it `new-subdir` within the archive -archive.directory('subdir/', 'new-subdir'); +archive.directory("subdir/", "new-subdir"); // append files from a sub-directory, putting its contents at the root of archive -archive.directory('subdir/', false); +archive.directory("subdir/", false); // append files from a glob pattern -archive.glob('file*.txt', {cwd:__dirname}); +archive.glob("file*.txt", { cwd: __dirname }); // finalize the archive (ie we are done appending files but streams have to finish yet) // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index 186bf925..b6bdd004 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -1,25 +1,25 @@ module.exports = { - title: 'Archiver', - tagline: 'A streaming interface for archive generation.', - url: 'https://www.archiverjs.com', - baseUrl: '/', - onBrokenLinks: 'throw', - onBrokenMarkdownLinks: 'warn', - favicon: 'img/favicon.ico', - organizationName: 'archiverjs', - projectName: 'node-archiver', - themeConfig: { + title: "Archiver", + tagline: "A streaming interface for archive generation.", + url: "https://www.archiverjs.com", + baseUrl: "/", + onBrokenLinks: "throw", + onBrokenMarkdownLinks: "warn", + favicon: "img/favicon.ico", + organizationName: "archiverjs", + projectName: "node-archiver", + themeConfig: { navbar: { - title: 'Archiver', + title: "Archiver", logo: { - alt: 'Archiver Logo', - src: 'img/logo.svg', + alt: "Archiver Logo", + src: "img/logo.svg", }, items: [ { - to: 'docs/quickstart', - label: 'Docs', - position: 'left', + to: "docs/quickstart", + label: "Docs", + position: "left", }, { to: "docs/archiver", @@ -27,42 +27,42 @@ module.exports = { position: "left", }, { - href: 'https://github.com/archiverjs/node-archiver/', - label: 'GitHub', - position: 'right', + href: "https://github.com/archiverjs/node-archiver/", + label: "GitHub", + position: "right", }, ], }, footer: { - style: 'dark', + style: "dark", links: [ { - title: 'Docs', + title: "Docs", items: [ { - label: 'Get Started', - to: 'docs/quickstart', + label: "Get Started", + to: "docs/quickstart", }, { - label: 'Archive Formats', - to: 'docs/archive-formats', + label: "Archive Formats", + to: "docs/archive-formats", }, { - label: 'API Reference', - to: 'docs/archiver', + label: "API Reference", + to: "docs/archiver", }, ], }, { - title: 'More', + title: "More", items: [ { - label: 'ZipStream', - to: 'zipstream', + label: "ZipStream", + to: "zipstream", }, { - label: 'GitHub', - href: 'https://github.com/archiverjs/', + label: "GitHub", + href: "https://github.com/archiverjs/", }, ], }, @@ -72,21 +72,23 @@ module.exports = { }, presets: [ [ - '@docusaurus/preset-classic', + "@docusaurus/preset-classic", { docs: { - sidebarPath: require.resolve('./sidebars.js'), - editUrl: 'https://github.com/archiverjs/node-archiver/edit/master/website/', + sidebarPath: require.resolve("./sidebars.js"), + editUrl: + "https://github.com/archiverjs/node-archiver/edit/master/website/", }, blog: { showReadingTime: true, - editUrl: 'https://github.com/archiverjs/node-archiver/edit/master/website/blog/', + editUrl: + "https://github.com/archiverjs/node-archiver/edit/master/website/blog/", }, theme: { - customCss: require.resolve('./src/css/custom.css'), + customCss: require.resolve("./src/css/custom.css"), }, gtag: { - trackingID: 'UA-75847652-4', + trackingID: "UA-75847652-4", anonymizeIP: true, }, }, diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 5bf5a5e9..1c9e37e2 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -1,14 +1,14 @@ -import React from 'react'; -import clsx from 'clsx'; -import Layout from '@theme/Layout'; -import Link from '@docusaurus/Link'; -import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; -import useBaseUrl from '@docusaurus/useBaseUrl'; -import styles from './styles.module.css'; +import React from "react"; +import clsx from "clsx"; +import Layout from "@theme/Layout"; +import Link from "@docusaurus/Link"; +import useDocusaurusContext from "@docusaurus/useDocusaurusContext"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import styles from "./styles.module.css"; const features = [ { - title: 'Streaming', + title: "Streaming", description: ( <> Archiver was designed to use native node streams as its data transport. @@ -16,20 +16,20 @@ const features = [ ), }, { - title: 'Extendable', + title: "Extendable", description: ( <> - Archiver can be extended to support different archive formats - while reusing the same composition API. + Archiver can be extended to support different archive formats while + reusing the same composition API. ), }, ]; -function Feature({imageUrl, title, description}) { +function Feature({ imageUrl, title, description }) { const imgUrl = useBaseUrl(imageUrl); return ( -
+
{imgUrl && (
{title} @@ -43,22 +43,21 @@ function Feature({imageUrl, title, description}) { export default function Home() { const context = useDocusaurusContext(); - const {siteConfig = {}} = context; + const { siteConfig = {} } = context; return ( - -
+ +

{siteConfig.title}

{siteConfig.tagline}

+ to={useBaseUrl("docs/quickstart")} + > Get Started
diff --git a/website/src/pages/zipstream.md b/website/src/pages/zipstream.md index 2500b9b4..6920cb8c 100644 --- a/website/src/pages/zipstream.md +++ b/website/src/pages/zipstream.md @@ -19,7 +19,7 @@ ZipStream is available on [npm](https://www.npmjs.com/package/zip-stream). ## ZipStream Class ```js -new ZipStream(options) +new ZipStream(options); ``` ### constructor @@ -30,12 +30,12 @@ new ZipStream(options) The `options` object may contain the following properties: -- `comment` - *String* - Sets the zip archive comment. -- `forceLocalTime` - *Boolean* - Forces the archive to contain local file times instead of UTC. -- `forceZip64` - *Boolean* - Forces the archive to contain ZIP64 headers. -- `namePrependSlash` - *Boolean* - Prepends a forward slash to archive file paths. -- `store` - *Boolean* - Sets the compression method to STORE. -- `zlib` - *Object* - Passed to [zlib](https://nodejs.org/api/zlib.html#zlib_class_options) to control compression +- `comment` - _String_ - Sets the zip archive comment. +- `forceLocalTime` - _Boolean_ - Forces the archive to contain local file times instead of UTC. +- `forceZip64` - _Boolean_ - Forces the archive to contain ZIP64 headers. +- `namePrependSlash` - _Boolean_ - Prepends a forward slash to archive file paths. +- `store` - _Boolean_ - Sets the compression method to STORE. +- `zlib` - _Object_ - Passed to [zlib](https://nodejs.org/api/zlib.html#zlib_class_options) to control compression --- @@ -49,19 +49,19 @@ entry(source, data, callback) → {this} ##### Parameters -- `source` - *Buffer | Stream | String* - The input source. -- `data` - *Object* - The entry data. -- `callback` - *Function* +- `source` - _Buffer | Stream | String_ - The input source. +- `data` - _Object_ - The entry data. +- `callback` - _Function_ The `data` object may contain the following properties: -- `name` - *String* - The entry name including internal path. -- `comment` - *String* - The entry comment. -- `date` - *String | Date* - The entry date. -- `mode` - *Number* - The entry permissions. -- `namePrependSlash` - *Boolean* - Prepends a forward slash to archive file paths. -- `store` - *Boolean* - The compression method to STORE. -- `type` - *String* - The entry type. Defaults to `directory` if name ends with trailing slash. +- `name` - _String_ - The entry name including internal path. +- `comment` - _String_ - The entry comment. +- `date` - _String | Date_ - The entry date. +- `mode` - _Number_ - The entry permissions. +- `namePrependSlash` - _Boolean_ - Prepends a forward slash to archive file paths. +- `store` - _Boolean_ - The compression method to STORE. +- `type` - _String_ - The entry type. Defaults to `directory` if name ends with trailing slash. ---