Deployed the page to Github Pages.
This commit is contained in:
parent
1d79754e93
commit
2c89899458
62797 changed files with 6551425 additions and 15279 deletions
41
node_modules/streamroller/lib/DateRollingFileStream.js
generated
vendored
Normal file
41
node_modules/streamroller/lib/DateRollingFileStream.js
generated
vendored
Normal file
|
@ -0,0 +1,41 @@
|
|||
const RollingFileWriteStream = require('./RollingFileWriteStream');
|
||||
|
||||
// just to adapt the previous version
|
||||
class DateRollingFileStream extends RollingFileWriteStream {
|
||||
constructor(filename, pattern, options) {
|
||||
if (pattern && typeof(pattern) === 'object') {
|
||||
options = pattern;
|
||||
pattern = null;
|
||||
}
|
||||
if (!options) {
|
||||
options = {};
|
||||
}
|
||||
if (!pattern) {
|
||||
pattern = 'yyyy-MM-dd';
|
||||
}
|
||||
options.pattern = pattern;
|
||||
if (!options.numBackups && options.numBackups !== 0) {
|
||||
if (!options.daysToKeep && options.daysToKeep !== 0) {
|
||||
options.daysToKeep = 1;
|
||||
} else {
|
||||
process.emitWarning(
|
||||
"options.daysToKeep is deprecated due to the confusion it causes when used " +
|
||||
"together with file size rolling. Please use options.numBackups instead.",
|
||||
"DeprecationWarning", "streamroller-DEP0001"
|
||||
);
|
||||
}
|
||||
options.numBackups = options.daysToKeep;
|
||||
} else {
|
||||
options.daysToKeep = options.numBackups;
|
||||
}
|
||||
super(filename, options);
|
||||
this.mode = this.options.mode;
|
||||
}
|
||||
|
||||
get theStream() {
|
||||
return this.currentFileStream;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = DateRollingFileStream;
|
29
node_modules/streamroller/lib/RollingFileStream.js
generated
vendored
Normal file
29
node_modules/streamroller/lib/RollingFileStream.js
generated
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
const RollingFileWriteStream = require('./RollingFileWriteStream');
|
||||
|
||||
// just to adapt the previous version
|
||||
class RollingFileStream extends RollingFileWriteStream {
|
||||
constructor(filename, size, backups, options) {
|
||||
if (!options) {
|
||||
options = {};
|
||||
}
|
||||
if (size) {
|
||||
options.maxSize = size;
|
||||
}
|
||||
if (!options.numBackups && options.numBackups !== 0) {
|
||||
if (!backups && backups !== 0) {
|
||||
backups = 1;
|
||||
}
|
||||
options.numBackups = backups;
|
||||
}
|
||||
super(filename, options);
|
||||
this.backups = options.numBackups;
|
||||
this.size = this.options.maxSize;
|
||||
}
|
||||
|
||||
get theStream() {
|
||||
return this.currentFileStream;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = RollingFileStream;
|
338
node_modules/streamroller/lib/RollingFileWriteStream.js
generated
vendored
Normal file
338
node_modules/streamroller/lib/RollingFileWriteStream.js
generated
vendored
Normal file
|
@ -0,0 +1,338 @@
|
|||
const debug = require("debug")("streamroller:RollingFileWriteStream");
|
||||
const fs = require("fs-extra");
|
||||
const path = require("path");
|
||||
const os = require("os");
|
||||
const newNow = require("./now");
|
||||
const format = require("date-format");
|
||||
const { Writable } = require("stream");
|
||||
const fileNameFormatter = require("./fileNameFormatter");
|
||||
const fileNameParser = require("./fileNameParser");
|
||||
const moveAndMaybeCompressFile = require("./moveAndMaybeCompressFile");
|
||||
|
||||
const deleteFiles = fileNames => {
|
||||
debug(`deleteFiles: files to delete: ${fileNames}`);
|
||||
return Promise.all(fileNames.map(f => fs.unlink(f).catch((e) => {
|
||||
debug(`deleteFiles: error when unlinking ${f}, ignoring. Error was ${e}`);
|
||||
})));
|
||||
};
|
||||
|
||||
/**
|
||||
* RollingFileWriteStream is mainly used when writing to a file rolling by date or size.
|
||||
* RollingFileWriteStream inherits from stream.Writable
|
||||
*/
|
||||
class RollingFileWriteStream extends Writable {
|
||||
/**
|
||||
* Create a RollingFileWriteStream
|
||||
* @constructor
|
||||
* @param {string} filePath - The file path to write.
|
||||
* @param {object} options - The extra options
|
||||
* @param {number} options.numToKeep - The max numbers of files to keep.
|
||||
* @param {number} options.maxSize - The maxSize one file can reach. Unit is Byte.
|
||||
* This should be more than 1024. The default is 0.
|
||||
* If not specified or 0, then no log rolling will happen.
|
||||
* @param {string} options.mode - The mode of the files. The default is '0600'. Refer to stream.writable for more.
|
||||
* @param {string} options.flags - The default is 'a'. Refer to stream.flags for more.
|
||||
* @param {boolean} options.compress - Whether to compress backup files.
|
||||
* @param {boolean} options.keepFileExt - Whether to keep the file extension.
|
||||
* @param {string} options.pattern - The date string pattern in the file name.
|
||||
* @param {boolean} options.alwaysIncludePattern - Whether to add date to the name of the first file.
|
||||
*/
|
||||
constructor(filePath, options) {
|
||||
debug(`constructor: creating RollingFileWriteStream. path=${filePath}`);
|
||||
if (typeof filePath !== "string" || filePath.length === 0) {
|
||||
throw new Error(`Invalid filename: ${filePath}`);
|
||||
} else if (filePath.endsWith(path.sep)) {
|
||||
throw new Error(`Filename is a directory: ${filePath}`);
|
||||
} else if (filePath.indexOf(`~${path.sep}`) === 0) {
|
||||
// handle ~ expansion: https://github.com/nodejs/node/issues/684
|
||||
// exclude ~ and ~filename as these can be valid files
|
||||
filePath = filePath.replace("~", os.homedir());
|
||||
}
|
||||
super(options);
|
||||
this.options = this._parseOption(options);
|
||||
this.fileObject = path.parse(filePath);
|
||||
if (this.fileObject.dir === "") {
|
||||
this.fileObject = path.parse(path.join(process.cwd(), filePath));
|
||||
}
|
||||
this.fileFormatter = fileNameFormatter({
|
||||
file: this.fileObject,
|
||||
alwaysIncludeDate: this.options.alwaysIncludePattern,
|
||||
needsIndex: this.options.maxSize < Number.MAX_SAFE_INTEGER,
|
||||
compress: this.options.compress,
|
||||
keepFileExt: this.options.keepFileExt,
|
||||
fileNameSep: this.options.fileNameSep
|
||||
});
|
||||
|
||||
this.fileNameParser = fileNameParser({
|
||||
file: this.fileObject,
|
||||
keepFileExt: this.options.keepFileExt,
|
||||
pattern: this.options.pattern,
|
||||
fileNameSep: this.options.fileNameSep
|
||||
});
|
||||
|
||||
this.state = {
|
||||
currentSize: 0
|
||||
};
|
||||
|
||||
if (this.options.pattern) {
|
||||
this.state.currentDate = format(this.options.pattern, newNow());
|
||||
}
|
||||
|
||||
this.filename = this.fileFormatter({
|
||||
index: 0,
|
||||
date: this.state.currentDate
|
||||
});
|
||||
if (["a", "a+", "as", "as+"].includes(this.options.flags)) {
|
||||
this._setExistingSizeAndDate();
|
||||
}
|
||||
|
||||
debug(
|
||||
`constructor: create new file ${this.filename}, state=${JSON.stringify(
|
||||
this.state
|
||||
)}`
|
||||
);
|
||||
this._renewWriteStream();
|
||||
}
|
||||
|
||||
_setExistingSizeAndDate() {
|
||||
try {
|
||||
const stats = fs.statSync(this.filename);
|
||||
this.state.currentSize = stats.size;
|
||||
if (this.options.pattern) {
|
||||
this.state.currentDate = format(this.options.pattern, stats.mtime);
|
||||
}
|
||||
} catch (e) {
|
||||
//file does not exist, that's fine - move along
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
_parseOption(rawOptions) {
|
||||
const defaultOptions = {
|
||||
maxSize: 0,
|
||||
numToKeep: Number.MAX_SAFE_INTEGER,
|
||||
encoding: "utf8",
|
||||
mode: parseInt("0600", 8),
|
||||
flags: "a",
|
||||
compress: false,
|
||||
keepFileExt: false,
|
||||
alwaysIncludePattern: false
|
||||
};
|
||||
const options = Object.assign({}, defaultOptions, rawOptions);
|
||||
if (!options.maxSize) {
|
||||
delete options.maxSize;
|
||||
} else if (options.maxSize <= 0) {
|
||||
throw new Error(`options.maxSize (${options.maxSize}) should be > 0`);
|
||||
}
|
||||
// options.numBackups will supercede options.numToKeep
|
||||
if (options.numBackups || options.numBackups === 0) {
|
||||
if (options.numBackups < 0) {
|
||||
throw new Error(`options.numBackups (${options.numBackups}) should be >= 0`);
|
||||
} else if (options.numBackups >= Number.MAX_SAFE_INTEGER) {
|
||||
// to cater for numToKeep (include the hot file) at Number.MAX_SAFE_INTEGER
|
||||
throw new Error(`options.numBackups (${options.numBackups}) should be < Number.MAX_SAFE_INTEGER`);
|
||||
} else {
|
||||
options.numToKeep = options.numBackups + 1;
|
||||
}
|
||||
} else if (options.numToKeep <= 0) {
|
||||
throw new Error(`options.numToKeep (${options.numToKeep}) should be > 0`);
|
||||
}
|
||||
debug(
|
||||
`_parseOption: creating stream with option=${JSON.stringify(options)}`
|
||||
);
|
||||
return options;
|
||||
}
|
||||
|
||||
_final(callback) {
|
||||
this.currentFileStream.end("", this.options.encoding, callback);
|
||||
}
|
||||
|
||||
_write(chunk, encoding, callback) {
|
||||
this._shouldRoll().then(() => {
|
||||
debug(
|
||||
`_write: writing chunk. ` +
|
||||
`file=${this.currentFileStream.path} ` +
|
||||
`state=${JSON.stringify(this.state)} ` +
|
||||
`chunk=${chunk}`
|
||||
);
|
||||
this.currentFileStream.write(chunk, encoding, e => {
|
||||
this.state.currentSize += chunk.length;
|
||||
callback(e);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async _shouldRoll() {
|
||||
if (this._dateChanged() || this._tooBig()) {
|
||||
debug(
|
||||
`_shouldRoll: rolling because dateChanged? ${this._dateChanged()} or tooBig? ${this._tooBig()}`
|
||||
);
|
||||
await this._roll();
|
||||
}
|
||||
}
|
||||
|
||||
_dateChanged() {
|
||||
return (
|
||||
this.state.currentDate &&
|
||||
this.state.currentDate !== format(this.options.pattern, newNow())
|
||||
);
|
||||
}
|
||||
|
||||
_tooBig() {
|
||||
return this.state.currentSize >= this.options.maxSize;
|
||||
}
|
||||
|
||||
_roll() {
|
||||
debug(`_roll: closing the current stream`);
|
||||
return new Promise((resolve, reject) => {
|
||||
this.currentFileStream.end("", this.options.encoding, () => {
|
||||
this._moveOldFiles()
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async _moveOldFiles() {
|
||||
const files = await this._getExistingFiles();
|
||||
const todaysFiles = this.state.currentDate
|
||||
? files.filter(f => f.date === this.state.currentDate)
|
||||
: files;
|
||||
for (let i = todaysFiles.length; i >= 0; i--) {
|
||||
debug(`_moveOldFiles: i = ${i}`);
|
||||
const sourceFilePath = this.fileFormatter({
|
||||
date: this.state.currentDate,
|
||||
index: i
|
||||
});
|
||||
const targetFilePath = this.fileFormatter({
|
||||
date: this.state.currentDate,
|
||||
index: i + 1
|
||||
});
|
||||
|
||||
const moveAndCompressOptions = {
|
||||
compress: this.options.compress && i === 0,
|
||||
mode: this.options.mode
|
||||
};
|
||||
await moveAndMaybeCompressFile(
|
||||
sourceFilePath,
|
||||
targetFilePath,
|
||||
moveAndCompressOptions
|
||||
);
|
||||
}
|
||||
|
||||
this.state.currentSize = 0;
|
||||
this.state.currentDate = this.state.currentDate
|
||||
? format(this.options.pattern, newNow())
|
||||
: null;
|
||||
debug(
|
||||
`_moveOldFiles: finished rolling files. state=${JSON.stringify(
|
||||
this.state
|
||||
)}`
|
||||
);
|
||||
this._renewWriteStream();
|
||||
// wait for the file to be open before cleaning up old ones,
|
||||
// otherwise the daysToKeep calculations can be off
|
||||
await new Promise((resolve, reject) => {
|
||||
this.currentFileStream.write("", "utf8", () => {
|
||||
this._clean()
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Sorted from the oldest to the latest
|
||||
async _getExistingFiles() {
|
||||
const files = await fs.readdir(this.fileObject.dir)
|
||||
.catch( /* istanbul ignore next: will not happen on windows */ () => []);
|
||||
|
||||
debug(`_getExistingFiles: files=${files}`);
|
||||
const existingFileDetails = files
|
||||
.map(n => this.fileNameParser(n))
|
||||
.filter(n => n);
|
||||
|
||||
const getKey = n =>
|
||||
(n.timestamp ? n.timestamp : newNow().getTime()) - n.index;
|
||||
existingFileDetails.sort((a, b) => getKey(a) - getKey(b));
|
||||
|
||||
return existingFileDetails;
|
||||
}
|
||||
|
||||
_renewWriteStream() {
|
||||
const filePath = this.fileFormatter({
|
||||
date: this.state.currentDate,
|
||||
index: 0
|
||||
});
|
||||
|
||||
// attempt to create the directory
|
||||
const mkdir = (dir) => {
|
||||
try {
|
||||
return fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
// backward-compatible fs.mkdirSync for nodejs pre-10.12.0 (without recursive option)
|
||||
catch (e) {
|
||||
// recursive creation of parent first
|
||||
if (e.code === "ENOENT") {
|
||||
mkdir(path.dirname(dir));
|
||||
return mkdir(dir);
|
||||
}
|
||||
|
||||
// throw error for all except EEXIST and EROFS (read-only filesystem)
|
||||
if (e.code !== "EEXIST" && e.code !== "EROFS") {
|
||||
throw e;
|
||||
}
|
||||
|
||||
// EEXIST: throw if file and not directory
|
||||
// EROFS : throw if directory not found
|
||||
else {
|
||||
try {
|
||||
if (fs.statSync(dir).isDirectory()) {
|
||||
return dir;
|
||||
}
|
||||
throw e;
|
||||
} catch (err) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
mkdir(this.fileObject.dir);
|
||||
|
||||
const ops = {
|
||||
flags: this.options.flags,
|
||||
encoding: this.options.encoding,
|
||||
mode: this.options.mode
|
||||
};
|
||||
const renameKey = function(obj, oldKey, newKey) {
|
||||
obj[newKey] = obj[oldKey];
|
||||
delete obj[oldKey];
|
||||
return obj;
|
||||
};
|
||||
// try to throw EISDIR, EROFS, EACCES
|
||||
fs.appendFileSync(filePath, "", renameKey({ ...ops }, "flags", "flag"));
|
||||
this.currentFileStream = fs.createWriteStream(filePath, ops);
|
||||
this.currentFileStream.on("error", e => {
|
||||
this.emit("error", e);
|
||||
});
|
||||
}
|
||||
|
||||
async _clean() {
|
||||
const existingFileDetails = await this._getExistingFiles();
|
||||
debug(
|
||||
`_clean: numToKeep = ${this.options.numToKeep}, existingFiles = ${existingFileDetails.length}`
|
||||
);
|
||||
debug("_clean: existing files are: ", existingFileDetails);
|
||||
if (this._tooManyFiles(existingFileDetails.length)) {
|
||||
const fileNamesToRemove = existingFileDetails
|
||||
.slice(0, existingFileDetails.length - this.options.numToKeep)
|
||||
.map(f => path.format({ dir: this.fileObject.dir, base: f.filename }));
|
||||
await deleteFiles(fileNamesToRemove);
|
||||
}
|
||||
}
|
||||
|
||||
_tooManyFiles(numFiles) {
|
||||
return this.options.numToKeep > 0 && numFiles > this.options.numToKeep;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RollingFileWriteStream;
|
39
node_modules/streamroller/lib/fileNameFormatter.js
generated
vendored
Normal file
39
node_modules/streamroller/lib/fileNameFormatter.js
generated
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
const debug = require("debug")("streamroller:fileNameFormatter");
|
||||
const path = require("path");
|
||||
const ZIP_EXT = ".gz";
|
||||
const DEFAULT_FILENAME_SEP = ".";
|
||||
|
||||
module.exports = ({
|
||||
file,
|
||||
keepFileExt,
|
||||
needsIndex,
|
||||
alwaysIncludeDate,
|
||||
compress,
|
||||
fileNameSep
|
||||
}) => {
|
||||
let FILENAME_SEP = fileNameSep || DEFAULT_FILENAME_SEP;
|
||||
const dirAndName = path.join(file.dir, file.name);
|
||||
|
||||
const ext = f => f + file.ext;
|
||||
|
||||
const index = (f, i, d) =>
|
||||
(needsIndex || !d) && i ? f + FILENAME_SEP + i : f;
|
||||
|
||||
const date = (f, i, d) => {
|
||||
return (i > 0 || alwaysIncludeDate) && d ? f + FILENAME_SEP + d : f;
|
||||
};
|
||||
|
||||
const gzip = (f, i) => (i && compress ? f + ZIP_EXT : f);
|
||||
|
||||
const parts = keepFileExt
|
||||
? [date, index, ext, gzip]
|
||||
: [ext, date, index, gzip];
|
||||
|
||||
return ({ date, index }) => {
|
||||
debug(`_formatFileName: date=${date}, index=${index}`);
|
||||
return parts.reduce(
|
||||
(filename, part) => part(filename, index, date),
|
||||
dirAndName
|
||||
);
|
||||
};
|
||||
};
|
96
node_modules/streamroller/lib/fileNameParser.js
generated
vendored
Normal file
96
node_modules/streamroller/lib/fileNameParser.js
generated
vendored
Normal file
|
@ -0,0 +1,96 @@
|
|||
const debug = require("debug")("streamroller:fileNameParser");
|
||||
const ZIP_EXT = ".gz";
|
||||
const format = require("date-format");
|
||||
const DEFAULT_FILENAME_SEP = ".";
|
||||
|
||||
module.exports = ({ file, keepFileExt, pattern, fileNameSep }) => {
|
||||
let FILENAME_SEP = fileNameSep || DEFAULT_FILENAME_SEP;
|
||||
// All these functions take two arguments: f, the filename, and p, the result placeholder
|
||||
// They return the filename with any matching parts removed.
|
||||
// The "zip" function, for instance, removes the ".gz" part of the filename (if present)
|
||||
const zip = (f, p) => {
|
||||
if (f.endsWith(ZIP_EXT)) {
|
||||
debug("it is gzipped");
|
||||
p.isCompressed = true;
|
||||
return f.slice(0, -1 * ZIP_EXT.length);
|
||||
}
|
||||
return f;
|
||||
};
|
||||
|
||||
const __NOT_MATCHING__ = "__NOT_MATCHING__";
|
||||
|
||||
const extAtEnd = f => {
|
||||
if (f.startsWith(file.name) && f.endsWith(file.ext)) {
|
||||
debug("it starts and ends with the right things");
|
||||
return f.slice(file.name.length + 1, -1 * file.ext.length);
|
||||
}
|
||||
return __NOT_MATCHING__;
|
||||
};
|
||||
|
||||
const extInMiddle = f => {
|
||||
if (f.startsWith(file.base)) {
|
||||
debug("it starts with the right things");
|
||||
return f.slice(file.base.length + 1);
|
||||
}
|
||||
return __NOT_MATCHING__;
|
||||
};
|
||||
|
||||
const dateAndIndex = (f, p) => {
|
||||
const items = f.split(FILENAME_SEP);
|
||||
let indexStr = items[items.length - 1];
|
||||
debug("items: ", items, ", indexStr: ", indexStr);
|
||||
let dateStr = f;
|
||||
if (indexStr !== undefined && indexStr.match(/^\d+$/)) {
|
||||
dateStr = f.slice(0, -1 * (indexStr.length + 1));
|
||||
debug(`dateStr is ${dateStr}`);
|
||||
if (pattern && !dateStr) {
|
||||
dateStr = indexStr;
|
||||
indexStr = "0";
|
||||
}
|
||||
} else {
|
||||
indexStr = "0";
|
||||
}
|
||||
|
||||
try {
|
||||
// Two arguments for new Date() are intentional. This will set other date
|
||||
// components to minimal values in the current timezone instead of UTC,
|
||||
// as new Date(0) will do.
|
||||
const date = format.parse(pattern, dateStr, new Date(0, 0));
|
||||
if (format.asString(pattern, date) !== dateStr) return f;
|
||||
p.index = parseInt(indexStr, 10);
|
||||
p.date = dateStr;
|
||||
p.timestamp = date.getTime();
|
||||
return "";
|
||||
} catch (e) {
|
||||
//not a valid date, don't panic.
|
||||
debug(`Problem parsing ${dateStr} as ${pattern}, error was: `, e);
|
||||
return f;
|
||||
}
|
||||
};
|
||||
|
||||
const index = (f, p) => {
|
||||
if (f.match(/^\d+$/)) {
|
||||
debug("it has an index");
|
||||
p.index = parseInt(f, 10);
|
||||
return "";
|
||||
}
|
||||
return f;
|
||||
};
|
||||
|
||||
let parts = [
|
||||
zip,
|
||||
keepFileExt ? extAtEnd : extInMiddle,
|
||||
pattern ? dateAndIndex : index
|
||||
];
|
||||
|
||||
return filename => {
|
||||
let result = { filename, index: 0, isCompressed: false };
|
||||
// pass the filename through each of the file part parsers
|
||||
let whatsLeftOver = parts.reduce(
|
||||
(remains, part) => part(remains, result),
|
||||
filename
|
||||
);
|
||||
// if there's anything left after parsing, then it wasn't a valid filename
|
||||
return whatsLeftOver ? null : result;
|
||||
};
|
||||
};
|
5
node_modules/streamroller/lib/index.js
generated
vendored
Normal file
5
node_modules/streamroller/lib/index.js
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
module.exports = {
|
||||
RollingFileWriteStream: require('./RollingFileWriteStream'),
|
||||
RollingFileStream: require('./RollingFileStream'),
|
||||
DateRollingFileStream: require('./DateRollingFileStream')
|
||||
};
|
106
node_modules/streamroller/lib/moveAndMaybeCompressFile.js
generated
vendored
Normal file
106
node_modules/streamroller/lib/moveAndMaybeCompressFile.js
generated
vendored
Normal file
|
@ -0,0 +1,106 @@
|
|||
const debug = require('debug')('streamroller:moveAndMaybeCompressFile');
|
||||
const fs = require('fs-extra');
|
||||
const zlib = require('zlib');
|
||||
|
||||
const _parseOption = function(rawOptions){
|
||||
const defaultOptions = {
|
||||
mode: parseInt("0600", 8),
|
||||
compress: false,
|
||||
};
|
||||
const options = Object.assign({}, defaultOptions, rawOptions);
|
||||
debug(`_parseOption: moveAndMaybeCompressFile called with option=${JSON.stringify(options)}`);
|
||||
return options;
|
||||
};
|
||||
|
||||
const moveAndMaybeCompressFile = async (
|
||||
sourceFilePath,
|
||||
targetFilePath,
|
||||
options
|
||||
) => {
|
||||
options = _parseOption(options);
|
||||
|
||||
if (sourceFilePath === targetFilePath) {
|
||||
debug(`moveAndMaybeCompressFile: source and target are the same, not doing anything`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (await fs.pathExists(sourceFilePath)) {
|
||||
debug(
|
||||
`moveAndMaybeCompressFile: moving file from ${sourceFilePath} to ${targetFilePath} ${
|
||||
options.compress ? "with" : "without"
|
||||
} compress`
|
||||
);
|
||||
if (options.compress) {
|
||||
await new Promise((resolve, reject) => {
|
||||
let isCreated = false;
|
||||
// to avoid concurrency, the forked process which can create the file will proceed (using flags wx)
|
||||
const writeStream = fs.createWriteStream(targetFilePath, { mode: options.mode, flags: "wx" })
|
||||
// wait until writable stream is valid before proceeding to read
|
||||
.on("open", () => {
|
||||
isCreated = true;
|
||||
const readStream = fs.createReadStream(sourceFilePath)
|
||||
// wait until readable stream is valid before piping
|
||||
.on("open", () => {
|
||||
readStream.pipe(zlib.createGzip()).pipe(writeStream);
|
||||
})
|
||||
.on("error", (e) => {
|
||||
debug(`moveAndMaybeCompressFile: error reading ${sourceFilePath}`, e);
|
||||
// manually close writable: https://nodejs.org/api/stream.html#readablepipedestination-options
|
||||
writeStream.destroy(e);
|
||||
});
|
||||
})
|
||||
.on("finish", () => {
|
||||
debug(`moveAndMaybeCompressFile: finished compressing ${targetFilePath}, deleting ${sourceFilePath}`);
|
||||
// delete sourceFilePath
|
||||
fs.unlink(sourceFilePath)
|
||||
.then(resolve)
|
||||
.catch((e) => {
|
||||
debug(`moveAndMaybeCompressFile: error deleting ${sourceFilePath}, truncating instead`, e);
|
||||
// fallback to truncate
|
||||
fs.truncate(sourceFilePath)
|
||||
.then(resolve)
|
||||
.catch((e) => {
|
||||
debug(`moveAndMaybeCompressFile: error truncating ${sourceFilePath}`, e);
|
||||
reject(e);
|
||||
});
|
||||
});
|
||||
})
|
||||
.on("error", (e) => {
|
||||
if (!isCreated) {
|
||||
debug(`moveAndMaybeCompressFile: error creating ${targetFilePath}`, e);
|
||||
// do not do anything if handled by another forked process
|
||||
reject(e);
|
||||
} else {
|
||||
debug(`moveAndMaybeCompressFile: error writing ${targetFilePath}, deleting`, e);
|
||||
// delete targetFilePath (taking as nothing happened)
|
||||
fs.unlink(targetFilePath)
|
||||
.then(() => { reject(e); })
|
||||
.catch((e) => {
|
||||
debug(`moveAndMaybeCompressFile: error deleting ${targetFilePath}`, e);
|
||||
reject(e);
|
||||
});
|
||||
}
|
||||
});
|
||||
}).catch(() => {});
|
||||
} else {
|
||||
debug(`moveAndMaybeCompressFile: renaming ${sourceFilePath} to ${targetFilePath}`);
|
||||
try {
|
||||
await fs.move(sourceFilePath, targetFilePath, { overwrite: true });
|
||||
} catch (e) {
|
||||
debug(`moveAndMaybeCompressFile: error renaming ${sourceFilePath} to ${targetFilePath}`, e);
|
||||
/* istanbul ignore else: no need to do anything if file does not exist */
|
||||
if (e.code !== "ENOENT") {
|
||||
debug(`moveAndMaybeCompressFile: trying copy+truncate instead`);
|
||||
try {
|
||||
await fs.copy(sourceFilePath, targetFilePath, { overwrite: true });
|
||||
await fs.truncate(sourceFilePath);
|
||||
} catch (e) {
|
||||
debug(`moveAndMaybeCompressFile: error copy+truncate`, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = moveAndMaybeCompressFile;
|
2
node_modules/streamroller/lib/now.js
generated
vendored
Normal file
2
node_modules/streamroller/lib/now.js
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
// allows us to inject a mock date in tests
|
||||
module.exports = () => new Date();
|
Loading…
Add table
Add a link
Reference in a new issue