Deployed the page to Github Pages.
This commit is contained in:
parent
1d79754e93
commit
2c89899458
62797 changed files with 6551425 additions and 15279 deletions
142
node_modules/adm-zip/util/constants.js
generated
vendored
Normal file
142
node_modules/adm-zip/util/constants.js
generated
vendored
Normal file
|
@ -0,0 +1,142 @@
|
|||
module.exports = {
|
||||
/* The local file header */
|
||||
LOCHDR : 30, // LOC header size
|
||||
LOCSIG : 0x04034b50, // "PK\003\004"
|
||||
LOCVER : 4, // version needed to extract
|
||||
LOCFLG : 6, // general purpose bit flag
|
||||
LOCHOW : 8, // compression method
|
||||
LOCTIM : 10, // modification time (2 bytes time, 2 bytes date)
|
||||
LOCCRC : 14, // uncompressed file crc-32 value
|
||||
LOCSIZ : 18, // compressed size
|
||||
LOCLEN : 22, // uncompressed size
|
||||
LOCNAM : 26, // filename length
|
||||
LOCEXT : 28, // extra field length
|
||||
|
||||
/* The Data descriptor */
|
||||
EXTSIG : 0x08074b50, // "PK\007\008"
|
||||
EXTHDR : 16, // EXT header size
|
||||
EXTCRC : 4, // uncompressed file crc-32 value
|
||||
EXTSIZ : 8, // compressed size
|
||||
EXTLEN : 12, // uncompressed size
|
||||
|
||||
/* The central directory file header */
|
||||
CENHDR : 46, // CEN header size
|
||||
CENSIG : 0x02014b50, // "PK\001\002"
|
||||
CENVEM : 4, // version made by
|
||||
CENVER : 6, // version needed to extract
|
||||
CENFLG : 8, // encrypt, decrypt flags
|
||||
CENHOW : 10, // compression method
|
||||
CENTIM : 12, // modification time (2 bytes time, 2 bytes date)
|
||||
CENCRC : 16, // uncompressed file crc-32 value
|
||||
CENSIZ : 20, // compressed size
|
||||
CENLEN : 24, // uncompressed size
|
||||
CENNAM : 28, // filename length
|
||||
CENEXT : 30, // extra field length
|
||||
CENCOM : 32, // file comment length
|
||||
CENDSK : 34, // volume number start
|
||||
CENATT : 36, // internal file attributes
|
||||
CENATX : 38, // external file attributes (host system dependent)
|
||||
CENOFF : 42, // LOC header offset
|
||||
|
||||
/* The entries in the end of central directory */
|
||||
ENDHDR : 22, // END header size
|
||||
ENDSIG : 0x06054b50, // "PK\005\006"
|
||||
ENDSUB : 8, // number of entries on this disk
|
||||
ENDTOT : 10, // total number of entries
|
||||
ENDSIZ : 12, // central directory size in bytes
|
||||
ENDOFF : 16, // offset of first CEN header
|
||||
ENDCOM : 20, // zip file comment length
|
||||
|
||||
END64HDR : 20, // zip64 END header size
|
||||
END64SIG : 0x07064b50, // zip64 Locator signature, "PK\006\007"
|
||||
END64START : 4, // number of the disk with the start of the zip64
|
||||
END64OFF : 8, // relative offset of the zip64 end of central directory
|
||||
END64NUMDISKS : 16, // total number of disks
|
||||
|
||||
ZIP64SIG : 0x06064b50, // zip64 signature, "PK\006\006"
|
||||
ZIP64HDR : 56, // zip64 record minimum size
|
||||
ZIP64LEAD : 12, // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE
|
||||
ZIP64SIZE : 4, // zip64 size of the central directory record
|
||||
ZIP64VEM : 12, // zip64 version made by
|
||||
ZIP64VER : 14, // zip64 version needed to extract
|
||||
ZIP64DSK : 16, // zip64 number of this disk
|
||||
ZIP64DSKDIR : 20, // number of the disk with the start of the record directory
|
||||
ZIP64SUB : 24, // number of entries on this disk
|
||||
ZIP64TOT : 32, // total number of entries
|
||||
ZIP64SIZB : 40, // zip64 central directory size in bytes
|
||||
ZIP64OFF : 48, // offset of start of central directory with respect to the starting disk number
|
||||
ZIP64EXTRA : 56, // extensible data sector
|
||||
|
||||
/* Compression methods */
|
||||
STORED : 0, // no compression
|
||||
SHRUNK : 1, // shrunk
|
||||
REDUCED1 : 2, // reduced with compression factor 1
|
||||
REDUCED2 : 3, // reduced with compression factor 2
|
||||
REDUCED3 : 4, // reduced with compression factor 3
|
||||
REDUCED4 : 5, // reduced with compression factor 4
|
||||
IMPLODED : 6, // imploded
|
||||
// 7 reserved for Tokenizing compression algorithm
|
||||
DEFLATED : 8, // deflated
|
||||
ENHANCED_DEFLATED: 9, // enhanced deflated
|
||||
PKWARE : 10,// PKWare DCL imploded
|
||||
// 11 reserved by PKWARE
|
||||
BZIP2 : 12, // compressed using BZIP2
|
||||
// 13 reserved by PKWARE
|
||||
LZMA : 14, // LZMA
|
||||
// 15-17 reserved by PKWARE
|
||||
IBM_TERSE : 18, // compressed using IBM TERSE
|
||||
IBM_LZ77 : 19, // IBM LZ77 z
|
||||
AES_ENCRYPT : 99, // WinZIP AES encryption method
|
||||
|
||||
/* General purpose bit flag */
|
||||
// values can obtained with expression 2**bitnr
|
||||
FLG_ENC : 1, // Bit 0: encrypted file
|
||||
FLG_COMP1 : 2, // Bit 1, compression option
|
||||
FLG_COMP2 : 4, // Bit 2, compression option
|
||||
FLG_DESC : 8, // Bit 3, data descriptor
|
||||
FLG_ENH : 16, // Bit 4, enhanced deflating
|
||||
FLG_PATCH : 32, // Bit 5, indicates that the file is compressed patched data.
|
||||
FLG_STR : 64, // Bit 6, strong encryption (patented)
|
||||
// Bits 7-10: Currently unused.
|
||||
FLG_EFS : 2048, // Bit 11: Language encoding flag (EFS)
|
||||
// Bit 12: Reserved by PKWARE for enhanced compression.
|
||||
// Bit 13: encrypted the Central Directory (patented).
|
||||
// Bits 14-15: Reserved by PKWARE.
|
||||
FLG_MSK : 4096, // mask header values
|
||||
|
||||
/* Load type */
|
||||
FILE : 2,
|
||||
BUFFER : 1,
|
||||
NONE : 0,
|
||||
|
||||
/* 4.5 Extensible data fields */
|
||||
EF_ID : 0,
|
||||
EF_SIZE : 2,
|
||||
|
||||
/* Header IDs */
|
||||
ID_ZIP64 : 0x0001,
|
||||
ID_AVINFO : 0x0007,
|
||||
ID_PFS : 0x0008,
|
||||
ID_OS2 : 0x0009,
|
||||
ID_NTFS : 0x000a,
|
||||
ID_OPENVMS : 0x000c,
|
||||
ID_UNIX : 0x000d,
|
||||
ID_FORK : 0x000e,
|
||||
ID_PATCH : 0x000f,
|
||||
ID_X509_PKCS7 : 0x0014,
|
||||
ID_X509_CERTID_F : 0x0015,
|
||||
ID_X509_CERTID_C : 0x0016,
|
||||
ID_STRONGENC : 0x0017,
|
||||
ID_RECORD_MGT : 0x0018,
|
||||
ID_X509_PKCS7_RL : 0x0019,
|
||||
ID_IBM1 : 0x0065,
|
||||
ID_IBM2 : 0x0066,
|
||||
ID_POSZIP : 0x4690,
|
||||
|
||||
EF_ZIP64_OR_32 : 0xffffffff,
|
||||
EF_ZIP64_OR_16 : 0xffff,
|
||||
EF_ZIP64_SUNCOMP : 0,
|
||||
EF_ZIP64_SCOMP : 8,
|
||||
EF_ZIP64_RHO : 16,
|
||||
EF_ZIP64_DSN : 24
|
||||
};
|
63
node_modules/adm-zip/util/errors.js
generated
vendored
Normal file
63
node_modules/adm-zip/util/errors.js
generated
vendored
Normal file
|
@ -0,0 +1,63 @@
|
|||
const errors = {
|
||||
/* Header error messages */
|
||||
INVALID_LOC: "Invalid LOC header (bad signature)",
|
||||
INVALID_CEN: "Invalid CEN header (bad signature)",
|
||||
INVALID_END: "Invalid END header (bad signature)",
|
||||
|
||||
/* Descriptor */
|
||||
DESCRIPTOR_NOT_EXIST: "No descriptor present",
|
||||
DESCRIPTOR_UNKNOWN: "Unknown descriptor format",
|
||||
DESCRIPTOR_FAULTY: "Descriptor data is malformed",
|
||||
|
||||
/* ZipEntry error messages*/
|
||||
NO_DATA: "Nothing to decompress",
|
||||
BAD_CRC: "CRC32 checksum failed {0}",
|
||||
FILE_IN_THE_WAY: "There is a file in the way: {0}",
|
||||
UNKNOWN_METHOD: "Invalid/unsupported compression method",
|
||||
|
||||
/* Inflater error messages */
|
||||
AVAIL_DATA: "inflate::Available inflate data did not terminate",
|
||||
INVALID_DISTANCE: "inflate::Invalid literal/length or distance code in fixed or dynamic block",
|
||||
TO_MANY_CODES: "inflate::Dynamic block code description: too many length or distance codes",
|
||||
INVALID_REPEAT_LEN: "inflate::Dynamic block code description: repeat more than specified lengths",
|
||||
INVALID_REPEAT_FIRST: "inflate::Dynamic block code description: repeat lengths with no first length",
|
||||
INCOMPLETE_CODES: "inflate::Dynamic block code description: code lengths codes incomplete",
|
||||
INVALID_DYN_DISTANCE: "inflate::Dynamic block code description: invalid distance code lengths",
|
||||
INVALID_CODES_LEN: "inflate::Dynamic block code description: invalid literal/length code lengths",
|
||||
INVALID_STORE_BLOCK: "inflate::Stored block length did not match one's complement",
|
||||
INVALID_BLOCK_TYPE: "inflate::Invalid block type (type == 3)",
|
||||
|
||||
/* ADM-ZIP error messages */
|
||||
CANT_EXTRACT_FILE: "Could not extract the file",
|
||||
CANT_OVERRIDE: "Target file already exists",
|
||||
DISK_ENTRY_TOO_LARGE: "Number of disk entries is too large",
|
||||
NO_ZIP: "No zip file was loaded",
|
||||
NO_ENTRY: "Entry doesn't exist",
|
||||
DIRECTORY_CONTENT_ERROR: "A directory cannot have content",
|
||||
FILE_NOT_FOUND: 'File not found: "{0}"',
|
||||
NOT_IMPLEMENTED: "Not implemented",
|
||||
INVALID_FILENAME: "Invalid filename",
|
||||
INVALID_FORMAT: "Invalid or unsupported zip format. No END header found",
|
||||
INVALID_PASS_PARAM: "Incompatible password parameter",
|
||||
WRONG_PASSWORD: "Wrong Password",
|
||||
|
||||
/* ADM-ZIP */
|
||||
COMMENT_TOO_LONG: "Comment is too long", // Comment can be max 65535 bytes long (NOTE: some non-US characters may take more space)
|
||||
EXTRA_FIELD_PARSE_ERROR: "Extra field parsing error"
|
||||
};
|
||||
|
||||
// template
|
||||
function E(message) {
|
||||
return function (...args) {
|
||||
if (args.length) { // Allow {0} .. {9} arguments in error message, based on argument number
|
||||
message = message.replace(/\{(\d)\}/g, (_, n) => args[n] || '');
|
||||
}
|
||||
|
||||
return new Error('ADM-ZIP: ' + message);
|
||||
};
|
||||
}
|
||||
|
||||
// Init errors with template
|
||||
for (const msg of Object.keys(errors)) {
|
||||
exports[msg] = E(errors[msg]);
|
||||
}
|
76
node_modules/adm-zip/util/fattr.js
generated
vendored
Normal file
76
node_modules/adm-zip/util/fattr.js
generated
vendored
Normal file
|
@ -0,0 +1,76 @@
|
|||
const pth = require("path");
|
||||
|
||||
module.exports = function (/*String*/ path, /*Utils object*/ { fs }) {
|
||||
var _path = path || "",
|
||||
_obj = newAttr(),
|
||||
_stat = null;
|
||||
|
||||
function newAttr() {
|
||||
return {
|
||||
directory: false,
|
||||
readonly: false,
|
||||
hidden: false,
|
||||
executable: false,
|
||||
mtime: 0,
|
||||
atime: 0
|
||||
};
|
||||
}
|
||||
|
||||
if (_path && fs.existsSync(_path)) {
|
||||
_stat = fs.statSync(_path);
|
||||
_obj.directory = _stat.isDirectory();
|
||||
_obj.mtime = _stat.mtime;
|
||||
_obj.atime = _stat.atime;
|
||||
_obj.executable = (0o111 & _stat.mode) !== 0; // file is executable who ever har right not just owner
|
||||
_obj.readonly = (0o200 & _stat.mode) === 0; // readonly if owner has no write right
|
||||
_obj.hidden = pth.basename(_path)[0] === ".";
|
||||
} else {
|
||||
console.warn("Invalid path: " + _path);
|
||||
}
|
||||
|
||||
return {
|
||||
get directory() {
|
||||
return _obj.directory;
|
||||
},
|
||||
|
||||
get readOnly() {
|
||||
return _obj.readonly;
|
||||
},
|
||||
|
||||
get hidden() {
|
||||
return _obj.hidden;
|
||||
},
|
||||
|
||||
get mtime() {
|
||||
return _obj.mtime;
|
||||
},
|
||||
|
||||
get atime() {
|
||||
return _obj.atime;
|
||||
},
|
||||
|
||||
get executable() {
|
||||
return _obj.executable;
|
||||
},
|
||||
|
||||
decodeAttributes: function () {},
|
||||
|
||||
encodeAttributes: function () {},
|
||||
|
||||
toJSON: function () {
|
||||
return {
|
||||
path: _path,
|
||||
isDirectory: _obj.directory,
|
||||
isReadOnly: _obj.readonly,
|
||||
isHidden: _obj.hidden,
|
||||
isExecutable: _obj.executable,
|
||||
mTime: _obj.mtime,
|
||||
aTime: _obj.atime
|
||||
};
|
||||
},
|
||||
|
||||
toString: function () {
|
||||
return JSON.stringify(this.toJSON(), null, "\t");
|
||||
}
|
||||
};
|
||||
};
|
5
node_modules/adm-zip/util/index.js
generated
vendored
Normal file
5
node_modules/adm-zip/util/index.js
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
module.exports = require("./utils");
|
||||
module.exports.Constants = require("./constants");
|
||||
module.exports.Errors = require("./errors");
|
||||
module.exports.FileAttr = require("./fattr");
|
||||
module.exports.decoder = require("./decoder");
|
336
node_modules/adm-zip/util/utils.js
generated
vendored
Normal file
336
node_modules/adm-zip/util/utils.js
generated
vendored
Normal file
|
@ -0,0 +1,336 @@
|
|||
const fsystem = require("fs");
|
||||
const pth = require("path");
|
||||
const Constants = require("./constants");
|
||||
const Errors = require("./errors");
|
||||
const isWin = typeof process === "object" && "win32" === process.platform;
|
||||
|
||||
const is_Obj = (obj) => typeof obj === "object" && obj !== null;
|
||||
|
||||
// generate CRC32 lookup table
|
||||
const crcTable = new Uint32Array(256).map((t, c) => {
|
||||
for (let k = 0; k < 8; k++) {
|
||||
if ((c & 1) !== 0) {
|
||||
c = 0xedb88320 ^ (c >>> 1);
|
||||
} else {
|
||||
c >>>= 1;
|
||||
}
|
||||
}
|
||||
return c >>> 0;
|
||||
});
|
||||
|
||||
// UTILS functions
|
||||
|
||||
function Utils(opts) {
|
||||
this.sep = pth.sep;
|
||||
this.fs = fsystem;
|
||||
|
||||
if (is_Obj(opts)) {
|
||||
// custom filesystem
|
||||
if (is_Obj(opts.fs) && typeof opts.fs.statSync === "function") {
|
||||
this.fs = opts.fs;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Utils;
|
||||
|
||||
// INSTANTIABLE functions
|
||||
|
||||
Utils.prototype.makeDir = function (/*String*/ folder) {
|
||||
const self = this;
|
||||
|
||||
// Sync - make directories tree
|
||||
function mkdirSync(/*String*/ fpath) {
|
||||
let resolvedPath = fpath.split(self.sep)[0];
|
||||
fpath.split(self.sep).forEach(function (name) {
|
||||
if (!name || name.substr(-1, 1) === ":") return;
|
||||
resolvedPath += self.sep + name;
|
||||
var stat;
|
||||
try {
|
||||
stat = self.fs.statSync(resolvedPath);
|
||||
} catch (e) {
|
||||
self.fs.mkdirSync(resolvedPath);
|
||||
}
|
||||
if (stat && stat.isFile()) throw Errors.FILE_IN_THE_WAY(`"${resolvedPath}"`);
|
||||
});
|
||||
}
|
||||
|
||||
mkdirSync(folder);
|
||||
};
|
||||
|
||||
Utils.prototype.writeFileTo = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr) {
|
||||
const self = this;
|
||||
if (self.fs.existsSync(path)) {
|
||||
if (!overwrite) return false; // cannot overwrite
|
||||
|
||||
var stat = self.fs.statSync(path);
|
||||
if (stat.isDirectory()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
var folder = pth.dirname(path);
|
||||
if (!self.fs.existsSync(folder)) {
|
||||
self.makeDir(folder);
|
||||
}
|
||||
|
||||
var fd;
|
||||
try {
|
||||
fd = self.fs.openSync(path, "w", 0o666); // 0666
|
||||
} catch (e) {
|
||||
self.fs.chmodSync(path, 0o666);
|
||||
fd = self.fs.openSync(path, "w", 0o666);
|
||||
}
|
||||
if (fd) {
|
||||
try {
|
||||
self.fs.writeSync(fd, content, 0, content.length, 0);
|
||||
} finally {
|
||||
self.fs.closeSync(fd);
|
||||
}
|
||||
}
|
||||
self.fs.chmodSync(path, attr || 0o666);
|
||||
return true;
|
||||
};
|
||||
|
||||
Utils.prototype.writeFileToAsync = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr, /*Function*/ callback) {
|
||||
if (typeof attr === "function") {
|
||||
callback = attr;
|
||||
attr = undefined;
|
||||
}
|
||||
|
||||
const self = this;
|
||||
|
||||
self.fs.exists(path, function (exist) {
|
||||
if (exist && !overwrite) return callback(false);
|
||||
|
||||
self.fs.stat(path, function (err, stat) {
|
||||
if (exist && stat.isDirectory()) {
|
||||
return callback(false);
|
||||
}
|
||||
|
||||
var folder = pth.dirname(path);
|
||||
self.fs.exists(folder, function (exists) {
|
||||
if (!exists) self.makeDir(folder);
|
||||
|
||||
self.fs.open(path, "w", 0o666, function (err, fd) {
|
||||
if (err) {
|
||||
self.fs.chmod(path, 0o666, function () {
|
||||
self.fs.open(path, "w", 0o666, function (err, fd) {
|
||||
self.fs.write(fd, content, 0, content.length, 0, function () {
|
||||
self.fs.close(fd, function () {
|
||||
self.fs.chmod(path, attr || 0o666, function () {
|
||||
callback(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
} else if (fd) {
|
||||
self.fs.write(fd, content, 0, content.length, 0, function () {
|
||||
self.fs.close(fd, function () {
|
||||
self.fs.chmod(path, attr || 0o666, function () {
|
||||
callback(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
} else {
|
||||
self.fs.chmod(path, attr || 0o666, function () {
|
||||
callback(true);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
Utils.prototype.findFiles = function (/*String*/ path) {
|
||||
const self = this;
|
||||
|
||||
function findSync(/*String*/ dir, /*RegExp*/ pattern, /*Boolean*/ recursive) {
|
||||
if (typeof pattern === "boolean") {
|
||||
recursive = pattern;
|
||||
pattern = undefined;
|
||||
}
|
||||
let files = [];
|
||||
self.fs.readdirSync(dir).forEach(function (file) {
|
||||
const path = pth.join(dir, file);
|
||||
const stat = self.fs.statSync(path);
|
||||
|
||||
if (!pattern || pattern.test(path)) {
|
||||
files.push(pth.normalize(path) + (stat.isDirectory() ? self.sep : ""));
|
||||
}
|
||||
|
||||
if (stat.isDirectory() && recursive) files = files.concat(findSync(path, pattern, recursive));
|
||||
});
|
||||
return files;
|
||||
}
|
||||
|
||||
return findSync(path, undefined, true);
|
||||
};
|
||||
|
||||
/**
|
||||
* Callback for showing if everything was done.
|
||||
*
|
||||
* @callback filelistCallback
|
||||
* @param {Error} err - Error object
|
||||
* @param {string[]} list - was request fully completed
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} dir
|
||||
* @param {filelistCallback} cb
|
||||
*/
|
||||
Utils.prototype.findFilesAsync = function (dir, cb) {
|
||||
const self = this;
|
||||
let results = [];
|
||||
self.fs.readdir(dir, function (err, list) {
|
||||
if (err) return cb(err);
|
||||
let list_length = list.length;
|
||||
if (!list_length) return cb(null, results);
|
||||
list.forEach(function (file) {
|
||||
file = pth.join(dir, file);
|
||||
self.fs.stat(file, function (err, stat) {
|
||||
if (err) return cb(err);
|
||||
if (stat) {
|
||||
results.push(pth.normalize(file) + (stat.isDirectory() ? self.sep : ""));
|
||||
if (stat.isDirectory()) {
|
||||
self.findFilesAsync(file, function (err, res) {
|
||||
if (err) return cb(err);
|
||||
results = results.concat(res);
|
||||
if (!--list_length) cb(null, results);
|
||||
});
|
||||
} else {
|
||||
if (!--list_length) cb(null, results);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
Utils.prototype.getAttributes = function () {};
|
||||
|
||||
Utils.prototype.setAttributes = function () {};
|
||||
|
||||
// STATIC functions
|
||||
|
||||
// crc32 single update (it is part of crc32)
|
||||
Utils.crc32update = function (crc, byte) {
|
||||
return crcTable[(crc ^ byte) & 0xff] ^ (crc >>> 8);
|
||||
};
|
||||
|
||||
Utils.crc32 = function (buf) {
|
||||
if (typeof buf === "string") {
|
||||
buf = Buffer.from(buf, "utf8");
|
||||
}
|
||||
|
||||
let len = buf.length;
|
||||
let crc = ~0;
|
||||
for (let off = 0; off < len; ) crc = Utils.crc32update(crc, buf[off++]);
|
||||
// xor and cast as uint32 number
|
||||
return ~crc >>> 0;
|
||||
};
|
||||
|
||||
Utils.methodToString = function (/*Number*/ method) {
|
||||
switch (method) {
|
||||
case Constants.STORED:
|
||||
return "STORED (" + method + ")";
|
||||
case Constants.DEFLATED:
|
||||
return "DEFLATED (" + method + ")";
|
||||
default:
|
||||
return "UNSUPPORTED (" + method + ")";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* removes ".." style path elements
|
||||
* @param {string} path - fixable path
|
||||
* @returns string - fixed filepath
|
||||
*/
|
||||
Utils.canonical = function (/*string*/ path) {
|
||||
if (!path) return "";
|
||||
// trick normalize think path is absolute
|
||||
const safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/"));
|
||||
return pth.join(".", safeSuffix);
|
||||
};
|
||||
|
||||
/**
|
||||
* fix file names in achive
|
||||
* @param {string} path - fixable path
|
||||
* @returns string - fixed filepath
|
||||
*/
|
||||
|
||||
Utils.zipnamefix = function (path) {
|
||||
if (!path) return "";
|
||||
// trick normalize think path is absolute
|
||||
const safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/"));
|
||||
return pth.posix.join(".", safeSuffix);
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Array} arr
|
||||
* @param {function} callback
|
||||
* @returns
|
||||
*/
|
||||
Utils.findLast = function (arr, callback) {
|
||||
if (!Array.isArray(arr)) throw new TypeError("arr is not array");
|
||||
|
||||
const len = arr.length >>> 0;
|
||||
for (let i = len - 1; i >= 0; i--) {
|
||||
if (callback(arr[i], i, arr)) {
|
||||
return arr[i];
|
||||
}
|
||||
}
|
||||
return void 0;
|
||||
};
|
||||
|
||||
// make abolute paths taking prefix as root folder
|
||||
Utils.sanitize = function (/*string*/ prefix, /*string*/ name) {
|
||||
prefix = pth.resolve(pth.normalize(prefix));
|
||||
var parts = name.split("/");
|
||||
for (var i = 0, l = parts.length; i < l; i++) {
|
||||
var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep)));
|
||||
if (path.indexOf(prefix) === 0) {
|
||||
return path;
|
||||
}
|
||||
}
|
||||
return pth.normalize(pth.join(prefix, pth.basename(name)));
|
||||
};
|
||||
|
||||
// converts buffer, Uint8Array, string types to buffer
|
||||
Utils.toBuffer = function toBuffer(/*buffer, Uint8Array, string*/ input, /* function */ encoder) {
|
||||
if (Buffer.isBuffer(input)) {
|
||||
return input;
|
||||
} else if (input instanceof Uint8Array) {
|
||||
return Buffer.from(input);
|
||||
} else {
|
||||
// expect string all other values are invalid and return empty buffer
|
||||
return typeof input === "string" ? encoder(input) : Buffer.alloc(0);
|
||||
}
|
||||
};
|
||||
|
||||
Utils.readBigUInt64LE = function (/*Buffer*/ buffer, /*int*/ index) {
|
||||
var slice = Buffer.from(buffer.slice(index, index + 8));
|
||||
slice.swap64();
|
||||
|
||||
return parseInt(`0x${slice.toString("hex")}`);
|
||||
};
|
||||
|
||||
Utils.fromDOS2Date = function (val) {
|
||||
return new Date(((val >> 25) & 0x7f) + 1980, Math.max(((val >> 21) & 0x0f) - 1, 0), Math.max((val >> 16) & 0x1f, 1), (val >> 11) & 0x1f, (val >> 5) & 0x3f, (val & 0x1f) << 1);
|
||||
};
|
||||
|
||||
Utils.fromDate2DOS = function (val) {
|
||||
let date = 0;
|
||||
let time = 0;
|
||||
if (val.getFullYear() > 1979) {
|
||||
date = (((val.getFullYear() - 1980) & 0x7f) << 9) | ((val.getMonth() + 1) << 5) | val.getDate();
|
||||
time = (val.getHours() << 11) | (val.getMinutes() << 5) | (val.getSeconds() >> 1);
|
||||
}
|
||||
return (date << 16) | time;
|
||||
};
|
||||
|
||||
Utils.isWin = isWin; // Do we have windows system
|
||||
Utils.crcTable = crcTable;
|
Loading…
Add table
Add a link
Reference in a new issue