inital upload

This commit is contained in:
jackbeeby
2025-05-15 13:35:49 +10:00
commit 8c53ff1000
9092 changed files with 1833300 additions and 0 deletions

3
node_modules/tar/dist/commonjs/create.d.ts generated vendored Normal file
View File

@@ -0,0 +1,3 @@
import { Pack, PackSync } from './pack.js';
export declare const create: import("./make-command.js").TarCommand<Pack, PackSync>;
//# sourceMappingURL=create.d.ts.map

1
node_modules/tar/dist/commonjs/create.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"create.d.ts","sourceRoot":"","sources":["../../src/create.ts"],"names":[],"mappings":"AAWA,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AA8E1C,eAAO,MAAM,MAAM,wDAUlB,CAAA"}

83
node_modules/tar/dist/commonjs/create.js generated vendored Normal file
View File

@@ -0,0 +1,83 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.create = void 0;
const fs_minipass_1 = require("@isaacs/fs-minipass");
const node_path_1 = __importDefault(require("node:path"));
const list_js_1 = require("./list.js");
const make_command_js_1 = require("./make-command.js");
const pack_js_1 = require("./pack.js");
const createFileSync = (opt, files) => {
const p = new pack_js_1.PackSync(opt);
const stream = new fs_minipass_1.WriteStreamSync(opt.file, {
mode: opt.mode || 0o666,
});
p.pipe(stream);
addFilesSync(p, files);
};
const createFile = (opt, files) => {
const p = new pack_js_1.Pack(opt);
const stream = new fs_minipass_1.WriteStream(opt.file, {
mode: opt.mode || 0o666,
});
p.pipe(stream);
const promise = new Promise((res, rej) => {
stream.on('error', rej);
stream.on('close', res);
p.on('error', rej);
});
addFilesAsync(p, files);
return promise;
};
const addFilesSync = (p, files) => {
files.forEach(file => {
if (file.charAt(0) === '@') {
(0, list_js_1.list)({
file: node_path_1.default.resolve(p.cwd, file.slice(1)),
sync: true,
noResume: true,
onReadEntry: entry => p.add(entry),
});
}
else {
p.add(file);
}
});
p.end();
};
const addFilesAsync = async (p, files) => {
for (let i = 0; i < files.length; i++) {
const file = String(files[i]);
if (file.charAt(0) === '@') {
await (0, list_js_1.list)({
file: node_path_1.default.resolve(String(p.cwd), file.slice(1)),
noResume: true,
onReadEntry: entry => {
p.add(entry);
},
});
}
else {
p.add(file);
}
}
p.end();
};
const createSync = (opt, files) => {
const p = new pack_js_1.PackSync(opt);
addFilesSync(p, files);
return p;
};
const createAsync = (opt, files) => {
const p = new pack_js_1.Pack(opt);
addFilesAsync(p, files);
return p;
};
exports.create = (0, make_command_js_1.makeCommand)(createFileSync, createFile, createSync, createAsync, (_opt, files) => {
if (!files?.length) {
throw new TypeError('no paths specified to add to archive');
}
});
//# sourceMappingURL=create.js.map

1
node_modules/tar/dist/commonjs/create.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

8
node_modules/tar/dist/commonjs/cwd-error.d.ts generated vendored Normal file
View File

@@ -0,0 +1,8 @@
export declare class CwdError extends Error {
path: string;
code: string;
syscall: 'chdir';
constructor(path: string, code: string);
get name(): string;
}
//# sourceMappingURL=cwd-error.d.ts.map

1
node_modules/tar/dist/commonjs/cwd-error.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"cwd-error.d.ts","sourceRoot":"","sources":["../../src/cwd-error.ts"],"names":[],"mappings":"AAAA,qBAAa,QAAS,SAAQ,KAAK;IACjC,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,OAAO,EAAE,OAAO,CAAU;gBAEd,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM;IAMtC,IAAI,IAAI,WAEP;CACF"}

18
node_modules/tar/dist/commonjs/cwd-error.js generated vendored Normal file
View File

@@ -0,0 +1,18 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CwdError = void 0;
class CwdError extends Error {
path;
code;
syscall = 'chdir';
constructor(path, code) {
super(`${code}: Cannot cd into '${path}'`);
this.path = path;
this.code = code;
}
get name() {
return 'CwdError';
}
}
exports.CwdError = CwdError;
//# sourceMappingURL=cwd-error.js.map

1
node_modules/tar/dist/commonjs/cwd-error.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"cwd-error.js","sourceRoot":"","sources":["../../src/cwd-error.ts"],"names":[],"mappings":";;;AAAA,MAAa,QAAS,SAAQ,KAAK;IACjC,IAAI,CAAQ;IACZ,IAAI,CAAQ;IACZ,OAAO,GAAY,OAAO,CAAA;IAE1B,YAAY,IAAY,EAAE,IAAY;QACpC,KAAK,CAAC,GAAG,IAAI,qBAAqB,IAAI,GAAG,CAAC,CAAA;QAC1C,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;IAClB,CAAC;IAED,IAAI,IAAI;QACN,OAAO,UAAU,CAAA;IACnB,CAAC;CACF;AAdD,4BAcC","sourcesContent":["export class CwdError extends Error {\n path: string\n code: string\n syscall: 'chdir' = 'chdir'\n\n constructor(path: string, code: string) {\n super(`${code}: Cannot cd into '${path}'`)\n this.path = path\n this.code = code\n }\n\n get name() {\n return 'CwdError'\n }\n}\n"]}

3
node_modules/tar/dist/commonjs/extract.d.ts generated vendored Normal file
View File

@@ -0,0 +1,3 @@
import { Unpack, UnpackSync } from './unpack.js';
export declare const extract: import("./make-command.js").TarCommand<Unpack, UnpackSync>;
//# sourceMappingURL=extract.d.ts.map

1
node_modules/tar/dist/commonjs/extract.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"extract.d.ts","sourceRoot":"","sources":["../../src/extract.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AA2ChD,eAAO,MAAM,OAAO,4DAQnB,CAAA"}

78
node_modules/tar/dist/commonjs/extract.js generated vendored Normal file
View File

@@ -0,0 +1,78 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.extract = void 0;
// tar -x
const fsm = __importStar(require("@isaacs/fs-minipass"));
const node_fs_1 = __importDefault(require("node:fs"));
const list_js_1 = require("./list.js");
const make_command_js_1 = require("./make-command.js");
const unpack_js_1 = require("./unpack.js");
const extractFileSync = (opt) => {
const u = new unpack_js_1.UnpackSync(opt);
const file = opt.file;
const stat = node_fs_1.default.statSync(file);
// This trades a zero-byte read() syscall for a stat
// However, it will usually result in less memory allocation
const readSize = opt.maxReadSize || 16 * 1024 * 1024;
const stream = new fsm.ReadStreamSync(file, {
readSize: readSize,
size: stat.size,
});
stream.pipe(u);
};
const extractFile = (opt, _) => {
const u = new unpack_js_1.Unpack(opt);
const readSize = opt.maxReadSize || 16 * 1024 * 1024;
const file = opt.file;
const p = new Promise((resolve, reject) => {
u.on('error', reject);
u.on('close', resolve);
// This trades a zero-byte read() syscall for a stat
// However, it will usually result in less memory allocation
node_fs_1.default.stat(file, (er, stat) => {
if (er) {
reject(er);
}
else {
const stream = new fsm.ReadStream(file, {
readSize: readSize,
size: stat.size,
});
stream.on('error', reject);
stream.pipe(u);
}
});
});
return p;
};
exports.extract = (0, make_command_js_1.makeCommand)(extractFileSync, extractFile, opt => new unpack_js_1.UnpackSync(opt), opt => new unpack_js_1.Unpack(opt), (opt, files) => {
if (files?.length)
(0, list_js_1.filesFilter)(opt, files);
});
//# sourceMappingURL=extract.js.map

1
node_modules/tar/dist/commonjs/extract.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"extract.js","sourceRoot":"","sources":["../../src/extract.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,SAAS;AACT,yDAA0C;AAC1C,sDAAwB;AACxB,uCAAuC;AACvC,uDAA+C;AAE/C,2CAAgD;AAEhD,MAAM,eAAe,GAAG,CAAC,GAAuB,EAAE,EAAE;IAClD,MAAM,CAAC,GAAG,IAAI,sBAAU,CAAC,GAAG,CAAC,CAAA;IAC7B,MAAM,IAAI,GAAG,GAAG,CAAC,IAAI,CAAA;IACrB,MAAM,IAAI,GAAG,iBAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAA;IAC9B,oDAAoD;IACpD,4DAA4D;IAC5D,MAAM,QAAQ,GAAG,GAAG,CAAC,WAAW,IAAI,EAAE,GAAG,IAAI,GAAG,IAAI,CAAA;IACpD,MAAM,MAAM,GAAG,IAAI,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE;QAC1C,QAAQ,EAAE,QAAQ;QAClB,IAAI,EAAE,IAAI,CAAC,IAAI;KAChB,CAAC,CAAA;IACF,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;AAChB,CAAC,CAAA;AAED,MAAM,WAAW,GAAG,CAAC,GAAmB,EAAE,CAAY,EAAE,EAAE;IACxD,MAAM,CAAC,GAAG,IAAI,kBAAM,CAAC,GAAG,CAAC,CAAA;IACzB,MAAM,QAAQ,GAAG,GAAG,CAAC,WAAW,IAAI,EAAE,GAAG,IAAI,GAAG,IAAI,CAAA;IAEpD,MAAM,IAAI,GAAG,GAAG,CAAC,IAAI,CAAA;IACrB,MAAM,CAAC,GAAG,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC9C,CAAC,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAA;QACrB,CAAC,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAA;QAEtB,oDAAoD;QACpD,4DAA4D;QAC5D,iBAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE;YACzB,IAAI,EAAE,EAAE,CAAC;gBACP,MAAM,CAAC,EAAE,CAAC,CAAA;YACZ,CAAC;iBAAM,CAAC;gBACN,MAAM,MAAM,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,IAAI,EAAE;oBACtC,QAAQ,EAAE,QAAQ;oBAClB,IAAI,EAAE,IAAI,CAAC,IAAI;iBAChB,CAAC,CAAA;gBACF,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAA;gBAC1B,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;YAChB,CAAC;QACH,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IACF,OAAO,CAAC,CAAA;AACV,CAAC,CAAA;AAEY,QAAA,OAAO,GAAG,IAAA,6BAAW,EAChC,eAAe,EACf,WAAW,EACX,GAAG,CAAC,EAAE,CAAC,IAAI,sBAAU,CAAC,GAAG,CAAC,EAC1B,GAAG,CAAC,EAAE,CAAC,IAAI,kBAAM,CAAC,GAAG,CAAC,EACtB,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE;IACb,IAAI,KAAK,EAAE,MAAM;QAAE,IAAA,qBAAW,EAAC,GAAG,EAAE,KAAK,CAAC,CAAA;AAC5C,CAAC,CACF,CAAA","sourcesContent":["// tar -x\nimport * as fsm from '@isaacs/fs-minipass'\nimport fs from 'node:fs'\nimport { filesFilter } from './list.js'\nimport { makeCommand } from './make-command.js'\nimport { TarOptionsFile, TarOptionsSyncFile } from './options.js'\nimport { Unpack, UnpackSync } from './unpack.js'\n\nconst extractFileSync = (opt: TarOptionsSyncFile) => {\n const u = new UnpackSync(opt)\n const file = opt.file\n const stat = fs.statSync(file)\n // This trades a zero-byte read() syscall for a stat\n // However, it will usually result in less memory allocation\n const readSize = opt.maxReadSize || 16 * 1024 * 1024\n const stream = new fsm.ReadStreamSync(file, {\n readSize: readSize,\n size: stat.size,\n })\n stream.pipe(u)\n}\n\nconst extractFile = (opt: TarOptionsFile, _?: string[]) => {\n const u = new Unpack(opt)\n const readSize = opt.maxReadSize || 16 * 1024 * 1024\n\n const file = opt.file\n const p = new Promise<void>((resolve, reject) => {\n u.on('error', reject)\n u.on('close', resolve)\n\n // This trades a zero-byte read() syscall for a stat\n // However, it will usually result in less memory allocation\n fs.stat(file, (er, stat) => {\n if (er) {\n reject(er)\n } else {\n const stream = new fsm.ReadStream(file, {\n readSize: readSize,\n size: stat.size,\n })\n stream.on('error', reject)\n stream.pipe(u)\n }\n })\n })\n return p\n}\n\nexport const extract = makeCommand<Unpack, UnpackSync>(\n extractFileSync,\n extractFile,\n opt => new UnpackSync(opt),\n opt => new Unpack(opt),\n (opt, files) => {\n if (files?.length) filesFilter(opt, files)\n },\n)\n"]}

2
node_modules/tar/dist/commonjs/get-write-flag.d.ts generated vendored Normal file
View File

@@ -0,0 +1,2 @@
export declare const getWriteFlag: (() => string) | ((size: number) => number | "w");
//# sourceMappingURL=get-write-flag.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"get-write-flag.d.ts","sourceRoot":"","sources":["../../src/get-write-flag.ts"],"names":[],"mappings":"AAwBA,eAAO,MAAM,YAAY,2BAGd,MAAM,kBAAwC,CAAA"}

29
node_modules/tar/dist/commonjs/get-write-flag.js generated vendored Normal file
View File

@@ -0,0 +1,29 @@
"use strict";
// Get the appropriate flag to use for creating files
// We use fmap on Windows platforms for files less than
// 512kb. This is a fairly low limit, but avoids making
// things slower in some cases. Since most of what this
// library is used for is extracting tarballs of many
// relatively small files in npm packages and the like,
// it can be a big boost on Windows platforms.
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getWriteFlag = void 0;
const fs_1 = __importDefault(require("fs"));
const platform = process.env.__FAKE_PLATFORM__ || process.platform;
const isWindows = platform === 'win32';
/* c8 ignore start */
const { O_CREAT, O_TRUNC, O_WRONLY } = fs_1.default.constants;
const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) ||
fs_1.default.constants.UV_FS_O_FILEMAP ||
0;
/* c8 ignore stop */
const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP;
const fMapLimit = 512 * 1024;
const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY;
exports.getWriteFlag = !fMapEnabled ?
() => 'w'
: (size) => (size < fMapLimit ? fMapFlag : 'w');
//# sourceMappingURL=get-write-flag.js.map

1
node_modules/tar/dist/commonjs/get-write-flag.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"get-write-flag.js","sourceRoot":"","sources":["../../src/get-write-flag.ts"],"names":[],"mappings":";AAAA,qDAAqD;AACrD,uDAAuD;AACvD,wDAAwD;AACxD,wDAAwD;AACxD,qDAAqD;AACrD,uDAAuD;AACvD,8CAA8C;;;;;;AAE9C,4CAAmB;AAEnB,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,IAAI,OAAO,CAAC,QAAQ,CAAA;AAClE,MAAM,SAAS,GAAG,QAAQ,KAAK,OAAO,CAAA;AAEtC,qBAAqB;AACrB,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,YAAE,CAAC,SAAS,CAAA;AACnD,MAAM,eAAe,GACnB,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,CAAC;IAC1C,YAAE,CAAC,SAAS,CAAC,eAAe;IAC5B,CAAC,CAAA;AACH,oBAAoB;AAEpB,MAAM,WAAW,GAAG,SAAS,IAAI,CAAC,CAAC,eAAe,CAAA;AAClD,MAAM,SAAS,GAAG,GAAG,GAAG,IAAI,CAAA;AAC5B,MAAM,QAAQ,GAAG,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG,QAAQ,CAAA;AAClD,QAAA,YAAY,GACvB,CAAC,WAAW,CAAC,CAAC;IACZ,GAAG,EAAE,CAAC,GAAG;IACX,CAAC,CAAC,CAAC,IAAY,EAAE,EAAE,CAAC,CAAC,IAAI,GAAG,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAA","sourcesContent":["// Get the appropriate flag to use for creating files\n// We use fmap on Windows platforms for files less than\n// 512kb. This is a fairly low limit, but avoids making\n// things slower in some cases. Since most of what this\n// library is used for is extracting tarballs of many\n// relatively small files in npm packages and the like,\n// it can be a big boost on Windows platforms.\n\nimport fs from 'fs'\n\nconst platform = process.env.__FAKE_PLATFORM__ || process.platform\nconst isWindows = platform === 'win32'\n\n/* c8 ignore start */\nconst { O_CREAT, O_TRUNC, O_WRONLY } = fs.constants\nconst UV_FS_O_FILEMAP =\n Number(process.env.__FAKE_FS_O_FILENAME__) ||\n fs.constants.UV_FS_O_FILEMAP ||\n 0\n/* c8 ignore stop */\n\nconst fMapEnabled = isWindows && !!UV_FS_O_FILEMAP\nconst fMapLimit = 512 * 1024\nconst fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY\nexport const getWriteFlag =\n !fMapEnabled ?\n () => 'w'\n : (size: number) => (size < fMapLimit ? fMapFlag : 'w')\n"]}

54
node_modules/tar/dist/commonjs/header.d.ts generated vendored Normal file
View File

@@ -0,0 +1,54 @@
/// <reference types="node" />
import type { EntryTypeCode, EntryTypeName } from './types.js';
export type HeaderData = {
path?: string;
mode?: number;
uid?: number;
gid?: number;
size?: number;
cksum?: number;
type?: EntryTypeName | 'Unsupported';
linkpath?: string;
uname?: string;
gname?: string;
devmaj?: number;
devmin?: number;
atime?: Date;
ctime?: Date;
mtime?: Date;
charset?: string;
comment?: string;
dev?: number;
ino?: number;
nlink?: number;
};
export declare class Header implements HeaderData {
#private;
cksumValid: boolean;
needPax: boolean;
nullBlock: boolean;
block?: Buffer;
path?: string;
mode?: number;
uid?: number;
gid?: number;
size?: number;
cksum?: number;
linkpath?: string;
uname?: string;
gname?: string;
devmaj: number;
devmin: number;
atime?: Date;
ctime?: Date;
mtime?: Date;
charset?: string;
comment?: string;
constructor(data?: Buffer | HeaderData, off?: number, ex?: HeaderData, gex?: HeaderData);
decode(buf: Buffer, off: number, ex?: HeaderData, gex?: HeaderData): void;
encode(buf?: Buffer, off?: number): boolean;
get type(): EntryTypeName;
get typeKey(): EntryTypeCode | 'Unsupported';
set type(type: EntryTypeCode | EntryTypeName | 'Unsupported');
}
//# sourceMappingURL=header.d.ts.map

1
node_modules/tar/dist/commonjs/header.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"header.d.ts","sourceRoot":"","sources":["../../src/header.ts"],"names":[],"mappings":";AAOA,OAAO,KAAK,EAAE,aAAa,EAAE,aAAa,EAAE,MAAM,YAAY,CAAA;AAG9D,MAAM,MAAM,UAAU,GAAG;IACvB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,IAAI,CAAC,EAAE,aAAa,GAAG,aAAa,CAAA;IACpC,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,MAAM,CAAC,EAAE,MAAM,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,CAAA;IACf,KAAK,CAAC,EAAE,IAAI,CAAA;IACZ,KAAK,CAAC,EAAE,IAAI,CAAA;IACZ,KAAK,CAAC,EAAE,IAAI,CAAA;IAIZ,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,qBAAa,MAAO,YAAW,UAAU;;IACvC,UAAU,EAAE,OAAO,CAAQ;IAC3B,OAAO,EAAE,OAAO,CAAQ;IACxB,SAAS,EAAE,OAAO,CAAQ;IAE1B,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,KAAK,CAAC,EAAE,MAAM,CAAA;IAEd,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,MAAM,EAAE,MAAM,CAAI;IAClB,MAAM,EAAE,MAAM,CAAI;IAClB,KAAK,CAAC,EAAE,IAAI,CAAA;IACZ,KAAK,CAAC,EAAE,IAAI,CAAA;IACZ,KAAK,CAAC,EAAE,IAAI,CAAA;IAEZ,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,OAAO,CAAC,EAAE,MAAM,CAAA;gBAGd,IAAI,CAAC,EAAE,MAAM,GAAG,UAAU,EAC1B,GAAG,GAAE,MAAU,EACf,EAAE,CAAC,EAAE,UAAU,EACf,GAAG,CAAC,EAAE,UAAU;IASlB,MAAM,CACJ,GAAG,EAAE,MAAM,EACX,GAAG,EAAE,MAAM,EACX,EAAE,CAAC,EAAE,UAAU,EACf,GAAG,CAAC,EAAE,UAAU;IAsGlB,MAAM,CAAC,GAAG,CAAC,EAAE,MAAM,EAAE,GAAG,GAAE,MAAU;IAwEpC,IAAI,IAAI,IAAI,aAAa,CAKxB;IAED,IAAI,OAAO,IAAI,aAAa,GAAG,aAAa,CAE3C;IAED,IAAI,IAAI,CAAC,IAAI,EAAE,aAAa,GAAG,aAAa,GAAG,aAAa,EAS3D;CACF"}

306
node_modules/tar/dist/commonjs/header.js generated vendored Normal file
View File

@@ -0,0 +1,306 @@
"use strict";
// parse a 512-byte header block to a data object, or vice-versa
// encode returns `true` if a pax extended header is needed, because
// the data could not be faithfully encoded in a simple header.
// (Also, check header.needPax to see if it needs a pax header.)
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Header = void 0;
const node_path_1 = require("node:path");
const large = __importStar(require("./large-numbers.js"));
const types = __importStar(require("./types.js"));
class Header {
cksumValid = false;
needPax = false;
nullBlock = false;
block;
path;
mode;
uid;
gid;
size;
cksum;
#type = 'Unsupported';
linkpath;
uname;
gname;
devmaj = 0;
devmin = 0;
atime;
ctime;
mtime;
charset;
comment;
constructor(data, off = 0, ex, gex) {
if (Buffer.isBuffer(data)) {
this.decode(data, off || 0, ex, gex);
}
else if (data) {
this.#slurp(data);
}
}
decode(buf, off, ex, gex) {
if (!off) {
off = 0;
}
if (!buf || !(buf.length >= off + 512)) {
throw new Error('need 512 bytes for header');
}
this.path = decString(buf, off, 100);
this.mode = decNumber(buf, off + 100, 8);
this.uid = decNumber(buf, off + 108, 8);
this.gid = decNumber(buf, off + 116, 8);
this.size = decNumber(buf, off + 124, 12);
this.mtime = decDate(buf, off + 136, 12);
this.cksum = decNumber(buf, off + 148, 12);
// if we have extended or global extended headers, apply them now
// See https://github.com/npm/node-tar/pull/187
// Apply global before local, so it overrides
if (gex)
this.#slurp(gex, true);
if (ex)
this.#slurp(ex);
// old tar versions marked dirs as a file with a trailing /
const t = decString(buf, off + 156, 1);
if (types.isCode(t)) {
this.#type = t || '0';
}
if (this.#type === '0' && this.path.slice(-1) === '/') {
this.#type = '5';
}
// tar implementations sometimes incorrectly put the stat(dir).size
// as the size in the tarball, even though Directory entries are
// not able to have any body at all. In the very rare chance that
// it actually DOES have a body, we weren't going to do anything with
// it anyway, and it'll just be a warning about an invalid header.
if (this.#type === '5') {
this.size = 0;
}
this.linkpath = decString(buf, off + 157, 100);
if (buf.subarray(off + 257, off + 265).toString() ===
'ustar\u000000') {
this.uname = decString(buf, off + 265, 32);
this.gname = decString(buf, off + 297, 32);
/* c8 ignore start */
this.devmaj = decNumber(buf, off + 329, 8) ?? 0;
this.devmin = decNumber(buf, off + 337, 8) ?? 0;
/* c8 ignore stop */
if (buf[off + 475] !== 0) {
// definitely a prefix, definitely >130 chars.
const prefix = decString(buf, off + 345, 155);
this.path = prefix + '/' + this.path;
}
else {
const prefix = decString(buf, off + 345, 130);
if (prefix) {
this.path = prefix + '/' + this.path;
}
this.atime = decDate(buf, off + 476, 12);
this.ctime = decDate(buf, off + 488, 12);
}
}
let sum = 8 * 0x20;
for (let i = off; i < off + 148; i++) {
sum += buf[i];
}
for (let i = off + 156; i < off + 512; i++) {
sum += buf[i];
}
this.cksumValid = sum === this.cksum;
if (this.cksum === undefined && sum === 8 * 0x20) {
this.nullBlock = true;
}
}
#slurp(ex, gex = false) {
Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
// we slurp in everything except for the path attribute in
// a global extended header, because that's weird. Also, any
// null/undefined values are ignored.
return !(v === null ||
v === undefined ||
(k === 'path' && gex) ||
(k === 'linkpath' && gex) ||
k === 'global');
})));
}
encode(buf, off = 0) {
if (!buf) {
buf = this.block = Buffer.alloc(512);
}
if (this.#type === 'Unsupported') {
this.#type = '0';
}
if (!(buf.length >= off + 512)) {
throw new Error('need 512 bytes for header');
}
const prefixSize = this.ctime || this.atime ? 130 : 155;
const split = splitPrefix(this.path || '', prefixSize);
const path = split[0];
const prefix = split[1];
this.needPax = !!split[2];
this.needPax = encString(buf, off, 100, path) || this.needPax;
this.needPax =
encNumber(buf, off + 100, 8, this.mode) || this.needPax;
this.needPax =
encNumber(buf, off + 108, 8, this.uid) || this.needPax;
this.needPax =
encNumber(buf, off + 116, 8, this.gid) || this.needPax;
this.needPax =
encNumber(buf, off + 124, 12, this.size) || this.needPax;
this.needPax =
encDate(buf, off + 136, 12, this.mtime) || this.needPax;
buf[off + 156] = this.#type.charCodeAt(0);
this.needPax =
encString(buf, off + 157, 100, this.linkpath) || this.needPax;
buf.write('ustar\u000000', off + 257, 8);
this.needPax =
encString(buf, off + 265, 32, this.uname) || this.needPax;
this.needPax =
encString(buf, off + 297, 32, this.gname) || this.needPax;
this.needPax =
encNumber(buf, off + 329, 8, this.devmaj) || this.needPax;
this.needPax =
encNumber(buf, off + 337, 8, this.devmin) || this.needPax;
this.needPax =
encString(buf, off + 345, prefixSize, prefix) || this.needPax;
if (buf[off + 475] !== 0) {
this.needPax =
encString(buf, off + 345, 155, prefix) || this.needPax;
}
else {
this.needPax =
encString(buf, off + 345, 130, prefix) || this.needPax;
this.needPax =
encDate(buf, off + 476, 12, this.atime) || this.needPax;
this.needPax =
encDate(buf, off + 488, 12, this.ctime) || this.needPax;
}
let sum = 8 * 0x20;
for (let i = off; i < off + 148; i++) {
sum += buf[i];
}
for (let i = off + 156; i < off + 512; i++) {
sum += buf[i];
}
this.cksum = sum;
encNumber(buf, off + 148, 8, this.cksum);
this.cksumValid = true;
return this.needPax;
}
get type() {
return (this.#type === 'Unsupported' ?
this.#type
: types.name.get(this.#type));
}
get typeKey() {
return this.#type;
}
set type(type) {
const c = String(types.code.get(type));
if (types.isCode(c) || c === 'Unsupported') {
this.#type = c;
}
else if (types.isCode(type)) {
this.#type = type;
}
else {
throw new TypeError('invalid entry type: ' + type);
}
}
}
exports.Header = Header;
const splitPrefix = (p, prefixSize) => {
const pathSize = 100;
let pp = p;
let prefix = '';
let ret = undefined;
const root = node_path_1.posix.parse(p).root || '.';
if (Buffer.byteLength(pp) < pathSize) {
ret = [pp, prefix, false];
}
else {
// first set prefix to the dir, and path to the base
prefix = node_path_1.posix.dirname(pp);
pp = node_path_1.posix.basename(pp);
do {
if (Buffer.byteLength(pp) <= pathSize &&
Buffer.byteLength(prefix) <= prefixSize) {
// both fit!
ret = [pp, prefix, false];
}
else if (Buffer.byteLength(pp) > pathSize &&
Buffer.byteLength(prefix) <= prefixSize) {
// prefix fits in prefix, but path doesn't fit in path
ret = [pp.slice(0, pathSize - 1), prefix, true];
}
else {
// make path take a bit from prefix
pp = node_path_1.posix.join(node_path_1.posix.basename(prefix), pp);
prefix = node_path_1.posix.dirname(prefix);
}
} while (prefix !== root && ret === undefined);
// at this point, found no resolution, just truncate
if (!ret) {
ret = [p.slice(0, pathSize - 1), '', true];
}
}
return ret;
};
const decString = (buf, off, size) => buf
.subarray(off, off + size)
.toString('utf8')
.replace(/\0.*/, '');
const decDate = (buf, off, size) => numToDate(decNumber(buf, off, size));
const numToDate = (num) => num === undefined ? undefined : new Date(num * 1000);
const decNumber = (buf, off, size) => Number(buf[off]) & 0x80 ?
large.parse(buf.subarray(off, off + size))
: decSmallNumber(buf, off, size);
const nanUndef = (value) => (isNaN(value) ? undefined : value);
const decSmallNumber = (buf, off, size) => nanUndef(parseInt(buf
.subarray(off, off + size)
.toString('utf8')
.replace(/\0.*$/, '')
.trim(), 8));
// the maximum encodable as a null-terminated octal, by field size
const MAXNUM = {
12: 0o77777777777,
8: 0o7777777,
};
const encNumber = (buf, off, size, num) => num === undefined ? false
: num > MAXNUM[size] || num < 0 ?
(large.encode(num, buf.subarray(off, off + size)), true)
: (encSmallNumber(buf, off, size, num), false);
const encSmallNumber = (buf, off, size, num) => buf.write(octalString(num, size), off, size, 'ascii');
const octalString = (num, size) => padOctal(Math.floor(num).toString(8), size);
const padOctal = (str, size) => (str.length === size - 1 ?
str
: new Array(size - str.length - 1).join('0') + str + ' ') + '\0';
const encDate = (buf, off, size, date) => date === undefined ? false : (encNumber(buf, off, size, date.getTime() / 1000));
// enough to fill the longest string we've got
const NULLS = new Array(156).join('\0');
// pad with nulls, return true if it's longer or non-ascii
const encString = (buf, off, size, str) => str === undefined ? false : ((buf.write(str + NULLS, off, size, 'utf8'),
str.length !== Buffer.byteLength(str) || str.length > size));
//# sourceMappingURL=header.js.map

1
node_modules/tar/dist/commonjs/header.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

20
node_modules/tar/dist/commonjs/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,20 @@
export { type TarOptionsWithAliasesAsync, type TarOptionsWithAliasesAsyncFile, type TarOptionsWithAliasesAsyncNoFile, type TarOptionsWithAliasesSyncNoFile, type TarOptionsWithAliases, type TarOptionsWithAliasesFile, type TarOptionsWithAliasesSync, type TarOptionsWithAliasesSyncFile, } from './options.js';
export * from './create.js';
export { create as c } from './create.js';
export * from './extract.js';
export { extract as x } from './extract.js';
export * from './header.js';
export * from './list.js';
export { list as t } from './list.js';
export * from './pack.js';
export * from './parse.js';
export * from './pax.js';
export * from './read-entry.js';
export * from './replace.js';
export { replace as r } from './replace.js';
export * as types from './types.js';
export * from './unpack.js';
export * from './update.js';
export { update as u } from './update.js';
export * from './write-entry.js';
//# sourceMappingURL=index.d.ts.map

1
node_modules/tar/dist/commonjs/index.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,KAAK,0BAA0B,EAC/B,KAAK,8BAA8B,EACnC,KAAK,gCAAgC,EACrC,KAAK,+BAA+B,EACpC,KAAK,qBAAqB,EAC1B,KAAK,yBAAyB,EAC9B,KAAK,yBAAyB,EAC9B,KAAK,6BAA6B,GACnC,MAAM,cAAc,CAAA;AAErB,cAAc,aAAa,CAAA;AAC3B,OAAO,EAAE,MAAM,IAAI,CAAC,EAAE,MAAM,aAAa,CAAA;AACzC,cAAc,cAAc,CAAA;AAC5B,OAAO,EAAE,OAAO,IAAI,CAAC,EAAE,MAAM,cAAc,CAAA;AAC3C,cAAc,aAAa,CAAA;AAC3B,cAAc,WAAW,CAAA;AACzB,OAAO,EAAE,IAAI,IAAI,CAAC,EAAE,MAAM,WAAW,CAAA;AAErC,cAAc,WAAW,CAAA;AACzB,cAAc,YAAY,CAAA;AAC1B,cAAc,UAAU,CAAA;AACxB,cAAc,iBAAiB,CAAA;AAC/B,cAAc,cAAc,CAAA;AAC5B,OAAO,EAAE,OAAO,IAAI,CAAC,EAAE,MAAM,cAAc,CAAA;AAC3C,OAAO,KAAK,KAAK,MAAM,YAAY,CAAA;AACnC,cAAc,aAAa,CAAA;AAC3B,cAAc,aAAa,CAAA;AAC3B,OAAO,EAAE,MAAM,IAAI,CAAC,EAAE,MAAM,aAAa,CAAA;AACzC,cAAc,kBAAkB,CAAA"}

54
node_modules/tar/dist/commonjs/index.js generated vendored Normal file
View File

@@ -0,0 +1,54 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.u = exports.types = exports.r = exports.t = exports.x = exports.c = void 0;
__exportStar(require("./create.js"), exports);
var create_js_1 = require("./create.js");
Object.defineProperty(exports, "c", { enumerable: true, get: function () { return create_js_1.create; } });
__exportStar(require("./extract.js"), exports);
var extract_js_1 = require("./extract.js");
Object.defineProperty(exports, "x", { enumerable: true, get: function () { return extract_js_1.extract; } });
__exportStar(require("./header.js"), exports);
__exportStar(require("./list.js"), exports);
var list_js_1 = require("./list.js");
Object.defineProperty(exports, "t", { enumerable: true, get: function () { return list_js_1.list; } });
// classes
__exportStar(require("./pack.js"), exports);
__exportStar(require("./parse.js"), exports);
__exportStar(require("./pax.js"), exports);
__exportStar(require("./read-entry.js"), exports);
__exportStar(require("./replace.js"), exports);
var replace_js_1 = require("./replace.js");
Object.defineProperty(exports, "r", { enumerable: true, get: function () { return replace_js_1.replace; } });
exports.types = __importStar(require("./types.js"));
__exportStar(require("./unpack.js"), exports);
__exportStar(require("./update.js"), exports);
var update_js_1 = require("./update.js");
Object.defineProperty(exports, "u", { enumerable: true, get: function () { return update_js_1.update; } });
__exportStar(require("./write-entry.js"), exports);
//# sourceMappingURL=index.js.map

1
node_modules/tar/dist/commonjs/index.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAWA,8CAA2B;AAC3B,yCAAyC;AAAhC,8FAAA,MAAM,OAAK;AACpB,+CAA4B;AAC5B,2CAA2C;AAAlC,+FAAA,OAAO,OAAK;AACrB,8CAA2B;AAC3B,4CAAyB;AACzB,qCAAqC;AAA5B,4FAAA,IAAI,OAAK;AAClB,UAAU;AACV,4CAAyB;AACzB,6CAA0B;AAC1B,2CAAwB;AACxB,kDAA+B;AAC/B,+CAA4B;AAC5B,2CAA2C;AAAlC,+FAAA,OAAO,OAAK;AACrB,oDAAmC;AACnC,8CAA2B;AAC3B,8CAA2B;AAC3B,yCAAyC;AAAhC,8FAAA,MAAM,OAAK;AACpB,mDAAgC","sourcesContent":["export {\n type TarOptionsWithAliasesAsync,\n type TarOptionsWithAliasesAsyncFile,\n type TarOptionsWithAliasesAsyncNoFile,\n type TarOptionsWithAliasesSyncNoFile,\n type TarOptionsWithAliases,\n type TarOptionsWithAliasesFile,\n type TarOptionsWithAliasesSync,\n type TarOptionsWithAliasesSyncFile,\n} from './options.js'\n\nexport * from './create.js'\nexport { create as c } from './create.js'\nexport * from './extract.js'\nexport { extract as x } from './extract.js'\nexport * from './header.js'\nexport * from './list.js'\nexport { list as t } from './list.js'\n// classes\nexport * from './pack.js'\nexport * from './parse.js'\nexport * from './pax.js'\nexport * from './read-entry.js'\nexport * from './replace.js'\nexport { replace as r } from './replace.js'\nexport * as types from './types.js'\nexport * from './unpack.js'\nexport * from './update.js'\nexport { update as u } from './update.js'\nexport * from './write-entry.js'\n"]}

4
node_modules/tar/dist/commonjs/large-numbers.d.ts generated vendored Normal file
View File

@@ -0,0 +1,4 @@
/// <reference types="node" />
export declare const encode: (num: number, buf: Buffer) => Buffer;
export declare const parse: (buf: Buffer) => number;
//# sourceMappingURL=large-numbers.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"large-numbers.d.ts","sourceRoot":"","sources":["../../src/large-numbers.ts"],"names":[],"mappings":";AAGA,eAAO,MAAM,MAAM,QAAS,MAAM,OAAO,MAAM,WAa9C,CAAA;AA6BD,eAAO,MAAM,KAAK,QAAS,MAAM,WAmBhC,CAAA"}

99
node_modules/tar/dist/commonjs/large-numbers.js generated vendored Normal file
View File

@@ -0,0 +1,99 @@
"use strict";
// Tar can encode large and negative numbers using a leading byte of
// 0xff for negative, and 0x80 for positive.
Object.defineProperty(exports, "__esModule", { value: true });
exports.parse = exports.encode = void 0;
const encode = (num, buf) => {
if (!Number.isSafeInteger(num)) {
// The number is so large that javascript cannot represent it with integer
// precision.
throw Error('cannot encode number outside of javascript safe integer range');
}
else if (num < 0) {
encodeNegative(num, buf);
}
else {
encodePositive(num, buf);
}
return buf;
};
exports.encode = encode;
const encodePositive = (num, buf) => {
buf[0] = 0x80;
for (var i = buf.length; i > 1; i--) {
buf[i - 1] = num & 0xff;
num = Math.floor(num / 0x100);
}
};
const encodeNegative = (num, buf) => {
buf[0] = 0xff;
var flipped = false;
num = num * -1;
for (var i = buf.length; i > 1; i--) {
var byte = num & 0xff;
num = Math.floor(num / 0x100);
if (flipped) {
buf[i - 1] = onesComp(byte);
}
else if (byte === 0) {
buf[i - 1] = 0;
}
else {
flipped = true;
buf[i - 1] = twosComp(byte);
}
}
};
const parse = (buf) => {
const pre = buf[0];
const value = pre === 0x80 ? pos(buf.subarray(1, buf.length))
: pre === 0xff ? twos(buf)
: null;
if (value === null) {
throw Error('invalid base256 encoding');
}
if (!Number.isSafeInteger(value)) {
// The number is so large that javascript cannot represent it with integer
// precision.
throw Error('parsed number outside of javascript safe integer range');
}
return value;
};
exports.parse = parse;
const twos = (buf) => {
var len = buf.length;
var sum = 0;
var flipped = false;
for (var i = len - 1; i > -1; i--) {
var byte = Number(buf[i]);
var f;
if (flipped) {
f = onesComp(byte);
}
else if (byte === 0) {
f = byte;
}
else {
flipped = true;
f = twosComp(byte);
}
if (f !== 0) {
sum -= f * Math.pow(256, len - i - 1);
}
}
return sum;
};
const pos = (buf) => {
var len = buf.length;
var sum = 0;
for (var i = len - 1; i > -1; i--) {
var byte = Number(buf[i]);
if (byte !== 0) {
sum += byte * Math.pow(256, len - i - 1);
}
}
return sum;
};
const onesComp = (byte) => (0xff ^ byte) & 0xff;
const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff;
//# sourceMappingURL=large-numbers.js.map

1
node_modules/tar/dist/commonjs/large-numbers.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

7
node_modules/tar/dist/commonjs/list.d.ts generated vendored Normal file
View File

@@ -0,0 +1,7 @@
import { TarOptions } from './options.js';
import { Parser } from './parse.js';
export declare const filesFilter: (opt: TarOptions, files: string[]) => void;
export declare const list: import("./make-command.js").TarCommand<Parser, Parser & {
sync: true;
}>;
//# sourceMappingURL=list.d.ts.map

1
node_modules/tar/dist/commonjs/list.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"list.d.ts","sourceRoot":"","sources":["../../src/list.ts"],"names":[],"mappings":"AAKA,OAAO,EACL,UAAU,EAGX,MAAM,cAAc,CAAA;AACrB,OAAO,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAgBnC,eAAO,MAAM,WAAW,QAAS,UAAU,SAAS,MAAM,EAAE,SA4B3D,CAAA;AA4DD,eAAO,MAAM,IAAI;UAG4B,IAAI;EAMhD,CAAA"}

136
node_modules/tar/dist/commonjs/list.js generated vendored Normal file
View File

@@ -0,0 +1,136 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.list = exports.filesFilter = void 0;
// tar -t
const fsm = __importStar(require("@isaacs/fs-minipass"));
const node_fs_1 = __importDefault(require("node:fs"));
const path_1 = require("path");
const make_command_js_1 = require("./make-command.js");
const parse_js_1 = require("./parse.js");
const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
const onReadEntryFunction = (opt) => {
const onReadEntry = opt.onReadEntry;
opt.onReadEntry =
onReadEntry ?
e => {
onReadEntry(e);
e.resume();
}
: e => e.resume();
};
// construct a filter that limits the file entries listed
// include child entries if a dir is included
const filesFilter = (opt, files) => {
const map = new Map(files.map(f => [(0, strip_trailing_slashes_js_1.stripTrailingSlashes)(f), true]));
const filter = opt.filter;
const mapHas = (file, r = '') => {
const root = r || (0, path_1.parse)(file).root || '.';
let ret;
if (file === root)
ret = false;
else {
const m = map.get(file);
if (m !== undefined) {
ret = m;
}
else {
ret = mapHas((0, path_1.dirname)(file), root);
}
}
map.set(file, ret);
return ret;
};
opt.filter =
filter ?
(file, entry) => filter(file, entry) && mapHas((0, strip_trailing_slashes_js_1.stripTrailingSlashes)(file))
: file => mapHas((0, strip_trailing_slashes_js_1.stripTrailingSlashes)(file));
};
exports.filesFilter = filesFilter;
const listFileSync = (opt) => {
const p = new parse_js_1.Parser(opt);
const file = opt.file;
let fd;
try {
const stat = node_fs_1.default.statSync(file);
const readSize = opt.maxReadSize || 16 * 1024 * 1024;
if (stat.size < readSize) {
p.end(node_fs_1.default.readFileSync(file));
}
else {
let pos = 0;
const buf = Buffer.allocUnsafe(readSize);
fd = node_fs_1.default.openSync(file, 'r');
while (pos < stat.size) {
const bytesRead = node_fs_1.default.readSync(fd, buf, 0, readSize, pos);
pos += bytesRead;
p.write(buf.subarray(0, bytesRead));
}
p.end();
}
}
finally {
if (typeof fd === 'number') {
try {
node_fs_1.default.closeSync(fd);
/* c8 ignore next */
}
catch (er) { }
}
}
};
const listFile = (opt, _files) => {
const parse = new parse_js_1.Parser(opt);
const readSize = opt.maxReadSize || 16 * 1024 * 1024;
const file = opt.file;
const p = new Promise((resolve, reject) => {
parse.on('error', reject);
parse.on('end', resolve);
node_fs_1.default.stat(file, (er, stat) => {
if (er) {
reject(er);
}
else {
const stream = new fsm.ReadStream(file, {
readSize: readSize,
size: stat.size,
});
stream.on('error', reject);
stream.pipe(parse);
}
});
});
return p;
};
exports.list = (0, make_command_js_1.makeCommand)(listFileSync, listFile, opt => new parse_js_1.Parser(opt), opt => new parse_js_1.Parser(opt), (opt, files) => {
if (files?.length)
(0, exports.filesFilter)(opt, files);
if (!opt.noResume)
onReadEntryFunction(opt);
});
//# sourceMappingURL=list.js.map

1
node_modules/tar/dist/commonjs/list.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

49
node_modules/tar/dist/commonjs/make-command.d.ts generated vendored Normal file
View File

@@ -0,0 +1,49 @@
import { TarOptions, TarOptionsAsyncFile, TarOptionsAsyncNoFile, TarOptionsSyncFile, TarOptionsSyncNoFile, TarOptionsWithAliases, TarOptionsWithAliasesAsync, TarOptionsWithAliasesAsyncFile, TarOptionsWithAliasesAsyncNoFile, TarOptionsWithAliasesFile, TarOptionsWithAliasesNoFile, TarOptionsWithAliasesSync, TarOptionsWithAliasesSyncFile, TarOptionsWithAliasesSyncNoFile } from './options.js';
export type CB = (er?: Error) => any;
export type TarCommand<AsyncClass, SyncClass extends {
sync: true;
}> = {
(): AsyncClass;
(opt: TarOptionsWithAliasesAsyncNoFile): AsyncClass;
(entries: string[]): AsyncClass;
(opt: TarOptionsWithAliasesAsyncNoFile, entries: string[]): AsyncClass;
} & {
(opt: TarOptionsWithAliasesSyncNoFile): SyncClass;
(opt: TarOptionsWithAliasesSyncNoFile, entries: string[]): SyncClass;
} & {
(opt: TarOptionsWithAliasesAsyncFile): Promise<void>;
(opt: TarOptionsWithAliasesAsyncFile, entries: string[]): Promise<void>;
(opt: TarOptionsWithAliasesAsyncFile, cb: CB): Promise<void>;
(opt: TarOptionsWithAliasesAsyncFile, entries: string[], cb: CB): Promise<void>;
} & {
(opt: TarOptionsWithAliasesSyncFile): void;
(opt: TarOptionsWithAliasesSyncFile, entries: string[]): void;
} & {
(opt: TarOptionsWithAliasesSync): typeof opt extends (TarOptionsWithAliasesFile) ? void : typeof opt extends TarOptionsWithAliasesNoFile ? SyncClass : void | SyncClass;
(opt: TarOptionsWithAliasesSync, entries: string[]): typeof opt extends TarOptionsWithAliasesFile ? void : typeof opt extends TarOptionsWithAliasesNoFile ? SyncClass : void | SyncClass;
} & {
(opt: TarOptionsWithAliasesAsync): typeof opt extends (TarOptionsWithAliasesFile) ? Promise<void> : typeof opt extends TarOptionsWithAliasesNoFile ? AsyncClass : Promise<void> | AsyncClass;
(opt: TarOptionsWithAliasesAsync, entries: string[]): typeof opt extends TarOptionsWithAliasesFile ? Promise<void> : typeof opt extends TarOptionsWithAliasesNoFile ? AsyncClass : Promise<void> | AsyncClass;
(opt: TarOptionsWithAliasesAsync, cb: CB): Promise<void>;
(opt: TarOptionsWithAliasesAsync, entries: string[], cb: CB): typeof opt extends TarOptionsWithAliasesFile ? Promise<void> : typeof opt extends TarOptionsWithAliasesNoFile ? never : Promise<void>;
} & {
(opt: TarOptionsWithAliasesFile): Promise<void> | void;
(opt: TarOptionsWithAliasesFile, entries: string[]): typeof opt extends TarOptionsWithAliasesSync ? void : typeof opt extends TarOptionsWithAliasesAsync ? Promise<void> : Promise<void> | void;
(opt: TarOptionsWithAliasesFile, cb: CB): Promise<void>;
(opt: TarOptionsWithAliasesFile, entries: string[], cb: CB): typeof opt extends TarOptionsWithAliasesSync ? never : typeof opt extends TarOptionsWithAliasesAsync ? Promise<void> : Promise<void>;
} & {
(opt: TarOptionsWithAliasesNoFile): typeof opt extends (TarOptionsWithAliasesSync) ? SyncClass : typeof opt extends TarOptionsWithAliasesAsync ? AsyncClass : SyncClass | AsyncClass;
(opt: TarOptionsWithAliasesNoFile, entries: string[]): typeof opt extends TarOptionsWithAliasesSync ? SyncClass : typeof opt extends TarOptionsWithAliasesAsync ? AsyncClass : SyncClass | AsyncClass;
} & {
(opt: TarOptionsWithAliases): typeof opt extends (TarOptionsWithAliasesFile) ? typeof opt extends TarOptionsWithAliasesSync ? void : typeof opt extends TarOptionsWithAliasesAsync ? Promise<void> : void | Promise<void> : typeof opt extends TarOptionsWithAliasesNoFile ? typeof opt extends TarOptionsWithAliasesSync ? SyncClass : typeof opt extends TarOptionsWithAliasesAsync ? AsyncClass : SyncClass | AsyncClass : typeof opt extends TarOptionsWithAliasesSync ? SyncClass | void : typeof opt extends TarOptionsWithAliasesAsync ? AsyncClass | Promise<void> : SyncClass | void | AsyncClass | Promise<void>;
} & {
syncFile: (opt: TarOptionsSyncFile, entries: string[]) => void;
asyncFile: (opt: TarOptionsAsyncFile, entries: string[], cb?: CB) => Promise<void>;
syncNoFile: (opt: TarOptionsSyncNoFile, entries: string[]) => SyncClass;
asyncNoFile: (opt: TarOptionsAsyncNoFile, entries: string[]) => AsyncClass;
validate?: (opt: TarOptions, entries?: string[]) => void;
};
export declare const makeCommand: <AsyncClass, SyncClass extends {
sync: true;
}>(syncFile: (opt: TarOptionsSyncFile, entries: string[]) => void, asyncFile: (opt: TarOptionsAsyncFile, entries: string[], cb?: CB) => Promise<void>, syncNoFile: (opt: TarOptionsSyncNoFile, entries: string[]) => SyncClass, asyncNoFile: (opt: TarOptionsAsyncNoFile, entries: string[]) => AsyncClass, validate?: (opt: TarOptions, entries?: string[]) => void) => TarCommand<AsyncClass, SyncClass>;
//# sourceMappingURL=make-command.d.ts.map

1
node_modules/tar/dist/commonjs/make-command.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"make-command.d.ts","sourceRoot":"","sources":["../../src/make-command.ts"],"names":[],"mappings":"AAAA,OAAO,EAML,UAAU,EACV,mBAAmB,EACnB,qBAAqB,EACrB,kBAAkB,EAClB,oBAAoB,EACpB,qBAAqB,EACrB,0BAA0B,EAC1B,8BAA8B,EAC9B,gCAAgC,EAChC,yBAAyB,EACzB,2BAA2B,EAC3B,yBAAyB,EACzB,6BAA6B,EAC7B,+BAA+B,EAChC,MAAM,cAAc,CAAA;AAErB,MAAM,MAAM,EAAE,GAAG,CAAC,EAAE,CAAC,EAAE,KAAK,KAAK,GAAG,CAAA;AAEpC,MAAM,MAAM,UAAU,CACpB,UAAU,EACV,SAAS,SAAS;IAAE,IAAI,EAAE,IAAI,CAAA;CAAE,IAC9B;IAEF,IAAI,UAAU,CAAA;IACd,CAAC,GAAG,EAAE,gCAAgC,GAAG,UAAU,CAAA;IACnD,CAAC,OAAO,EAAE,MAAM,EAAE,GAAG,UAAU,CAAA;IAC/B,CACE,GAAG,EAAE,gCAAgC,EACrC,OAAO,EAAE,MAAM,EAAE,GAChB,UAAU,CAAA;CACd,GAAG;IAEF,CAAC,GAAG,EAAE,+BAA+B,GAAG,SAAS,CAAA;IACjD,CAAC,GAAG,EAAE,+BAA+B,EAAE,OAAO,EAAE,MAAM,EAAE,GAAG,SAAS,CAAA;CACrE,GAAG;IAEF,CAAC,GAAG,EAAE,8BAA8B,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACpD,CACE,GAAG,EAAE,8BAA8B,EACnC,OAAO,EAAE,MAAM,EAAE,GAChB,OAAO,CAAC,IAAI,CAAC,CAAA;IAChB,CAAC,GAAG,EAAE,8BAA8B,EAAE,EAAE,EAAE,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC5D,CACE,GAAG,EAAE,8BAA8B,EACnC,OAAO,EAAE,MAAM,EAAE,EACjB,EAAE,EAAE,EAAE,GACL,OAAO,CAAC,IAAI,CAAC,CAAA;CACjB,GAAG;IAEF,CAAC,GAAG,EAAE,6BAA6B,GAAG,IAAI,CAAA;IAC1C,CAAC,GAAG,EAAE,6BAA6B,EAAE,OAAO,EAAE,MAAM,EAAE,GAAG,IAAI,CAAA;CAC9D,GAAG;IAEF,CAAC,GAAG,EAAE,yBAAyB,GAAG,OAAO,GAAG,SAAS,CACnD,yBAAyB,CAC1B,GACC,IAAI,GACJ,OAAO,GAAG,SAAS,2BAA2B,GAAG,SAAS,GAC1D,IAAI,GAAG,SAAS,CAAA;IAClB,CACE,GAAG,EAAE,yBAAyB,EAC9B,OAAO,EAAE,MAAM,EAAE,GAChB,OAAO,GAAG,SAAS,yBAAyB,GAAG,IAAI,GACpD,OAAO,GAAG,SAAS,2BAA2B,GAAG,SAAS,GAC1D,IAAI,GAAG,SAAS,CAAA;CACnB,GAAG;IAEF,CAAC,GAAG,EAAE,0BAA0B,GAAG,OAAO,GAAG,SAAS,CACpD,yBAAyB,CAC1B,GACC,OAAO,CAAC,IAAI,CAAC,GACb,OAAO,GAAG,SAAS,2BAA2B,GAAG,UAAU,GAC3D,OAAO,CAAC,IAAI,CAAC,GAAG,UAAU,CAAA;IAC5B,CACE,GAAG,EAAE,0BAA0B,EAC/B,OAAO,EAAE,MAAM,EAAE,GAChB,OAAO,GAAG,SAAS,yBAAyB,GAAG,OAAO,CAAC,IAAI,CAAC,GAC7D,OAAO,GAAG,SAAS,2BAA2B,GAAG,UAAU,GAC3D,OAAO,CAAC,IAAI,CAAC,GAAG,UAAU,CAAA;IAC5B,CAAC,GAAG,EAAE,0BAA0B,EAAE,EAAE,EAAE,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACxD,CACE,GAAG,EAAE,0BAA0B,EAC/B,OAAO,EAAE,MAAM,EAAE,EACjB,EAAE,EAAE,EAAE,GACL,OAAO,GAAG,SAAS,yBAAyB,GAAG,OAAO,CAAC,IAAI,CAAC,GAC7D,OAAO,GAAG,SAAS,2BAA2B,GAAG,KAAK,GACtD,OAAO,CAAC,IAAI,CAAC,CAAA;CAChB,GAAG;IAEF,CAAC,GAAG,EAAE,yBAAyB,GAAG,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,CAAA;IACtD,CACE,GAAG,EAAE,yBAAyB,EAC9B,OAAO,EAAE,MAAM,EAAE,GAChB,OAAO,GAAG,SAAS,yBAAyB,GAAG,IAAI,GACpD,OAAO,GAAG,SAAS,0BAA0B,GAAG,OAAO,CAAC,IAAI,CAAC,GAC7D,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,CAAA;IACtB,CAAC,GAAG,EAAE,yBAAyB,EAAE,EAAE,EAAE,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACvD,CACE,GAAG,EAAE,yBAAyB,EAC9B,OAAO,EAAE,MAAM,EAAE,EACjB,EAAE,EAAE,EAAE,GACL,OAAO,GAAG,SAAS,yBAAyB,GAAG,KAAK,GACrD,OAAO,GAAG,SAAS,0BAA0B,GAAG,OAAO,CAAC,IAAI,CAAC,GAC7D,OAAO,CAAC,IAAI,CAAC,CAAA;CAChB,GAAG;IAEF,CAAC,GAAG,EAAE,2BAA2B,GAAG,OAAO,GAAG,SAAS,CACrD,yBAAyB,CAC1B,GACC,SAAS,GACT,OAAO,GAAG,SAAS,0BAA0B,GAAG,UAAU,GAC1D,SAAS,GAAG,UAAU,CAAA;IACxB,CACE,GAAG,EAAE,2BAA2B,EAChC,OAAO,EAAE,MAAM,EAAE,GAChB,OAAO,GAAG,SAAS,yBAAyB,GAAG,SAAS,GACzD,OAAO,GAAG,SAAS,0BAA0B,GAAG,UAAU,GAC1D,SAAS,GAAG,UAAU,CAAA;CACzB,GAAG;IAEF,CAAC,GAAG,EAAE,qBAAqB,GAAG,OAAO,GAAG,SAAS,CAC/C,yBAAyB,CAC1B,GACC,OAAO,GAAG,SAAS,yBAAyB,GAAG,IAAI,GACjD,OAAO,GAAG,SAAS,0BAA0B,GAAG,OAAO,CAAC,IAAI,CAAC,GAC7D,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,GACtB,OAAO,GAAG,SAAS,2BAA2B,GAC9C,OAAO,GAAG,SAAS,yBAAyB,GAAG,SAAS,GACtD,OAAO,GAAG,SAAS,0BAA0B,GAAG,UAAU,GAC1D,SAAS,GAAG,UAAU,GACxB,OAAO,GAAG,SAAS,yBAAyB,GAAG,SAAS,GAAG,IAAI,GAC/D,OAAO,GAAG,SAAS,0BAA0B,GAC7C,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC,GAC1B,SAAS,GAAG,IAAI,GAAG,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;CAChD,GAAG;IAEF,QAAQ,EAAE,CAAC,GAAG,EAAE,kBAAkB,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,IAAI,CAAA;IAC9D,SAAS,EAAE,CACT,GAAG,EAAE,mBAAmB,EACxB,OAAO,EAAE,MAAM,EAAE,EACjB,EAAE,CAAC,EAAE,EAAE,KACJ,OAAO,CAAC,IAAI,CAAC,CAAA;IAClB,UAAU,EAAE,CACV,GAAG,EAAE,oBAAoB,EACzB,OAAO,EAAE,MAAM,EAAE,KACd,SAAS,CAAA;IACd,WAAW,EAAE,CACX,GAAG,EAAE,qBAAqB,EAC1B,OAAO,EAAE,MAAM,EAAE,KACd,UAAU,CAAA;IACf,QAAQ,CAAC,EAAE,CAAC,GAAG,EAAE,UAAU,EAAE,OAAO,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,CAAA;CACzD,CAAA;AAED,eAAO,MAAM,WAAW;UAEI,IAAI;aAEpB,CAAC,GAAG,EAAE,kBAAkB,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,IAAI,aACnD,CACT,GAAG,EAAE,mBAAmB,EACxB,OAAO,EAAE,MAAM,EAAE,EACjB,EAAE,CAAC,EAAE,EAAE,KACJ,QAAQ,IAAI,CAAC,cACN,CACV,GAAG,EAAE,oBAAoB,EACzB,OAAO,EAAE,MAAM,EAAE,KACd,SAAS,eACD,CACX,GAAG,EAAE,qBAAqB,EAC1B,OAAO,EAAE,MAAM,EAAE,KACd,UAAU,aACJ,CAAC,GAAG,EAAE,UAAU,EAAE,OAAO,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,KACvD,WAAW,UAAU,EAAE,SAAS,CAmElC,CAAA"}

61
node_modules/tar/dist/commonjs/make-command.js generated vendored Normal file
View File

@@ -0,0 +1,61 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.makeCommand = void 0;
const options_js_1 = require("./options.js");
const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => {
return Object.assign((opt_ = [], entries, cb) => {
if (Array.isArray(opt_)) {
entries = opt_;
opt_ = {};
}
if (typeof entries === 'function') {
cb = entries;
entries = undefined;
}
if (!entries) {
entries = [];
}
else {
entries = Array.from(entries);
}
const opt = (0, options_js_1.dealias)(opt_);
validate?.(opt, entries);
if ((0, options_js_1.isSyncFile)(opt)) {
if (typeof cb === 'function') {
throw new TypeError('callback not supported for sync tar functions');
}
return syncFile(opt, entries);
}
else if ((0, options_js_1.isAsyncFile)(opt)) {
const p = asyncFile(opt, entries);
// weirdness to make TS happy
const c = cb ? cb : undefined;
return c ? p.then(() => c(), c) : p;
}
else if ((0, options_js_1.isSyncNoFile)(opt)) {
if (typeof cb === 'function') {
throw new TypeError('callback not supported for sync tar functions');
}
return syncNoFile(opt, entries);
}
else if ((0, options_js_1.isAsyncNoFile)(opt)) {
if (typeof cb === 'function') {
throw new TypeError('callback only supported with file option');
}
return asyncNoFile(opt, entries);
/* c8 ignore start */
}
else {
throw new Error('impossible options??');
}
/* c8 ignore stop */
}, {
syncFile,
asyncFile,
syncNoFile,
asyncNoFile,
validate,
});
};
exports.makeCommand = makeCommand;
//# sourceMappingURL=make-command.js.map

1
node_modules/tar/dist/commonjs/make-command.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

27
node_modules/tar/dist/commonjs/mkdir.d.ts generated vendored Normal file
View File

@@ -0,0 +1,27 @@
/// <reference types="node" />
import { CwdError } from './cwd-error.js';
import { SymlinkError } from './symlink-error.js';
export type MkdirOptions = {
uid?: number;
gid?: number;
processUid?: number;
processGid?: number;
umask?: number;
preserve: boolean;
unlink: boolean;
cache: Map<string, boolean>;
cwd: string;
mode: number;
};
export type MkdirError = NodeJS.ErrnoException | CwdError | SymlinkError;
/**
* Wrapper around mkdirp for tar's needs.
*
* The main purpose is to avoid creating directories if we know that
* they already exist (and track which ones exist for this purpose),
* and prevent entries from being extracted into symlinked folders,
* if `preservePaths` is not set.
*/
export declare const mkdir: (dir: string, opt: MkdirOptions, cb: (er?: null | MkdirError, made?: string) => void) => void | Promise<void>;
export declare const mkdirSync: (dir: string, opt: MkdirOptions) => void | SymlinkError;
//# sourceMappingURL=mkdir.d.ts.map

1
node_modules/tar/dist/commonjs/mkdir.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"mkdir.d.ts","sourceRoot":"","sources":["../../src/mkdir.ts"],"names":[],"mappings":";AAIA,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AAEzC,OAAO,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAA;AAEjD,MAAM,MAAM,YAAY,GAAG;IACzB,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,GAAG,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;IAC3B,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,EAAE,MAAM,CAAA;CACb,CAAA;AAED,MAAM,MAAM,UAAU,GAClB,MAAM,CAAC,cAAc,GACrB,QAAQ,GACR,YAAY,CAAA;AAyBhB;;;;;;;GAOG;AACH,eAAO,MAAM,KAAK,QACX,MAAM,OACN,YAAY,MACb,CAAC,EAAE,CAAC,EAAE,IAAI,GAAG,UAAU,EAAE,IAAI,CAAC,EAAE,MAAM,KAAK,IAAI,yBA0DpD,CAAA;AA+FD,eAAO,MAAM,SAAS,QAAS,MAAM,OAAO,YAAY,wBA+EvD,CAAA"}

209
node_modules/tar/dist/commonjs/mkdir.js generated vendored Normal file
View File

@@ -0,0 +1,209 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.mkdirSync = exports.mkdir = void 0;
const chownr_1 = require("chownr");
const fs_1 = __importDefault(require("fs"));
const mkdirp_1 = require("mkdirp");
const node_path_1 = __importDefault(require("node:path"));
const cwd_error_js_1 = require("./cwd-error.js");
const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
const symlink_error_js_1 = require("./symlink-error.js");
const cGet = (cache, key) => cache.get((0, normalize_windows_path_js_1.normalizeWindowsPath)(key));
const cSet = (cache, key, val) => cache.set((0, normalize_windows_path_js_1.normalizeWindowsPath)(key), val);
const checkCwd = (dir, cb) => {
fs_1.default.stat(dir, (er, st) => {
if (er || !st.isDirectory()) {
er = new cwd_error_js_1.CwdError(dir, er?.code || 'ENOTDIR');
}
cb(er);
});
};
/**
* Wrapper around mkdirp for tar's needs.
*
* The main purpose is to avoid creating directories if we know that
* they already exist (and track which ones exist for this purpose),
* and prevent entries from being extracted into symlinked folders,
* if `preservePaths` is not set.
*/
const mkdir = (dir, opt, cb) => {
dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir);
// if there's any overlap between mask and mode,
// then we'll need an explicit chmod
/* c8 ignore next */
const umask = opt.umask ?? 0o22;
const mode = opt.mode | 0o0700;
const needChmod = (mode & umask) !== 0;
const uid = opt.uid;
const gid = opt.gid;
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
(uid !== opt.processUid || gid !== opt.processGid);
const preserve = opt.preserve;
const unlink = opt.unlink;
const cache = opt.cache;
const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd);
const done = (er, created) => {
if (er) {
cb(er);
}
else {
cSet(cache, dir, true);
if (created && doChown) {
(0, chownr_1.chownr)(created, uid, gid, er => done(er));
}
else if (needChmod) {
fs_1.default.chmod(dir, mode, cb);
}
else {
cb();
}
}
};
if (cache && cGet(cache, dir) === true) {
return done();
}
if (dir === cwd) {
return checkCwd(dir, done);
}
if (preserve) {
return (0, mkdirp_1.mkdirp)(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts
done);
}
const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir));
const parts = sub.split('/');
mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done);
};
exports.mkdir = mkdir;
const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
if (!parts.length) {
return cb(null, created);
}
const p = parts.shift();
const part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(base + '/' + p));
if (cGet(cache, part)) {
return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
}
fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
};
const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => {
if (er) {
fs_1.default.lstat(part, (statEr, st) => {
if (statEr) {
statEr.path =
statEr.path && (0, normalize_windows_path_js_1.normalizeWindowsPath)(statEr.path);
cb(statEr);
}
else if (st.isDirectory()) {
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
}
else if (unlink) {
fs_1.default.unlink(part, er => {
if (er) {
return cb(er);
}
fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
});
}
else if (st.isSymbolicLink()) {
return cb(new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/')));
}
else {
cb(er);
}
});
}
else {
created = created || part;
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
}
};
const checkCwdSync = (dir) => {
let ok = false;
let code = undefined;
try {
ok = fs_1.default.statSync(dir).isDirectory();
}
catch (er) {
code = er?.code;
}
finally {
if (!ok) {
throw new cwd_error_js_1.CwdError(dir, code ?? 'ENOTDIR');
}
}
};
const mkdirSync = (dir, opt) => {
dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir);
// if there's any overlap between mask and mode,
// then we'll need an explicit chmod
/* c8 ignore next */
const umask = opt.umask ?? 0o22;
const mode = opt.mode | 0o700;
const needChmod = (mode & umask) !== 0;
const uid = opt.uid;
const gid = opt.gid;
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
(uid !== opt.processUid || gid !== opt.processGid);
const preserve = opt.preserve;
const unlink = opt.unlink;
const cache = opt.cache;
const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd);
const done = (created) => {
cSet(cache, dir, true);
if (created && doChown) {
(0, chownr_1.chownrSync)(created, uid, gid);
}
if (needChmod) {
fs_1.default.chmodSync(dir, mode);
}
};
if (cache && cGet(cache, dir) === true) {
return done();
}
if (dir === cwd) {
checkCwdSync(cwd);
return done();
}
if (preserve) {
return done((0, mkdirp_1.mkdirpSync)(dir, mode) ?? undefined);
}
const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir));
const parts = sub.split('/');
let created = undefined;
for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) {
part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(part));
if (cGet(cache, part)) {
continue;
}
try {
fs_1.default.mkdirSync(part, mode);
created = created || part;
cSet(cache, part, true);
}
catch (er) {
const st = fs_1.default.lstatSync(part);
if (st.isDirectory()) {
cSet(cache, part, true);
continue;
}
else if (unlink) {
fs_1.default.unlinkSync(part);
fs_1.default.mkdirSync(part, mode);
created = created || part;
cSet(cache, part, true);
continue;
}
else if (st.isSymbolicLink()) {
return new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/'));
}
}
}
return done(created);
};
exports.mkdirSync = mkdirSync;
//# sourceMappingURL=mkdir.js.map

1
node_modules/tar/dist/commonjs/mkdir.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

2
node_modules/tar/dist/commonjs/mode-fix.d.ts generated vendored Normal file
View File

@@ -0,0 +1,2 @@
export declare const modeFix: (mode: number, isDir: boolean, portable: boolean) => number;
//# sourceMappingURL=mode-fix.d.ts.map

1
node_modules/tar/dist/commonjs/mode-fix.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"mode-fix.d.ts","sourceRoot":"","sources":["../../src/mode-fix.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,OAAO,SACZ,MAAM,SACL,OAAO,YACJ,OAAO,WA0BlB,CAAA"}

29
node_modules/tar/dist/commonjs/mode-fix.js generated vendored Normal file
View File

@@ -0,0 +1,29 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.modeFix = void 0;
const modeFix = (mode, isDir, portable) => {
mode &= 0o7777;
// in portable mode, use the minimum reasonable umask
// if this system creates files with 0o664 by default
// (as some linux distros do), then we'll write the
// archive with 0o644 instead. Also, don't ever create
// a file that is not readable/writable by the owner.
if (portable) {
mode = (mode | 0o600) & ~0o22;
}
// if dirs are readable, then they should be listable
if (isDir) {
if (mode & 0o400) {
mode |= 0o100;
}
if (mode & 0o40) {
mode |= 0o10;
}
if (mode & 0o4) {
mode |= 0o1;
}
}
return mode;
};
exports.modeFix = modeFix;
//# sourceMappingURL=mode-fix.js.map

1
node_modules/tar/dist/commonjs/mode-fix.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"mode-fix.js","sourceRoot":"","sources":["../../src/mode-fix.ts"],"names":[],"mappings":";;;AAAO,MAAM,OAAO,GAAG,CACrB,IAAY,EACZ,KAAc,EACd,QAAiB,EACjB,EAAE;IACF,IAAI,IAAI,MAAM,CAAA;IAEd,qDAAqD;IACrD,qDAAqD;IACrD,mDAAmD;IACnD,uDAAuD;IACvD,qDAAqD;IACrD,IAAI,QAAQ,EAAE,CAAC;QACb,IAAI,GAAG,CAAC,IAAI,GAAG,KAAK,CAAC,GAAG,CAAC,IAAI,CAAA;IAC/B,CAAC;IAED,qDAAqD;IACrD,IAAI,KAAK,EAAE,CAAC;QACV,IAAI,IAAI,GAAG,KAAK,EAAE,CAAC;YACjB,IAAI,IAAI,KAAK,CAAA;QACf,CAAC;QACD,IAAI,IAAI,GAAG,IAAI,EAAE,CAAC;YAChB,IAAI,IAAI,IAAI,CAAA;QACd,CAAC;QACD,IAAI,IAAI,GAAG,GAAG,EAAE,CAAC;YACf,IAAI,IAAI,GAAG,CAAA;QACb,CAAC;IACH,CAAC;IACD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AA7BY,QAAA,OAAO,WA6BnB","sourcesContent":["export const modeFix = (\n mode: number,\n isDir: boolean,\n portable: boolean,\n) => {\n mode &= 0o7777\n\n // in portable mode, use the minimum reasonable umask\n // if this system creates files with 0o664 by default\n // (as some linux distros do), then we'll write the\n // archive with 0o644 instead. Also, don't ever create\n // a file that is not readable/writable by the owner.\n if (portable) {\n mode = (mode | 0o600) & ~0o22\n }\n\n // if dirs are readable, then they should be listable\n if (isDir) {\n if (mode & 0o400) {\n mode |= 0o100\n }\n if (mode & 0o40) {\n mode |= 0o10\n }\n if (mode & 0o4) {\n mode |= 0o1\n }\n }\n return mode\n}\n"]}

View File

@@ -0,0 +1,2 @@
export declare const normalizeUnicode: (s: string) => any;
//# sourceMappingURL=normalize-unicode.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"normalize-unicode.d.ts","sourceRoot":"","sources":["../../src/normalize-unicode.ts"],"names":[],"mappings":"AAMA,eAAO,MAAM,gBAAgB,MAAO,MAAM,QAKzC,CAAA"}

17
node_modules/tar/dist/commonjs/normalize-unicode.js generated vendored Normal file
View File

@@ -0,0 +1,17 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.normalizeUnicode = void 0;
// warning: extremely hot code path.
// This has been meticulously optimized for use
// within npm install on large package trees.
// Do not edit without careful benchmarking.
const normalizeCache = Object.create(null);
const { hasOwnProperty } = Object.prototype;
const normalizeUnicode = (s) => {
if (!hasOwnProperty.call(normalizeCache, s)) {
normalizeCache[s] = s.normalize('NFD');
}
return normalizeCache[s];
};
exports.normalizeUnicode = normalizeUnicode;
//# sourceMappingURL=normalize-unicode.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"normalize-unicode.js","sourceRoot":"","sources":["../../src/normalize-unicode.ts"],"names":[],"mappings":";;;AAAA,oCAAoC;AACpC,+CAA+C;AAC/C,6CAA6C;AAC7C,4CAA4C;AAC5C,MAAM,cAAc,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;AAC1C,MAAM,EAAE,cAAc,EAAE,GAAG,MAAM,CAAC,SAAS,CAAA;AACpC,MAAM,gBAAgB,GAAG,CAAC,CAAS,EAAE,EAAE;IAC5C,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE,CAAC;QAC5C,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,KAAK,CAAC,CAAA;IACxC,CAAC;IACD,OAAO,cAAc,CAAC,CAAC,CAAC,CAAA;AAC1B,CAAC,CAAA;AALY,QAAA,gBAAgB,oBAK5B","sourcesContent":["// warning: extremely hot code path.\n// This has been meticulously optimized for use\n// within npm install on large package trees.\n// Do not edit without careful benchmarking.\nconst normalizeCache = Object.create(null)\nconst { hasOwnProperty } = Object.prototype\nexport const normalizeUnicode = (s: string) => {\n if (!hasOwnProperty.call(normalizeCache, s)) {\n normalizeCache[s] = s.normalize('NFD')\n }\n return normalizeCache[s]\n}\n"]}

View File

@@ -0,0 +1,2 @@
export declare const normalizeWindowsPath: (p: string) => string;
//# sourceMappingURL=normalize-windows-path.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"normalize-windows-path.d.ts","sourceRoot":"","sources":["../../src/normalize-windows-path.ts"],"names":[],"mappings":"AAQA,eAAO,MAAM,oBAAoB,MAEzB,MAAM,WAC+B,CAAA"}

View File

@@ -0,0 +1,12 @@
"use strict";
// on windows, either \ or / are valid directory separators.
// on unix, \ is a valid character in filenames.
// so, on windows, and only on windows, we replace all \ chars with /,
// so that we can use / as our one and only directory separator char.
Object.defineProperty(exports, "__esModule", { value: true });
exports.normalizeWindowsPath = void 0;
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
exports.normalizeWindowsPath = platform !== 'win32' ?
(p) => p
: (p) => p && p.replace(/\\/g, '/');
//# sourceMappingURL=normalize-windows-path.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"normalize-windows-path.js","sourceRoot":"","sources":["../../src/normalize-windows-path.ts"],"names":[],"mappings":";AAAA,4DAA4D;AAC5D,gDAAgD;AAChD,sEAAsE;AACtE,qEAAqE;;;AAErE,MAAM,QAAQ,GACZ,OAAO,CAAC,GAAG,CAAC,yBAAyB,IAAI,OAAO,CAAC,QAAQ,CAAA;AAE9C,QAAA,oBAAoB,GAC/B,QAAQ,KAAK,OAAO,CAAC,CAAC;IACpB,CAAC,CAAS,EAAE,EAAE,CAAC,CAAC;IAClB,CAAC,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAA","sourcesContent":["// on windows, either \\ or / are valid directory separators.\n// on unix, \\ is a valid character in filenames.\n// so, on windows, and only on windows, we replace all \\ chars with /,\n// so that we can use / as our one and only directory separator char.\n\nconst platform =\n process.env.TESTING_TAR_FAKE_PLATFORM || process.platform\n\nexport const normalizeWindowsPath =\n platform !== 'win32' ?\n (p: string) => p\n : (p: string) => p && p.replace(/\\\\/g, '/')\n"]}

605
node_modules/tar/dist/commonjs/options.d.ts generated vendored Normal file
View File

@@ -0,0 +1,605 @@
/// <reference types="node" />
import { type GzipOptions, type ZlibOptions } from 'minizlib';
import { type Stats } from 'node:fs';
import { type ReadEntry } from './read-entry.js';
import { type WarnData } from './warn-method.js';
import { WriteEntry } from './write-entry.js';
/**
* The options that can be provided to tar commands.
*
* Note that some of these are only relevant for certain commands, since
* they are specific to reading or writing.
*
* Aliases are provided in the {@link TarOptionsWithAliases} type.
*/
export interface TarOptions {
/**
* Perform all I/O operations synchronously. If the stream is ended
* immediately, then it will be processed entirely synchronously.
*/
sync?: boolean;
/**
* The tar file to be read and/or written. When this is set, a stream
* is not returned. Asynchronous commands will return a promise indicating
* when the operation is completed, and synchronous commands will return
* immediately.
*/
file?: string;
/**
* Treat warnings as crash-worthy errors. Defaults false.
*/
strict?: boolean;
/**
* The effective current working directory for this tar command
*/
cwd?: string;
/**
* When creating a tar archive, this can be used to compress it as well.
* Set to `true` to use the default gzip options, or customize them as
* needed.
*
* When reading, if this is unset, then the compression status will be
* inferred from the archive data. This is generally best, unless you are
* sure of the compression settings in use to create the archive, and want to
* fail if the archive doesn't match expectations.
*/
gzip?: boolean | GzipOptions;
/**
* When creating archives, preserve absolute and `..` paths in the archive,
* rather than sanitizing them under the cwd.
*
* When extracting, allow absolute paths, paths containing `..`, and
* extracting through symbolic links. By default, the root `/` is stripped
* from absolute paths (eg, turning `/x/y/z` into `x/y/z`), paths containing
* `..` are not extracted, and any file whose location would be modified by a
* symbolic link is not extracted.
*
* **WARNING** This is almost always unsafe, and must NEVER be used on
* archives from untrusted sources, such as user input, and every entry must
* be validated to ensure it is safe to write. Even if the input is not
* malicious, mistakes can cause a lot of damage!
*/
preservePaths?: boolean;
/**
* When extracting, do not set the `mtime` value for extracted entries to
* match the `mtime` in the archive.
*
* When creating archives, do not store the `mtime` value in the entry. Note
* that this prevents properly using other mtime-based features (such as
* `tar.update` or the `newer` option) with the resulting archive.
*/
noMtime?: boolean;
/**
* Set to `true` or an object with settings for `zlib.BrotliCompress()` to
* create a brotli-compressed archive
*
* When extracting, this will cause the archive to be treated as a
* brotli-compressed file if set to `true` or a ZlibOptions object.
*
* If set `false`, then brotli options will not be used.
*
* If both this and the `gzip` option are left `undefined`, then tar will
* attempt to infer the brotli compression status, but can only do so based
* on the filename. If the filename ends in `.tbr` or `.tar.br`, and the
* first 512 bytes are not a valid tar header, then brotli decompression
* will be attempted.
*/
brotli?: boolean | ZlibOptions;
/**
* A function that is called with `(path, stat)` when creating an archive, or
* `(path, entry)` when extracting. Return true to process the file/entry, or
* false to exclude it.
*/
filter?: (path: string, entry: Stats | ReadEntry) => boolean;
/**
* A function that gets called for any warning encountered.
*
* Note: if `strict` is set, then the warning will throw, and this method
* will not be called.
*/
onwarn?: (code: string, message: string, data: WarnData) => any;
/**
* When extracting, unlink files before creating them. Without this option,
* tar overwrites existing files, which preserves existing hardlinks. With
* this option, existing hardlinks will be broken, as will any symlink that
* would affect the location of an extracted file.
*/
unlink?: boolean;
/**
* When extracting, strip the specified number of path portions from the
* entry path. For example, with `{strip: 2}`, the entry `a/b/c/d` would be
* extracted to `{cwd}/c/d`.
*
* Any entry whose entire path is stripped will be excluded.
*/
strip?: number;
/**
* When extracting, keep the existing file on disk if it's newer than the
* file in the archive.
*/
newer?: boolean;
/**
* When extracting, do not overwrite existing files at all.
*/
keep?: boolean;
/**
* When extracting, set the `uid` and `gid` of extracted entries to the `uid`
* and `gid` fields in the archive. Defaults to true when run as root, and
* false otherwise.
*
* If false, then files and directories will be set with the owner and group
* of the user running the process. This is similar to `-p` in `tar(1)`, but
* ACLs and other system-specific data is never unpacked in this
* implementation, and modes are set by default already.
*/
preserveOwner?: boolean;
/**
* The maximum depth of subfolders to extract into. This defaults to 1024.
* Anything deeper than the limit will raise a warning and skip the entry.
* Set to `Infinity` to remove the limitation.
*/
maxDepth?: number;
/**
* When extracting, force all created files and directories, and all
* implicitly created directories, to be owned by the specified user id,
* regardless of the `uid` field in the archive.
*
* Cannot be used along with `preserveOwner`. Requires also setting the `gid`
* option.
*/
uid?: number;
/**
* When extracting, force all created files and directories, and all
* implicitly created directories, to be owned by the specified group id,
* regardless of the `gid` field in the archive.
*
* Cannot be used along with `preserveOwner`. Requires also setting the `uid`
* option.
*/
gid?: number;
/**
* When extracting, provide a function that takes an `entry` object, and
* returns a stream, or any falsey value. If a stream is provided, then that
* stream's data will be written instead of the contents of the archive
* entry. If a falsey value is provided, then the entry is written to disk as
* normal.
*
* To exclude items from extraction, use the `filter` option.
*
* Note that using an asynchronous stream type with the `transform` option
* will cause undefined behavior in synchronous extractions.
* [MiniPass](http://npm.im/minipass)-based streams are designed for this use
* case.
*/
transform?: (entry: ReadEntry) => any;
/**
* Call `chmod()` to ensure that extracted files match the entry's mode
* field. Without this field set, all mode fields in archive entries are a
* best effort attempt only.
*
* Setting this necessitates a call to the deprecated `process.umask()`
* method to determine the default umask value, unless a `processUmask`
* config is provided as well.
*
* If not set, tar will attempt to create file system entries with whatever
* mode is provided, and let the implicit process `umask` apply normally, but
* if a file already exists to be written to, then its existing mode will not
* be modified.
*
* When setting `chmod: true`, it is highly recommend to set the
* {@link TarOptions#processUmask} option as well, to avoid the call to the
* deprecated (and thread-unsafe) `process.umask()` method.
*/
chmod?: boolean;
/**
* When setting the {@link TarOptions#chmod} option to `true`, you may
* provide a value here to avoid having to call the deprecated and
* thread-unsafe `process.umask()` method.
*
* This has no effect with `chmod` is not set to true, as mode values are not
* set explicitly anyway. If `chmod` is set to `true`, and a value is not
* provided here, then `process.umask()` must be called, which will result in
* deprecation warnings.
*
* The most common values for this are `0o22` (resulting in directories
* created with mode `0o755` and files with `0o644` by default) and `0o2`
* (resulting in directores created with mode `0o775` and files `0o664`, so
* they are group-writable).
*/
processUmask?: number;
/**
* When parsing/listing archives, `entry` streams are by default resumed
* (set into "flowing" mode) immediately after the call to `onReadEntry()`.
* Set `noResume: true` to suppress this behavior.
*
* Note that when this is set, the stream will never complete until the
* data is consumed somehow.
*
* Set automatically in extract operations, since the entry is piped to
* a file system entry right away. Only relevant when parsing.
*/
noResume?: boolean;
/**
* When creating, updating, or replacing within archives, this method will
* be called with each WriteEntry that is created.
*/
onWriteEntry?: (entry: WriteEntry) => any;
/**
* When extracting or listing archives, this method will be called with
* each entry that is not excluded by a `filter`.
*
* Important when listing archives synchronously from a file, because there
* is otherwise no way to interact with the data!
*/
onReadEntry?: (entry: ReadEntry) => any;
/**
* Pack the targets of symbolic links rather than the link itself.
*/
follow?: boolean;
/**
* When creating archives, omit any metadata that is system-specific:
* `ctime`, `atime`, `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and
* `nlink`. Note that `mtime` is still included, because this is necessary
* for other time-based operations such as `tar.update`. Additionally, `mode`
* is set to a "reasonable default" for mose unix systems, based on an
* effective `umask` of `0o22`.
*
* This also defaults the `portable` option in the gzip configs when creating
* a compressed archive, in order to produce deterministic archives that are
* not operating-system specific.
*/
portable?: boolean;
/**
* When creating archives, do not recursively archive the contents of
* directories. By default, archiving a directory archives all of its
* contents as well.
*/
noDirRecurse?: boolean;
/**
* Suppress Pax extended headers when creating archives. Note that this means
* long paths and linkpaths will be truncated, and large or negative numeric
* values may be interpreted incorrectly.
*/
noPax?: boolean;
/**
* Set to a `Date` object to force a specific `mtime` value for everything
* written to an archive.
*
* This is useful when creating archives that are intended to be
* deterministic based on their contents, irrespective of the file's last
* modification time.
*
* Overridden by `noMtime`.
*/
mtime?: Date;
/**
* A path portion to prefix onto the entries added to an archive.
*/
prefix?: string;
/**
* The mode to set on any created file archive, defaults to 0o666
* masked by the process umask, often resulting in 0o644.
*
* This does *not* affect the mode fields of individual entries, or the
* mode status of extracted entries on the filesystem.
*/
mode?: number;
/**
* A cache of mtime values, to avoid having to stat the same file repeatedly.
*
* @internal
*/
mtimeCache?: Map<string, Date>;
/**
* maximum buffer size for `fs.read()` operations.
*
* @internal
*/
maxReadSize?: number;
/**
* Filter modes of entries being unpacked, like `process.umask()`
*
* @internal
*/
umask?: number;
/**
* Default mode for directories. Used for all implicitly created directories,
* and any directories in the archive that do not have a mode field.
*
* @internal
*/
dmode?: number;
/**
* default mode for files
*
* @internal
*/
fmode?: number;
/**
* Map that tracks which directories already exist, for extraction
*
* @internal
*/
dirCache?: Map<string, boolean>;
/**
* maximum supported size of meta entries. Defaults to 1MB
*
* @internal
*/
maxMetaEntrySize?: number;
/**
* A Map object containing the device and inode value for any file whose
* `nlink` value is greater than 1, to identify hard links when creating
* archives.
*
* @internal
*/
linkCache?: Map<LinkCacheKey, string>;
/**
* A map object containing the results of `fs.readdir()` calls.
*
* @internal
*/
readdirCache?: Map<string, string[]>;
/**
* A cache of all `lstat` results, for use in creating archives.
*
* @internal
*/
statCache?: Map<string, Stats>;
/**
* Number of concurrent jobs to run when creating archives.
*
* Defaults to 4.
*
* @internal
*/
jobs?: number;
/**
* Automatically set to true on Windows systems.
*
* When extracting, causes behavior where filenames containing `<|>?:`
* characters are converted to windows-compatible escape sequences in the
* created filesystem entries.
*
* When packing, causes behavior where paths replace `\` with `/`, and
* filenames containing the windows-compatible escaped forms of `<|>?:` are
* converted to actual `<|>?:` characters in the archive.
*
* @internal
*/
win32?: boolean;
/**
* For `WriteEntry` objects, the absolute path to the entry on the
* filesystem. By default, this is `resolve(cwd, entry.path)`, but it can be
* overridden explicitly.
*
* @internal
*/
absolute?: string;
/**
* Used with Parser stream interface, to attach and take over when the
* stream is completely parsed. If this is set, then the prefinish,
* finish, and end events will not fire, and are the responsibility of
* the ondone method to emit properly.
*
* @internal
*/
ondone?: () => void;
/**
* Mostly for testing, but potentially useful in some cases.
* Forcibly trigger a chown on every entry, no matter what.
*/
forceChown?: boolean;
/**
* ambiguous deprecated name for {@link onReadEntry}
*
* @deprecated
*/
onentry?: (entry: ReadEntry) => any;
}
export type TarOptionsSync = TarOptions & {
sync: true;
};
export type TarOptionsAsync = TarOptions & {
sync?: false;
};
export type TarOptionsFile = TarOptions & {
file: string;
};
export type TarOptionsNoFile = TarOptions & {
file?: undefined;
};
export type TarOptionsSyncFile = TarOptionsSync & TarOptionsFile;
export type TarOptionsAsyncFile = TarOptionsAsync & TarOptionsFile;
export type TarOptionsSyncNoFile = TarOptionsSync & TarOptionsNoFile;
export type TarOptionsAsyncNoFile = TarOptionsAsync & TarOptionsNoFile;
export type LinkCacheKey = `${number}:${number}`;
export interface TarOptionsWithAliases extends TarOptions {
/**
* The effective current working directory for this tar command
*/
C?: TarOptions['cwd'];
/**
* The tar file to be read and/or written. When this is set, a stream
* is not returned. Asynchronous commands will return a promise indicating
* when the operation is completed, and synchronous commands will return
* immediately.
*/
f?: TarOptions['file'];
/**
* When creating a tar archive, this can be used to compress it as well.
* Set to `true` to use the default gzip options, or customize them as
* needed.
*
* When reading, if this is unset, then the compression status will be
* inferred from the archive data. This is generally best, unless you are
* sure of the compression settings in use to create the archive, and want to
* fail if the archive doesn't match expectations.
*/
z?: TarOptions['gzip'];
/**
* When creating archives, preserve absolute and `..` paths in the archive,
* rather than sanitizing them under the cwd.
*
* When extracting, allow absolute paths, paths containing `..`, and
* extracting through symbolic links. By default, the root `/` is stripped
* from absolute paths (eg, turning `/x/y/z` into `x/y/z`), paths containing
* `..` are not extracted, and any file whose location would be modified by a
* symbolic link is not extracted.
*
* **WARNING** This is almost always unsafe, and must NEVER be used on
* archives from untrusted sources, such as user input, and every entry must
* be validated to ensure it is safe to write. Even if the input is not
* malicious, mistakes can cause a lot of damage!
*/
P?: TarOptions['preservePaths'];
/**
* When extracting, unlink files before creating them. Without this option,
* tar overwrites existing files, which preserves existing hardlinks. With
* this option, existing hardlinks will be broken, as will any symlink that
* would affect the location of an extracted file.
*/
U?: TarOptions['unlink'];
/**
* When extracting, strip the specified number of path portions from the
* entry path. For example, with `{strip: 2}`, the entry `a/b/c/d` would be
* extracted to `{cwd}/c/d`.
*/
'strip-components'?: TarOptions['strip'];
/**
* When extracting, strip the specified number of path portions from the
* entry path. For example, with `{strip: 2}`, the entry `a/b/c/d` would be
* extracted to `{cwd}/c/d`.
*/
stripComponents?: TarOptions['strip'];
/**
* When extracting, keep the existing file on disk if it's newer than the
* file in the archive.
*/
'keep-newer'?: TarOptions['newer'];
/**
* When extracting, keep the existing file on disk if it's newer than the
* file in the archive.
*/
keepNewer?: TarOptions['newer'];
/**
* When extracting, keep the existing file on disk if it's newer than the
* file in the archive.
*/
'keep-newer-files'?: TarOptions['newer'];
/**
* When extracting, keep the existing file on disk if it's newer than the
* file in the archive.
*/
keepNewerFiles?: TarOptions['newer'];
/**
* When extracting, do not overwrite existing files at all.
*/
k?: TarOptions['keep'];
/**
* When extracting, do not overwrite existing files at all.
*/
'keep-existing'?: TarOptions['keep'];
/**
* When extracting, do not overwrite existing files at all.
*/
keepExisting?: TarOptions['keep'];
/**
* When extracting, do not set the `mtime` value for extracted entries to
* match the `mtime` in the archive.
*
* When creating archives, do not store the `mtime` value in the entry. Note
* that this prevents properly using other mtime-based features (such as
* `tar.update` or the `newer` option) with the resulting archive.
*/
m?: TarOptions['noMtime'];
/**
* When extracting, do not set the `mtime` value for extracted entries to
* match the `mtime` in the archive.
*
* When creating archives, do not store the `mtime` value in the entry. Note
* that this prevents properly using other mtime-based features (such as
* `tar.update` or the `newer` option) with the resulting archive.
*/
'no-mtime'?: TarOptions['noMtime'];
/**
* When extracting, set the `uid` and `gid` of extracted entries to the `uid`
* and `gid` fields in the archive. Defaults to true when run as root, and
* false otherwise.
*
* If false, then files and directories will be set with the owner and group
* of the user running the process. This is similar to `-p` in `tar(1)`, but
* ACLs and other system-specific data is never unpacked in this
* implementation, and modes are set by default already.
*/
p?: TarOptions['preserveOwner'];
/**
* Pack the targets of symbolic links rather than the link itself.
*/
L?: TarOptions['follow'];
/**
* Pack the targets of symbolic links rather than the link itself.
*/
h?: TarOptions['follow'];
/**
* Deprecated option. Set explicitly false to set `chmod: true`. Ignored
* if {@link TarOptions#chmod} is set to any boolean value.
*
* @deprecated
*/
noChmod?: boolean;
}
export type TarOptionsWithAliasesSync = TarOptionsWithAliases & {
sync: true;
};
export type TarOptionsWithAliasesAsync = TarOptionsWithAliases & {
sync?: false;
};
export type TarOptionsWithAliasesFile = (TarOptionsWithAliases & {
file: string;
}) | (TarOptionsWithAliases & {
f: string;
});
export type TarOptionsWithAliasesSyncFile = TarOptionsWithAliasesSync & TarOptionsWithAliasesFile;
export type TarOptionsWithAliasesAsyncFile = TarOptionsWithAliasesAsync & TarOptionsWithAliasesFile;
export type TarOptionsWithAliasesNoFile = TarOptionsWithAliases & {
f?: undefined;
file?: undefined;
};
export type TarOptionsWithAliasesSyncNoFile = TarOptionsWithAliasesSync & TarOptionsWithAliasesNoFile;
export type TarOptionsWithAliasesAsyncNoFile = TarOptionsWithAliasesAsync & TarOptionsWithAliasesNoFile;
export declare const isSyncFile: <O extends TarOptions>(o: O) => o is O & TarOptions & {
sync: true;
} & {
file: string;
};
export declare const isAsyncFile: <O extends TarOptions>(o: O) => o is O & TarOptions & {
sync?: false | undefined;
} & {
file: string;
};
export declare const isSyncNoFile: <O extends TarOptions>(o: O) => o is O & TarOptions & {
sync: true;
} & {
file?: undefined;
};
export declare const isAsyncNoFile: <O extends TarOptions>(o: O) => o is O & TarOptions & {
sync?: false | undefined;
} & {
file?: undefined;
};
export declare const isSync: <O extends TarOptions>(o: O) => o is O & TarOptions & {
sync: true;
};
export declare const isAsync: <O extends TarOptions>(o: O) => o is O & TarOptions & {
sync?: false | undefined;
};
export declare const isFile: <O extends TarOptions>(o: O) => o is O & TarOptions & {
file: string;
};
export declare const isNoFile: <O extends TarOptions>(o: O) => o is O & TarOptions & {
file?: undefined;
};
export declare const dealias: (opt?: TarOptionsWithAliases) => TarOptions;
//# sourceMappingURL=options.d.ts.map

1
node_modules/tar/dist/commonjs/options.d.ts.map generated vendored Normal file

File diff suppressed because one or more lines are too long

66
node_modules/tar/dist/commonjs/options.js generated vendored Normal file
View File

@@ -0,0 +1,66 @@
"use strict";
// turn tar(1) style args like `C` into the more verbose things like `cwd`
Object.defineProperty(exports, "__esModule", { value: true });
exports.dealias = exports.isNoFile = exports.isFile = exports.isAsync = exports.isSync = exports.isAsyncNoFile = exports.isSyncNoFile = exports.isAsyncFile = exports.isSyncFile = void 0;
const argmap = new Map([
['C', 'cwd'],
['f', 'file'],
['z', 'gzip'],
['P', 'preservePaths'],
['U', 'unlink'],
['strip-components', 'strip'],
['stripComponents', 'strip'],
['keep-newer', 'newer'],
['keepNewer', 'newer'],
['keep-newer-files', 'newer'],
['keepNewerFiles', 'newer'],
['k', 'keep'],
['keep-existing', 'keep'],
['keepExisting', 'keep'],
['m', 'noMtime'],
['no-mtime', 'noMtime'],
['p', 'preserveOwner'],
['L', 'follow'],
['h', 'follow'],
['onentry', 'onReadEntry'],
]);
const isSyncFile = (o) => !!o.sync && !!o.file;
exports.isSyncFile = isSyncFile;
const isAsyncFile = (o) => !o.sync && !!o.file;
exports.isAsyncFile = isAsyncFile;
const isSyncNoFile = (o) => !!o.sync && !o.file;
exports.isSyncNoFile = isSyncNoFile;
const isAsyncNoFile = (o) => !o.sync && !o.file;
exports.isAsyncNoFile = isAsyncNoFile;
const isSync = (o) => !!o.sync;
exports.isSync = isSync;
const isAsync = (o) => !o.sync;
exports.isAsync = isAsync;
const isFile = (o) => !!o.file;
exports.isFile = isFile;
const isNoFile = (o) => !o.file;
exports.isNoFile = isNoFile;
const dealiasKey = (k) => {
const d = argmap.get(k);
if (d)
return d;
return k;
};
const dealias = (opt = {}) => {
if (!opt)
return {};
const result = {};
for (const [key, v] of Object.entries(opt)) {
// TS doesn't know that aliases are going to always be the same type
const k = dealiasKey(key);
result[k] = v;
}
// affordance for deprecated noChmod -> chmod
if (result.chmod === undefined && result.noChmod === false) {
result.chmod = true;
}
delete result.noChmod;
return result;
};
exports.dealias = dealias;
//# sourceMappingURL=options.js.map

1
node_modules/tar/dist/commonjs/options.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

102
node_modules/tar/dist/commonjs/pack.d.ts generated vendored Normal file
View File

@@ -0,0 +1,102 @@
/// <reference types="node" />
/// <reference types="node" />
import { type Stats } from 'fs';
import { WriteEntry, WriteEntrySync, WriteEntryTar } from './write-entry.js';
export declare class PackJob {
path: string;
absolute: string;
entry?: WriteEntry | WriteEntryTar;
stat?: Stats;
readdir?: string[];
pending: boolean;
ignore: boolean;
piped: boolean;
constructor(path: string, absolute: string);
}
import { Minipass } from 'minipass';
import * as zlib from 'minizlib';
import { Yallist } from 'yallist';
import { ReadEntry } from './read-entry.js';
import { WarnEvent, type WarnData, type Warner } from './warn-method.js';
declare const ONSTAT: unique symbol;
declare const ENDED: unique symbol;
declare const QUEUE: unique symbol;
declare const CURRENT: unique symbol;
declare const PROCESS: unique symbol;
declare const PROCESSING: unique symbol;
declare const PROCESSJOB: unique symbol;
declare const JOBS: unique symbol;
declare const JOBDONE: unique symbol;
declare const ADDFSENTRY: unique symbol;
declare const ADDTARENTRY: unique symbol;
declare const STAT: unique symbol;
declare const READDIR: unique symbol;
declare const ONREADDIR: unique symbol;
declare const PIPE: unique symbol;
declare const ENTRY: unique symbol;
declare const ENTRYOPT: unique symbol;
declare const WRITEENTRYCLASS: unique symbol;
declare const WRITE: unique symbol;
declare const ONDRAIN: unique symbol;
import { TarOptions } from './options.js';
export declare class Pack extends Minipass<Buffer, ReadEntry | string, WarnEvent<Buffer>> implements Warner {
opt: TarOptions;
cwd: string;
maxReadSize?: number;
preservePaths: boolean;
strict: boolean;
noPax: boolean;
prefix: string;
linkCache: Exclude<TarOptions['linkCache'], undefined>;
statCache: Exclude<TarOptions['statCache'], undefined>;
file: string;
portable: boolean;
zip?: zlib.BrotliCompress | zlib.Gzip;
readdirCache: Exclude<TarOptions['readdirCache'], undefined>;
noDirRecurse: boolean;
follow: boolean;
noMtime: boolean;
mtime?: Date;
filter: Exclude<TarOptions['filter'], undefined>;
jobs: number;
[WRITEENTRYCLASS]: typeof WriteEntry | typeof WriteEntrySync;
onWriteEntry?: (entry: WriteEntry) => void;
[QUEUE]: Yallist<PackJob>;
[JOBS]: number;
[PROCESSING]: boolean;
[ENDED]: boolean;
constructor(opt?: TarOptions);
[WRITE](chunk: Buffer): boolean;
add(path: string | ReadEntry): this;
end(cb?: () => void): this;
end(path: string | ReadEntry, cb?: () => void): this;
end(path: string | ReadEntry, encoding?: Minipass.Encoding, cb?: () => void): this;
write(path: string | ReadEntry): boolean;
[ADDTARENTRY](p: ReadEntry): void;
[ADDFSENTRY](p: string): void;
[STAT](job: PackJob): void;
[ONSTAT](job: PackJob, stat: Stats): void;
[READDIR](job: PackJob): void;
[ONREADDIR](job: PackJob, entries: string[]): void;
[PROCESS](): void;
get [CURRENT](): PackJob | undefined;
[JOBDONE](_job: PackJob): void;
[PROCESSJOB](job: PackJob): void;
[ENTRYOPT](job: PackJob): TarOptions;
[ENTRY](job: PackJob): WriteEntry | undefined;
[ONDRAIN](): void;
[PIPE](job: PackJob): void;
pause(): void;
warn(code: string, message: string | Error, data?: WarnData): void;
}
export declare class PackSync extends Pack {
sync: true;
constructor(opt: TarOptions);
pause(): void;
resume(): void;
[STAT](job: PackJob): void;
[READDIR](job: PackJob): void;
[PIPE](job: PackJob): void;
}
export {};
//# sourceMappingURL=pack.d.ts.map

1
node_modules/tar/dist/commonjs/pack.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"pack.d.ts","sourceRoot":"","sources":["../../src/pack.ts"],"names":[],"mappings":";;AASA,OAAW,EAAE,KAAK,KAAK,EAAE,MAAM,IAAI,CAAA;AACnC,OAAO,EACL,UAAU,EACV,cAAc,EACd,aAAa,EACd,MAAM,kBAAkB,CAAA;AAEzB,qBAAa,OAAO;IAClB,IAAI,EAAE,MAAM,CAAA;IACZ,QAAQ,EAAE,MAAM,CAAA;IAChB,KAAK,CAAC,EAAE,UAAU,GAAG,aAAa,CAAA;IAClC,IAAI,CAAC,EAAE,KAAK,CAAA;IACZ,OAAO,CAAC,EAAE,MAAM,EAAE,CAAA;IAClB,OAAO,EAAE,OAAO,CAAQ;IACxB,MAAM,EAAE,OAAO,CAAQ;IACvB,KAAK,EAAE,OAAO,CAAQ;gBACV,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM;CAI3C;AAED,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,KAAK,IAAI,MAAM,UAAU,CAAA;AAChC,OAAO,EAAE,OAAO,EAAE,MAAM,SAAS,CAAA;AACjC,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAA;AAC3C,OAAO,EACL,SAAS,EAET,KAAK,QAAQ,EACb,KAAK,MAAM,EACZ,MAAM,kBAAkB,CAAA;AAGzB,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,UAAU,eAAuB,CAAA;AACvC,QAAA,MAAM,UAAU,eAAuB,CAAA;AACvC,QAAA,MAAM,IAAI,eAAiB,CAAA;AAC3B,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,UAAU,eAAuB,CAAA;AACvC,QAAA,MAAM,WAAW,eAAwB,CAAA;AACzC,QAAA,MAAM,IAAI,eAAiB,CAAA;AAC3B,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,SAAS,eAAsB,CAAA;AACrC,QAAA,MAAM,IAAI,eAAiB,CAAA;AAC3B,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,eAAe,eAA4B,CAAA;AACjD,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,OAAO,eAAoB,CAAA;AAIjC,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAEzC,qBAAa,IACX,SAAQ,QAAQ,CAAC,MAAM,EAAE,SAAS,GAAG,MAAM,EAAE,SAAS,CAAC,MAAM,CAAC,CAC9D,YAAW,MAAM;IAEjB,GAAG,EAAE,UAAU,CAAA;IACf,GAAG,EAAE,MAAM,CAAA;IACX,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB,aAAa,EAAE,OAAO,CAAA;IACtB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,MAAM,EAAE,MAAM,CAAA;IACd,SAAS,EAAE,OAAO,CAAC,UAAU,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;IACtD,SAAS,EAAE,OAAO,CAAC,UAAU,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;IACtD,IAAI,EAAE,MAAM,CAAA;IACZ,QAAQ,EAAE,OAAO,CAAA;IACjB,GAAG,CAAC,EAAE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,IAAI,CAAA;IACrC,YAAY,EAAE,OAAO,CAAC,UAAU,CAAC,cAAc,CAAC,EAAE,SAAS,CAAC,CAAA;IAC5D,YAAY,EAAE,OAAO,CAAA;IACrB,MAAM,EAAE,OAAO,CAAA;IACf,OAAO,EAAE,OAAO,CAAA;IAChB,KAAK,CAAC,EAAE,IAAI,CAAA;IACZ,MAAM,EAAE,OAAO,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,SAAS,CAAC,CAAA;IAChD,IAAI,EAAE,MAAM,CAAC;IAEb,CAAC,eAAe,CAAC,EAAE,OAAO,UAAU,GAAG,OAAO,cAAc,CAAA;IAC5D,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,UAAU,KAAK,IAAI,CAAC;IAC3C,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;IAC1B,CAAC,IAAI,CAAC,EAAE,MAAM,CAAK;IACnB,CAAC,UAAU,CAAC,EAAE,OAAO,CAAS;IAC9B,CAAC,KAAK,CAAC,EAAE,OAAO,CAAQ;gBAEZ,GAAG,GAAE,UAAe;IAoEhC,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,MAAM;IAIrB,GAAG,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS;IAK5B,GAAG,CAAC,EAAE,CAAC,EAAE,MAAM,IAAI,GAAG,IAAI;IAC1B,GAAG,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,EAAE,EAAE,CAAC,EAAE,MAAM,IAAI,GAAG,IAAI;IACpD,GAAG,CACD,IAAI,EAAE,MAAM,GAAG,SAAS,EACxB,QAAQ,CAAC,EAAE,QAAQ,CAAC,QAAQ,EAC5B,EAAE,CAAC,EAAE,MAAM,IAAI,GACd,IAAI;IA0BP,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS;IAa9B,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,SAAS;IAkB1B,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,MAAM;IAMtB,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,OAAO;IAenB,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,OAAO,EAAE,IAAI,EAAE,KAAK;IAYlC,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,OAAO;IAatB,CAAC,SAAS,CAAC,CAAC,GAAG,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE;IAM3C,CAAC,OAAO,CAAC;IA+BT,IAAI,CAAC,OAAO,CAAC,wBAEZ;IAED,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,OAAO;IAMvB,CAAC,UAAU,CAAC,CAAC,GAAG,EAAE,OAAO;IAyDzB,CAAC,QAAQ,CAAC,CAAC,GAAG,EAAE,OAAO,GAAG,UAAU;IAmBpC,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,OAAO;IAepB,CAAC,OAAO,CAAC;IAOT,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,OAAO;IAgCnB,KAAK;IAML,IAAI,CACF,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,MAAM,GAAG,KAAK,EACvB,IAAI,GAAE,QAAa,GAClB,IAAI;CAGR;AAED,qBAAa,QAAS,SAAQ,IAAI;IAChC,IAAI,EAAE,IAAI,CAAO;gBACL,GAAG,EAAE,UAAU;IAM3B,KAAK;IACL,MAAM;IAEN,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,OAAO;IAKnB,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,OAAO;IAKtB,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,OAAO;CA0BpB"}

477
node_modules/tar/dist/commonjs/pack.js generated vendored Normal file
View File

@@ -0,0 +1,477 @@
"use strict";
// A readable tar stream creator
// Technically, this is a transform stream that you write paths into,
// and tar format comes out of.
// The `add()` method is like `write()` but returns this,
// and end() return `this` as well, so you can
// do `new Pack(opt).add('files').add('dir').end().pipe(output)
// You could also do something like:
// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.PackSync = exports.Pack = exports.PackJob = void 0;
const fs_1 = __importDefault(require("fs"));
const write_entry_js_1 = require("./write-entry.js");
class PackJob {
path;
absolute;
entry;
stat;
readdir;
pending = false;
ignore = false;
piped = false;
constructor(path, absolute) {
this.path = path || './';
this.absolute = absolute;
}
}
exports.PackJob = PackJob;
const minipass_1 = require("minipass");
const zlib = __importStar(require("minizlib"));
const yallist_1 = require("yallist");
const read_entry_js_1 = require("./read-entry.js");
const warn_method_js_1 = require("./warn-method.js");
const EOF = Buffer.alloc(1024);
const ONSTAT = Symbol('onStat');
const ENDED = Symbol('ended');
const QUEUE = Symbol('queue');
const CURRENT = Symbol('current');
const PROCESS = Symbol('process');
const PROCESSING = Symbol('processing');
const PROCESSJOB = Symbol('processJob');
const JOBS = Symbol('jobs');
const JOBDONE = Symbol('jobDone');
const ADDFSENTRY = Symbol('addFSEntry');
const ADDTARENTRY = Symbol('addTarEntry');
const STAT = Symbol('stat');
const READDIR = Symbol('readdir');
const ONREADDIR = Symbol('onreaddir');
const PIPE = Symbol('pipe');
const ENTRY = Symbol('entry');
const ENTRYOPT = Symbol('entryOpt');
const WRITEENTRYCLASS = Symbol('writeEntryClass');
const WRITE = Symbol('write');
const ONDRAIN = Symbol('ondrain');
const path_1 = __importDefault(require("path"));
const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
class Pack extends minipass_1.Minipass {
opt;
cwd;
maxReadSize;
preservePaths;
strict;
noPax;
prefix;
linkCache;
statCache;
file;
portable;
zip;
readdirCache;
noDirRecurse;
follow;
noMtime;
mtime;
filter;
jobs;
[WRITEENTRYCLASS];
onWriteEntry;
[QUEUE];
[JOBS] = 0;
[PROCESSING] = false;
[ENDED] = false;
constructor(opt = {}) {
//@ts-ignore
super();
this.opt = opt;
this.file = opt.file || '';
this.cwd = opt.cwd || process.cwd();
this.maxReadSize = opt.maxReadSize;
this.preservePaths = !!opt.preservePaths;
this.strict = !!opt.strict;
this.noPax = !!opt.noPax;
this.prefix = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.prefix || '');
this.linkCache = opt.linkCache || new Map();
this.statCache = opt.statCache || new Map();
this.readdirCache = opt.readdirCache || new Map();
this.onWriteEntry = opt.onWriteEntry;
this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntry;
if (typeof opt.onwarn === 'function') {
this.on('warn', opt.onwarn);
}
this.portable = !!opt.portable;
if (opt.gzip || opt.brotli) {
if (opt.gzip && opt.brotli) {
throw new TypeError('gzip and brotli are mutually exclusive');
}
if (opt.gzip) {
if (typeof opt.gzip !== 'object') {
opt.gzip = {};
}
if (this.portable) {
opt.gzip.portable = true;
}
this.zip = new zlib.Gzip(opt.gzip);
}
if (opt.brotli) {
if (typeof opt.brotli !== 'object') {
opt.brotli = {};
}
this.zip = new zlib.BrotliCompress(opt.brotli);
}
/* c8 ignore next */
if (!this.zip)
throw new Error('impossible');
const zip = this.zip;
zip.on('data', chunk => super.write(chunk));
zip.on('end', () => super.end());
zip.on('drain', () => this[ONDRAIN]());
this.on('resume', () => zip.resume());
}
else {
this.on('drain', this[ONDRAIN]);
}
this.noDirRecurse = !!opt.noDirRecurse;
this.follow = !!opt.follow;
this.noMtime = !!opt.noMtime;
if (opt.mtime)
this.mtime = opt.mtime;
this.filter =
typeof opt.filter === 'function' ? opt.filter : () => true;
this[QUEUE] = new yallist_1.Yallist();
this[JOBS] = 0;
this.jobs = Number(opt.jobs) || 4;
this[PROCESSING] = false;
this[ENDED] = false;
}
[WRITE](chunk) {
return super.write(chunk);
}
add(path) {
this.write(path);
return this;
}
end(path, encoding, cb) {
/* c8 ignore start */
if (typeof path === 'function') {
cb = path;
path = undefined;
}
if (typeof encoding === 'function') {
cb = encoding;
encoding = undefined;
}
/* c8 ignore stop */
if (path) {
this.add(path);
}
this[ENDED] = true;
this[PROCESS]();
/* c8 ignore next */
if (cb)
cb();
return this;
}
write(path) {
if (this[ENDED]) {
throw new Error('write after end');
}
if (path instanceof read_entry_js_1.ReadEntry) {
this[ADDTARENTRY](path);
}
else {
this[ADDFSENTRY](path);
}
return this.flowing;
}
[ADDTARENTRY](p) {
const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p.path));
// in this case, we don't have to wait for the stat
if (!this.filter(p.path, p)) {
p.resume();
}
else {
const job = new PackJob(p.path, absolute);
job.entry = new write_entry_js_1.WriteEntryTar(p, this[ENTRYOPT](job));
job.entry.on('end', () => this[JOBDONE](job));
this[JOBS] += 1;
this[QUEUE].push(job);
}
this[PROCESS]();
}
[ADDFSENTRY](p) {
const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p));
this[QUEUE].push(new PackJob(p, absolute));
this[PROCESS]();
}
[STAT](job) {
job.pending = true;
this[JOBS] += 1;
const stat = this.follow ? 'stat' : 'lstat';
fs_1.default[stat](job.absolute, (er, stat) => {
job.pending = false;
this[JOBS] -= 1;
if (er) {
this.emit('error', er);
}
else {
this[ONSTAT](job, stat);
}
});
}
[ONSTAT](job, stat) {
this.statCache.set(job.absolute, stat);
job.stat = stat;
// now we have the stat, we can filter it.
if (!this.filter(job.path, stat)) {
job.ignore = true;
}
this[PROCESS]();
}
[READDIR](job) {
job.pending = true;
this[JOBS] += 1;
fs_1.default.readdir(job.absolute, (er, entries) => {
job.pending = false;
this[JOBS] -= 1;
if (er) {
return this.emit('error', er);
}
this[ONREADDIR](job, entries);
});
}
[ONREADDIR](job, entries) {
this.readdirCache.set(job.absolute, entries);
job.readdir = entries;
this[PROCESS]();
}
[PROCESS]() {
if (this[PROCESSING]) {
return;
}
this[PROCESSING] = true;
for (let w = this[QUEUE].head; !!w && this[JOBS] < this.jobs; w = w.next) {
this[PROCESSJOB](w.value);
if (w.value.ignore) {
const p = w.next;
this[QUEUE].removeNode(w);
w.next = p;
}
}
this[PROCESSING] = false;
if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
if (this.zip) {
this.zip.end(EOF);
}
else {
super.write(EOF);
super.end();
}
}
}
get [CURRENT]() {
return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value;
}
[JOBDONE](_job) {
this[QUEUE].shift();
this[JOBS] -= 1;
this[PROCESS]();
}
[PROCESSJOB](job) {
if (job.pending) {
return;
}
if (job.entry) {
if (job === this[CURRENT] && !job.piped) {
this[PIPE](job);
}
return;
}
if (!job.stat) {
const sc = this.statCache.get(job.absolute);
if (sc) {
this[ONSTAT](job, sc);
}
else {
this[STAT](job);
}
}
if (!job.stat) {
return;
}
// filtered out!
if (job.ignore) {
return;
}
if (!this.noDirRecurse &&
job.stat.isDirectory() &&
!job.readdir) {
const rc = this.readdirCache.get(job.absolute);
if (rc) {
this[ONREADDIR](job, rc);
}
else {
this[READDIR](job);
}
if (!job.readdir) {
return;
}
}
// we know it doesn't have an entry, because that got checked above
job.entry = this[ENTRY](job);
if (!job.entry) {
job.ignore = true;
return;
}
if (job === this[CURRENT] && !job.piped) {
this[PIPE](job);
}
}
[ENTRYOPT](job) {
return {
onwarn: (code, msg, data) => this.warn(code, msg, data),
noPax: this.noPax,
cwd: this.cwd,
absolute: job.absolute,
preservePaths: this.preservePaths,
maxReadSize: this.maxReadSize,
strict: this.strict,
portable: this.portable,
linkCache: this.linkCache,
statCache: this.statCache,
noMtime: this.noMtime,
mtime: this.mtime,
prefix: this.prefix,
onWriteEntry: this.onWriteEntry,
};
}
[ENTRY](job) {
this[JOBS] += 1;
try {
const e = new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job));
return e
.on('end', () => this[JOBDONE](job))
.on('error', er => this.emit('error', er));
}
catch (er) {
this.emit('error', er);
}
}
[ONDRAIN]() {
if (this[CURRENT] && this[CURRENT].entry) {
this[CURRENT].entry.resume();
}
}
// like .pipe() but using super, because our write() is special
[PIPE](job) {
job.piped = true;
if (job.readdir) {
job.readdir.forEach(entry => {
const p = job.path;
const base = p === './' ? '' : p.replace(/\/*$/, '/');
this[ADDFSENTRY](base + entry);
});
}
const source = job.entry;
const zip = this.zip;
/* c8 ignore start */
if (!source)
throw new Error('cannot pipe without source');
/* c8 ignore stop */
if (zip) {
source.on('data', chunk => {
if (!zip.write(chunk)) {
source.pause();
}
});
}
else {
source.on('data', chunk => {
if (!super.write(chunk)) {
source.pause();
}
});
}
}
pause() {
if (this.zip) {
this.zip.pause();
}
return super.pause();
}
warn(code, message, data = {}) {
(0, warn_method_js_1.warnMethod)(this, code, message, data);
}
}
exports.Pack = Pack;
class PackSync extends Pack {
sync = true;
constructor(opt) {
super(opt);
this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntrySync;
}
// pause/resume are no-ops in sync streams.
pause() { }
resume() { }
[STAT](job) {
const stat = this.follow ? 'statSync' : 'lstatSync';
this[ONSTAT](job, fs_1.default[stat](job.absolute));
}
[READDIR](job) {
this[ONREADDIR](job, fs_1.default.readdirSync(job.absolute));
}
// gotta get it all in this tick
[PIPE](job) {
const source = job.entry;
const zip = this.zip;
if (job.readdir) {
job.readdir.forEach(entry => {
const p = job.path;
const base = p === './' ? '' : p.replace(/\/*$/, '/');
this[ADDFSENTRY](base + entry);
});
}
/* c8 ignore start */
if (!source)
throw new Error('Cannot pipe without source');
/* c8 ignore stop */
if (zip) {
source.on('data', chunk => {
zip.write(chunk);
});
}
else {
source.on('data', chunk => {
super[WRITE](chunk);
});
}
}
}
exports.PackSync = PackSync;
//# sourceMappingURL=pack.js.map

1
node_modules/tar/dist/commonjs/pack.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

3
node_modules/tar/dist/commonjs/package.json generated vendored Normal file
View File

@@ -0,0 +1,3 @@
{
"type": "commonjs"
}

87
node_modules/tar/dist/commonjs/parse.d.ts generated vendored Normal file
View File

@@ -0,0 +1,87 @@
/// <reference types="node" />
/// <reference types="node" />
import { EventEmitter as EE } from 'events';
import { BrotliDecompress, Unzip } from 'minizlib';
import { Yallist } from 'yallist';
import { TarOptions } from './options.js';
import { Pax } from './pax.js';
import { ReadEntry } from './read-entry.js';
import { type WarnData, type Warner } from './warn-method.js';
declare const STATE: unique symbol;
declare const WRITEENTRY: unique symbol;
declare const READENTRY: unique symbol;
declare const NEXTENTRY: unique symbol;
declare const PROCESSENTRY: unique symbol;
declare const EX: unique symbol;
declare const GEX: unique symbol;
declare const META: unique symbol;
declare const EMITMETA: unique symbol;
declare const BUFFER: unique symbol;
declare const QUEUE: unique symbol;
declare const ENDED: unique symbol;
declare const EMITTEDEND: unique symbol;
declare const EMIT: unique symbol;
declare const UNZIP: unique symbol;
declare const CONSUMECHUNK: unique symbol;
declare const CONSUMECHUNKSUB: unique symbol;
declare const CONSUMEBODY: unique symbol;
declare const CONSUMEMETA: unique symbol;
declare const CONSUMEHEADER: unique symbol;
declare const CONSUMING: unique symbol;
declare const BUFFERCONCAT: unique symbol;
declare const MAYBEEND: unique symbol;
declare const WRITING: unique symbol;
declare const ABORTED: unique symbol;
declare const SAW_VALID_ENTRY: unique symbol;
declare const SAW_NULL_BLOCK: unique symbol;
declare const SAW_EOF: unique symbol;
declare const CLOSESTREAM: unique symbol;
export type State = 'begin' | 'header' | 'ignore' | 'meta' | 'body';
export declare class Parser extends EE implements Warner {
file: string;
strict: boolean;
maxMetaEntrySize: number;
filter: Exclude<TarOptions['filter'], undefined>;
brotli?: TarOptions['brotli'];
writable: true;
readable: false;
[QUEUE]: Yallist<ReadEntry | [string | symbol, any, any]>;
[BUFFER]?: Buffer;
[READENTRY]?: ReadEntry;
[WRITEENTRY]?: ReadEntry;
[STATE]: State;
[META]: string;
[EX]?: Pax;
[GEX]?: Pax;
[ENDED]: boolean;
[UNZIP]?: false | Unzip | BrotliDecompress;
[ABORTED]: boolean;
[SAW_VALID_ENTRY]?: boolean;
[SAW_NULL_BLOCK]: boolean;
[SAW_EOF]: boolean;
[WRITING]: boolean;
[CONSUMING]: boolean;
[EMITTEDEND]: boolean;
constructor(opt?: TarOptions);
warn(code: string, message: string | Error, data?: WarnData): void;
[CONSUMEHEADER](chunk: Buffer, position: number): void;
[CLOSESTREAM](): void;
[PROCESSENTRY](entry?: ReadEntry | [string | symbol, any, any]): boolean;
[NEXTENTRY](): void;
[CONSUMEBODY](chunk: Buffer, position: number): number;
[CONSUMEMETA](chunk: Buffer, position: number): number;
[EMIT](ev: string | symbol, data?: any, extra?: any): void;
[EMITMETA](entry: ReadEntry): void;
abort(error: Error): void;
write(buffer: Uint8Array | string, cb?: (err?: Error | null) => void): boolean;
write(str: string, encoding?: BufferEncoding, cb?: (err?: Error | null) => void): boolean;
[BUFFERCONCAT](c: Buffer): void;
[MAYBEEND](): void;
[CONSUMECHUNK](chunk?: Buffer): void;
[CONSUMECHUNKSUB](chunk: Buffer): void;
end(cb?: () => void): this;
end(data: string | Buffer, cb?: () => void): this;
end(str: string, encoding?: BufferEncoding, cb?: () => void): this;
}
export {};
//# sourceMappingURL=parse.d.ts.map

1
node_modules/tar/dist/commonjs/parse.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"parse.d.ts","sourceRoot":"","sources":["../../src/parse.ts"],"names":[],"mappings":";;AAoBA,OAAO,EAAE,YAAY,IAAI,EAAE,EAAE,MAAM,QAAQ,CAAA;AAC3C,OAAO,EAAE,gBAAgB,EAAE,KAAK,EAAE,MAAM,UAAU,CAAA;AAClD,OAAO,EAAE,OAAO,EAAE,MAAM,SAAS,CAAA;AAEjC,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AACzC,OAAO,EAAE,GAAG,EAAE,MAAM,UAAU,CAAA;AAC9B,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAA;AAC3C,OAAO,EAEL,KAAK,QAAQ,EACb,KAAK,MAAM,EACZ,MAAM,kBAAkB,CAAA;AAKzB,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,UAAU,eAAuB,CAAA;AACvC,QAAA,MAAM,SAAS,eAAsB,CAAA;AACrC,QAAA,MAAM,SAAS,eAAsB,CAAA;AACrC,QAAA,MAAM,YAAY,eAAyB,CAAA;AAC3C,QAAA,MAAM,EAAE,eAA2B,CAAA;AACnC,QAAA,MAAM,GAAG,eAAiC,CAAA;AAC1C,QAAA,MAAM,IAAI,eAAiB,CAAA;AAC3B,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,UAAU,eAAuB,CAAA;AACvC,QAAA,MAAM,IAAI,eAAiB,CAAA;AAC3B,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,YAAY,eAAyB,CAAA;AAC3C,QAAA,MAAM,eAAe,eAA4B,CAAA;AACjD,QAAA,MAAM,WAAW,eAAwB,CAAA;AACzC,QAAA,MAAM,WAAW,eAAwB,CAAA;AACzC,QAAA,MAAM,aAAa,eAA0B,CAAA;AAC7C,QAAA,MAAM,SAAS,eAAsB,CAAA;AACrC,QAAA,MAAM,YAAY,eAAyB,CAAA;AAC3C,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,OAAO,eAAoB,CAAA;AAEjC,QAAA,MAAM,eAAe,eAA0B,CAAA;AAC/C,QAAA,MAAM,cAAc,eAAyB,CAAA;AAC7C,QAAA,MAAM,OAAO,eAAmB,CAAA;AAChC,QAAA,MAAM,WAAW,eAAwB,CAAA;AAIzC,MAAM,MAAM,KAAK,GAAG,OAAO,GAAG,QAAQ,GAAG,QAAQ,GAAG,MAAM,GAAG,MAAM,CAAA;AAEnE,qBAAa,MAAO,SAAQ,EAAG,YAAW,MAAM;IAC9C,IAAI,EAAE,MAAM,CAAA;IACZ,MAAM,EAAE,OAAO,CAAA;IACf,gBAAgB,EAAE,MAAM,CAAA;IACxB,MAAM,EAAE,OAAO,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,SAAS,CAAC,CAAA;IAChD,MAAM,CAAC,EAAE,UAAU,CAAC,QAAQ,CAAC,CAAA;IAE7B,QAAQ,EAAE,IAAI,CAAO;IACrB,QAAQ,EAAE,KAAK,CAAS;IAExB,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC,SAAS,GAAG,CAAC,MAAM,GAAG,MAAM,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,CACzC;IAChB,CAAC,MAAM,CAAC,CAAC,EAAE,MAAM,CAAC;IAClB,CAAC,SAAS,CAAC,CAAC,EAAE,SAAS,CAAC;IACxB,CAAC,UAAU,CAAC,CAAC,EAAE,SAAS,CAAC;IACzB,CAAC,KAAK,CAAC,EAAE,KAAK,CAAW;IACzB,CAAC,IAAI,CAAC,EAAE,MAAM,CAAM;IACpB,CAAC,EAAE,CAAC,CAAC,EAAE,GAAG,CAAC;IACX,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC;IACZ,CAAC,KAAK,CAAC,EAAE,OAAO,CAAS;IACzB,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,GAAG,KAAK,GAAG,gBAAgB,CAAC;IAC3C,CAAC,OAAO,CAAC,EAAE,OAAO,CAAS;IAC3B,CAAC,eAAe,CAAC,CAAC,EAAE,OAAO,CAAC;IAC5B,CAAC,cAAc,CAAC,EAAE,OAAO,CAAS;IAClC,CAAC,OAAO,CAAC,EAAE,OAAO,CAAS;IAC3B,CAAC,OAAO,CAAC,EAAE,OAAO,CAAS;IAC3B,CAAC,SAAS,CAAC,EAAE,OAAO,CAAS;IAC7B,CAAC,UAAU,CAAC,EAAE,OAAO,CAAQ;gBAEjB,GAAG,GAAE,UAAe;IAsDhC,IAAI,CACF,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,MAAM,GAAG,KAAK,EACvB,IAAI,GAAE,QAAa,GAClB,IAAI;IAIP,CAAC,aAAa,CAAC,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM;IA4G/C,CAAC,WAAW,CAAC;IAIb,CAAC,YAAY,CAAC,CAAC,KAAK,CAAC,EAAE,SAAS,GAAG,CAAC,MAAM,GAAG,MAAM,EAAE,GAAG,EAAE,GAAG,CAAC;IAqB9D,CAAC,SAAS,CAAC;IAuBX,CAAC,WAAW,CAAC,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM;IAyB7C,CAAC,WAAW,CAAC,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM;IAY7C,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,MAAM,GAAG,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,EAAE,KAAK,CAAC,EAAE,GAAG;IAQnD,CAAC,QAAQ,CAAC,CAAC,KAAK,EAAE,SAAS;IAkC3B,KAAK,CAAC,KAAK,EAAE,KAAK;IAOlB,KAAK,CACH,MAAM,EAAE,UAAU,GAAG,MAAM,EAC3B,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,KAAK,GAAG,IAAI,KAAK,IAAI,GAChC,OAAO;IACV,KAAK,CACH,GAAG,EAAE,MAAM,EACX,QAAQ,CAAC,EAAE,cAAc,EACzB,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,KAAK,GAAG,IAAI,KAAK,IAAI,GAChC,OAAO;IA6HV,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,MAAM;IAOxB,CAAC,QAAQ,CAAC;IA0BV,CAAC,YAAY,CAAC,CAAC,KAAK,CAAC,EAAE,MAAM;IAkC7B,CAAC,eAAe,CAAC,CAAC,KAAK,EAAE,MAAM;IA6C/B,GAAG,CAAC,EAAE,CAAC,EAAE,MAAM,IAAI,GAAG,IAAI;IAC1B,GAAG,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,CAAC,EAAE,MAAM,IAAI,GAAG,IAAI;IACjD,GAAG,CAAC,GAAG,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,cAAc,EAAE,EAAE,CAAC,EAAE,MAAM,IAAI,GAAG,IAAI;CAmCnE"}

599
node_modules/tar/dist/commonjs/parse.js generated vendored Normal file
View File

@@ -0,0 +1,599 @@
"use strict";
// this[BUFFER] is the remainder of a chunk if we're waiting for
// the full 512 bytes of a header to come in. We will Buffer.concat()
// it to the next write(), which is a mem copy, but a small one.
//
// this[QUEUE] is a Yallist of entries that haven't been emitted
// yet this can only get filled up if the user keeps write()ing after
// a write() returns false, or does a write() with more than one entry
//
// We don't buffer chunks, we always parse them and either create an
// entry, or push it into the active entry. The ReadEntry class knows
// to throw data away if .ignore=true
//
// Shift entry off the buffer when it emits 'end', and emit 'entry' for
// the next one in the list.
//
// At any time, we're pushing body chunks into the entry at WRITEENTRY,
// and waiting for 'end' on the entry at READENTRY
//
// ignored entries get .resume() called on them straight away
Object.defineProperty(exports, "__esModule", { value: true });
exports.Parser = void 0;
const events_1 = require("events");
const minizlib_1 = require("minizlib");
const yallist_1 = require("yallist");
const header_js_1 = require("./header.js");
const pax_js_1 = require("./pax.js");
const read_entry_js_1 = require("./read-entry.js");
const warn_method_js_1 = require("./warn-method.js");
const maxMetaEntrySize = 1024 * 1024;
const gzipHeader = Buffer.from([0x1f, 0x8b]);
const STATE = Symbol('state');
const WRITEENTRY = Symbol('writeEntry');
const READENTRY = Symbol('readEntry');
const NEXTENTRY = Symbol('nextEntry');
const PROCESSENTRY = Symbol('processEntry');
const EX = Symbol('extendedHeader');
const GEX = Symbol('globalExtendedHeader');
const META = Symbol('meta');
const EMITMETA = Symbol('emitMeta');
const BUFFER = Symbol('buffer');
const QUEUE = Symbol('queue');
const ENDED = Symbol('ended');
const EMITTEDEND = Symbol('emittedEnd');
const EMIT = Symbol('emit');
const UNZIP = Symbol('unzip');
const CONSUMECHUNK = Symbol('consumeChunk');
const CONSUMECHUNKSUB = Symbol('consumeChunkSub');
const CONSUMEBODY = Symbol('consumeBody');
const CONSUMEMETA = Symbol('consumeMeta');
const CONSUMEHEADER = Symbol('consumeHeader');
const CONSUMING = Symbol('consuming');
const BUFFERCONCAT = Symbol('bufferConcat');
const MAYBEEND = Symbol('maybeEnd');
const WRITING = Symbol('writing');
const ABORTED = Symbol('aborted');
const DONE = Symbol('onDone');
const SAW_VALID_ENTRY = Symbol('sawValidEntry');
const SAW_NULL_BLOCK = Symbol('sawNullBlock');
const SAW_EOF = Symbol('sawEOF');
const CLOSESTREAM = Symbol('closeStream');
const noop = () => true;
class Parser extends events_1.EventEmitter {
file;
strict;
maxMetaEntrySize;
filter;
brotli;
writable = true;
readable = false;
[QUEUE] = new yallist_1.Yallist();
[BUFFER];
[READENTRY];
[WRITEENTRY];
[STATE] = 'begin';
[META] = '';
[EX];
[GEX];
[ENDED] = false;
[UNZIP];
[ABORTED] = false;
[SAW_VALID_ENTRY];
[SAW_NULL_BLOCK] = false;
[SAW_EOF] = false;
[WRITING] = false;
[CONSUMING] = false;
[EMITTEDEND] = false;
constructor(opt = {}) {
super();
this.file = opt.file || '';
// these BADARCHIVE errors can't be detected early. listen on DONE.
this.on(DONE, () => {
if (this[STATE] === 'begin' ||
this[SAW_VALID_ENTRY] === false) {
// either less than 1 block of data, or all entries were invalid.
// Either way, probably not even a tarball.
this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format');
}
});
if (opt.ondone) {
this.on(DONE, opt.ondone);
}
else {
this.on(DONE, () => {
this.emit('prefinish');
this.emit('finish');
this.emit('end');
});
}
this.strict = !!opt.strict;
this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize;
this.filter = typeof opt.filter === 'function' ? opt.filter : noop;
// Unlike gzip, brotli doesn't have any magic bytes to identify it
// Users need to explicitly tell us they're extracting a brotli file
// Or we infer from the file extension
const isTBR = opt.file &&
(opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr'));
// if it's a tbr file it MIGHT be brotli, but we don't know until
// we look at it and verify it's not a valid tar file.
this.brotli =
!opt.gzip && opt.brotli !== undefined ? opt.brotli
: isTBR ? undefined
: false;
// have to set this so that streams are ok piping into it
this.on('end', () => this[CLOSESTREAM]());
if (typeof opt.onwarn === 'function') {
this.on('warn', opt.onwarn);
}
if (typeof opt.onReadEntry === 'function') {
this.on('entry', opt.onReadEntry);
}
}
warn(code, message, data = {}) {
(0, warn_method_js_1.warnMethod)(this, code, message, data);
}
[CONSUMEHEADER](chunk, position) {
if (this[SAW_VALID_ENTRY] === undefined) {
this[SAW_VALID_ENTRY] = false;
}
let header;
try {
header = new header_js_1.Header(chunk, position, this[EX], this[GEX]);
}
catch (er) {
return this.warn('TAR_ENTRY_INVALID', er);
}
if (header.nullBlock) {
if (this[SAW_NULL_BLOCK]) {
this[SAW_EOF] = true;
// ending an archive with no entries. pointless, but legal.
if (this[STATE] === 'begin') {
this[STATE] = 'header';
}
this[EMIT]('eof');
}
else {
this[SAW_NULL_BLOCK] = true;
this[EMIT]('nullBlock');
}
}
else {
this[SAW_NULL_BLOCK] = false;
if (!header.cksumValid) {
this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header });
}
else if (!header.path) {
this.warn('TAR_ENTRY_INVALID', 'path is required', { header });
}
else {
const type = header.type;
if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) {
this.warn('TAR_ENTRY_INVALID', 'linkpath required', {
header,
});
}
else if (!/^(Symbolic)?Link$/.test(type) &&
!/^(Global)?ExtendedHeader$/.test(type) &&
header.linkpath) {
this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {
header,
});
}
else {
const entry = (this[WRITEENTRY] = new read_entry_js_1.ReadEntry(header, this[EX], this[GEX]));
// we do this for meta & ignored entries as well, because they
// are still valid tar, or else we wouldn't know to ignore them
if (!this[SAW_VALID_ENTRY]) {
if (entry.remain) {
// this might be the one!
const onend = () => {
if (!entry.invalid) {
this[SAW_VALID_ENTRY] = true;
}
};
entry.on('end', onend);
}
else {
this[SAW_VALID_ENTRY] = true;
}
}
if (entry.meta) {
if (entry.size > this.maxMetaEntrySize) {
entry.ignore = true;
this[EMIT]('ignoredEntry', entry);
this[STATE] = 'ignore';
entry.resume();
}
else if (entry.size > 0) {
this[META] = '';
entry.on('data', c => (this[META] += c));
this[STATE] = 'meta';
}
}
else {
this[EX] = undefined;
entry.ignore =
entry.ignore || !this.filter(entry.path, entry);
if (entry.ignore) {
// probably valid, just not something we care about
this[EMIT]('ignoredEntry', entry);
this[STATE] = entry.remain ? 'ignore' : 'header';
entry.resume();
}
else {
if (entry.remain) {
this[STATE] = 'body';
}
else {
this[STATE] = 'header';
entry.end();
}
if (!this[READENTRY]) {
this[QUEUE].push(entry);
this[NEXTENTRY]();
}
else {
this[QUEUE].push(entry);
}
}
}
}
}
}
}
[CLOSESTREAM]() {
queueMicrotask(() => this.emit('close'));
}
[PROCESSENTRY](entry) {
let go = true;
if (!entry) {
this[READENTRY] = undefined;
go = false;
}
else if (Array.isArray(entry)) {
const [ev, ...args] = entry;
this.emit(ev, ...args);
}
else {
this[READENTRY] = entry;
this.emit('entry', entry);
if (!entry.emittedEnd) {
entry.on('end', () => this[NEXTENTRY]());
go = false;
}
}
return go;
}
[NEXTENTRY]() {
do { } while (this[PROCESSENTRY](this[QUEUE].shift()));
if (!this[QUEUE].length) {
// At this point, there's nothing in the queue, but we may have an
// entry which is being consumed (readEntry).
// If we don't, then we definitely can handle more data.
// If we do, and either it's flowing, or it has never had any data
// written to it, then it needs more.
// The only other possibility is that it has returned false from a
// write() call, so we wait for the next drain to continue.
const re = this[READENTRY];
const drainNow = !re || re.flowing || re.size === re.remain;
if (drainNow) {
if (!this[WRITING]) {
this.emit('drain');
}
}
else {
re.once('drain', () => this.emit('drain'));
}
}
}
[CONSUMEBODY](chunk, position) {
// write up to but no more than writeEntry.blockRemain
const entry = this[WRITEENTRY];
/* c8 ignore start */
if (!entry) {
throw new Error('attempt to consume body without entry??');
}
const br = entry.blockRemain ?? 0;
/* c8 ignore stop */
const c = br >= chunk.length && position === 0 ?
chunk
: chunk.subarray(position, position + br);
entry.write(c);
if (!entry.blockRemain) {
this[STATE] = 'header';
this[WRITEENTRY] = undefined;
entry.end();
}
return c.length;
}
[CONSUMEMETA](chunk, position) {
const entry = this[WRITEENTRY];
const ret = this[CONSUMEBODY](chunk, position);
// if we finished, then the entry is reset
if (!this[WRITEENTRY] && entry) {
this[EMITMETA](entry);
}
return ret;
}
[EMIT](ev, data, extra) {
if (!this[QUEUE].length && !this[READENTRY]) {
this.emit(ev, data, extra);
}
else {
this[QUEUE].push([ev, data, extra]);
}
}
[EMITMETA](entry) {
this[EMIT]('meta', this[META]);
switch (entry.type) {
case 'ExtendedHeader':
case 'OldExtendedHeader':
this[EX] = pax_js_1.Pax.parse(this[META], this[EX], false);
break;
case 'GlobalExtendedHeader':
this[GEX] = pax_js_1.Pax.parse(this[META], this[GEX], true);
break;
case 'NextFileHasLongPath':
case 'OldGnuLongPath': {
const ex = this[EX] ?? Object.create(null);
this[EX] = ex;
ex.path = this[META].replace(/\0.*/, '');
break;
}
case 'NextFileHasLongLinkpath': {
const ex = this[EX] || Object.create(null);
this[EX] = ex;
ex.linkpath = this[META].replace(/\0.*/, '');
break;
}
/* c8 ignore start */
default:
throw new Error('unknown meta: ' + entry.type);
/* c8 ignore stop */
}
}
abort(error) {
this[ABORTED] = true;
this.emit('abort', error);
// always throws, even in non-strict mode
this.warn('TAR_ABORT', error, { recoverable: false });
}
write(chunk, encoding, cb) {
if (typeof encoding === 'function') {
cb = encoding;
encoding = undefined;
}
if (typeof chunk === 'string') {
chunk = Buffer.from(chunk,
/* c8 ignore next */
typeof encoding === 'string' ? encoding : 'utf8');
}
if (this[ABORTED]) {
/* c8 ignore next */
cb?.();
return false;
}
// first write, might be gzipped
const needSniff = this[UNZIP] === undefined ||
(this.brotli === undefined && this[UNZIP] === false);
if (needSniff && chunk) {
if (this[BUFFER]) {
chunk = Buffer.concat([this[BUFFER], chunk]);
this[BUFFER] = undefined;
}
if (chunk.length < gzipHeader.length) {
this[BUFFER] = chunk;
/* c8 ignore next */
cb?.();
return true;
}
// look for gzip header
for (let i = 0; this[UNZIP] === undefined && i < gzipHeader.length; i++) {
if (chunk[i] !== gzipHeader[i]) {
this[UNZIP] = false;
}
}
const maybeBrotli = this.brotli === undefined;
if (this[UNZIP] === false && maybeBrotli) {
// read the first header to see if it's a valid tar file. If so,
// we can safely assume that it's not actually brotli, despite the
// .tbr or .tar.br file extension.
// if we ended before getting a full chunk, yes, def brotli
if (chunk.length < 512) {
if (this[ENDED]) {
this.brotli = true;
}
else {
this[BUFFER] = chunk;
/* c8 ignore next */
cb?.();
return true;
}
}
else {
// if it's tar, it's pretty reliably not brotli, chances of
// that happening are astronomical.
try {
new header_js_1.Header(chunk.subarray(0, 512));
this.brotli = false;
}
catch (_) {
this.brotli = true;
}
}
}
if (this[UNZIP] === undefined ||
(this[UNZIP] === false && this.brotli)) {
const ended = this[ENDED];
this[ENDED] = false;
this[UNZIP] =
this[UNZIP] === undefined ?
new minizlib_1.Unzip({})
: new minizlib_1.BrotliDecompress({});
this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk));
this[UNZIP].on('error', er => this.abort(er));
this[UNZIP].on('end', () => {
this[ENDED] = true;
this[CONSUMECHUNK]();
});
this[WRITING] = true;
const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk);
this[WRITING] = false;
cb?.();
return ret;
}
}
this[WRITING] = true;
if (this[UNZIP]) {
this[UNZIP].write(chunk);
}
else {
this[CONSUMECHUNK](chunk);
}
this[WRITING] = false;
// return false if there's a queue, or if the current entry isn't flowing
const ret = this[QUEUE].length ? false
: this[READENTRY] ? this[READENTRY].flowing
: true;
// if we have no queue, then that means a clogged READENTRY
if (!ret && !this[QUEUE].length) {
this[READENTRY]?.once('drain', () => this.emit('drain'));
}
/* c8 ignore next */
cb?.();
return ret;
}
[BUFFERCONCAT](c) {
if (c && !this[ABORTED]) {
this[BUFFER] =
this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c;
}
}
[MAYBEEND]() {
if (this[ENDED] &&
!this[EMITTEDEND] &&
!this[ABORTED] &&
!this[CONSUMING]) {
this[EMITTEDEND] = true;
const entry = this[WRITEENTRY];
if (entry && entry.blockRemain) {
// truncated, likely a damaged file
const have = this[BUFFER] ? this[BUFFER].length : 0;
this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, { entry });
if (this[BUFFER]) {
entry.write(this[BUFFER]);
}
entry.end();
}
this[EMIT](DONE);
}
}
[CONSUMECHUNK](chunk) {
if (this[CONSUMING] && chunk) {
this[BUFFERCONCAT](chunk);
}
else if (!chunk && !this[BUFFER]) {
this[MAYBEEND]();
}
else if (chunk) {
this[CONSUMING] = true;
if (this[BUFFER]) {
this[BUFFERCONCAT](chunk);
const c = this[BUFFER];
this[BUFFER] = undefined;
this[CONSUMECHUNKSUB](c);
}
else {
this[CONSUMECHUNKSUB](chunk);
}
while (this[BUFFER] &&
this[BUFFER]?.length >= 512 &&
!this[ABORTED] &&
!this[SAW_EOF]) {
const c = this[BUFFER];
this[BUFFER] = undefined;
this[CONSUMECHUNKSUB](c);
}
this[CONSUMING] = false;
}
if (!this[BUFFER] || this[ENDED]) {
this[MAYBEEND]();
}
}
[CONSUMECHUNKSUB](chunk) {
// we know that we are in CONSUMING mode, so anything written goes into
// the buffer. Advance the position and put any remainder in the buffer.
let position = 0;
const length = chunk.length;
while (position + 512 <= length &&
!this[ABORTED] &&
!this[SAW_EOF]) {
switch (this[STATE]) {
case 'begin':
case 'header':
this[CONSUMEHEADER](chunk, position);
position += 512;
break;
case 'ignore':
case 'body':
position += this[CONSUMEBODY](chunk, position);
break;
case 'meta':
position += this[CONSUMEMETA](chunk, position);
break;
/* c8 ignore start */
default:
throw new Error('invalid state: ' + this[STATE]);
/* c8 ignore stop */
}
}
if (position < length) {
if (this[BUFFER]) {
this[BUFFER] = Buffer.concat([
chunk.subarray(position),
this[BUFFER],
]);
}
else {
this[BUFFER] = chunk.subarray(position);
}
}
}
end(chunk, encoding, cb) {
if (typeof chunk === 'function') {
cb = chunk;
encoding = undefined;
chunk = undefined;
}
if (typeof encoding === 'function') {
cb = encoding;
encoding = undefined;
}
if (typeof chunk === 'string') {
chunk = Buffer.from(chunk, encoding);
}
if (cb)
this.once('finish', cb);
if (!this[ABORTED]) {
if (this[UNZIP]) {
/* c8 ignore start */
if (chunk)
this[UNZIP].write(chunk);
/* c8 ignore stop */
this[UNZIP].end();
}
else {
this[ENDED] = true;
if (this.brotli === undefined)
chunk = chunk || Buffer.alloc(0);
if (chunk)
this.write(chunk);
this[MAYBEEND]();
}
}
return this;
}
}
exports.Parser = Parser;
//# sourceMappingURL=parse.js.map

1
node_modules/tar/dist/commonjs/parse.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

11
node_modules/tar/dist/commonjs/path-reservations.d.ts generated vendored Normal file
View File

@@ -0,0 +1,11 @@
export type Reservation = {
paths: string[];
dirs: Set<string>;
};
export type Handler = (clear: () => void) => void;
export declare class PathReservations {
#private;
reserve(paths: string[], fn: Handler): boolean;
check(fn: Handler): boolean;
}
//# sourceMappingURL=path-reservations.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"path-reservations.d.ts","sourceRoot":"","sources":["../../src/path-reservations.ts"],"names":[],"mappings":"AAgBA,MAAM,MAAM,WAAW,GAAG;IACxB,KAAK,EAAE,MAAM,EAAE,CAAA;IACf,IAAI,EAAE,GAAG,CAAC,MAAM,CAAC,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,OAAO,GAAG,CAAC,KAAK,EAAE,MAAM,IAAI,KAAK,IAAI,CAAA;AAmBjD,qBAAa,gBAAgB;;IAY3B,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,EAAE,EAAE,OAAO;IAgEpC,KAAK,CAAC,EAAE,EAAE,OAAO;CA8ElB"}

170
node_modules/tar/dist/commonjs/path-reservations.js generated vendored Normal file
View File

@@ -0,0 +1,170 @@
"use strict";
// A path exclusive reservation system
// reserve([list, of, paths], fn)
// When the fn is first in line for all its paths, it
// is called with a cb that clears the reservation.
//
// Used by async unpack to avoid clobbering paths in use,
// while still allowing maximal safe parallelization.
Object.defineProperty(exports, "__esModule", { value: true });
exports.PathReservations = void 0;
const node_path_1 = require("node:path");
const normalize_unicode_js_1 = require("./normalize-unicode.js");
const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
const isWindows = platform === 'win32';
// return a set of parent dirs for a given path
// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
const getDirs = (path) => {
const dirs = path
.split('/')
.slice(0, -1)
.reduce((set, path) => {
const s = set[set.length - 1];
if (s !== undefined) {
path = (0, node_path_1.join)(s, path);
}
set.push(path || '/');
return set;
}, []);
return dirs;
};
class PathReservations {
// path => [function or Set]
// A Set object means a directory reservation
// A fn is a direct reservation on that path
#queues = new Map();
// fn => {paths:[path,...], dirs:[path, ...]}
#reservations = new Map();
// functions currently running
#running = new Set();
reserve(paths, fn) {
paths =
isWindows ?
['win32 parallelization disabled']
: paths.map(p => {
// don't need normPath, because we skip this entirely for windows
return (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, node_path_1.join)((0, normalize_unicode_js_1.normalizeUnicode)(p))).toLowerCase();
});
const dirs = new Set(paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)));
this.#reservations.set(fn, { dirs, paths });
for (const p of paths) {
const q = this.#queues.get(p);
if (!q) {
this.#queues.set(p, [fn]);
}
else {
q.push(fn);
}
}
for (const dir of dirs) {
const q = this.#queues.get(dir);
if (!q) {
this.#queues.set(dir, [new Set([fn])]);
}
else {
const l = q[q.length - 1];
if (l instanceof Set) {
l.add(fn);
}
else {
q.push(new Set([fn]));
}
}
}
return this.#run(fn);
}
// return the queues for each path the function cares about
// fn => {paths, dirs}
#getQueues(fn) {
const res = this.#reservations.get(fn);
/* c8 ignore start */
if (!res) {
throw new Error('function does not have any path reservations');
}
/* c8 ignore stop */
return {
paths: res.paths.map((path) => this.#queues.get(path)),
dirs: [...res.dirs].map(path => this.#queues.get(path)),
};
}
// check if fn is first in line for all its paths, and is
// included in the first set for all its dir queues
check(fn) {
const { paths, dirs } = this.#getQueues(fn);
return (paths.every(q => q && q[0] === fn) &&
dirs.every(q => q && q[0] instanceof Set && q[0].has(fn)));
}
// run the function if it's first in line and not already running
#run(fn) {
if (this.#running.has(fn) || !this.check(fn)) {
return false;
}
this.#running.add(fn);
fn(() => this.#clear(fn));
return true;
}
#clear(fn) {
if (!this.#running.has(fn)) {
return false;
}
const res = this.#reservations.get(fn);
/* c8 ignore start */
if (!res) {
throw new Error('invalid reservation');
}
/* c8 ignore stop */
const { paths, dirs } = res;
const next = new Set();
for (const path of paths) {
const q = this.#queues.get(path);
/* c8 ignore start */
if (!q || q?.[0] !== fn) {
continue;
}
/* c8 ignore stop */
const q0 = q[1];
if (!q0) {
this.#queues.delete(path);
continue;
}
q.shift();
if (typeof q0 === 'function') {
next.add(q0);
}
else {
for (const f of q0) {
next.add(f);
}
}
}
for (const dir of dirs) {
const q = this.#queues.get(dir);
const q0 = q?.[0];
/* c8 ignore next - type safety only */
if (!q || !(q0 instanceof Set))
continue;
if (q0.size === 1 && q.length === 1) {
this.#queues.delete(dir);
continue;
}
else if (q0.size === 1) {
q.shift();
// next one must be a function,
// or else the Set would've been reused
const n = q[0];
if (typeof n === 'function') {
next.add(n);
}
}
else {
q0.delete(fn);
}
}
this.#running.delete(fn);
next.forEach(fn => this.#run(fn));
return true;
}
}
exports.PathReservations = PathReservations;
//# sourceMappingURL=path-reservations.js.map

File diff suppressed because one or more lines are too long

27
node_modules/tar/dist/commonjs/pax.d.ts generated vendored Normal file
View File

@@ -0,0 +1,27 @@
/// <reference types="node" />
import { HeaderData } from './header.js';
export declare class Pax implements HeaderData {
atime?: Date;
mtime?: Date;
ctime?: Date;
charset?: string;
comment?: string;
gid?: number;
uid?: number;
gname?: string;
uname?: string;
linkpath?: string;
dev?: number;
ino?: number;
nlink?: number;
path?: string;
size?: number;
mode?: number;
global: boolean;
constructor(obj: HeaderData, global?: boolean);
encode(): Buffer;
encodeBody(): string;
encodeField(field: keyof Pax): string;
static parse(str: string, ex?: HeaderData, g?: boolean): Pax;
}
//# sourceMappingURL=pax.d.ts.map

1
node_modules/tar/dist/commonjs/pax.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"pax.d.ts","sourceRoot":"","sources":["../../src/pax.ts"],"names":[],"mappings":";AACA,OAAO,EAAU,UAAU,EAAE,MAAM,aAAa,CAAA;AAEhD,qBAAa,GAAI,YAAW,UAAU;IACpC,KAAK,CAAC,EAAE,IAAI,CAAA;IACZ,KAAK,CAAC,EAAE,IAAI,CAAA;IACZ,KAAK,CAAC,EAAE,IAAI,CAAA;IAEZ,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,OAAO,CAAC,EAAE,MAAM,CAAA;IAEhB,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,GAAG,CAAC,EAAE,MAAM,CAAA;IAEZ,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb,MAAM,EAAE,OAAO,CAAA;gBAEH,GAAG,EAAE,UAAU,EAAE,MAAM,GAAE,OAAe;IAmBpD,MAAM;IAiDN,UAAU;IAoBV,WAAW,CAAC,KAAK,EAAE,MAAM,GAAG,GAAG,MAAM;IA2BrC,MAAM,CAAC,KAAK,CAAC,GAAG,EAAE,MAAM,EAAE,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,GAAE,OAAe;CAG9D"}

158
node_modules/tar/dist/commonjs/pax.js generated vendored Normal file
View File

@@ -0,0 +1,158 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Pax = void 0;
const node_path_1 = require("node:path");
const header_js_1 = require("./header.js");
class Pax {
atime;
mtime;
ctime;
charset;
comment;
gid;
uid;
gname;
uname;
linkpath;
dev;
ino;
nlink;
path;
size;
mode;
global;
constructor(obj, global = false) {
this.atime = obj.atime;
this.charset = obj.charset;
this.comment = obj.comment;
this.ctime = obj.ctime;
this.dev = obj.dev;
this.gid = obj.gid;
this.global = global;
this.gname = obj.gname;
this.ino = obj.ino;
this.linkpath = obj.linkpath;
this.mtime = obj.mtime;
this.nlink = obj.nlink;
this.path = obj.path;
this.size = obj.size;
this.uid = obj.uid;
this.uname = obj.uname;
}
encode() {
const body = this.encodeBody();
if (body === '') {
return Buffer.allocUnsafe(0);
}
const bodyLen = Buffer.byteLength(body);
// round up to 512 bytes
// add 512 for header
const bufLen = 512 * Math.ceil(1 + bodyLen / 512);
const buf = Buffer.allocUnsafe(bufLen);
// 0-fill the header section, it might not hit every field
for (let i = 0; i < 512; i++) {
buf[i] = 0;
}
new header_js_1.Header({
// XXX split the path
// then the path should be PaxHeader + basename, but less than 99,
// prepend with the dirname
/* c8 ignore start */
path: ('PaxHeader/' + (0, node_path_1.basename)(this.path ?? '')).slice(0, 99),
/* c8 ignore stop */
mode: this.mode || 0o644,
uid: this.uid,
gid: this.gid,
size: bodyLen,
mtime: this.mtime,
type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
linkpath: '',
uname: this.uname || '',
gname: this.gname || '',
devmaj: 0,
devmin: 0,
atime: this.atime,
ctime: this.ctime,
}).encode(buf);
buf.write(body, 512, bodyLen, 'utf8');
// null pad after the body
for (let i = bodyLen + 512; i < buf.length; i++) {
buf[i] = 0;
}
return buf;
}
encodeBody() {
return (this.encodeField('path') +
this.encodeField('ctime') +
this.encodeField('atime') +
this.encodeField('dev') +
this.encodeField('ino') +
this.encodeField('nlink') +
this.encodeField('charset') +
this.encodeField('comment') +
this.encodeField('gid') +
this.encodeField('gname') +
this.encodeField('linkpath') +
this.encodeField('mtime') +
this.encodeField('size') +
this.encodeField('uid') +
this.encodeField('uname'));
}
encodeField(field) {
if (this[field] === undefined) {
return '';
}
const r = this[field];
const v = r instanceof Date ? r.getTime() / 1000 : r;
const s = ' ' +
(field === 'dev' || field === 'ino' || field === 'nlink' ?
'SCHILY.'
: '') +
field +
'=' +
v +
'\n';
const byteLen = Buffer.byteLength(s);
// the digits includes the length of the digits in ascii base-10
// so if it's 9 characters, then adding 1 for the 9 makes it 10
// which makes it 11 chars.
let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1;
if (byteLen + digits >= Math.pow(10, digits)) {
digits += 1;
}
const len = digits + byteLen;
return len + s;
}
static parse(str, ex, g = false) {
return new Pax(merge(parseKV(str), ex), g);
}
}
exports.Pax = Pax;
const merge = (a, b) => b ? Object.assign({}, b, a) : a;
const parseKV = (str) => str
.replace(/\n$/, '')
.split('\n')
.reduce(parseKVLine, Object.create(null));
const parseKVLine = (set, line) => {
const n = parseInt(line, 10);
// XXX Values with \n in them will fail this.
// Refactor to not be a naive line-by-line parse.
if (n !== Buffer.byteLength(line) + 1) {
return set;
}
line = line.slice((n + ' ').length);
const kv = line.split('=');
const r = kv.shift();
if (!r) {
return set;
}
const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1');
const v = kv.join('=');
set[k] =
/^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ?
new Date(Number(v) * 1000)
: /^[0-9]+$/.test(v) ? +v
: v;
return set;
};
//# sourceMappingURL=pax.js.map

1
node_modules/tar/dist/commonjs/pax.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

37
node_modules/tar/dist/commonjs/read-entry.d.ts generated vendored Normal file
View File

@@ -0,0 +1,37 @@
/// <reference types="node" />
import { Minipass } from 'minipass';
import { Header } from './header.js';
import { Pax } from './pax.js';
import { EntryTypeName } from './types.js';
export declare class ReadEntry extends Minipass<Buffer, Buffer> {
#private;
extended?: Pax;
globalExtended?: Pax;
header: Header;
startBlockSize: number;
blockRemain: number;
remain: number;
type: EntryTypeName;
meta: boolean;
ignore: boolean;
path: string;
mode?: number;
uid?: number;
gid?: number;
uname?: string;
gname?: string;
size: number;
mtime?: Date;
atime?: Date;
ctime?: Date;
linkpath?: string;
dev?: number;
ino?: number;
nlink?: number;
invalid: boolean;
absolute?: string;
unsupported: boolean;
constructor(header: Header, ex?: Pax, gex?: Pax);
write(data: Buffer): boolean;
}
//# sourceMappingURL=read-entry.d.ts.map

1
node_modules/tar/dist/commonjs/read-entry.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"read-entry.d.ts","sourceRoot":"","sources":["../../src/read-entry.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAEpC,OAAO,EAAE,GAAG,EAAE,MAAM,UAAU,CAAA;AAC9B,OAAO,EAAE,aAAa,EAAE,MAAM,YAAY,CAAA;AAE1C,qBAAa,SAAU,SAAQ,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC;;IACrD,QAAQ,CAAC,EAAE,GAAG,CAAA;IACd,cAAc,CAAC,EAAE,GAAG,CAAA;IACpB,MAAM,EAAE,MAAM,CAAA;IACd,cAAc,EAAE,MAAM,CAAA;IACtB,WAAW,EAAE,MAAM,CAAA;IACnB,MAAM,EAAE,MAAM,CAAA;IACd,IAAI,EAAE,aAAa,CAAA;IACnB,IAAI,EAAE,OAAO,CAAQ;IACrB,MAAM,EAAE,OAAO,CAAQ;IACvB,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,IAAI,EAAE,MAAM,CAAI;IAChB,KAAK,CAAC,EAAE,IAAI,CAAA;IACZ,KAAK,CAAC,EAAE,IAAI,CAAA;IACZ,KAAK,CAAC,EAAE,IAAI,CAAA;IACZ,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,OAAO,EAAE,OAAO,CAAQ;IACxB,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,WAAW,EAAE,OAAO,CAAQ;gBAEhB,MAAM,EAAE,MAAM,EAAE,EAAE,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,EAAE,GAAG;IA+E/C,KAAK,CAAC,IAAI,EAAE,MAAM;CAyCnB"}

140
node_modules/tar/dist/commonjs/read-entry.js generated vendored Normal file
View File

@@ -0,0 +1,140 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ReadEntry = void 0;
const minipass_1 = require("minipass");
const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
class ReadEntry extends minipass_1.Minipass {
extended;
globalExtended;
header;
startBlockSize;
blockRemain;
remain;
type;
meta = false;
ignore = false;
path;
mode;
uid;
gid;
uname;
gname;
size = 0;
mtime;
atime;
ctime;
linkpath;
dev;
ino;
nlink;
invalid = false;
absolute;
unsupported = false;
constructor(header, ex, gex) {
super({});
// read entries always start life paused. this is to avoid the
// situation where Minipass's auto-ending empty streams results
// in an entry ending before we're ready for it.
this.pause();
this.extended = ex;
this.globalExtended = gex;
this.header = header;
/* c8 ignore start */
this.remain = header.size ?? 0;
/* c8 ignore stop */
this.startBlockSize = 512 * Math.ceil(this.remain / 512);
this.blockRemain = this.startBlockSize;
this.type = header.type;
switch (this.type) {
case 'File':
case 'OldFile':
case 'Link':
case 'SymbolicLink':
case 'CharacterDevice':
case 'BlockDevice':
case 'Directory':
case 'FIFO':
case 'ContiguousFile':
case 'GNUDumpDir':
break;
case 'NextFileHasLongLinkpath':
case 'NextFileHasLongPath':
case 'OldGnuLongPath':
case 'GlobalExtendedHeader':
case 'ExtendedHeader':
case 'OldExtendedHeader':
this.meta = true;
break;
// NOTE: gnutar and bsdtar treat unrecognized types as 'File'
// it may be worth doing the same, but with a warning.
default:
this.ignore = true;
}
/* c8 ignore start */
if (!header.path) {
throw new Error('no path provided for tar.ReadEntry');
}
/* c8 ignore stop */
this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.path);
this.mode = header.mode;
if (this.mode) {
this.mode = this.mode & 0o7777;
}
this.uid = header.uid;
this.gid = header.gid;
this.uname = header.uname;
this.gname = header.gname;
this.size = this.remain;
this.mtime = header.mtime;
this.atime = header.atime;
this.ctime = header.ctime;
/* c8 ignore start */
this.linkpath =
header.linkpath ?
(0, normalize_windows_path_js_1.normalizeWindowsPath)(header.linkpath)
: undefined;
/* c8 ignore stop */
this.uname = header.uname;
this.gname = header.gname;
if (ex) {
this.#slurp(ex);
}
if (gex) {
this.#slurp(gex, true);
}
}
write(data) {
const writeLen = data.length;
if (writeLen > this.blockRemain) {
throw new Error('writing more to entry than is appropriate');
}
const r = this.remain;
const br = this.blockRemain;
this.remain = Math.max(0, r - writeLen);
this.blockRemain = Math.max(0, br - writeLen);
if (this.ignore) {
return true;
}
if (r >= writeLen) {
return super.write(data);
}
// r < writeLen
return super.write(data.subarray(0, r));
}
#slurp(ex, gex = false) {
if (ex.path)
ex.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.path);
if (ex.linkpath)
ex.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.linkpath);
Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
// we slurp in everything except for the path attribute in
// a global extended header, because that's weird. Also, any
// null/undefined values are ignored.
return !(v === null ||
v === undefined ||
(k === 'path' && gex));
})));
}
}
exports.ReadEntry = ReadEntry;
//# sourceMappingURL=read-entry.js.map

1
node_modules/tar/dist/commonjs/read-entry.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

2
node_modules/tar/dist/commonjs/replace.d.ts generated vendored Normal file
View File

@@ -0,0 +1,2 @@
export declare const replace: import("./make-command.js").TarCommand<never, never>;
//# sourceMappingURL=replace.d.ts.map

1
node_modules/tar/dist/commonjs/replace.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"replace.d.ts","sourceRoot":"","sources":["../../src/replace.ts"],"names":[],"mappings":"AA6QA,eAAO,MAAM,OAAO,sDA6BnB,CAAA"}

231
node_modules/tar/dist/commonjs/replace.js generated vendored Normal file
View File

@@ -0,0 +1,231 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.replace = void 0;
// tar -r
const fs_minipass_1 = require("@isaacs/fs-minipass");
const node_fs_1 = __importDefault(require("node:fs"));
const node_path_1 = __importDefault(require("node:path"));
const header_js_1 = require("./header.js");
const list_js_1 = require("./list.js");
const make_command_js_1 = require("./make-command.js");
const options_js_1 = require("./options.js");
const pack_js_1 = require("./pack.js");
// starting at the head of the file, read a Header
// If the checksum is invalid, that's our position to start writing
// If it is, jump forward by the specified size (round up to 512)
// and try again.
// Write the new Pack stream starting there.
const replaceSync = (opt, files) => {
const p = new pack_js_1.PackSync(opt);
let threw = true;
let fd;
let position;
try {
try {
fd = node_fs_1.default.openSync(opt.file, 'r+');
}
catch (er) {
if (er?.code === 'ENOENT') {
fd = node_fs_1.default.openSync(opt.file, 'w+');
}
else {
throw er;
}
}
const st = node_fs_1.default.fstatSync(fd);
const headBuf = Buffer.alloc(512);
POSITION: for (position = 0; position < st.size; position += 512) {
for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
bytes = node_fs_1.default.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos);
if (position === 0 &&
headBuf[0] === 0x1f &&
headBuf[1] === 0x8b) {
throw new Error('cannot append to compressed archives');
}
if (!bytes) {
break POSITION;
}
}
const h = new header_js_1.Header(headBuf);
if (!h.cksumValid) {
break;
}
const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512);
if (position + entryBlockSize + 512 > st.size) {
break;
}
// the 512 for the header we just parsed will be added as well
// also jump ahead all the blocks for the body
position += entryBlockSize;
if (opt.mtimeCache && h.mtime) {
opt.mtimeCache.set(String(h.path), h.mtime);
}
}
threw = false;
streamSync(opt, p, position, fd, files);
}
finally {
if (threw) {
try {
node_fs_1.default.closeSync(fd);
}
catch (er) { }
}
}
};
const streamSync = (opt, p, position, fd, files) => {
const stream = new fs_minipass_1.WriteStreamSync(opt.file, {
fd: fd,
start: position,
});
p.pipe(stream);
addFilesSync(p, files);
};
const replaceAsync = (opt, files) => {
files = Array.from(files);
const p = new pack_js_1.Pack(opt);
const getPos = (fd, size, cb_) => {
const cb = (er, pos) => {
if (er) {
node_fs_1.default.close(fd, _ => cb_(er));
}
else {
cb_(null, pos);
}
};
let position = 0;
if (size === 0) {
return cb(null, 0);
}
let bufPos = 0;
const headBuf = Buffer.alloc(512);
const onread = (er, bytes) => {
if (er || typeof bytes === 'undefined') {
return cb(er);
}
bufPos += bytes;
if (bufPos < 512 && bytes) {
return node_fs_1.default.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread);
}
if (position === 0 &&
headBuf[0] === 0x1f &&
headBuf[1] === 0x8b) {
return cb(new Error('cannot append to compressed archives'));
}
// truncated header
if (bufPos < 512) {
return cb(null, position);
}
const h = new header_js_1.Header(headBuf);
if (!h.cksumValid) {
return cb(null, position);
}
/* c8 ignore next */
const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512);
if (position + entryBlockSize + 512 > size) {
return cb(null, position);
}
position += entryBlockSize + 512;
if (position >= size) {
return cb(null, position);
}
if (opt.mtimeCache && h.mtime) {
opt.mtimeCache.set(String(h.path), h.mtime);
}
bufPos = 0;
node_fs_1.default.read(fd, headBuf, 0, 512, position, onread);
};
node_fs_1.default.read(fd, headBuf, 0, 512, position, onread);
};
const promise = new Promise((resolve, reject) => {
p.on('error', reject);
let flag = 'r+';
const onopen = (er, fd) => {
if (er && er.code === 'ENOENT' && flag === 'r+') {
flag = 'w+';
return node_fs_1.default.open(opt.file, flag, onopen);
}
if (er || !fd) {
return reject(er);
}
node_fs_1.default.fstat(fd, (er, st) => {
if (er) {
return node_fs_1.default.close(fd, () => reject(er));
}
getPos(fd, st.size, (er, position) => {
if (er) {
return reject(er);
}
const stream = new fs_minipass_1.WriteStream(opt.file, {
fd: fd,
start: position,
});
p.pipe(stream);
stream.on('error', reject);
stream.on('close', resolve);
addFilesAsync(p, files);
});
});
};
node_fs_1.default.open(opt.file, flag, onopen);
});
return promise;
};
const addFilesSync = (p, files) => {
files.forEach(file => {
if (file.charAt(0) === '@') {
(0, list_js_1.list)({
file: node_path_1.default.resolve(p.cwd, file.slice(1)),
sync: true,
noResume: true,
onReadEntry: entry => p.add(entry),
});
}
else {
p.add(file);
}
});
p.end();
};
const addFilesAsync = async (p, files) => {
for (let i = 0; i < files.length; i++) {
const file = String(files[i]);
if (file.charAt(0) === '@') {
await (0, list_js_1.list)({
file: node_path_1.default.resolve(String(p.cwd), file.slice(1)),
noResume: true,
onReadEntry: entry => p.add(entry),
});
}
else {
p.add(file);
}
}
p.end();
};
exports.replace = (0, make_command_js_1.makeCommand)(replaceSync, replaceAsync,
/* c8 ignore start */
() => {
throw new TypeError('file is required');
}, () => {
throw new TypeError('file is required');
},
/* c8 ignore stop */
(opt, entries) => {
if (!(0, options_js_1.isFile)(opt)) {
throw new TypeError('file is required');
}
if (opt.gzip ||
opt.brotli ||
opt.file.endsWith('.br') ||
opt.file.endsWith('.tbr')) {
throw new TypeError('cannot append to compressed archives');
}
if (!entries?.length) {
throw new TypeError('no paths specified to add/replace');
}
});
//# sourceMappingURL=replace.js.map

1
node_modules/tar/dist/commonjs/replace.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,2 @@
export declare const stripAbsolutePath: (path: string) => string[];
//# sourceMappingURL=strip-absolute-path.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"strip-absolute-path.d.ts","sourceRoot":"","sources":["../../src/strip-absolute-path.ts"],"names":[],"mappings":"AAUA,eAAO,MAAM,iBAAiB,SAAU,MAAM,aAgB7C,CAAA"}

29
node_modules/tar/dist/commonjs/strip-absolute-path.js generated vendored Normal file
View File

@@ -0,0 +1,29 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.stripAbsolutePath = void 0;
// unix absolute paths are also absolute on win32, so we use this for both
const node_path_1 = require("node:path");
const { isAbsolute, parse } = node_path_1.win32;
// returns [root, stripped]
// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
// explicitly if it's the first character.
// drive-specific relative paths on Windows get their root stripped off even
// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
const stripAbsolutePath = (path) => {
let r = '';
let parsed = parse(path);
while (isAbsolute(path) || parsed.root) {
// windows will think that //x/y/z has a "root" of //x/y/
// but strip the //?/C:/ off of //?/C:/path
const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ?
'/'
: parsed.root;
path = path.slice(root.length);
r += root;
parsed = parse(path);
}
return [r, path];
};
exports.stripAbsolutePath = stripAbsolutePath;
//# sourceMappingURL=strip-absolute-path.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"strip-absolute-path.js","sourceRoot":"","sources":["../../src/strip-absolute-path.ts"],"names":[],"mappings":";;;AAAA,0EAA0E;AAC1E,yCAAiC;AACjC,MAAM,EAAE,UAAU,EAAE,KAAK,EAAE,GAAG,iBAAK,CAAA;AAEnC,2BAA2B;AAC3B,4EAA4E;AAC5E,yEAAyE;AACzE,0CAA0C;AAC1C,4EAA4E;AAC5E,uEAAuE;AAChE,MAAM,iBAAiB,GAAG,CAAC,IAAY,EAAE,EAAE;IAChD,IAAI,CAAC,GAAG,EAAE,CAAA;IAEV,IAAI,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;IACxB,OAAO,UAAU,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,CAAC;QACvC,yDAAyD;QACzD,2CAA2C;QAC3C,MAAM,IAAI,GACR,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,MAAM,CAAC,CAAC;YACrD,GAAG;YACL,CAAC,CAAC,MAAM,CAAC,IAAI,CAAA;QACf,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QAC9B,CAAC,IAAI,IAAI,CAAA;QACT,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;IACtB,CAAC;IACD,OAAO,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;AAClB,CAAC,CAAA;AAhBY,QAAA,iBAAiB,qBAgB7B","sourcesContent":["// unix absolute paths are also absolute on win32, so we use this for both\nimport { win32 } from 'node:path'\nconst { isAbsolute, parse } = win32\n\n// returns [root, stripped]\n// Note that windows will think that //x/y/z/a has a \"root\" of //x/y, and in\n// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /\n// explicitly if it's the first character.\n// drive-specific relative paths on Windows get their root stripped off even\n// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']\nexport const stripAbsolutePath = (path: string) => {\n let r = ''\n\n let parsed = parse(path)\n while (isAbsolute(path) || parsed.root) {\n // windows will think that //x/y/z has a \"root\" of //x/y/\n // but strip the //?/C:/ off of //?/C:/path\n const root =\n path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ?\n '/'\n : parsed.root\n path = path.slice(root.length)\n r += root\n parsed = parse(path)\n }\n return [r, path]\n}\n"]}

View File

@@ -0,0 +1,2 @@
export declare const stripTrailingSlashes: (str: string) => string;
//# sourceMappingURL=strip-trailing-slashes.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"strip-trailing-slashes.d.ts","sourceRoot":"","sources":["../../src/strip-trailing-slashes.ts"],"names":[],"mappings":"AAIA,eAAO,MAAM,oBAAoB,QAAS,MAAM,WAQ/C,CAAA"}

View File

@@ -0,0 +1,18 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.stripTrailingSlashes = void 0;
// warning: extremely hot code path.
// This has been meticulously optimized for use
// within npm install on large package trees.
// Do not edit without careful benchmarking.
const stripTrailingSlashes = (str) => {
let i = str.length - 1;
let slashesStart = -1;
while (i > -1 && str.charAt(i) === '/') {
slashesStart = i;
i--;
}
return slashesStart === -1 ? str : str.slice(0, slashesStart);
};
exports.stripTrailingSlashes = stripTrailingSlashes;
//# sourceMappingURL=strip-trailing-slashes.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"strip-trailing-slashes.js","sourceRoot":"","sources":["../../src/strip-trailing-slashes.ts"],"names":[],"mappings":";;;AAAA,oCAAoC;AACpC,+CAA+C;AAC/C,6CAA6C;AAC7C,4CAA4C;AACrC,MAAM,oBAAoB,GAAG,CAAC,GAAW,EAAE,EAAE;IAClD,IAAI,CAAC,GAAG,GAAG,CAAC,MAAM,GAAG,CAAC,CAAA;IACtB,IAAI,YAAY,GAAG,CAAC,CAAC,CAAA;IACrB,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE,CAAC;QACvC,YAAY,GAAG,CAAC,CAAA;QAChB,CAAC,EAAE,CAAA;IACL,CAAC;IACD,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,YAAY,CAAC,CAAA;AAC/D,CAAC,CAAA;AARY,QAAA,oBAAoB,wBAQhC","sourcesContent":["// warning: extremely hot code path.\n// This has been meticulously optimized for use\n// within npm install on large package trees.\n// Do not edit without careful benchmarking.\nexport const stripTrailingSlashes = (str: string) => {\n let i = str.length - 1\n let slashesStart = -1\n while (i > -1 && str.charAt(i) === '/') {\n slashesStart = i\n i--\n }\n return slashesStart === -1 ? str : str.slice(0, slashesStart)\n}\n"]}

9
node_modules/tar/dist/commonjs/symlink-error.d.ts generated vendored Normal file
View File

@@ -0,0 +1,9 @@
export declare class SymlinkError extends Error {
path: string;
symlink: string;
syscall: 'symlink';
code: 'TAR_SYMLINK_ERROR';
constructor(symlink: string, path: string);
get name(): string;
}
//# sourceMappingURL=symlink-error.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"symlink-error.d.ts","sourceRoot":"","sources":["../../src/symlink-error.ts"],"names":[],"mappings":"AAAA,qBAAa,YAAa,SAAQ,KAAK;IACrC,IAAI,EAAE,MAAM,CAAA;IACZ,OAAO,EAAE,MAAM,CAAA;IACf,OAAO,EAAE,SAAS,CAAY;IAC9B,IAAI,EAAE,mBAAmB,CAAsB;gBACnC,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM;IAKzC,IAAI,IAAI,WAEP;CACF"}

19
node_modules/tar/dist/commonjs/symlink-error.js generated vendored Normal file
View File

@@ -0,0 +1,19 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SymlinkError = void 0;
class SymlinkError extends Error {
path;
symlink;
syscall = 'symlink';
code = 'TAR_SYMLINK_ERROR';
constructor(symlink, path) {
super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link');
this.symlink = symlink;
this.path = path;
}
get name() {
return 'SymlinkError';
}
}
exports.SymlinkError = SymlinkError;
//# sourceMappingURL=symlink-error.js.map

1
node_modules/tar/dist/commonjs/symlink-error.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"symlink-error.js","sourceRoot":"","sources":["../../src/symlink-error.ts"],"names":[],"mappings":";;;AAAA,MAAa,YAAa,SAAQ,KAAK;IACrC,IAAI,CAAQ;IACZ,OAAO,CAAQ;IACf,OAAO,GAAc,SAAS,CAAA;IAC9B,IAAI,GAAwB,mBAAmB,CAAA;IAC/C,YAAY,OAAe,EAAE,IAAY;QACvC,KAAK,CAAC,yDAAyD,CAAC,CAAA;QAChE,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;IAClB,CAAC;IACD,IAAI,IAAI;QACN,OAAO,cAAc,CAAA;IACvB,CAAC;CACF;AAbD,oCAaC","sourcesContent":["export class SymlinkError extends Error {\n path: string\n symlink: string\n syscall: 'symlink' = 'symlink'\n code: 'TAR_SYMLINK_ERROR' = 'TAR_SYMLINK_ERROR'\n constructor(symlink: string, path: string) {\n super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link')\n this.symlink = symlink\n this.path = path\n }\n get name() {\n return 'SymlinkError'\n }\n}\n"]}

7
node_modules/tar/dist/commonjs/types.d.ts generated vendored Normal file
View File

@@ -0,0 +1,7 @@
export declare const isCode: (c: string) => c is EntryTypeCode;
export declare const isName: (c: string) => c is EntryTypeName;
export type EntryTypeCode = '0' | '' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | 'g' | 'x' | 'A' | 'D' | 'I' | 'K' | 'L' | 'M' | 'N' | 'S' | 'V' | 'X';
export type EntryTypeName = 'File' | 'OldFile' | 'Link' | 'SymbolicLink' | 'CharacterDevice' | 'BlockDevice' | 'Directory' | 'FIFO' | 'ContiguousFile' | 'GlobalExtendedHeader' | 'ExtendedHeader' | 'SolarisACL' | 'GNUDumpDir' | 'Inode' | 'NextFileHasLongLinkpath' | 'NextFileHasLongPath' | 'ContinuationFile' | 'OldGnuLongPath' | 'SparseFile' | 'TapeVolumeHeader' | 'OldExtendedHeader' | 'Unsupported';
export declare const name: Map<EntryTypeCode, EntryTypeName>;
export declare const code: Map<EntryTypeName, EntryTypeCode>;
//# sourceMappingURL=types.d.ts.map

1
node_modules/tar/dist/commonjs/types.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,MAAM,MAAO,MAAM,uBACF,CAAA;AAE9B,eAAO,MAAM,MAAM,MAAO,MAAM,uBACF,CAAA;AAE9B,MAAM,MAAM,aAAa,GACrB,GAAG,GACH,EAAE,GACF,GAAG,GACH,GAAG,GACH,GAAG,GACH,GAAG,GACH,GAAG,GACH,GAAG,GACH,GAAG,GACH,GAAG,GACH,GAAG,GACH,GAAG,GACH,GAAG,GACH,GAAG,GACH,GAAG,GACH,GAAG,GACH,GAAG,GACH,GAAG,GACH,GAAG,GACH,GAAG,GACH,GAAG,CAAA;AAEP,MAAM,MAAM,aAAa,GACrB,MAAM,GACN,SAAS,GACT,MAAM,GACN,cAAc,GACd,iBAAiB,GACjB,aAAa,GACb,WAAW,GACX,MAAM,GACN,gBAAgB,GAChB,sBAAsB,GACtB,gBAAgB,GAChB,YAAY,GACZ,YAAY,GACZ,OAAO,GACP,yBAAyB,GACzB,qBAAqB,GACrB,kBAAkB,GAClB,gBAAgB,GAChB,YAAY,GACZ,kBAAkB,GAClB,mBAAmB,GACnB,aAAa,CAAA;AAGjB,eAAO,MAAM,IAAI,mCAsCf,CAAA;AAGF,eAAO,MAAM,IAAI,mCAEhB,CAAA"}

50
node_modules/tar/dist/commonjs/types.js generated vendored Normal file
View File

@@ -0,0 +1,50 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.code = exports.name = exports.isName = exports.isCode = void 0;
const isCode = (c) => exports.name.has(c);
exports.isCode = isCode;
const isName = (c) => exports.code.has(c);
exports.isName = isName;
// map types from key to human-friendly name
exports.name = new Map([
['0', 'File'],
// same as File
['', 'OldFile'],
['1', 'Link'],
['2', 'SymbolicLink'],
// Devices and FIFOs aren't fully supported
// they are parsed, but skipped when unpacking
['3', 'CharacterDevice'],
['4', 'BlockDevice'],
['5', 'Directory'],
['6', 'FIFO'],
// same as File
['7', 'ContiguousFile'],
// pax headers
['g', 'GlobalExtendedHeader'],
['x', 'ExtendedHeader'],
// vendor-specific stuff
// skip
['A', 'SolarisACL'],
// like 5, but with data, which should be skipped
['D', 'GNUDumpDir'],
// metadata only, skip
['I', 'Inode'],
// data = link path of next file
['K', 'NextFileHasLongLinkpath'],
// data = path of next file
['L', 'NextFileHasLongPath'],
// skip
['M', 'ContinuationFile'],
// like L
['N', 'OldGnuLongPath'],
// skip
['S', 'SparseFile'],
// skip
['V', 'TapeVolumeHeader'],
// like x
['X', 'OldExtendedHeader'],
]);
// map the other direction
exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]));
//# sourceMappingURL=types.js.map

1
node_modules/tar/dist/commonjs/types.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":";;;AAAO,MAAM,MAAM,GAAG,CAAC,CAAS,EAAsB,EAAE,CACtD,YAAI,CAAC,GAAG,CAAC,CAAkB,CAAC,CAAA;AADjB,QAAA,MAAM,UACW;AAEvB,MAAM,MAAM,GAAG,CAAC,CAAS,EAAsB,EAAE,CACtD,YAAI,CAAC,GAAG,CAAC,CAAkB,CAAC,CAAA;AADjB,QAAA,MAAM,UACW;AAiD9B,4CAA4C;AAC/B,QAAA,IAAI,GAAG,IAAI,GAAG,CAA+B;IACxD,CAAC,GAAG,EAAE,MAAM,CAAC;IACb,eAAe;IACf,CAAC,EAAE,EAAE,SAAS,CAAC;IACf,CAAC,GAAG,EAAE,MAAM,CAAC;IACb,CAAC,GAAG,EAAE,cAAc,CAAC;IACrB,2CAA2C;IAC3C,8CAA8C;IAC9C,CAAC,GAAG,EAAE,iBAAiB,CAAC;IACxB,CAAC,GAAG,EAAE,aAAa,CAAC;IACpB,CAAC,GAAG,EAAE,WAAW,CAAC;IAClB,CAAC,GAAG,EAAE,MAAM,CAAC;IACb,eAAe;IACf,CAAC,GAAG,EAAE,gBAAgB,CAAC;IACvB,cAAc;IACd,CAAC,GAAG,EAAE,sBAAsB,CAAC;IAC7B,CAAC,GAAG,EAAE,gBAAgB,CAAC;IACvB,wBAAwB;IACxB,OAAO;IACP,CAAC,GAAG,EAAE,YAAY,CAAC;IACnB,iDAAiD;IACjD,CAAC,GAAG,EAAE,YAAY,CAAC;IACnB,sBAAsB;IACtB,CAAC,GAAG,EAAE,OAAO,CAAC;IACd,gCAAgC;IAChC,CAAC,GAAG,EAAE,yBAAyB,CAAC;IAChC,2BAA2B;IAC3B,CAAC,GAAG,EAAE,qBAAqB,CAAC;IAC5B,OAAO;IACP,CAAC,GAAG,EAAE,kBAAkB,CAAC;IACzB,SAAS;IACT,CAAC,GAAG,EAAE,gBAAgB,CAAC;IACvB,OAAO;IACP,CAAC,GAAG,EAAE,YAAY,CAAC;IACnB,OAAO;IACP,CAAC,GAAG,EAAE,kBAAkB,CAAC;IACzB,SAAS;IACT,CAAC,GAAG,EAAE,mBAAmB,CAAC;CAC3B,CAAC,CAAA;AAEF,0BAA0B;AACb,QAAA,IAAI,GAAG,IAAI,GAAG,CACzB,KAAK,CAAC,IAAI,CAAC,YAAI,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAC3C,CAAA","sourcesContent":["export const isCode = (c: string): c is EntryTypeCode =>\n name.has(c as EntryTypeCode)\n\nexport const isName = (c: string): c is EntryTypeName =>\n code.has(c as EntryTypeName)\n\nexport type EntryTypeCode =\n | '0'\n | ''\n | '1'\n | '2'\n | '3'\n | '4'\n | '5'\n | '6'\n | '7'\n | 'g'\n | 'x'\n | 'A'\n | 'D'\n | 'I'\n | 'K'\n | 'L'\n | 'M'\n | 'N'\n | 'S'\n | 'V'\n | 'X'\n\nexport type EntryTypeName =\n | 'File'\n | 'OldFile'\n | 'Link'\n | 'SymbolicLink'\n | 'CharacterDevice'\n | 'BlockDevice'\n | 'Directory'\n | 'FIFO'\n | 'ContiguousFile'\n | 'GlobalExtendedHeader'\n | 'ExtendedHeader'\n | 'SolarisACL'\n | 'GNUDumpDir'\n | 'Inode'\n | 'NextFileHasLongLinkpath'\n | 'NextFileHasLongPath'\n | 'ContinuationFile'\n | 'OldGnuLongPath'\n | 'SparseFile'\n | 'TapeVolumeHeader'\n | 'OldExtendedHeader'\n | 'Unsupported'\n\n// map types from key to human-friendly name\nexport const name = new Map<EntryTypeCode, EntryTypeName>([\n ['0', 'File'],\n // same as File\n ['', 'OldFile'],\n ['1', 'Link'],\n ['2', 'SymbolicLink'],\n // Devices and FIFOs aren't fully supported\n // they are parsed, but skipped when unpacking\n ['3', 'CharacterDevice'],\n ['4', 'BlockDevice'],\n ['5', 'Directory'],\n ['6', 'FIFO'],\n // same as File\n ['7', 'ContiguousFile'],\n // pax headers\n ['g', 'GlobalExtendedHeader'],\n ['x', 'ExtendedHeader'],\n // vendor-specific stuff\n // skip\n ['A', 'SolarisACL'],\n // like 5, but with data, which should be skipped\n ['D', 'GNUDumpDir'],\n // metadata only, skip\n ['I', 'Inode'],\n // data = link path of next file\n ['K', 'NextFileHasLongLinkpath'],\n // data = path of next file\n ['L', 'NextFileHasLongPath'],\n // skip\n ['M', 'ContinuationFile'],\n // like L\n ['N', 'OldGnuLongPath'],\n // skip\n ['S', 'SparseFile'],\n // skip\n ['V', 'TapeVolumeHeader'],\n // like x\n ['X', 'OldExtendedHeader'],\n])\n\n// map the other direction\nexport const code = new Map<EntryTypeName, EntryTypeCode>(\n Array.from(name).map(kv => [kv[1], kv[0]]),\n)\n"]}

99
node_modules/tar/dist/commonjs/unpack.d.ts generated vendored Normal file
View File

@@ -0,0 +1,99 @@
/// <reference types="node" />
import { type Stats } from 'node:fs';
import { MkdirError } from './mkdir.js';
import { Parser } from './parse.js';
import { TarOptions } from './options.js';
import { PathReservations } from './path-reservations.js';
import { ReadEntry } from './read-entry.js';
import { WarnData } from './warn-method.js';
declare const ONENTRY: unique symbol;
declare const CHECKFS: unique symbol;
declare const CHECKFS2: unique symbol;
declare const PRUNECACHE: unique symbol;
declare const ISREUSABLE: unique symbol;
declare const MAKEFS: unique symbol;
declare const FILE: unique symbol;
declare const DIRECTORY: unique symbol;
declare const LINK: unique symbol;
declare const SYMLINK: unique symbol;
declare const HARDLINK: unique symbol;
declare const UNSUPPORTED: unique symbol;
declare const CHECKPATH: unique symbol;
declare const MKDIR: unique symbol;
declare const ONERROR: unique symbol;
declare const PENDING: unique symbol;
declare const PEND: unique symbol;
declare const UNPEND: unique symbol;
declare const ENDED: unique symbol;
declare const MAYBECLOSE: unique symbol;
declare const SKIP: unique symbol;
declare const DOCHOWN: unique symbol;
declare const UID: unique symbol;
declare const GID: unique symbol;
declare const CHECKED_CWD: unique symbol;
export declare class Unpack extends Parser {
[ENDED]: boolean;
[CHECKED_CWD]: boolean;
[PENDING]: number;
reservations: PathReservations;
transform?: TarOptions['transform'];
writable: true;
readable: false;
dirCache: Exclude<TarOptions['dirCache'], undefined>;
uid?: number;
gid?: number;
setOwner: boolean;
preserveOwner: boolean;
processGid?: number;
processUid?: number;
maxDepth: number;
forceChown: boolean;
win32: boolean;
newer: boolean;
keep: boolean;
noMtime: boolean;
preservePaths: boolean;
unlink: boolean;
cwd: string;
strip: number;
processUmask: number;
umask: number;
dmode: number;
fmode: number;
chmod: boolean;
constructor(opt?: TarOptions);
warn(code: string, msg: string | Error, data?: WarnData): void;
[MAYBECLOSE](): void;
[CHECKPATH](entry: ReadEntry): boolean;
[ONENTRY](entry: ReadEntry): void;
[ONERROR](er: Error, entry: ReadEntry): void;
[MKDIR](dir: string, mode: number, cb: (er?: null | MkdirError, made?: string) => void): void;
[DOCHOWN](entry: ReadEntry): boolean;
[UID](entry: ReadEntry): number | undefined;
[GID](entry: ReadEntry): number | undefined;
[FILE](entry: ReadEntry, fullyDone: () => void): void;
[DIRECTORY](entry: ReadEntry, fullyDone: () => void): void;
[UNSUPPORTED](entry: ReadEntry): void;
[SYMLINK](entry: ReadEntry, done: () => void): void;
[HARDLINK](entry: ReadEntry, done: () => void): void;
[PEND](): void;
[UNPEND](): void;
[SKIP](entry: ReadEntry): void;
[ISREUSABLE](entry: ReadEntry, st: Stats): boolean;
[CHECKFS](entry: ReadEntry): void;
[PRUNECACHE](entry: ReadEntry): void;
[CHECKFS2](entry: ReadEntry, fullyDone: (er?: Error) => void): void;
[MAKEFS](er: null | undefined | Error, entry: ReadEntry, done: () => void): void;
[LINK](entry: ReadEntry, linkpath: string, link: 'link' | 'symlink', done: () => void): void;
}
export declare class UnpackSync extends Unpack {
sync: true;
[MAKEFS](er: null | Error | undefined, entry: ReadEntry): void;
[CHECKFS](entry: ReadEntry): void;
[FILE](entry: ReadEntry, done: () => void): void;
[DIRECTORY](entry: ReadEntry, done: () => void): void;
[MKDIR](dir: string, mode: number): unknown;
[LINK](entry: ReadEntry, linkpath: string, link: 'link' | 'symlink', done: () => void): void;
}
export {};
//# sourceMappingURL=unpack.d.ts.map

1
node_modules/tar/dist/commonjs/unpack.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"unpack.d.ts","sourceRoot":"","sources":["../../src/unpack.ts"],"names":[],"mappings":";AASA,OAAW,EAAE,KAAK,KAAK,EAAE,MAAM,SAAS,CAAA;AAGxC,OAAO,EAAS,UAAU,EAAa,MAAM,YAAY,CAAA;AAGzD,OAAO,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAKnC,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AACzC,OAAO,EAAE,gBAAgB,EAAE,MAAM,wBAAwB,CAAA;AACzD,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAA;AAC3C,OAAO,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAA;AAE3C,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,UAAU,eAAuB,CAAA;AACvC,QAAA,MAAM,UAAU,eAAuB,CAAA;AACvC,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,IAAI,eAAiB,CAAA;AAC3B,QAAA,MAAM,SAAS,eAAsB,CAAA;AACrC,QAAA,MAAM,IAAI,eAAiB,CAAA;AAC3B,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,WAAW,eAAwB,CAAA;AACzC,QAAA,MAAM,SAAS,eAAsB,CAAA;AACrC,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,IAAI,eAAiB,CAAA;AAC3B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,UAAU,eAAuB,CAAA;AACvC,QAAA,MAAM,IAAI,eAAiB,CAAA;AAC3B,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,GAAG,eAAgB,CAAA;AACzB,QAAA,MAAM,GAAG,eAAgB,CAAA;AACzB,QAAA,MAAM,WAAW,eAAuB,CAAA;AA6FxC,qBAAa,MAAO,SAAQ,MAAM;IAChC,CAAC,KAAK,CAAC,EAAE,OAAO,CAAS;IACzB,CAAC,WAAW,CAAC,EAAE,OAAO,CAAS;IAC/B,CAAC,OAAO,CAAC,EAAE,MAAM,CAAI;IAErB,YAAY,EAAE,gBAAgB,CAAyB;IACvD,SAAS,CAAC,EAAE,UAAU,CAAC,WAAW,CAAC,CAAA;IACnC,QAAQ,EAAE,IAAI,CAAO;IACrB,QAAQ,EAAE,KAAK,CAAQ;IACvB,QAAQ,EAAE,OAAO,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE,SAAS,CAAC,CAAA;IACpD,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,QAAQ,EAAE,OAAO,CAAA;IACjB,aAAa,EAAE,OAAO,CAAA;IACtB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,QAAQ,EAAE,MAAM,CAAA;IAChB,UAAU,EAAE,OAAO,CAAA;IACnB,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,IAAI,EAAE,OAAO,CAAA;IACb,OAAO,EAAE,OAAO,CAAA;IAChB,aAAa,EAAE,OAAO,CAAA;IACtB,MAAM,EAAE,OAAO,CAAA;IACf,GAAG,EAAE,MAAM,CAAA;IACX,KAAK,EAAE,MAAM,CAAA;IACb,YAAY,EAAE,MAAM,CAAA;IACpB,KAAK,EAAE,MAAM,CAAA;IACb,KAAK,EAAE,MAAM,CAAA;IACb,KAAK,EAAE,MAAM,CAAA;IACb,KAAK,EAAE,OAAO,CAAA;gBAEF,GAAG,GAAE,UAAe;IAgHhC,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,EAAE,IAAI,GAAE,QAAa;IAO3D,CAAC,UAAU,CAAC;IAQZ,CAAC,SAAS,CAAC,CAAC,KAAK,EAAE,SAAS;IA8G5B,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,SAAS;IA8B1B,CAAC,OAAO,CAAC,CAAC,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,SAAS;IAarC,CAAC,KAAK,CAAC,CACL,GAAG,EAAE,MAAM,EACX,IAAI,EAAE,MAAM,EACZ,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,GAAG,UAAU,EAAE,IAAI,CAAC,EAAE,MAAM,KAAK,IAAI;IAoBrD,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,SAAS;IAgB1B,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,SAAS;IAItB,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,SAAS;IAItB,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE,SAAS,EAAE,SAAS,EAAE,MAAM,IAAI;IAiG9C,CAAC,SAAS,CAAC,CAAC,KAAK,EAAE,SAAS,EAAE,SAAS,EAAE,MAAM,IAAI;IA6CnD,CAAC,WAAW,CAAC,CAAC,KAAK,EAAE,SAAS;IAU9B,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,MAAM,IAAI;IAI5C,CAAC,QAAQ,CAAC,CAAC,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,MAAM,IAAI;IAO7C,CAAC,IAAI,CAAC;IAIN,CAAC,MAAM,CAAC;IAKR,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE,SAAS;IAQvB,CAAC,UAAU,CAAC,CAAC,KAAK,EAAE,SAAS,EAAE,EAAE,EAAE,KAAK;IAWxC,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,SAAS;IAW1B,CAAC,UAAU,CAAC,CAAC,KAAK,EAAE,SAAS;IAkB7B,CAAC,QAAQ,CAAC,CAAC,KAAK,EAAE,SAAS,EAAE,SAAS,EAAE,CAAC,EAAE,CAAC,EAAE,KAAK,KAAK,IAAI;IA2G5D,CAAC,MAAM,CAAC,CACN,EAAE,EAAE,IAAI,GAAG,SAAS,GAAG,KAAK,EAC5B,KAAK,EAAE,SAAS,EAChB,IAAI,EAAE,MAAM,IAAI;IA0BlB,CAAC,IAAI,CAAC,CACJ,KAAK,EAAE,SAAS,EAChB,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,MAAM,GAAG,SAAS,EACxB,IAAI,EAAE,MAAM,IAAI;CAanB;AAUD,qBAAa,UAAW,SAAQ,MAAM;IACpC,IAAI,EAAE,IAAI,CAAQ;IAElB,CAAC,MAAM,CAAC,CAAC,EAAE,EAAE,IAAI,GAAG,KAAK,GAAG,SAAS,EAAE,KAAK,EAAE,SAAS;IAIvD,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,SAAS;IAuE1B,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,MAAM,IAAI;IAoFzC,CAAC,SAAS,CAAC,CAAC,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,MAAM,IAAI;IAkC9C,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM;IAmBjC,CAAC,IAAI,CAAC,CACJ,KAAK,EAAE,SAAS,EAChB,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,MAAM,GAAG,SAAS,EACxB,IAAI,EAAE,MAAM,IAAI;CAWnB"}

919
node_modules/tar/dist/commonjs/unpack.js generated vendored Normal file
View File

@@ -0,0 +1,919 @@
"use strict";
// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
// but the path reservations are required to avoid race conditions where
// parallelized unpack ops may mess with one another, due to dependencies
// (like a Link depending on its target) or destructive operations (like
// clobbering an fs object to create one of a different type.)
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.UnpackSync = exports.Unpack = void 0;
const fsm = __importStar(require("@isaacs/fs-minipass"));
const node_assert_1 = __importDefault(require("node:assert"));
const node_crypto_1 = require("node:crypto");
const node_fs_1 = __importDefault(require("node:fs"));
const node_path_1 = __importDefault(require("node:path"));
const get_write_flag_js_1 = require("./get-write-flag.js");
const mkdir_js_1 = require("./mkdir.js");
const normalize_unicode_js_1 = require("./normalize-unicode.js");
const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
const parse_js_1 = require("./parse.js");
const strip_absolute_path_js_1 = require("./strip-absolute-path.js");
const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
const wc = __importStar(require("./winchars.js"));
const path_reservations_js_1 = require("./path-reservations.js");
const ONENTRY = Symbol('onEntry');
const CHECKFS = Symbol('checkFs');
const CHECKFS2 = Symbol('checkFs2');
const PRUNECACHE = Symbol('pruneCache');
const ISREUSABLE = Symbol('isReusable');
const MAKEFS = Symbol('makeFs');
const FILE = Symbol('file');
const DIRECTORY = Symbol('directory');
const LINK = Symbol('link');
const SYMLINK = Symbol('symlink');
const HARDLINK = Symbol('hardlink');
const UNSUPPORTED = Symbol('unsupported');
const CHECKPATH = Symbol('checkPath');
const MKDIR = Symbol('mkdir');
const ONERROR = Symbol('onError');
const PENDING = Symbol('pending');
const PEND = Symbol('pend');
const UNPEND = Symbol('unpend');
const ENDED = Symbol('ended');
const MAYBECLOSE = Symbol('maybeClose');
const SKIP = Symbol('skip');
const DOCHOWN = Symbol('doChown');
const UID = Symbol('uid');
const GID = Symbol('gid');
const CHECKED_CWD = Symbol('checkedCwd');
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
const isWindows = platform === 'win32';
const DEFAULT_MAX_DEPTH = 1024;
// Unlinks on Windows are not atomic.
//
// This means that if you have a file entry, followed by another
// file entry with an identical name, and you cannot re-use the file
// (because it's a hardlink, or because unlink:true is set, or it's
// Windows, which does not have useful nlink values), then the unlink
// will be committed to the disk AFTER the new file has been written
// over the old one, deleting the new file.
//
// To work around this, on Windows systems, we rename the file and then
// delete the renamed file. It's a sloppy kludge, but frankly, I do not
// know of a better way to do this, given windows' non-atomic unlink
// semantics.
//
// See: https://github.com/npm/node-tar/issues/183
/* c8 ignore start */
const unlinkFile = (path, cb) => {
if (!isWindows) {
return node_fs_1.default.unlink(path, cb);
}
const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex');
node_fs_1.default.rename(path, name, er => {
if (er) {
return cb(er);
}
node_fs_1.default.unlink(name, cb);
});
};
/* c8 ignore stop */
/* c8 ignore start */
const unlinkFileSync = (path) => {
if (!isWindows) {
return node_fs_1.default.unlinkSync(path);
}
const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex');
node_fs_1.default.renameSync(path, name);
node_fs_1.default.unlinkSync(name);
};
/* c8 ignore stop */
// this.gid, entry.gid, this.processUid
const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a
: b !== undefined && b === b >>> 0 ? b
: c;
// clear the cache if it's a case-insensitive unicode-squashing match.
// we can't know if the current file system is case-sensitive or supports
// unicode fully, so we check for similarity on the maximally compatible
// representation. Err on the side of pruning, since all it's doing is
// preventing lstats, and it's not the end of the world if we get a false
// positive.
// Note that on windows, we always drop the entire cache whenever a
// symbolic link is encountered, because 8.3 filenames are impossible
// to reason about, and collisions are hazards rather than just failures.
const cacheKeyNormalize = (path) => (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, normalize_windows_path_js_1.normalizeWindowsPath)((0, normalize_unicode_js_1.normalizeUnicode)(path))).toLowerCase();
// remove all cache entries matching ${abs}/**
const pruneCache = (cache, abs) => {
abs = cacheKeyNormalize(abs);
for (const path of cache.keys()) {
const pnorm = cacheKeyNormalize(path);
if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
cache.delete(path);
}
}
};
const dropCache = (cache) => {
for (const key of cache.keys()) {
cache.delete(key);
}
};
class Unpack extends parse_js_1.Parser {
[ENDED] = false;
[CHECKED_CWD] = false;
[PENDING] = 0;
reservations = new path_reservations_js_1.PathReservations();
transform;
writable = true;
readable = false;
dirCache;
uid;
gid;
setOwner;
preserveOwner;
processGid;
processUid;
maxDepth;
forceChown;
win32;
newer;
keep;
noMtime;
preservePaths;
unlink;
cwd;
strip;
processUmask;
umask;
dmode;
fmode;
chmod;
constructor(opt = {}) {
opt.ondone = () => {
this[ENDED] = true;
this[MAYBECLOSE]();
};
super(opt);
this.transform = opt.transform;
this.dirCache = opt.dirCache || new Map();
this.chmod = !!opt.chmod;
if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
// need both or neither
if (typeof opt.uid !== 'number' ||
typeof opt.gid !== 'number') {
throw new TypeError('cannot set owner without number uid and gid');
}
if (opt.preserveOwner) {
throw new TypeError('cannot preserve owner in archive and also set owner explicitly');
}
this.uid = opt.uid;
this.gid = opt.gid;
this.setOwner = true;
}
else {
this.uid = undefined;
this.gid = undefined;
this.setOwner = false;
}
// default true for root
if (opt.preserveOwner === undefined &&
typeof opt.uid !== 'number') {
this.preserveOwner = !!(process.getuid && process.getuid() === 0);
}
else {
this.preserveOwner = !!opt.preserveOwner;
}
this.processUid =
(this.preserveOwner || this.setOwner) && process.getuid ?
process.getuid()
: undefined;
this.processGid =
(this.preserveOwner || this.setOwner) && process.getgid ?
process.getgid()
: undefined;
// prevent excessively deep nesting of subfolders
// set to `Infinity` to remove this restriction
this.maxDepth =
typeof opt.maxDepth === 'number' ?
opt.maxDepth
: DEFAULT_MAX_DEPTH;
// mostly just for testing, but useful in some cases.
// Forcibly trigger a chown on every entry, no matter what
this.forceChown = opt.forceChown === true;
// turn ><?| in filenames into 0xf000-higher encoded forms
this.win32 = !!opt.win32 || isWindows;
// do not unpack over files that are newer than what's in the archive
this.newer = !!opt.newer;
// do not unpack over ANY files
this.keep = !!opt.keep;
// do not set mtime/atime of extracted entries
this.noMtime = !!opt.noMtime;
// allow .., absolute path entries, and unpacking through symlinks
// without this, warn and skip .., relativize absolutes, and error
// on symlinks in extraction path
this.preservePaths = !!opt.preservePaths;
// unlink files and links before writing. This breaks existing hard
// links, and removes symlink directories rather than erroring
this.unlink = !!opt.unlink;
this.cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(opt.cwd || process.cwd()));
this.strip = Number(opt.strip) || 0;
// if we're not chmodding, then we don't need the process umask
this.processUmask =
!this.chmod ? 0
: typeof opt.processUmask === 'number' ? opt.processUmask
: process.umask();
this.umask =
typeof opt.umask === 'number' ? opt.umask : this.processUmask;
// default mode for dirs created as parents
this.dmode = opt.dmode || 0o0777 & ~this.umask;
this.fmode = opt.fmode || 0o0666 & ~this.umask;
this.on('entry', entry => this[ONENTRY](entry));
}
// a bad or damaged archive is a warning for Parser, but an error
// when extracting. Mark those errors as unrecoverable, because
// the Unpack contract cannot be met.
warn(code, msg, data = {}) {
if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') {
data.recoverable = false;
}
return super.warn(code, msg, data);
}
[MAYBECLOSE]() {
if (this[ENDED] && this[PENDING] === 0) {
this.emit('prefinish');
this.emit('finish');
this.emit('end');
}
}
[CHECKPATH](entry) {
const p = (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path);
const parts = p.split('/');
if (this.strip) {
if (parts.length < this.strip) {
return false;
}
if (entry.type === 'Link') {
const linkparts = (0, normalize_windows_path_js_1.normalizeWindowsPath)(String(entry.linkpath)).split('/');
if (linkparts.length >= this.strip) {
entry.linkpath = linkparts.slice(this.strip).join('/');
}
else {
return false;
}
}
parts.splice(0, this.strip);
entry.path = parts.join('/');
}
if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
entry,
path: p,
depth: parts.length,
maxDepth: this.maxDepth,
});
return false;
}
if (!this.preservePaths) {
if (parts.includes('..') ||
/* c8 ignore next */
(isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) {
this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
entry,
path: p,
});
return false;
}
// strip off the root
const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(p);
if (root) {
entry.path = String(stripped);
this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
entry,
path: p,
});
}
}
if (node_path_1.default.isAbsolute(entry.path)) {
entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(entry.path));
}
else {
entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, entry.path));
}
// if we somehow ended up with a path that escapes the cwd, and we are
// not in preservePaths mode, then something is fishy! This should have
// been prevented above, so ignore this for coverage.
/* c8 ignore start - defense in depth */
if (!this.preservePaths &&
typeof entry.absolute === 'string' &&
entry.absolute.indexOf(this.cwd + '/') !== 0 &&
entry.absolute !== this.cwd) {
this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
entry,
path: (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path),
resolvedPath: entry.absolute,
cwd: this.cwd,
});
return false;
}
/* c8 ignore stop */
// an archive can set properties on the extraction directory, but it
// may not replace the cwd with a different kind of thing entirely.
if (entry.absolute === this.cwd &&
entry.type !== 'Directory' &&
entry.type !== 'GNUDumpDir') {
return false;
}
// only encode : chars that aren't drive letter indicators
if (this.win32) {
const { root: aRoot } = node_path_1.default.win32.parse(String(entry.absolute));
entry.absolute =
aRoot + wc.encode(String(entry.absolute).slice(aRoot.length));
const { root: pRoot } = node_path_1.default.win32.parse(entry.path);
entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length));
}
return true;
}
[ONENTRY](entry) {
if (!this[CHECKPATH](entry)) {
return entry.resume();
}
node_assert_1.default.equal(typeof entry.absolute, 'string');
switch (entry.type) {
case 'Directory':
case 'GNUDumpDir':
if (entry.mode) {
entry.mode = entry.mode | 0o700;
}
// eslint-disable-next-line no-fallthrough
case 'File':
case 'OldFile':
case 'ContiguousFile':
case 'Link':
case 'SymbolicLink':
return this[CHECKFS](entry);
case 'CharacterDevice':
case 'BlockDevice':
case 'FIFO':
default:
return this[UNSUPPORTED](entry);
}
}
[ONERROR](er, entry) {
// Cwd has to exist, or else nothing works. That's serious.
// Other errors are warnings, which raise the error in strict
// mode, but otherwise continue on.
if (er.name === 'CwdError') {
this.emit('error', er);
}
else {
this.warn('TAR_ENTRY_ERROR', er, { entry });
this[UNPEND]();
entry.resume();
}
}
[MKDIR](dir, mode, cb) {
(0, mkdir_js_1.mkdir)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), {
uid: this.uid,
gid: this.gid,
processUid: this.processUid,
processGid: this.processGid,
umask: this.processUmask,
preserve: this.preservePaths,
unlink: this.unlink,
cache: this.dirCache,
cwd: this.cwd,
mode: mode,
}, cb);
}
[DOCHOWN](entry) {
// in preserve owner mode, chown if the entry doesn't match process
// in set owner mode, chown if setting doesn't match process
return (this.forceChown ||
(this.preserveOwner &&
((typeof entry.uid === 'number' &&
entry.uid !== this.processUid) ||
(typeof entry.gid === 'number' &&
entry.gid !== this.processGid))) ||
(typeof this.uid === 'number' &&
this.uid !== this.processUid) ||
(typeof this.gid === 'number' && this.gid !== this.processGid));
}
[UID](entry) {
return uint32(this.uid, entry.uid, this.processUid);
}
[GID](entry) {
return uint32(this.gid, entry.gid, this.processGid);
}
[FILE](entry, fullyDone) {
const mode = typeof entry.mode === 'number' ?
entry.mode & 0o7777
: this.fmode;
const stream = new fsm.WriteStream(String(entry.absolute), {
// slight lie, but it can be numeric flags
flags: (0, get_write_flag_js_1.getWriteFlag)(entry.size),
mode: mode,
autoClose: false,
});
stream.on('error', (er) => {
if (stream.fd) {
node_fs_1.default.close(stream.fd, () => { });
}
// flush all the data out so that we aren't left hanging
// if the error wasn't actually fatal. otherwise the parse
// is blocked, and we never proceed.
stream.write = () => true;
this[ONERROR](er, entry);
fullyDone();
});
let actions = 1;
const done = (er) => {
if (er) {
/* c8 ignore start - we should always have a fd by now */
if (stream.fd) {
node_fs_1.default.close(stream.fd, () => { });
}
/* c8 ignore stop */
this[ONERROR](er, entry);
fullyDone();
return;
}
if (--actions === 0) {
if (stream.fd !== undefined) {
node_fs_1.default.close(stream.fd, er => {
if (er) {
this[ONERROR](er, entry);
}
else {
this[UNPEND]();
}
fullyDone();
});
}
}
};
stream.on('finish', () => {
// if futimes fails, try utimes
// if utimes fails, fail with the original error
// same for fchown/chown
const abs = String(entry.absolute);
const fd = stream.fd;
if (typeof fd === 'number' && entry.mtime && !this.noMtime) {
actions++;
const atime = entry.atime || new Date();
const mtime = entry.mtime;
node_fs_1.default.futimes(fd, atime, mtime, er => er ?
node_fs_1.default.utimes(abs, atime, mtime, er2 => done(er2 && er))
: done());
}
if (typeof fd === 'number' && this[DOCHOWN](entry)) {
actions++;
const uid = this[UID](entry);
const gid = this[GID](entry);
if (typeof uid === 'number' && typeof gid === 'number') {
node_fs_1.default.fchown(fd, uid, gid, er => er ?
node_fs_1.default.chown(abs, uid, gid, er2 => done(er2 && er))
: done());
}
}
done();
});
const tx = this.transform ? this.transform(entry) || entry : entry;
if (tx !== entry) {
tx.on('error', (er) => {
this[ONERROR](er, entry);
fullyDone();
});
entry.pipe(tx);
}
tx.pipe(stream);
}
[DIRECTORY](entry, fullyDone) {
const mode = typeof entry.mode === 'number' ?
entry.mode & 0o7777
: this.dmode;
this[MKDIR](String(entry.absolute), mode, er => {
if (er) {
this[ONERROR](er, entry);
fullyDone();
return;
}
let actions = 1;
const done = () => {
if (--actions === 0) {
fullyDone();
this[UNPEND]();
entry.resume();
}
};
if (entry.mtime && !this.noMtime) {
actions++;
node_fs_1.default.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done);
}
if (this[DOCHOWN](entry)) {
actions++;
node_fs_1.default.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done);
}
done();
});
}
[UNSUPPORTED](entry) {
entry.unsupported = true;
this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry });
entry.resume();
}
[SYMLINK](entry, done) {
this[LINK](entry, String(entry.linkpath), 'symlink', done);
}
[HARDLINK](entry, done) {
const linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, String(entry.linkpath)));
this[LINK](entry, linkpath, 'link', done);
}
[PEND]() {
this[PENDING]++;
}
[UNPEND]() {
this[PENDING]--;
this[MAYBECLOSE]();
}
[SKIP](entry) {
this[UNPEND]();
entry.resume();
}
// Check if we can reuse an existing filesystem entry safely and
// overwrite it, rather than unlinking and recreating
// Windows doesn't report a useful nlink, so we just never reuse entries
[ISREUSABLE](entry, st) {
return (entry.type === 'File' &&
!this.unlink &&
st.isFile() &&
st.nlink <= 1 &&
!isWindows);
}
// check if a thing is there, and if so, try to clobber it
[CHECKFS](entry) {
this[PEND]();
const paths = [entry.path];
if (entry.linkpath) {
paths.push(entry.linkpath);
}
this.reservations.reserve(paths, done => this[CHECKFS2](entry, done));
}
[PRUNECACHE](entry) {
// if we are not creating a directory, and the path is in the dirCache,
// then that means we are about to delete the directory we created
// previously, and it is no longer going to be a directory, and neither
// is any of its children.
// If a symbolic link is encountered, all bets are off. There is no
// reasonable way to sanitize the cache in such a way we will be able to
// avoid having filesystem collisions. If this happens with a non-symlink
// entry, it'll just fail to unpack, but a symlink to a directory, using an
// 8.3 shortname or certain unicode attacks, can evade detection and lead
// to arbitrary writes to anywhere on the system.
if (entry.type === 'SymbolicLink') {
dropCache(this.dirCache);
}
else if (entry.type !== 'Directory') {
pruneCache(this.dirCache, String(entry.absolute));
}
}
[CHECKFS2](entry, fullyDone) {
this[PRUNECACHE](entry);
const done = (er) => {
this[PRUNECACHE](entry);
fullyDone(er);
};
const checkCwd = () => {
this[MKDIR](this.cwd, this.dmode, er => {
if (er) {
this[ONERROR](er, entry);
done();
return;
}
this[CHECKED_CWD] = true;
start();
});
};
const start = () => {
if (entry.absolute !== this.cwd) {
const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute)));
if (parent !== this.cwd) {
return this[MKDIR](parent, this.dmode, er => {
if (er) {
this[ONERROR](er, entry);
done();
return;
}
afterMakeParent();
});
}
}
afterMakeParent();
};
const afterMakeParent = () => {
node_fs_1.default.lstat(String(entry.absolute), (lstatEr, st) => {
if (st &&
(this.keep ||
/* c8 ignore next */
(this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
this[SKIP](entry);
done();
return;
}
if (lstatEr || this[ISREUSABLE](entry, st)) {
return this[MAKEFS](null, entry, done);
}
if (st.isDirectory()) {
if (entry.type === 'Directory') {
const needChmod = this.chmod &&
entry.mode &&
(st.mode & 0o7777) !== entry.mode;
const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done);
if (!needChmod) {
return afterChmod();
}
return node_fs_1.default.chmod(String(entry.absolute), Number(entry.mode), afterChmod);
}
// Not a dir entry, have to remove it.
// NB: the only way to end up with an entry that is the cwd
// itself, in such a way that == does not detect, is a
// tricky windows absolute path with UNC or 8.3 parts (and
// preservePaths:true, or else it will have been stripped).
// In that case, the user has opted out of path protections
// explicitly, so if they blow away the cwd, c'est la vie.
if (entry.absolute !== this.cwd) {
return node_fs_1.default.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done));
}
}
// not a dir, and not reusable
// don't remove if the cwd, we want that error
if (entry.absolute === this.cwd) {
return this[MAKEFS](null, entry, done);
}
unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done));
});
};
if (this[CHECKED_CWD]) {
start();
}
else {
checkCwd();
}
}
[MAKEFS](er, entry, done) {
if (er) {
this[ONERROR](er, entry);
done();
return;
}
switch (entry.type) {
case 'File':
case 'OldFile':
case 'ContiguousFile':
return this[FILE](entry, done);
case 'Link':
return this[HARDLINK](entry, done);
case 'SymbolicLink':
return this[SYMLINK](entry, done);
case 'Directory':
case 'GNUDumpDir':
return this[DIRECTORY](entry, done);
}
}
[LINK](entry, linkpath, link, done) {
// XXX: get the type ('symlink' or 'junction') for windows
node_fs_1.default[link](linkpath, String(entry.absolute), er => {
if (er) {
this[ONERROR](er, entry);
}
else {
this[UNPEND]();
entry.resume();
}
done();
});
}
}
exports.Unpack = Unpack;
const callSync = (fn) => {
try {
return [null, fn()];
}
catch (er) {
return [er, null];
}
};
class UnpackSync extends Unpack {
sync = true;
[MAKEFS](er, entry) {
return super[MAKEFS](er, entry, () => { });
}
[CHECKFS](entry) {
this[PRUNECACHE](entry);
if (!this[CHECKED_CWD]) {
const er = this[MKDIR](this.cwd, this.dmode);
if (er) {
return this[ONERROR](er, entry);
}
this[CHECKED_CWD] = true;
}
// don't bother to make the parent if the current entry is the cwd,
// we've already checked it.
if (entry.absolute !== this.cwd) {
const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute)));
if (parent !== this.cwd) {
const mkParent = this[MKDIR](parent, this.dmode);
if (mkParent) {
return this[ONERROR](mkParent, entry);
}
}
}
const [lstatEr, st] = callSync(() => node_fs_1.default.lstatSync(String(entry.absolute)));
if (st &&
(this.keep ||
/* c8 ignore next */
(this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
return this[SKIP](entry);
}
if (lstatEr || this[ISREUSABLE](entry, st)) {
return this[MAKEFS](null, entry);
}
if (st.isDirectory()) {
if (entry.type === 'Directory') {
const needChmod = this.chmod &&
entry.mode &&
(st.mode & 0o7777) !== entry.mode;
const [er] = needChmod ?
callSync(() => {
node_fs_1.default.chmodSync(String(entry.absolute), Number(entry.mode));
})
: [];
return this[MAKEFS](er, entry);
}
// not a dir entry, have to remove it
const [er] = callSync(() => node_fs_1.default.rmdirSync(String(entry.absolute)));
this[MAKEFS](er, entry);
}
// not a dir, and not reusable.
// don't remove if it's the cwd, since we want that error.
const [er] = entry.absolute === this.cwd ?
[]
: callSync(() => unlinkFileSync(String(entry.absolute)));
this[MAKEFS](er, entry);
}
[FILE](entry, done) {
const mode = typeof entry.mode === 'number' ?
entry.mode & 0o7777
: this.fmode;
const oner = (er) => {
let closeError;
try {
node_fs_1.default.closeSync(fd);
}
catch (e) {
closeError = e;
}
if (er || closeError) {
this[ONERROR](er || closeError, entry);
}
done();
};
let fd;
try {
fd = node_fs_1.default.openSync(String(entry.absolute), (0, get_write_flag_js_1.getWriteFlag)(entry.size), mode);
}
catch (er) {
return oner(er);
}
const tx = this.transform ? this.transform(entry) || entry : entry;
if (tx !== entry) {
tx.on('error', (er) => this[ONERROR](er, entry));
entry.pipe(tx);
}
tx.on('data', (chunk) => {
try {
node_fs_1.default.writeSync(fd, chunk, 0, chunk.length);
}
catch (er) {
oner(er);
}
});
tx.on('end', () => {
let er = null;
// try both, falling futimes back to utimes
// if either fails, handle the first error
if (entry.mtime && !this.noMtime) {
const atime = entry.atime || new Date();
const mtime = entry.mtime;
try {
node_fs_1.default.futimesSync(fd, atime, mtime);
}
catch (futimeser) {
try {
node_fs_1.default.utimesSync(String(entry.absolute), atime, mtime);
}
catch (utimeser) {
er = futimeser;
}
}
}
if (this[DOCHOWN](entry)) {
const uid = this[UID](entry);
const gid = this[GID](entry);
try {
node_fs_1.default.fchownSync(fd, Number(uid), Number(gid));
}
catch (fchowner) {
try {
node_fs_1.default.chownSync(String(entry.absolute), Number(uid), Number(gid));
}
catch (chowner) {
er = er || fchowner;
}
}
}
oner(er);
});
}
[DIRECTORY](entry, done) {
const mode = typeof entry.mode === 'number' ?
entry.mode & 0o7777
: this.dmode;
const er = this[MKDIR](String(entry.absolute), mode);
if (er) {
this[ONERROR](er, entry);
done();
return;
}
if (entry.mtime && !this.noMtime) {
try {
node_fs_1.default.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime);
/* c8 ignore next */
}
catch (er) { }
}
if (this[DOCHOWN](entry)) {
try {
node_fs_1.default.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)));
}
catch (er) { }
}
done();
entry.resume();
}
[MKDIR](dir, mode) {
try {
return (0, mkdir_js_1.mkdirSync)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), {
uid: this.uid,
gid: this.gid,
processUid: this.processUid,
processGid: this.processGid,
umask: this.processUmask,
preserve: this.preservePaths,
unlink: this.unlink,
cache: this.dirCache,
cwd: this.cwd,
mode: mode,
});
}
catch (er) {
return er;
}
}
[LINK](entry, linkpath, link, done) {
const ls = `${link}Sync`;
try {
node_fs_1.default[ls](linkpath, String(entry.absolute));
done();
entry.resume();
}
catch (er) {
return this[ONERROR](er, entry);
}
}
}
exports.UnpackSync = UnpackSync;
//# sourceMappingURL=unpack.js.map

Some files were not shown because too many files have changed in this diff Show More