?
This commit is contained in:
20
node_modules/execa/lib/arguments/command.js
generated
vendored
Normal file
20
node_modules/execa/lib/arguments/command.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import {logCommand} from '../verbose/start.js';
|
||||
import {getVerboseInfo} from '../verbose/info.js';
|
||||
import {getStartTime} from '../return/duration.js';
|
||||
import {joinCommand} from './escape.js';
|
||||
import {normalizeFdSpecificOption} from './specific.js';
|
||||
|
||||
// Compute `result.command`, `result.escapedCommand` and `verbose`-related information
|
||||
export const handleCommand = (filePath, rawArguments, rawOptions) => {
|
||||
const startTime = getStartTime();
|
||||
const {command, escapedCommand} = joinCommand(filePath, rawArguments);
|
||||
const verbose = normalizeFdSpecificOption(rawOptions, 'verbose');
|
||||
const verboseInfo = getVerboseInfo(verbose, escapedCommand, {...rawOptions});
|
||||
logCommand(escapedCommand, verboseInfo);
|
||||
return {
|
||||
command,
|
||||
escapedCommand,
|
||||
startTime,
|
||||
verboseInfo,
|
||||
};
|
||||
};
|
||||
39
node_modules/execa/lib/arguments/cwd.js
generated
vendored
Normal file
39
node_modules/execa/lib/arguments/cwd.js
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
import {statSync} from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import process from 'node:process';
|
||||
import {safeNormalizeFileUrl} from './file-url.js';
|
||||
|
||||
// Normalize `cwd` option
|
||||
export const normalizeCwd = (cwd = getDefaultCwd()) => {
|
||||
const cwdString = safeNormalizeFileUrl(cwd, 'The "cwd" option');
|
||||
return path.resolve(cwdString);
|
||||
};
|
||||
|
||||
const getDefaultCwd = () => {
|
||||
try {
|
||||
return process.cwd();
|
||||
} catch (error) {
|
||||
error.message = `The current directory does not exist.\n${error.message}`;
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// When `cwd` option has an invalid value, provide with a better error message
|
||||
export const fixCwdError = (originalMessage, cwd) => {
|
||||
if (cwd === getDefaultCwd()) {
|
||||
return originalMessage;
|
||||
}
|
||||
|
||||
let cwdStat;
|
||||
try {
|
||||
cwdStat = statSync(cwd);
|
||||
} catch (error) {
|
||||
return `The "cwd" option is invalid: ${cwd}.\n${error.message}\n${originalMessage}`;
|
||||
}
|
||||
|
||||
if (!cwdStat.isDirectory()) {
|
||||
return `The "cwd" option is not a directory: ${cwd}.\n${originalMessage}`;
|
||||
}
|
||||
|
||||
return originalMessage;
|
||||
};
|
||||
50
node_modules/execa/lib/arguments/encoding-option.js
generated
vendored
Normal file
50
node_modules/execa/lib/arguments/encoding-option.js
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
// Validate `encoding` option
|
||||
export const validateEncoding = ({encoding}) => {
|
||||
if (ENCODINGS.has(encoding)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const correctEncoding = getCorrectEncoding(encoding);
|
||||
if (correctEncoding !== undefined) {
|
||||
throw new TypeError(`Invalid option \`encoding: ${serializeEncoding(encoding)}\`.
|
||||
Please rename it to ${serializeEncoding(correctEncoding)}.`);
|
||||
}
|
||||
|
||||
const correctEncodings = [...ENCODINGS].map(correctEncoding => serializeEncoding(correctEncoding)).join(', ');
|
||||
throw new TypeError(`Invalid option \`encoding: ${serializeEncoding(encoding)}\`.
|
||||
Please rename it to one of: ${correctEncodings}.`);
|
||||
};
|
||||
|
||||
const TEXT_ENCODINGS = new Set(['utf8', 'utf16le']);
|
||||
export const BINARY_ENCODINGS = new Set(['buffer', 'hex', 'base64', 'base64url', 'latin1', 'ascii']);
|
||||
const ENCODINGS = new Set([...TEXT_ENCODINGS, ...BINARY_ENCODINGS]);
|
||||
|
||||
const getCorrectEncoding = encoding => {
|
||||
if (encoding === null) {
|
||||
return 'buffer';
|
||||
}
|
||||
|
||||
if (typeof encoding !== 'string') {
|
||||
return;
|
||||
}
|
||||
|
||||
const lowerEncoding = encoding.toLowerCase();
|
||||
if (lowerEncoding in ENCODING_ALIASES) {
|
||||
return ENCODING_ALIASES[lowerEncoding];
|
||||
}
|
||||
|
||||
if (ENCODINGS.has(lowerEncoding)) {
|
||||
return lowerEncoding;
|
||||
}
|
||||
};
|
||||
|
||||
const ENCODING_ALIASES = {
|
||||
// eslint-disable-next-line unicorn/text-encoding-identifier-case
|
||||
'utf-8': 'utf8',
|
||||
'utf-16le': 'utf16le',
|
||||
'ucs-2': 'utf16le',
|
||||
ucs2: 'utf16le',
|
||||
binary: 'latin1',
|
||||
};
|
||||
|
||||
const serializeEncoding = encoding => typeof encoding === 'string' ? `"${encoding}"` : String(encoding);
|
||||
88
node_modules/execa/lib/arguments/escape.js
generated
vendored
Normal file
88
node_modules/execa/lib/arguments/escape.js
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
import {platform} from 'node:process';
|
||||
import {stripVTControlCharacters} from 'node:util';
|
||||
|
||||
// Compute `result.command` and `result.escapedCommand`
|
||||
export const joinCommand = (filePath, rawArguments) => {
|
||||
const fileAndArguments = [filePath, ...rawArguments];
|
||||
const command = fileAndArguments.join(' ');
|
||||
const escapedCommand = fileAndArguments
|
||||
.map(fileAndArgument => quoteString(escapeControlCharacters(fileAndArgument)))
|
||||
.join(' ');
|
||||
return {command, escapedCommand};
|
||||
};
|
||||
|
||||
// Remove ANSI sequences and escape control characters and newlines
|
||||
export const escapeLines = lines => stripVTControlCharacters(lines)
|
||||
.split('\n')
|
||||
.map(line => escapeControlCharacters(line))
|
||||
.join('\n');
|
||||
|
||||
const escapeControlCharacters = line => line.replaceAll(SPECIAL_CHAR_REGEXP, character => escapeControlCharacter(character));
|
||||
|
||||
const escapeControlCharacter = character => {
|
||||
const commonEscape = COMMON_ESCAPES[character];
|
||||
if (commonEscape !== undefined) {
|
||||
return commonEscape;
|
||||
}
|
||||
|
||||
const codepoint = character.codePointAt(0);
|
||||
const codepointHex = codepoint.toString(16);
|
||||
return codepoint <= ASTRAL_START
|
||||
? `\\u${codepointHex.padStart(4, '0')}`
|
||||
: `\\U${codepointHex}`;
|
||||
};
|
||||
|
||||
// Characters that would create issues when printed are escaped using the \u or \U notation.
|
||||
// Those include control characters and newlines.
|
||||
// The \u and \U notation is Bash specific, but there is no way to do this in a shell-agnostic way.
|
||||
// Some shells do not even have a way to print those characters in an escaped fashion.
|
||||
// Therefore, we prioritize printing those safely, instead of allowing those to be copy-pasted.
|
||||
// List of Unicode character categories: https://www.fileformat.info/info/unicode/category/index.htm
|
||||
const getSpecialCharRegExp = () => {
|
||||
try {
|
||||
// This throws when using Node.js without ICU support.
|
||||
// When using a RegExp literal, this would throw at parsing-time, instead of runtime.
|
||||
// eslint-disable-next-line prefer-regex-literals
|
||||
return new RegExp('\\p{Separator}|\\p{Other}', 'gu');
|
||||
} catch {
|
||||
// Similar to the above RegExp, but works even when Node.js has been built without ICU support.
|
||||
// Unlike the above RegExp, it only covers whitespaces and C0/C1 control characters.
|
||||
// It does not cover some edge cases, such as Unicode reserved characters.
|
||||
// See https://github.com/sindresorhus/execa/issues/1143
|
||||
// eslint-disable-next-line no-control-regex
|
||||
return /[\s\u0000-\u001F\u007F-\u009F\u00AD]/g;
|
||||
}
|
||||
};
|
||||
|
||||
const SPECIAL_CHAR_REGEXP = getSpecialCharRegExp();
|
||||
|
||||
// Accepted by $'...' in Bash.
|
||||
// Exclude \a \e \v which are accepted in Bash but not in JavaScript (except \v) and JSON.
|
||||
const COMMON_ESCAPES = {
|
||||
' ': ' ',
|
||||
'\b': '\\b',
|
||||
'\f': '\\f',
|
||||
'\n': '\\n',
|
||||
'\r': '\\r',
|
||||
'\t': '\\t',
|
||||
};
|
||||
|
||||
// Up until that codepoint, \u notation can be used instead of \U
|
||||
const ASTRAL_START = 65_535;
|
||||
|
||||
// Some characters are shell-specific, i.e. need to be escaped when the command is copy-pasted then run.
|
||||
// Escaping is shell-specific. We cannot know which shell is used: `process.platform` detection is not enough.
|
||||
// For example, Windows users could be using `cmd.exe`, Powershell or Bash for Windows which all use different escaping.
|
||||
// We use '...' on Unix, which is POSIX shell compliant and escape all characters but ' so this is fairly safe.
|
||||
// On Windows, we assume cmd.exe is used and escape with "...", which also works with Powershell.
|
||||
const quoteString = escapedArgument => {
|
||||
if (NO_ESCAPE_REGEXP.test(escapedArgument)) {
|
||||
return escapedArgument;
|
||||
}
|
||||
|
||||
return platform === 'win32'
|
||||
? `"${escapedArgument.replaceAll('"', '""')}"`
|
||||
: `'${escapedArgument.replaceAll('\'', '\'\\\'\'')}'`;
|
||||
};
|
||||
|
||||
const NO_ESCAPE_REGEXP = /^[\w./-]+$/;
|
||||
108
node_modules/execa/lib/arguments/fd-options.js
generated
vendored
Normal file
108
node_modules/execa/lib/arguments/fd-options.js
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
import {parseFd} from './specific.js';
|
||||
|
||||
// Retrieve stream targeted by the `to` option
|
||||
export const getToStream = (destination, to = 'stdin') => {
|
||||
const isWritable = true;
|
||||
const {options, fileDescriptors} = SUBPROCESS_OPTIONS.get(destination);
|
||||
const fdNumber = getFdNumber(fileDescriptors, to, isWritable);
|
||||
const destinationStream = destination.stdio[fdNumber];
|
||||
|
||||
if (destinationStream === null) {
|
||||
throw new TypeError(getInvalidStdioOptionMessage(fdNumber, to, options, isWritable));
|
||||
}
|
||||
|
||||
return destinationStream;
|
||||
};
|
||||
|
||||
// Retrieve stream targeted by the `from` option
|
||||
export const getFromStream = (source, from = 'stdout') => {
|
||||
const isWritable = false;
|
||||
const {options, fileDescriptors} = SUBPROCESS_OPTIONS.get(source);
|
||||
const fdNumber = getFdNumber(fileDescriptors, from, isWritable);
|
||||
const sourceStream = fdNumber === 'all' ? source.all : source.stdio[fdNumber];
|
||||
|
||||
if (sourceStream === null || sourceStream === undefined) {
|
||||
throw new TypeError(getInvalidStdioOptionMessage(fdNumber, from, options, isWritable));
|
||||
}
|
||||
|
||||
return sourceStream;
|
||||
};
|
||||
|
||||
// Keeps track of the options passed to each Execa call
|
||||
export const SUBPROCESS_OPTIONS = new WeakMap();
|
||||
|
||||
const getFdNumber = (fileDescriptors, fdName, isWritable) => {
|
||||
const fdNumber = parseFdNumber(fdName, isWritable);
|
||||
validateFdNumber(fdNumber, fdName, isWritable, fileDescriptors);
|
||||
return fdNumber;
|
||||
};
|
||||
|
||||
const parseFdNumber = (fdName, isWritable) => {
|
||||
const fdNumber = parseFd(fdName);
|
||||
if (fdNumber !== undefined) {
|
||||
return fdNumber;
|
||||
}
|
||||
|
||||
const {validOptions, defaultValue} = isWritable
|
||||
? {validOptions: '"stdin"', defaultValue: 'stdin'}
|
||||
: {validOptions: '"stdout", "stderr", "all"', defaultValue: 'stdout'};
|
||||
throw new TypeError(`"${getOptionName(isWritable)}" must not be "${fdName}".
|
||||
It must be ${validOptions} or "fd3", "fd4" (and so on).
|
||||
It is optional and defaults to "${defaultValue}".`);
|
||||
};
|
||||
|
||||
const validateFdNumber = (fdNumber, fdName, isWritable, fileDescriptors) => {
|
||||
const fileDescriptor = fileDescriptors[getUsedDescriptor(fdNumber)];
|
||||
if (fileDescriptor === undefined) {
|
||||
throw new TypeError(`"${getOptionName(isWritable)}" must not be ${fdName}. That file descriptor does not exist.
|
||||
Please set the "stdio" option to ensure that file descriptor exists.`);
|
||||
}
|
||||
|
||||
if (fileDescriptor.direction === 'input' && !isWritable) {
|
||||
throw new TypeError(`"${getOptionName(isWritable)}" must not be ${fdName}. It must be a readable stream, not writable.`);
|
||||
}
|
||||
|
||||
if (fileDescriptor.direction !== 'input' && isWritable) {
|
||||
throw new TypeError(`"${getOptionName(isWritable)}" must not be ${fdName}. It must be a writable stream, not readable.`);
|
||||
}
|
||||
};
|
||||
|
||||
const getInvalidStdioOptionMessage = (fdNumber, fdName, options, isWritable) => {
|
||||
if (fdNumber === 'all' && !options.all) {
|
||||
return 'The "all" option must be true to use "from: \'all\'".';
|
||||
}
|
||||
|
||||
const {optionName, optionValue} = getInvalidStdioOption(fdNumber, options);
|
||||
return `The "${optionName}: ${serializeOptionValue(optionValue)}" option is incompatible with using "${getOptionName(isWritable)}: ${serializeOptionValue(fdName)}".
|
||||
Please set this option with "pipe" instead.`;
|
||||
};
|
||||
|
||||
const getInvalidStdioOption = (fdNumber, {stdin, stdout, stderr, stdio}) => {
|
||||
const usedDescriptor = getUsedDescriptor(fdNumber);
|
||||
|
||||
if (usedDescriptor === 0 && stdin !== undefined) {
|
||||
return {optionName: 'stdin', optionValue: stdin};
|
||||
}
|
||||
|
||||
if (usedDescriptor === 1 && stdout !== undefined) {
|
||||
return {optionName: 'stdout', optionValue: stdout};
|
||||
}
|
||||
|
||||
if (usedDescriptor === 2 && stderr !== undefined) {
|
||||
return {optionName: 'stderr', optionValue: stderr};
|
||||
}
|
||||
|
||||
return {optionName: `stdio[${usedDescriptor}]`, optionValue: stdio[usedDescriptor]};
|
||||
};
|
||||
|
||||
const getUsedDescriptor = fdNumber => fdNumber === 'all' ? 1 : fdNumber;
|
||||
|
||||
const getOptionName = isWritable => isWritable ? 'to' : 'from';
|
||||
|
||||
export const serializeOptionValue = value => {
|
||||
if (typeof value === 'string') {
|
||||
return `'${value}'`;
|
||||
}
|
||||
|
||||
return typeof value === 'number' ? `${value}` : 'Stream';
|
||||
};
|
||||
25
node_modules/execa/lib/arguments/file-url.js
generated
vendored
Normal file
25
node_modules/execa/lib/arguments/file-url.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
import {fileURLToPath} from 'node:url';
|
||||
|
||||
// Allow some arguments/options to be either a file path string or a file URL
|
||||
export const safeNormalizeFileUrl = (file, name) => {
|
||||
const fileString = normalizeFileUrl(normalizeDenoExecPath(file));
|
||||
|
||||
if (typeof fileString !== 'string') {
|
||||
throw new TypeError(`${name} must be a string or a file URL: ${fileString}.`);
|
||||
}
|
||||
|
||||
return fileString;
|
||||
};
|
||||
|
||||
// In Deno node:process execPath is a special object, not just a string:
|
||||
// https://github.com/denoland/deno/blob/f460188e583f00144000aa0d8ade08218d47c3c1/ext/node/polyfills/process.ts#L344
|
||||
const normalizeDenoExecPath = file => isDenoExecPath(file)
|
||||
? file.toString()
|
||||
: file;
|
||||
|
||||
export const isDenoExecPath = file => typeof file !== 'string'
|
||||
&& file
|
||||
&& Object.getPrototypeOf(file) === String.prototype;
|
||||
|
||||
// Same but also allows other values, e.g. `boolean` for the `shell` option
|
||||
export const normalizeFileUrl = file => file instanceof URL ? fileURLToPath(file) : file;
|
||||
96
node_modules/execa/lib/arguments/options.js
generated
vendored
Normal file
96
node_modules/execa/lib/arguments/options.js
generated
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
import path from 'node:path';
|
||||
import process from 'node:process';
|
||||
import crossSpawn from 'cross-spawn';
|
||||
import {npmRunPathEnv} from 'npm-run-path';
|
||||
import {normalizeForceKillAfterDelay} from '../terminate/kill.js';
|
||||
import {normalizeKillSignal} from '../terminate/signal.js';
|
||||
import {validateCancelSignal} from '../terminate/cancel.js';
|
||||
import {validateGracefulCancel} from '../terminate/graceful.js';
|
||||
import {validateTimeout} from '../terminate/timeout.js';
|
||||
import {handleNodeOption} from '../methods/node.js';
|
||||
import {validateIpcInputOption} from '../ipc/ipc-input.js';
|
||||
import {validateEncoding, BINARY_ENCODINGS} from './encoding-option.js';
|
||||
import {normalizeCwd} from './cwd.js';
|
||||
import {normalizeFileUrl} from './file-url.js';
|
||||
import {normalizeFdSpecificOptions} from './specific.js';
|
||||
|
||||
// Normalize the options object, and sometimes also the file paths and arguments.
|
||||
// Applies default values, validate allowed options, normalize them.
|
||||
export const normalizeOptions = (filePath, rawArguments, rawOptions) => {
|
||||
rawOptions.cwd = normalizeCwd(rawOptions.cwd);
|
||||
const [processedFile, processedArguments, processedOptions] = handleNodeOption(filePath, rawArguments, rawOptions);
|
||||
|
||||
const {command: file, args: commandArguments, options: initialOptions} = crossSpawn._parse(processedFile, processedArguments, processedOptions);
|
||||
|
||||
const fdOptions = normalizeFdSpecificOptions(initialOptions);
|
||||
const options = addDefaultOptions(fdOptions);
|
||||
validateTimeout(options);
|
||||
validateEncoding(options);
|
||||
validateIpcInputOption(options);
|
||||
validateCancelSignal(options);
|
||||
validateGracefulCancel(options);
|
||||
options.shell = normalizeFileUrl(options.shell);
|
||||
options.env = getEnv(options);
|
||||
options.killSignal = normalizeKillSignal(options.killSignal);
|
||||
options.forceKillAfterDelay = normalizeForceKillAfterDelay(options.forceKillAfterDelay);
|
||||
options.lines = options.lines.map((lines, fdNumber) => lines && !BINARY_ENCODINGS.has(options.encoding) && options.buffer[fdNumber]);
|
||||
|
||||
if (process.platform === 'win32' && path.basename(file, '.exe') === 'cmd') {
|
||||
// #116
|
||||
commandArguments.unshift('/q');
|
||||
}
|
||||
|
||||
return {file, commandArguments, options};
|
||||
};
|
||||
|
||||
const addDefaultOptions = ({
|
||||
extendEnv = true,
|
||||
preferLocal = false,
|
||||
cwd,
|
||||
localDir: localDirectory = cwd,
|
||||
encoding = 'utf8',
|
||||
reject = true,
|
||||
cleanup = true,
|
||||
all = false,
|
||||
windowsHide = true,
|
||||
killSignal = 'SIGTERM',
|
||||
forceKillAfterDelay = true,
|
||||
gracefulCancel = false,
|
||||
ipcInput,
|
||||
ipc = ipcInput !== undefined || gracefulCancel,
|
||||
serialization = 'advanced',
|
||||
...options
|
||||
}) => ({
|
||||
...options,
|
||||
extendEnv,
|
||||
preferLocal,
|
||||
cwd,
|
||||
localDirectory,
|
||||
encoding,
|
||||
reject,
|
||||
cleanup,
|
||||
all,
|
||||
windowsHide,
|
||||
killSignal,
|
||||
forceKillAfterDelay,
|
||||
gracefulCancel,
|
||||
ipcInput,
|
||||
ipc,
|
||||
serialization,
|
||||
});
|
||||
|
||||
const getEnv = ({env: envOption, extendEnv, preferLocal, node, localDirectory, nodePath}) => {
|
||||
const env = extendEnv ? {...process.env, ...envOption} : envOption;
|
||||
|
||||
if (preferLocal || node) {
|
||||
return npmRunPathEnv({
|
||||
env,
|
||||
cwd: localDirectory,
|
||||
execPath: nodePath,
|
||||
preferLocal,
|
||||
addExecPath: node,
|
||||
});
|
||||
}
|
||||
|
||||
return env;
|
||||
};
|
||||
111
node_modules/execa/lib/arguments/specific.js
generated
vendored
Normal file
111
node_modules/execa/lib/arguments/specific.js
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
import {debuglog} from 'node:util';
|
||||
import isPlainObject from 'is-plain-obj';
|
||||
import {STANDARD_STREAMS_ALIASES} from '../utils/standard-stream.js';
|
||||
|
||||
// Some options can have different values for `stdout`/`stderr`/`fd3`.
|
||||
// This normalizes those to array of values.
|
||||
// For example, `{verbose: {stdout: 'none', stderr: 'full'}}` becomes `{verbose: ['none', 'none', 'full']}`
|
||||
export const normalizeFdSpecificOptions = options => {
|
||||
const optionsCopy = {...options};
|
||||
|
||||
for (const optionName of FD_SPECIFIC_OPTIONS) {
|
||||
optionsCopy[optionName] = normalizeFdSpecificOption(options, optionName);
|
||||
}
|
||||
|
||||
return optionsCopy;
|
||||
};
|
||||
|
||||
export const normalizeFdSpecificOption = (options, optionName) => {
|
||||
const optionBaseArray = Array.from({length: getStdioLength(options) + 1});
|
||||
const optionArray = normalizeFdSpecificValue(options[optionName], optionBaseArray, optionName);
|
||||
return addDefaultValue(optionArray, optionName);
|
||||
};
|
||||
|
||||
const getStdioLength = ({stdio}) => Array.isArray(stdio)
|
||||
? Math.max(stdio.length, STANDARD_STREAMS_ALIASES.length)
|
||||
: STANDARD_STREAMS_ALIASES.length;
|
||||
|
||||
const normalizeFdSpecificValue = (optionValue, optionArray, optionName) => isPlainObject(optionValue)
|
||||
? normalizeOptionObject(optionValue, optionArray, optionName)
|
||||
: optionArray.fill(optionValue);
|
||||
|
||||
const normalizeOptionObject = (optionValue, optionArray, optionName) => {
|
||||
for (const fdName of Object.keys(optionValue).sort(compareFdName)) {
|
||||
for (const fdNumber of parseFdName(fdName, optionName, optionArray)) {
|
||||
optionArray[fdNumber] = optionValue[fdName];
|
||||
}
|
||||
}
|
||||
|
||||
return optionArray;
|
||||
};
|
||||
|
||||
// Ensure priority order when setting both `stdout`/`stderr`, `fd1`/`fd2`, and `all`
|
||||
const compareFdName = (fdNameA, fdNameB) => getFdNameOrder(fdNameA) < getFdNameOrder(fdNameB) ? 1 : -1;
|
||||
|
||||
const getFdNameOrder = fdName => {
|
||||
if (fdName === 'stdout' || fdName === 'stderr') {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return fdName === 'all' ? 2 : 1;
|
||||
};
|
||||
|
||||
const parseFdName = (fdName, optionName, optionArray) => {
|
||||
if (fdName === 'ipc') {
|
||||
return [optionArray.length - 1];
|
||||
}
|
||||
|
||||
const fdNumber = parseFd(fdName);
|
||||
if (fdNumber === undefined || fdNumber === 0) {
|
||||
throw new TypeError(`"${optionName}.${fdName}" is invalid.
|
||||
It must be "${optionName}.stdout", "${optionName}.stderr", "${optionName}.all", "${optionName}.ipc", or "${optionName}.fd3", "${optionName}.fd4" (and so on).`);
|
||||
}
|
||||
|
||||
if (fdNumber >= optionArray.length) {
|
||||
throw new TypeError(`"${optionName}.${fdName}" is invalid: that file descriptor does not exist.
|
||||
Please set the "stdio" option to ensure that file descriptor exists.`);
|
||||
}
|
||||
|
||||
return fdNumber === 'all' ? [1, 2] : [fdNumber];
|
||||
};
|
||||
|
||||
// Use the same syntax for fd-specific options and the `from`/`to` options
|
||||
export const parseFd = fdName => {
|
||||
if (fdName === 'all') {
|
||||
return fdName;
|
||||
}
|
||||
|
||||
if (STANDARD_STREAMS_ALIASES.includes(fdName)) {
|
||||
return STANDARD_STREAMS_ALIASES.indexOf(fdName);
|
||||
}
|
||||
|
||||
const regexpResult = FD_REGEXP.exec(fdName);
|
||||
if (regexpResult !== null) {
|
||||
return Number(regexpResult[1]);
|
||||
}
|
||||
};
|
||||
|
||||
const FD_REGEXP = /^fd(\d+)$/;
|
||||
|
||||
const addDefaultValue = (optionArray, optionName) => optionArray.map(optionValue => optionValue === undefined
|
||||
? DEFAULT_OPTIONS[optionName]
|
||||
: optionValue);
|
||||
|
||||
// Default value for the `verbose` option
|
||||
const verboseDefault = debuglog('execa').enabled ? 'full' : 'none';
|
||||
|
||||
const DEFAULT_OPTIONS = {
|
||||
lines: false,
|
||||
buffer: true,
|
||||
maxBuffer: 1000 * 1000 * 100,
|
||||
verbose: verboseDefault,
|
||||
stripFinalNewline: true,
|
||||
};
|
||||
|
||||
// List of options which can have different values for `stdout`/`stderr`
|
||||
export const FD_SPECIFIC_OPTIONS = ['lines', 'buffer', 'maxBuffer', 'verbose', 'stripFinalNewline'];
|
||||
|
||||
// Retrieve fd-specific option
|
||||
export const getFdSpecificValue = (optionArray, fdNumber) => fdNumber === 'ipc'
|
||||
? optionArray.at(-1)
|
||||
: optionArray[fdNumber];
|
||||
15
node_modules/execa/lib/convert/add.js
generated
vendored
Normal file
15
node_modules/execa/lib/convert/add.js
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
import {initializeConcurrentStreams} from './concurrent.js';
|
||||
import {createReadable} from './readable.js';
|
||||
import {createWritable} from './writable.js';
|
||||
import {createDuplex} from './duplex.js';
|
||||
import {createIterable} from './iterable.js';
|
||||
|
||||
// Add methods to convert the subprocess to a stream or iterable
|
||||
export const addConvertedStreams = (subprocess, {encoding}) => {
|
||||
const concurrentStreams = initializeConcurrentStreams();
|
||||
subprocess.readable = createReadable.bind(undefined, {subprocess, concurrentStreams, encoding});
|
||||
subprocess.writable = createWritable.bind(undefined, {subprocess, concurrentStreams});
|
||||
subprocess.duplex = createDuplex.bind(undefined, {subprocess, concurrentStreams, encoding});
|
||||
subprocess.iterable = createIterable.bind(undefined, subprocess, encoding);
|
||||
subprocess[Symbol.asyncIterator] = createIterable.bind(undefined, subprocess, encoding, {});
|
||||
};
|
||||
33
node_modules/execa/lib/convert/concurrent.js
generated
vendored
Normal file
33
node_modules/execa/lib/convert/concurrent.js
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
import {createDeferred} from '../utils/deferred.js';
|
||||
|
||||
// When using multiple `.readable()`/`.writable()`/`.duplex()`, `final` and `destroy` should wait for other streams
|
||||
export const initializeConcurrentStreams = () => ({
|
||||
readableDestroy: new WeakMap(),
|
||||
writableFinal: new WeakMap(),
|
||||
writableDestroy: new WeakMap(),
|
||||
});
|
||||
|
||||
// Each file descriptor + `waitName` has its own array of promises.
|
||||
// Each promise is a single `.readable()`/`.writable()`/`.duplex()` call.
|
||||
export const addConcurrentStream = (concurrentStreams, stream, waitName) => {
|
||||
const weakMap = concurrentStreams[waitName];
|
||||
if (!weakMap.has(stream)) {
|
||||
weakMap.set(stream, []);
|
||||
}
|
||||
|
||||
const promises = weakMap.get(stream);
|
||||
const promise = createDeferred();
|
||||
promises.push(promise);
|
||||
const resolve = promise.resolve.bind(promise);
|
||||
return {resolve, promises};
|
||||
};
|
||||
|
||||
// Wait for other streams, but stop waiting when subprocess ends
|
||||
export const waitForConcurrentStreams = async ({resolve, promises}, subprocess) => {
|
||||
resolve();
|
||||
const [isSubprocessExit] = await Promise.race([
|
||||
Promise.allSettled([true, subprocess]),
|
||||
Promise.all([false, ...promises]),
|
||||
]);
|
||||
return !isSubprocessExit;
|
||||
};
|
||||
69
node_modules/execa/lib/convert/duplex.js
generated
vendored
Normal file
69
node_modules/execa/lib/convert/duplex.js
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
import {Duplex} from 'node:stream';
|
||||
import {callbackify} from 'node:util';
|
||||
import {BINARY_ENCODINGS} from '../arguments/encoding-option.js';
|
||||
import {
|
||||
getSubprocessStdout,
|
||||
getReadableOptions,
|
||||
getReadableMethods,
|
||||
onStdoutFinished,
|
||||
onReadableDestroy,
|
||||
} from './readable.js';
|
||||
import {
|
||||
getSubprocessStdin,
|
||||
getWritableMethods,
|
||||
onStdinFinished,
|
||||
onWritableDestroy,
|
||||
} from './writable.js';
|
||||
|
||||
// Create a `Duplex` stream combining both `subprocess.readable()` and `subprocess.writable()`
|
||||
export const createDuplex = ({subprocess, concurrentStreams, encoding}, {from, to, binary: binaryOption = true, preserveNewlines = true} = {}) => {
|
||||
const binary = binaryOption || BINARY_ENCODINGS.has(encoding);
|
||||
const {subprocessStdout, waitReadableDestroy} = getSubprocessStdout(subprocess, from, concurrentStreams);
|
||||
const {subprocessStdin, waitWritableFinal, waitWritableDestroy} = getSubprocessStdin(subprocess, to, concurrentStreams);
|
||||
const {readableEncoding, readableObjectMode, readableHighWaterMark} = getReadableOptions(subprocessStdout, binary);
|
||||
const {read, onStdoutDataDone} = getReadableMethods({
|
||||
subprocessStdout,
|
||||
subprocess,
|
||||
binary,
|
||||
encoding,
|
||||
preserveNewlines,
|
||||
});
|
||||
const duplex = new Duplex({
|
||||
read,
|
||||
...getWritableMethods(subprocessStdin, subprocess, waitWritableFinal),
|
||||
destroy: callbackify(onDuplexDestroy.bind(undefined, {
|
||||
subprocessStdout,
|
||||
subprocessStdin,
|
||||
subprocess,
|
||||
waitReadableDestroy,
|
||||
waitWritableFinal,
|
||||
waitWritableDestroy,
|
||||
})),
|
||||
readableHighWaterMark,
|
||||
writableHighWaterMark: subprocessStdin.writableHighWaterMark,
|
||||
readableObjectMode,
|
||||
writableObjectMode: subprocessStdin.writableObjectMode,
|
||||
encoding: readableEncoding,
|
||||
});
|
||||
onStdoutFinished({
|
||||
subprocessStdout,
|
||||
onStdoutDataDone,
|
||||
readable: duplex,
|
||||
subprocess,
|
||||
subprocessStdin,
|
||||
});
|
||||
onStdinFinished(subprocessStdin, duplex, subprocessStdout);
|
||||
return duplex;
|
||||
};
|
||||
|
||||
const onDuplexDestroy = async ({subprocessStdout, subprocessStdin, subprocess, waitReadableDestroy, waitWritableFinal, waitWritableDestroy}, error) => {
|
||||
await Promise.all([
|
||||
onReadableDestroy({subprocessStdout, subprocess, waitReadableDestroy}, error),
|
||||
onWritableDestroy({
|
||||
subprocessStdin,
|
||||
subprocess,
|
||||
waitWritableFinal,
|
||||
waitWritableDestroy,
|
||||
}, error),
|
||||
]);
|
||||
};
|
||||
34
node_modules/execa/lib/convert/iterable.js
generated
vendored
Normal file
34
node_modules/execa/lib/convert/iterable.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
import {BINARY_ENCODINGS} from '../arguments/encoding-option.js';
|
||||
import {getFromStream} from '../arguments/fd-options.js';
|
||||
import {iterateOnSubprocessStream} from '../io/iterate.js';
|
||||
|
||||
// Convert the subprocess to an async iterable
|
||||
export const createIterable = (subprocess, encoding, {
|
||||
from,
|
||||
binary: binaryOption = false,
|
||||
preserveNewlines = false,
|
||||
} = {}) => {
|
||||
const binary = binaryOption || BINARY_ENCODINGS.has(encoding);
|
||||
const subprocessStdout = getFromStream(subprocess, from);
|
||||
const onStdoutData = iterateOnSubprocessStream({
|
||||
subprocessStdout,
|
||||
subprocess,
|
||||
binary,
|
||||
shouldEncode: true,
|
||||
encoding,
|
||||
preserveNewlines,
|
||||
});
|
||||
return iterateOnStdoutData(onStdoutData, subprocessStdout, subprocess);
|
||||
};
|
||||
|
||||
const iterateOnStdoutData = async function * (onStdoutData, subprocessStdout, subprocess) {
|
||||
try {
|
||||
yield * onStdoutData;
|
||||
} finally {
|
||||
if (subprocessStdout.readable) {
|
||||
subprocessStdout.destroy();
|
||||
}
|
||||
|
||||
await subprocess;
|
||||
}
|
||||
};
|
||||
113
node_modules/execa/lib/convert/readable.js
generated
vendored
Normal file
113
node_modules/execa/lib/convert/readable.js
generated
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
import {Readable} from 'node:stream';
|
||||
import {callbackify} from 'node:util';
|
||||
import {BINARY_ENCODINGS} from '../arguments/encoding-option.js';
|
||||
import {getFromStream} from '../arguments/fd-options.js';
|
||||
import {iterateOnSubprocessStream, DEFAULT_OBJECT_HIGH_WATER_MARK} from '../io/iterate.js';
|
||||
import {createDeferred} from '../utils/deferred.js';
|
||||
import {addConcurrentStream, waitForConcurrentStreams} from './concurrent.js';
|
||||
import {
|
||||
safeWaitForSubprocessStdin,
|
||||
waitForSubprocessStdout,
|
||||
waitForSubprocess,
|
||||
destroyOtherStream,
|
||||
} from './shared.js';
|
||||
|
||||
// Create a `Readable` stream that forwards from `stdout` and awaits the subprocess
|
||||
export const createReadable = ({subprocess, concurrentStreams, encoding}, {from, binary: binaryOption = true, preserveNewlines = true} = {}) => {
|
||||
const binary = binaryOption || BINARY_ENCODINGS.has(encoding);
|
||||
const {subprocessStdout, waitReadableDestroy} = getSubprocessStdout(subprocess, from, concurrentStreams);
|
||||
const {readableEncoding, readableObjectMode, readableHighWaterMark} = getReadableOptions(subprocessStdout, binary);
|
||||
const {read, onStdoutDataDone} = getReadableMethods({
|
||||
subprocessStdout,
|
||||
subprocess,
|
||||
binary,
|
||||
encoding,
|
||||
preserveNewlines,
|
||||
});
|
||||
const readable = new Readable({
|
||||
read,
|
||||
destroy: callbackify(onReadableDestroy.bind(undefined, {subprocessStdout, subprocess, waitReadableDestroy})),
|
||||
highWaterMark: readableHighWaterMark,
|
||||
objectMode: readableObjectMode,
|
||||
encoding: readableEncoding,
|
||||
});
|
||||
onStdoutFinished({
|
||||
subprocessStdout,
|
||||
onStdoutDataDone,
|
||||
readable,
|
||||
subprocess,
|
||||
});
|
||||
return readable;
|
||||
};
|
||||
|
||||
// Retrieve `stdout` (or other stream depending on `from`)
|
||||
export const getSubprocessStdout = (subprocess, from, concurrentStreams) => {
|
||||
const subprocessStdout = getFromStream(subprocess, from);
|
||||
const waitReadableDestroy = addConcurrentStream(concurrentStreams, subprocessStdout, 'readableDestroy');
|
||||
return {subprocessStdout, waitReadableDestroy};
|
||||
};
|
||||
|
||||
export const getReadableOptions = ({readableEncoding, readableObjectMode, readableHighWaterMark}, binary) => binary
|
||||
? {readableEncoding, readableObjectMode, readableHighWaterMark}
|
||||
: {readableEncoding, readableObjectMode: true, readableHighWaterMark: DEFAULT_OBJECT_HIGH_WATER_MARK};
|
||||
|
||||
export const getReadableMethods = ({subprocessStdout, subprocess, binary, encoding, preserveNewlines}) => {
|
||||
const onStdoutDataDone = createDeferred();
|
||||
const onStdoutData = iterateOnSubprocessStream({
|
||||
subprocessStdout,
|
||||
subprocess,
|
||||
binary,
|
||||
shouldEncode: !binary,
|
||||
encoding,
|
||||
preserveNewlines,
|
||||
});
|
||||
|
||||
return {
|
||||
read() {
|
||||
onRead(this, onStdoutData, onStdoutDataDone);
|
||||
},
|
||||
onStdoutDataDone,
|
||||
};
|
||||
};
|
||||
|
||||
// Forwards data from `stdout` to `readable`
|
||||
const onRead = async (readable, onStdoutData, onStdoutDataDone) => {
|
||||
try {
|
||||
const {value, done} = await onStdoutData.next();
|
||||
if (done) {
|
||||
onStdoutDataDone.resolve();
|
||||
} else {
|
||||
readable.push(value);
|
||||
}
|
||||
} catch {}
|
||||
};
|
||||
|
||||
// When `subprocess.stdout` ends/aborts/errors, do the same on `readable`.
|
||||
// Await the subprocess, for the same reason as above.
|
||||
export const onStdoutFinished = async ({subprocessStdout, onStdoutDataDone, readable, subprocess, subprocessStdin}) => {
|
||||
try {
|
||||
await waitForSubprocessStdout(subprocessStdout);
|
||||
await subprocess;
|
||||
await safeWaitForSubprocessStdin(subprocessStdin);
|
||||
await onStdoutDataDone;
|
||||
|
||||
if (readable.readable) {
|
||||
readable.push(null);
|
||||
}
|
||||
} catch (error) {
|
||||
await safeWaitForSubprocessStdin(subprocessStdin);
|
||||
destroyOtherReadable(readable, error);
|
||||
}
|
||||
};
|
||||
|
||||
// When `readable` aborts/errors, do the same on `subprocess.stdout`
|
||||
export const onReadableDestroy = async ({subprocessStdout, subprocess, waitReadableDestroy}, error) => {
|
||||
if (await waitForConcurrentStreams(waitReadableDestroy, subprocess)) {
|
||||
destroyOtherReadable(subprocessStdout, error);
|
||||
await waitForSubprocess(subprocess, error);
|
||||
}
|
||||
};
|
||||
|
||||
const destroyOtherReadable = (stream, error) => {
|
||||
destroyOtherStream(stream, stream.readable, error);
|
||||
};
|
||||
46
node_modules/execa/lib/convert/shared.js
generated
vendored
Normal file
46
node_modules/execa/lib/convert/shared.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
import {finished} from 'node:stream/promises';
|
||||
import {isStreamAbort} from '../resolve/wait-stream.js';
|
||||
|
||||
export const safeWaitForSubprocessStdin = async subprocessStdin => {
|
||||
if (subprocessStdin === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await waitForSubprocessStdin(subprocessStdin);
|
||||
} catch {}
|
||||
};
|
||||
|
||||
export const safeWaitForSubprocessStdout = async subprocessStdout => {
|
||||
if (subprocessStdout === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await waitForSubprocessStdout(subprocessStdout);
|
||||
} catch {}
|
||||
};
|
||||
|
||||
export const waitForSubprocessStdin = async subprocessStdin => {
|
||||
await finished(subprocessStdin, {cleanup: true, readable: false, writable: true});
|
||||
};
|
||||
|
||||
export const waitForSubprocessStdout = async subprocessStdout => {
|
||||
await finished(subprocessStdout, {cleanup: true, readable: true, writable: false});
|
||||
};
|
||||
|
||||
// When `readable` or `writable` aborts/errors, awaits the subprocess, for the reason mentioned above
|
||||
export const waitForSubprocess = async (subprocess, error) => {
|
||||
await subprocess;
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export const destroyOtherStream = (stream, isOpen, error) => {
|
||||
if (error && !isStreamAbort(error)) {
|
||||
stream.destroy(error);
|
||||
} else if (isOpen) {
|
||||
stream.destroy();
|
||||
}
|
||||
};
|
||||
90
node_modules/execa/lib/convert/writable.js
generated
vendored
Normal file
90
node_modules/execa/lib/convert/writable.js
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
import {Writable} from 'node:stream';
|
||||
import {callbackify} from 'node:util';
|
||||
import {getToStream} from '../arguments/fd-options.js';
|
||||
import {addConcurrentStream, waitForConcurrentStreams} from './concurrent.js';
|
||||
import {
|
||||
safeWaitForSubprocessStdout,
|
||||
waitForSubprocessStdin,
|
||||
waitForSubprocess,
|
||||
destroyOtherStream,
|
||||
} from './shared.js';
|
||||
|
||||
// Create a `Writable` stream that forwards to `stdin` and awaits the subprocess
|
||||
export const createWritable = ({subprocess, concurrentStreams}, {to} = {}) => {
|
||||
const {subprocessStdin, waitWritableFinal, waitWritableDestroy} = getSubprocessStdin(subprocess, to, concurrentStreams);
|
||||
const writable = new Writable({
|
||||
...getWritableMethods(subprocessStdin, subprocess, waitWritableFinal),
|
||||
destroy: callbackify(onWritableDestroy.bind(undefined, {
|
||||
subprocessStdin,
|
||||
subprocess,
|
||||
waitWritableFinal,
|
||||
waitWritableDestroy,
|
||||
})),
|
||||
highWaterMark: subprocessStdin.writableHighWaterMark,
|
||||
objectMode: subprocessStdin.writableObjectMode,
|
||||
});
|
||||
onStdinFinished(subprocessStdin, writable);
|
||||
return writable;
|
||||
};
|
||||
|
||||
// Retrieve `stdin` (or other stream depending on `to`)
|
||||
export const getSubprocessStdin = (subprocess, to, concurrentStreams) => {
|
||||
const subprocessStdin = getToStream(subprocess, to);
|
||||
const waitWritableFinal = addConcurrentStream(concurrentStreams, subprocessStdin, 'writableFinal');
|
||||
const waitWritableDestroy = addConcurrentStream(concurrentStreams, subprocessStdin, 'writableDestroy');
|
||||
return {subprocessStdin, waitWritableFinal, waitWritableDestroy};
|
||||
};
|
||||
|
||||
export const getWritableMethods = (subprocessStdin, subprocess, waitWritableFinal) => ({
|
||||
write: onWrite.bind(undefined, subprocessStdin),
|
||||
final: callbackify(onWritableFinal.bind(undefined, subprocessStdin, subprocess, waitWritableFinal)),
|
||||
});
|
||||
|
||||
// Forwards data from `writable` to `stdin`
|
||||
const onWrite = (subprocessStdin, chunk, encoding, done) => {
|
||||
if (subprocessStdin.write(chunk, encoding)) {
|
||||
done();
|
||||
} else {
|
||||
subprocessStdin.once('drain', done);
|
||||
}
|
||||
};
|
||||
|
||||
// Ensures that the writable `final` and readable `end` events awaits the subprocess.
|
||||
// Like this, any subprocess failure is propagated as a stream `error` event, instead of being lost.
|
||||
// The user does not need to `await` the subprocess anymore, but now needs to await the stream completion or error.
|
||||
// When multiple writables are targeting the same stream, they wait for each other, unless the subprocess ends first.
|
||||
const onWritableFinal = async (subprocessStdin, subprocess, waitWritableFinal) => {
|
||||
if (await waitForConcurrentStreams(waitWritableFinal, subprocess)) {
|
||||
if (subprocessStdin.writable) {
|
||||
subprocessStdin.end();
|
||||
}
|
||||
|
||||
await subprocess;
|
||||
}
|
||||
};
|
||||
|
||||
// When `subprocess.stdin` ends/aborts/errors, do the same on `writable`.
|
||||
export const onStdinFinished = async (subprocessStdin, writable, subprocessStdout) => {
|
||||
try {
|
||||
await waitForSubprocessStdin(subprocessStdin);
|
||||
if (writable.writable) {
|
||||
writable.end();
|
||||
}
|
||||
} catch (error) {
|
||||
await safeWaitForSubprocessStdout(subprocessStdout);
|
||||
destroyOtherWritable(writable, error);
|
||||
}
|
||||
};
|
||||
|
||||
// When `writable` aborts/errors, do the same on `subprocess.stdin`
|
||||
export const onWritableDestroy = async ({subprocessStdin, subprocess, waitWritableFinal, waitWritableDestroy}, error) => {
|
||||
await waitForConcurrentStreams(waitWritableFinal, subprocess);
|
||||
if (await waitForConcurrentStreams(waitWritableDestroy, subprocess)) {
|
||||
destroyOtherWritable(subprocessStdin, error);
|
||||
await waitForSubprocess(subprocess, error);
|
||||
}
|
||||
};
|
||||
|
||||
const destroyOtherWritable = (stream, error) => {
|
||||
destroyOtherStream(stream, stream.writable, error);
|
||||
};
|
||||
116
node_modules/execa/lib/io/contents.js
generated
vendored
Normal file
116
node_modules/execa/lib/io/contents.js
generated
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
import {setImmediate} from 'node:timers/promises';
|
||||
import getStream, {getStreamAsArrayBuffer, getStreamAsArray} from 'get-stream';
|
||||
import {isArrayBuffer} from '../utils/uint-array.js';
|
||||
import {shouldLogOutput, logLines} from '../verbose/output.js';
|
||||
import {iterateForResult} from './iterate.js';
|
||||
import {handleMaxBuffer} from './max-buffer.js';
|
||||
import {getStripFinalNewline} from './strip-newline.js';
|
||||
|
||||
// Retrieve `result.stdout|stderr|all|stdio[*]`
|
||||
export const getStreamOutput = async ({stream, onStreamEnd, fdNumber, encoding, buffer, maxBuffer, lines, allMixed, stripFinalNewline, verboseInfo, streamInfo}) => {
|
||||
const logPromise = logOutputAsync({
|
||||
stream,
|
||||
onStreamEnd,
|
||||
fdNumber,
|
||||
encoding,
|
||||
allMixed,
|
||||
verboseInfo,
|
||||
streamInfo,
|
||||
});
|
||||
|
||||
if (!buffer) {
|
||||
await Promise.all([resumeStream(stream), logPromise]);
|
||||
return;
|
||||
}
|
||||
|
||||
const stripFinalNewlineValue = getStripFinalNewline(stripFinalNewline, fdNumber);
|
||||
const iterable = iterateForResult({
|
||||
stream,
|
||||
onStreamEnd,
|
||||
lines,
|
||||
encoding,
|
||||
stripFinalNewline: stripFinalNewlineValue,
|
||||
allMixed,
|
||||
});
|
||||
const [output] = await Promise.all([
|
||||
getStreamContents({
|
||||
stream,
|
||||
iterable,
|
||||
fdNumber,
|
||||
encoding,
|
||||
maxBuffer,
|
||||
lines,
|
||||
}),
|
||||
logPromise,
|
||||
]);
|
||||
return output;
|
||||
};
|
||||
|
||||
const logOutputAsync = async ({stream, onStreamEnd, fdNumber, encoding, allMixed, verboseInfo, streamInfo: {fileDescriptors}}) => {
|
||||
if (!shouldLogOutput({
|
||||
stdioItems: fileDescriptors[fdNumber]?.stdioItems,
|
||||
encoding,
|
||||
verboseInfo,
|
||||
fdNumber,
|
||||
})) {
|
||||
return;
|
||||
}
|
||||
|
||||
const linesIterable = iterateForResult({
|
||||
stream,
|
||||
onStreamEnd,
|
||||
lines: true,
|
||||
encoding,
|
||||
stripFinalNewline: true,
|
||||
allMixed,
|
||||
});
|
||||
await logLines(linesIterable, stream, fdNumber, verboseInfo);
|
||||
};
|
||||
|
||||
// When using `buffer: false`, users need to read `subprocess.stdout|stderr|all` right away
|
||||
// See https://github.com/sindresorhus/execa/issues/730 and https://github.com/sindresorhus/execa/pull/729#discussion_r1465496310
|
||||
const resumeStream = async stream => {
|
||||
await setImmediate();
|
||||
if (stream.readableFlowing === null) {
|
||||
stream.resume();
|
||||
}
|
||||
};
|
||||
|
||||
const getStreamContents = async ({stream, stream: {readableObjectMode}, iterable, fdNumber, encoding, maxBuffer, lines}) => {
|
||||
try {
|
||||
if (readableObjectMode || lines) {
|
||||
return await getStreamAsArray(iterable, {maxBuffer});
|
||||
}
|
||||
|
||||
if (encoding === 'buffer') {
|
||||
return new Uint8Array(await getStreamAsArrayBuffer(iterable, {maxBuffer}));
|
||||
}
|
||||
|
||||
return await getStream(iterable, {maxBuffer});
|
||||
} catch (error) {
|
||||
return handleBufferedData(handleMaxBuffer({
|
||||
error,
|
||||
stream,
|
||||
readableObjectMode,
|
||||
lines,
|
||||
encoding,
|
||||
fdNumber,
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
// On failure, `result.stdout|stderr|all` should contain the currently buffered stream
|
||||
// They are automatically closed and flushed by Node.js when the subprocess exits
|
||||
// When `buffer` is `false`, `streamPromise` is `undefined` and there is no buffered data to retrieve
|
||||
export const getBufferedData = async streamPromise => {
|
||||
try {
|
||||
return await streamPromise;
|
||||
} catch (error) {
|
||||
return handleBufferedData(error);
|
||||
}
|
||||
};
|
||||
|
||||
// Ensure we are returning Uint8Arrays when using `encoding: 'buffer'`
|
||||
const handleBufferedData = ({bufferedData}) => isArrayBuffer(bufferedData)
|
||||
? new Uint8Array(bufferedData)
|
||||
: bufferedData;
|
||||
44
node_modules/execa/lib/io/input-sync.js
generated
vendored
Normal file
44
node_modules/execa/lib/io/input-sync.js
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
import {runGeneratorsSync} from '../transform/generator.js';
|
||||
import {joinToUint8Array, isUint8Array} from '../utils/uint-array.js';
|
||||
import {TYPE_TO_MESSAGE} from '../stdio/type.js';
|
||||
|
||||
// Apply `stdin`/`input`/`inputFile` options, before spawning, in sync mode, by converting it to the `input` option
|
||||
export const addInputOptionsSync = (fileDescriptors, options) => {
|
||||
for (const fdNumber of getInputFdNumbers(fileDescriptors)) {
|
||||
addInputOptionSync(fileDescriptors, fdNumber, options);
|
||||
}
|
||||
};
|
||||
|
||||
const getInputFdNumbers = fileDescriptors => new Set(Object.entries(fileDescriptors)
|
||||
.filter(([, {direction}]) => direction === 'input')
|
||||
.map(([fdNumber]) => Number(fdNumber)));
|
||||
|
||||
const addInputOptionSync = (fileDescriptors, fdNumber, options) => {
|
||||
const {stdioItems} = fileDescriptors[fdNumber];
|
||||
const allStdioItems = stdioItems.filter(({contents}) => contents !== undefined);
|
||||
if (allStdioItems.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (fdNumber !== 0) {
|
||||
const [{type, optionName}] = allStdioItems;
|
||||
throw new TypeError(`Only the \`stdin\` option, not \`${optionName}\`, can be ${TYPE_TO_MESSAGE[type]} with synchronous methods.`);
|
||||
}
|
||||
|
||||
const allContents = allStdioItems.map(({contents}) => contents);
|
||||
const transformedContents = allContents.map(contents => applySingleInputGeneratorsSync(contents, stdioItems));
|
||||
options.input = joinToUint8Array(transformedContents);
|
||||
};
|
||||
|
||||
const applySingleInputGeneratorsSync = (contents, stdioItems) => {
|
||||
const newContents = runGeneratorsSync(contents, stdioItems, 'utf8', true);
|
||||
validateSerializable(newContents);
|
||||
return joinToUint8Array(newContents);
|
||||
};
|
||||
|
||||
const validateSerializable = newContents => {
|
||||
const invalidItem = newContents.find(item => typeof item !== 'string' && !isUint8Array(item));
|
||||
if (invalidItem !== undefined) {
|
||||
throw new TypeError(`The \`stdin\` option is invalid: when passing objects as input, a transform must be used to serialize them to strings or Uint8Arrays: ${invalidItem}.`);
|
||||
}
|
||||
};
|
||||
110
node_modules/execa/lib/io/iterate.js
generated
vendored
Normal file
110
node_modules/execa/lib/io/iterate.js
generated
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
import {on} from 'node:events';
|
||||
import {getDefaultHighWaterMark} from 'node:stream';
|
||||
import {getEncodingTransformGenerator} from '../transform/encoding-transform.js';
|
||||
import {getSplitLinesGenerator} from '../transform/split.js';
|
||||
import {transformChunkSync, finalChunksSync} from '../transform/run-sync.js';
|
||||
|
||||
// Iterate over lines of `subprocess.stdout`, used by `subprocess.readable|duplex|iterable()`
|
||||
export const iterateOnSubprocessStream = ({subprocessStdout, subprocess, binary, shouldEncode, encoding, preserveNewlines}) => {
|
||||
const controller = new AbortController();
|
||||
stopReadingOnExit(subprocess, controller);
|
||||
return iterateOnStream({
|
||||
stream: subprocessStdout,
|
||||
controller,
|
||||
binary,
|
||||
shouldEncode: !subprocessStdout.readableObjectMode && shouldEncode,
|
||||
encoding,
|
||||
shouldSplit: !subprocessStdout.readableObjectMode,
|
||||
preserveNewlines,
|
||||
});
|
||||
};
|
||||
|
||||
const stopReadingOnExit = async (subprocess, controller) => {
|
||||
try {
|
||||
await subprocess;
|
||||
} catch {} finally {
|
||||
controller.abort();
|
||||
}
|
||||
};
|
||||
|
||||
// Iterate over lines of `subprocess.stdout`, used by `result.stdout` and the `verbose: 'full'` option.
|
||||
// Applies the `lines` and `encoding` options.
|
||||
export const iterateForResult = ({stream, onStreamEnd, lines, encoding, stripFinalNewline, allMixed}) => {
|
||||
const controller = new AbortController();
|
||||
stopReadingOnStreamEnd(onStreamEnd, controller, stream);
|
||||
const objectMode = stream.readableObjectMode && !allMixed;
|
||||
return iterateOnStream({
|
||||
stream,
|
||||
controller,
|
||||
binary: encoding === 'buffer',
|
||||
shouldEncode: !objectMode,
|
||||
encoding,
|
||||
shouldSplit: !objectMode && lines,
|
||||
preserveNewlines: !stripFinalNewline,
|
||||
});
|
||||
};
|
||||
|
||||
const stopReadingOnStreamEnd = async (onStreamEnd, controller, stream) => {
|
||||
try {
|
||||
await onStreamEnd;
|
||||
} catch {
|
||||
stream.destroy();
|
||||
} finally {
|
||||
controller.abort();
|
||||
}
|
||||
};
|
||||
|
||||
const iterateOnStream = ({stream, controller, binary, shouldEncode, encoding, shouldSplit, preserveNewlines}) => {
|
||||
const onStdoutChunk = on(stream, 'data', {
|
||||
signal: controller.signal,
|
||||
highWaterMark: HIGH_WATER_MARK,
|
||||
// Backward compatibility with older name for this option
|
||||
// See https://github.com/nodejs/node/pull/52080#discussion_r1525227861
|
||||
// @todo Remove after removing support for Node 21
|
||||
highWatermark: HIGH_WATER_MARK,
|
||||
});
|
||||
return iterateOnData({
|
||||
onStdoutChunk,
|
||||
controller,
|
||||
binary,
|
||||
shouldEncode,
|
||||
encoding,
|
||||
shouldSplit,
|
||||
preserveNewlines,
|
||||
});
|
||||
};
|
||||
|
||||
export const DEFAULT_OBJECT_HIGH_WATER_MARK = getDefaultHighWaterMark(true);
|
||||
|
||||
// The `highWaterMark` of `events.on()` is measured in number of events, not in bytes.
|
||||
// Not knowing the average amount of bytes per `data` event, we use the same heuristic as streams in objectMode, since they have the same issue.
|
||||
// Therefore, we use the value of `getDefaultHighWaterMark(true)`.
|
||||
// Note: this option does not exist on Node 18, but this is ok since the logic works without it. It just consumes more memory.
|
||||
const HIGH_WATER_MARK = DEFAULT_OBJECT_HIGH_WATER_MARK;
|
||||
|
||||
const iterateOnData = async function * ({onStdoutChunk, controller, binary, shouldEncode, encoding, shouldSplit, preserveNewlines}) {
|
||||
const generators = getGenerators({
|
||||
binary,
|
||||
shouldEncode,
|
||||
encoding,
|
||||
shouldSplit,
|
||||
preserveNewlines,
|
||||
});
|
||||
|
||||
try {
|
||||
for await (const [chunk] of onStdoutChunk) {
|
||||
yield * transformChunkSync(chunk, generators, 0);
|
||||
}
|
||||
} catch (error) {
|
||||
if (!controller.signal.aborted) {
|
||||
throw error;
|
||||
}
|
||||
} finally {
|
||||
yield * finalChunksSync(generators);
|
||||
}
|
||||
};
|
||||
|
||||
const getGenerators = ({binary, shouldEncode, encoding, shouldSplit, preserveNewlines}) => [
|
||||
getEncodingTransformGenerator(binary, encoding, !shouldEncode),
|
||||
getSplitLinesGenerator(binary, preserveNewlines, !shouldSplit, {}),
|
||||
].filter(Boolean);
|
||||
89
node_modules/execa/lib/io/max-buffer.js
generated
vendored
Normal file
89
node_modules/execa/lib/io/max-buffer.js
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
import {MaxBufferError} from 'get-stream';
|
||||
import {getStreamName} from '../utils/standard-stream.js';
|
||||
import {getFdSpecificValue} from '../arguments/specific.js';
|
||||
|
||||
// When the `maxBuffer` option is hit, a MaxBufferError is thrown.
|
||||
// The stream is aborted, then specific information is kept for the error message.
|
||||
export const handleMaxBuffer = ({error, stream, readableObjectMode, lines, encoding, fdNumber}) => {
|
||||
if (!(error instanceof MaxBufferError)) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (fdNumber === 'all') {
|
||||
return error;
|
||||
}
|
||||
|
||||
const unit = getMaxBufferUnit(readableObjectMode, lines, encoding);
|
||||
error.maxBufferInfo = {fdNumber, unit};
|
||||
stream.destroy();
|
||||
throw error;
|
||||
};
|
||||
|
||||
const getMaxBufferUnit = (readableObjectMode, lines, encoding) => {
|
||||
if (readableObjectMode) {
|
||||
return 'objects';
|
||||
}
|
||||
|
||||
if (lines) {
|
||||
return 'lines';
|
||||
}
|
||||
|
||||
if (encoding === 'buffer') {
|
||||
return 'bytes';
|
||||
}
|
||||
|
||||
return 'characters';
|
||||
};
|
||||
|
||||
// Check the `maxBuffer` option with `result.ipcOutput`
|
||||
export const checkIpcMaxBuffer = (subprocess, ipcOutput, maxBuffer) => {
|
||||
if (ipcOutput.length !== maxBuffer) {
|
||||
return;
|
||||
}
|
||||
|
||||
const error = new MaxBufferError();
|
||||
error.maxBufferInfo = {fdNumber: 'ipc'};
|
||||
throw error;
|
||||
};
|
||||
|
||||
// Error message when `maxBuffer` is hit
|
||||
export const getMaxBufferMessage = (error, maxBuffer) => {
|
||||
const {streamName, threshold, unit} = getMaxBufferInfo(error, maxBuffer);
|
||||
return `Command's ${streamName} was larger than ${threshold} ${unit}`;
|
||||
};
|
||||
|
||||
const getMaxBufferInfo = (error, maxBuffer) => {
|
||||
if (error?.maxBufferInfo === undefined) {
|
||||
return {streamName: 'output', threshold: maxBuffer[1], unit: 'bytes'};
|
||||
}
|
||||
|
||||
const {maxBufferInfo: {fdNumber, unit}} = error;
|
||||
delete error.maxBufferInfo;
|
||||
|
||||
const threshold = getFdSpecificValue(maxBuffer, fdNumber);
|
||||
if (fdNumber === 'ipc') {
|
||||
return {streamName: 'IPC output', threshold, unit: 'messages'};
|
||||
}
|
||||
|
||||
return {streamName: getStreamName(fdNumber), threshold, unit};
|
||||
};
|
||||
|
||||
// The only way to apply `maxBuffer` with `spawnSync()` is to use the native `maxBuffer` option Node.js provides.
|
||||
// However, this has multiple limitations, and cannot behave the exact same way as the async behavior.
|
||||
// When the `maxBuffer` is hit, a `ENOBUFS` error is thrown.
|
||||
export const isMaxBufferSync = (resultError, output, maxBuffer) => resultError?.code === 'ENOBUFS'
|
||||
&& output !== null
|
||||
&& output.some(result => result !== null && result.length > getMaxBufferSync(maxBuffer));
|
||||
|
||||
// When `maxBuffer` is hit, ensure the result is truncated
|
||||
export const truncateMaxBufferSync = (result, isMaxBuffer, maxBuffer) => {
|
||||
if (!isMaxBuffer) {
|
||||
return result;
|
||||
}
|
||||
|
||||
const maxBufferValue = getMaxBufferSync(maxBuffer);
|
||||
return result.length > maxBufferValue ? result.slice(0, maxBufferValue) : result;
|
||||
};
|
||||
|
||||
// `spawnSync()` does not allow differentiating `maxBuffer` per file descriptor, so we always use `stdout`
|
||||
export const getMaxBufferSync = ([, stdoutMaxBuffer]) => stdoutMaxBuffer;
|
||||
80
node_modules/execa/lib/io/output-async.js
generated
vendored
Normal file
80
node_modules/execa/lib/io/output-async.js
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
import mergeStreams from '@sindresorhus/merge-streams';
|
||||
import {isStandardStream} from '../utils/standard-stream.js';
|
||||
import {incrementMaxListeners} from '../utils/max-listeners.js';
|
||||
import {TRANSFORM_TYPES} from '../stdio/type.js';
|
||||
import {pipeStreams} from './pipeline.js';
|
||||
|
||||
// Handle `input`, `inputFile`, `stdin`, `stdout` and `stderr` options, after spawning, in async mode
|
||||
// When multiple input streams are used, we merge them to ensure the output stream ends only once each input stream has ended
|
||||
export const pipeOutputAsync = (subprocess, fileDescriptors, controller) => {
|
||||
const pipeGroups = new Map();
|
||||
|
||||
for (const [fdNumber, {stdioItems, direction}] of Object.entries(fileDescriptors)) {
|
||||
for (const {stream} of stdioItems.filter(({type}) => TRANSFORM_TYPES.has(type))) {
|
||||
pipeTransform(subprocess, stream, direction, fdNumber);
|
||||
}
|
||||
|
||||
for (const {stream} of stdioItems.filter(({type}) => !TRANSFORM_TYPES.has(type))) {
|
||||
pipeStdioItem({
|
||||
subprocess,
|
||||
stream,
|
||||
direction,
|
||||
fdNumber,
|
||||
pipeGroups,
|
||||
controller,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (const [outputStream, inputStreams] of pipeGroups.entries()) {
|
||||
const inputStream = inputStreams.length === 1 ? inputStreams[0] : mergeStreams(inputStreams);
|
||||
pipeStreams(inputStream, outputStream);
|
||||
}
|
||||
};
|
||||
|
||||
// When using transforms, `subprocess.stdin|stdout|stderr|stdio` is directly mutated
|
||||
const pipeTransform = (subprocess, stream, direction, fdNumber) => {
|
||||
if (direction === 'output') {
|
||||
pipeStreams(subprocess.stdio[fdNumber], stream);
|
||||
} else {
|
||||
pipeStreams(stream, subprocess.stdio[fdNumber]);
|
||||
}
|
||||
|
||||
const streamProperty = SUBPROCESS_STREAM_PROPERTIES[fdNumber];
|
||||
if (streamProperty !== undefined) {
|
||||
subprocess[streamProperty] = stream;
|
||||
}
|
||||
|
||||
subprocess.stdio[fdNumber] = stream;
|
||||
};
|
||||
|
||||
const SUBPROCESS_STREAM_PROPERTIES = ['stdin', 'stdout', 'stderr'];
|
||||
|
||||
// Most `std*` option values involve piping `subprocess.std*` to a stream.
|
||||
// The stream is either passed by the user or created internally.
|
||||
const pipeStdioItem = ({subprocess, stream, direction, fdNumber, pipeGroups, controller}) => {
|
||||
if (stream === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
setStandardStreamMaxListeners(stream, controller);
|
||||
|
||||
const [inputStream, outputStream] = direction === 'output'
|
||||
? [stream, subprocess.stdio[fdNumber]]
|
||||
: [subprocess.stdio[fdNumber], stream];
|
||||
const outputStreams = pipeGroups.get(inputStream) ?? [];
|
||||
pipeGroups.set(inputStream, [...outputStreams, outputStream]);
|
||||
};
|
||||
|
||||
// Multiple subprocesses might be piping from/to `process.std*` at the same time.
|
||||
// This is not necessarily an error and should not print a `maxListeners` warning.
|
||||
const setStandardStreamMaxListeners = (stream, {signal}) => {
|
||||
if (isStandardStream(stream)) {
|
||||
incrementMaxListeners(stream, MAX_LISTENERS_INCREMENT, signal);
|
||||
}
|
||||
};
|
||||
|
||||
// `source.pipe(destination)` adds at most 1 listener for each event.
|
||||
// If `stdin` option is an array, the values might be combined with `merge-streams`.
|
||||
// That library also listens for `source` end, which adds 1 more listener.
|
||||
const MAX_LISTENERS_INCREMENT = 2;
|
||||
135
node_modules/execa/lib/io/output-sync.js
generated
vendored
Normal file
135
node_modules/execa/lib/io/output-sync.js
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
import {writeFileSync, appendFileSync} from 'node:fs';
|
||||
import {shouldLogOutput, logLinesSync} from '../verbose/output.js';
|
||||
import {runGeneratorsSync} from '../transform/generator.js';
|
||||
import {splitLinesSync} from '../transform/split.js';
|
||||
import {joinToString, joinToUint8Array, bufferToUint8Array} from '../utils/uint-array.js';
|
||||
import {FILE_TYPES} from '../stdio/type.js';
|
||||
import {truncateMaxBufferSync} from './max-buffer.js';
|
||||
|
||||
// Apply `stdout`/`stderr` options, after spawning, in sync mode
|
||||
export const transformOutputSync = ({fileDescriptors, syncResult: {output}, options, isMaxBuffer, verboseInfo}) => {
|
||||
if (output === null) {
|
||||
return {output: Array.from({length: 3})};
|
||||
}
|
||||
|
||||
const state = {};
|
||||
const outputFiles = new Set([]);
|
||||
const transformedOutput = output.map((result, fdNumber) =>
|
||||
transformOutputResultSync({
|
||||
result,
|
||||
fileDescriptors,
|
||||
fdNumber,
|
||||
state,
|
||||
outputFiles,
|
||||
isMaxBuffer,
|
||||
verboseInfo,
|
||||
}, options));
|
||||
return {output: transformedOutput, ...state};
|
||||
};
|
||||
|
||||
const transformOutputResultSync = (
|
||||
{result, fileDescriptors, fdNumber, state, outputFiles, isMaxBuffer, verboseInfo},
|
||||
{buffer, encoding, lines, stripFinalNewline, maxBuffer},
|
||||
) => {
|
||||
if (result === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
const truncatedResult = truncateMaxBufferSync(result, isMaxBuffer, maxBuffer);
|
||||
const uint8ArrayResult = bufferToUint8Array(truncatedResult);
|
||||
const {stdioItems, objectMode} = fileDescriptors[fdNumber];
|
||||
const chunks = runOutputGeneratorsSync([uint8ArrayResult], stdioItems, encoding, state);
|
||||
const {serializedResult, finalResult = serializedResult} = serializeChunks({
|
||||
chunks,
|
||||
objectMode,
|
||||
encoding,
|
||||
lines,
|
||||
stripFinalNewline,
|
||||
fdNumber,
|
||||
});
|
||||
|
||||
logOutputSync({
|
||||
serializedResult,
|
||||
fdNumber,
|
||||
state,
|
||||
verboseInfo,
|
||||
encoding,
|
||||
stdioItems,
|
||||
objectMode,
|
||||
});
|
||||
|
||||
const returnedResult = buffer[fdNumber] ? finalResult : undefined;
|
||||
|
||||
try {
|
||||
if (state.error === undefined) {
|
||||
writeToFiles(serializedResult, stdioItems, outputFiles);
|
||||
}
|
||||
|
||||
return returnedResult;
|
||||
} catch (error) {
|
||||
state.error = error;
|
||||
return returnedResult;
|
||||
}
|
||||
};
|
||||
|
||||
// Applies transform generators to `stdout`/`stderr`
|
||||
const runOutputGeneratorsSync = (chunks, stdioItems, encoding, state) => {
|
||||
try {
|
||||
return runGeneratorsSync(chunks, stdioItems, encoding, false);
|
||||
} catch (error) {
|
||||
state.error = error;
|
||||
return chunks;
|
||||
}
|
||||
};
|
||||
|
||||
// The contents is converted to three stages:
|
||||
// - serializedResult: used when the target is a file path/URL or a file descriptor (including 'inherit')
|
||||
// - finalResult/returnedResult: returned as `result.std*`
|
||||
const serializeChunks = ({chunks, objectMode, encoding, lines, stripFinalNewline, fdNumber}) => {
|
||||
if (objectMode) {
|
||||
return {serializedResult: chunks};
|
||||
}
|
||||
|
||||
if (encoding === 'buffer') {
|
||||
return {serializedResult: joinToUint8Array(chunks)};
|
||||
}
|
||||
|
||||
const serializedResult = joinToString(chunks, encoding);
|
||||
if (lines[fdNumber]) {
|
||||
return {serializedResult, finalResult: splitLinesSync(serializedResult, !stripFinalNewline[fdNumber], objectMode)};
|
||||
}
|
||||
|
||||
return {serializedResult};
|
||||
};
|
||||
|
||||
const logOutputSync = ({serializedResult, fdNumber, state, verboseInfo, encoding, stdioItems, objectMode}) => {
|
||||
if (!shouldLogOutput({
|
||||
stdioItems,
|
||||
encoding,
|
||||
verboseInfo,
|
||||
fdNumber,
|
||||
})) {
|
||||
return;
|
||||
}
|
||||
|
||||
const linesArray = splitLinesSync(serializedResult, false, objectMode);
|
||||
|
||||
try {
|
||||
logLinesSync(linesArray, fdNumber, verboseInfo);
|
||||
} catch (error) {
|
||||
state.error ??= error;
|
||||
}
|
||||
};
|
||||
|
||||
// When the `std*` target is a file path/URL or a file descriptor
|
||||
const writeToFiles = (serializedResult, stdioItems, outputFiles) => {
|
||||
for (const {path, append} of stdioItems.filter(({type}) => FILE_TYPES.has(type))) {
|
||||
const pathString = typeof path === 'string' ? path : path.toString();
|
||||
if (append || outputFiles.has(pathString)) {
|
||||
appendFileSync(path, serializedResult);
|
||||
} else {
|
||||
outputFiles.add(pathString);
|
||||
writeFileSync(path, serializedResult);
|
||||
}
|
||||
}
|
||||
};
|
||||
48
node_modules/execa/lib/io/pipeline.js
generated
vendored
Normal file
48
node_modules/execa/lib/io/pipeline.js
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
import {finished} from 'node:stream/promises';
|
||||
import {isStandardStream} from '../utils/standard-stream.js';
|
||||
|
||||
// Similar to `Stream.pipeline(source, destination)`, but does not destroy standard streams
|
||||
export const pipeStreams = (source, destination) => {
|
||||
source.pipe(destination);
|
||||
onSourceFinish(source, destination);
|
||||
onDestinationFinish(source, destination);
|
||||
};
|
||||
|
||||
// `source.pipe(destination)` makes `destination` end when `source` ends.
|
||||
// But it does not propagate aborts or errors. This function does it.
|
||||
const onSourceFinish = async (source, destination) => {
|
||||
if (isStandardStream(source) || isStandardStream(destination)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await finished(source, {cleanup: true, readable: true, writable: false});
|
||||
} catch {}
|
||||
|
||||
endDestinationStream(destination);
|
||||
};
|
||||
|
||||
export const endDestinationStream = destination => {
|
||||
if (destination.writable) {
|
||||
destination.end();
|
||||
}
|
||||
};
|
||||
|
||||
// We do the same thing in the other direction as well.
|
||||
const onDestinationFinish = async (source, destination) => {
|
||||
if (isStandardStream(source) || isStandardStream(destination)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await finished(destination, {cleanup: true, readable: false, writable: true});
|
||||
} catch {}
|
||||
|
||||
abortSourceStream(source);
|
||||
};
|
||||
|
||||
export const abortSourceStream = source => {
|
||||
if (source.readable) {
|
||||
source.destroy();
|
||||
}
|
||||
};
|
||||
12
node_modules/execa/lib/io/strip-newline.js
generated
vendored
Normal file
12
node_modules/execa/lib/io/strip-newline.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
import stripFinalNewlineFunction from 'strip-final-newline';
|
||||
|
||||
// Apply `stripFinalNewline` option, which applies to `result.stdout|stderr|all|stdio[*]`.
|
||||
// If the `lines` option is used, it is applied on each line, but using a different function.
|
||||
export const stripNewline = (value, {stripFinalNewline}, fdNumber) => getStripFinalNewline(stripFinalNewline, fdNumber) && value !== undefined && !Array.isArray(value)
|
||||
? stripFinalNewlineFunction(value)
|
||||
: value;
|
||||
|
||||
// Retrieve `stripFinalNewline` option value, including with `subprocess.all`
|
||||
export const getStripFinalNewline = (stripFinalNewline, fdNumber) => fdNumber === 'all'
|
||||
? stripFinalNewline[1] || stripFinalNewline[2]
|
||||
: stripFinalNewline[fdNumber];
|
||||
4
node_modules/execa/lib/ipc/array.js
generated
vendored
Normal file
4
node_modules/execa/lib/ipc/array.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
// The `ipc` option adds an `ipc` item to the `stdio` option
|
||||
export const normalizeIpcStdioArray = (stdioArray, ipc) => ipc && !stdioArray.includes('ipc')
|
||||
? [...stdioArray, 'ipc']
|
||||
: stdioArray;
|
||||
47
node_modules/execa/lib/ipc/buffer-messages.js
generated
vendored
Normal file
47
node_modules/execa/lib/ipc/buffer-messages.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
import {checkIpcMaxBuffer} from '../io/max-buffer.js';
|
||||
import {shouldLogIpc, logIpcOutput} from '../verbose/ipc.js';
|
||||
import {getFdSpecificValue} from '../arguments/specific.js';
|
||||
import {loopOnMessages} from './get-each.js';
|
||||
|
||||
// Iterate through IPC messages sent by the subprocess
|
||||
export const waitForIpcOutput = async ({
|
||||
subprocess,
|
||||
buffer: bufferArray,
|
||||
maxBuffer: maxBufferArray,
|
||||
ipc,
|
||||
ipcOutput,
|
||||
verboseInfo,
|
||||
}) => {
|
||||
if (!ipc) {
|
||||
return ipcOutput;
|
||||
}
|
||||
|
||||
const isVerbose = shouldLogIpc(verboseInfo);
|
||||
const buffer = getFdSpecificValue(bufferArray, 'ipc');
|
||||
const maxBuffer = getFdSpecificValue(maxBufferArray, 'ipc');
|
||||
|
||||
for await (const message of loopOnMessages({
|
||||
anyProcess: subprocess,
|
||||
channel: subprocess.channel,
|
||||
isSubprocess: false,
|
||||
ipc,
|
||||
shouldAwait: false,
|
||||
reference: true,
|
||||
})) {
|
||||
if (buffer) {
|
||||
checkIpcMaxBuffer(subprocess, ipcOutput, maxBuffer);
|
||||
ipcOutput.push(message);
|
||||
}
|
||||
|
||||
if (isVerbose) {
|
||||
logIpcOutput(message, verboseInfo);
|
||||
}
|
||||
}
|
||||
|
||||
return ipcOutput;
|
||||
};
|
||||
|
||||
export const getBufferedIpcOutput = async (ipcOutputPromise, ipcOutput) => {
|
||||
await Promise.allSettled([ipcOutputPromise]);
|
||||
return ipcOutput;
|
||||
};
|
||||
56
node_modules/execa/lib/ipc/forward.js
generated
vendored
Normal file
56
node_modules/execa/lib/ipc/forward.js
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
import {EventEmitter} from 'node:events';
|
||||
import {onMessage, onDisconnect} from './incoming.js';
|
||||
import {undoAddedReferences} from './reference.js';
|
||||
|
||||
// Forward the `message` and `disconnect` events from the process and subprocess to a proxy emitter.
|
||||
// This prevents the `error` event from stopping IPC.
|
||||
// This also allows debouncing the `message` event.
|
||||
export const getIpcEmitter = (anyProcess, channel, isSubprocess) => {
|
||||
if (IPC_EMITTERS.has(anyProcess)) {
|
||||
return IPC_EMITTERS.get(anyProcess);
|
||||
}
|
||||
|
||||
// Use an `EventEmitter`, like the `process` that is being proxied
|
||||
// eslint-disable-next-line unicorn/prefer-event-target
|
||||
const ipcEmitter = new EventEmitter();
|
||||
ipcEmitter.connected = true;
|
||||
IPC_EMITTERS.set(anyProcess, ipcEmitter);
|
||||
forwardEvents({
|
||||
ipcEmitter,
|
||||
anyProcess,
|
||||
channel,
|
||||
isSubprocess,
|
||||
});
|
||||
return ipcEmitter;
|
||||
};
|
||||
|
||||
const IPC_EMITTERS = new WeakMap();
|
||||
|
||||
// The `message` and `disconnect` events are buffered in the subprocess until the first listener is setup.
|
||||
// However, unbuffering happens after one tick, so this give enough time for the caller to setup the listener on the proxy emitter first.
|
||||
// See https://github.com/nodejs/node/blob/2aaeaa863c35befa2ebaa98fb7737ec84df4d8e9/lib/internal/child_process.js#L721
|
||||
const forwardEvents = ({ipcEmitter, anyProcess, channel, isSubprocess}) => {
|
||||
const boundOnMessage = onMessage.bind(undefined, {
|
||||
anyProcess,
|
||||
channel,
|
||||
isSubprocess,
|
||||
ipcEmitter,
|
||||
});
|
||||
anyProcess.on('message', boundOnMessage);
|
||||
anyProcess.once('disconnect', onDisconnect.bind(undefined, {
|
||||
anyProcess,
|
||||
channel,
|
||||
isSubprocess,
|
||||
ipcEmitter,
|
||||
boundOnMessage,
|
||||
}));
|
||||
undoAddedReferences(channel, isSubprocess);
|
||||
};
|
||||
|
||||
// Check whether there might still be some `message` events to receive
|
||||
export const isConnected = anyProcess => {
|
||||
const ipcEmitter = IPC_EMITTERS.get(anyProcess);
|
||||
return ipcEmitter === undefined
|
||||
? anyProcess.channel !== null
|
||||
: ipcEmitter.connected;
|
||||
};
|
||||
89
node_modules/execa/lib/ipc/get-each.js
generated
vendored
Normal file
89
node_modules/execa/lib/ipc/get-each.js
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
import {once, on} from 'node:events';
|
||||
import {validateIpcMethod, disconnect, getStrictResponseError} from './validation.js';
|
||||
import {getIpcEmitter, isConnected} from './forward.js';
|
||||
import {addReference, removeReference} from './reference.js';
|
||||
|
||||
// Like `[sub]process.on('message')` but promise-based
|
||||
export const getEachMessage = ({anyProcess, channel, isSubprocess, ipc}, {reference = true} = {}) => loopOnMessages({
|
||||
anyProcess,
|
||||
channel,
|
||||
isSubprocess,
|
||||
ipc,
|
||||
shouldAwait: !isSubprocess,
|
||||
reference,
|
||||
});
|
||||
|
||||
// Same but used internally
|
||||
export const loopOnMessages = ({anyProcess, channel, isSubprocess, ipc, shouldAwait, reference}) => {
|
||||
validateIpcMethod({
|
||||
methodName: 'getEachMessage',
|
||||
isSubprocess,
|
||||
ipc,
|
||||
isConnected: isConnected(anyProcess),
|
||||
});
|
||||
|
||||
addReference(channel, reference);
|
||||
const ipcEmitter = getIpcEmitter(anyProcess, channel, isSubprocess);
|
||||
const controller = new AbortController();
|
||||
const state = {};
|
||||
stopOnDisconnect(anyProcess, ipcEmitter, controller);
|
||||
abortOnStrictError({
|
||||
ipcEmitter,
|
||||
isSubprocess,
|
||||
controller,
|
||||
state,
|
||||
});
|
||||
return iterateOnMessages({
|
||||
anyProcess,
|
||||
channel,
|
||||
ipcEmitter,
|
||||
isSubprocess,
|
||||
shouldAwait,
|
||||
controller,
|
||||
state,
|
||||
reference,
|
||||
});
|
||||
};
|
||||
|
||||
const stopOnDisconnect = async (anyProcess, ipcEmitter, controller) => {
|
||||
try {
|
||||
await once(ipcEmitter, 'disconnect', {signal: controller.signal});
|
||||
controller.abort();
|
||||
} catch {}
|
||||
};
|
||||
|
||||
const abortOnStrictError = async ({ipcEmitter, isSubprocess, controller, state}) => {
|
||||
try {
|
||||
const [error] = await once(ipcEmitter, 'strict:error', {signal: controller.signal});
|
||||
state.error = getStrictResponseError(error, isSubprocess);
|
||||
controller.abort();
|
||||
} catch {}
|
||||
};
|
||||
|
||||
const iterateOnMessages = async function * ({anyProcess, channel, ipcEmitter, isSubprocess, shouldAwait, controller, state, reference}) {
|
||||
try {
|
||||
for await (const [message] of on(ipcEmitter, 'message', {signal: controller.signal})) {
|
||||
throwIfStrictError(state);
|
||||
yield message;
|
||||
}
|
||||
} catch {
|
||||
throwIfStrictError(state);
|
||||
} finally {
|
||||
controller.abort();
|
||||
removeReference(channel, reference);
|
||||
|
||||
if (!isSubprocess) {
|
||||
disconnect(anyProcess);
|
||||
}
|
||||
|
||||
if (shouldAwait) {
|
||||
await anyProcess;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const throwIfStrictError = ({error}) => {
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
69
node_modules/execa/lib/ipc/get-one.js
generated
vendored
Normal file
69
node_modules/execa/lib/ipc/get-one.js
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
import {once, on} from 'node:events';
|
||||
import {
|
||||
validateIpcMethod,
|
||||
throwOnEarlyDisconnect,
|
||||
disconnect,
|
||||
getStrictResponseError,
|
||||
} from './validation.js';
|
||||
import {getIpcEmitter, isConnected} from './forward.js';
|
||||
import {addReference, removeReference} from './reference.js';
|
||||
|
||||
// Like `[sub]process.once('message')` but promise-based
|
||||
export const getOneMessage = ({anyProcess, channel, isSubprocess, ipc}, {reference = true, filter} = {}) => {
|
||||
validateIpcMethod({
|
||||
methodName: 'getOneMessage',
|
||||
isSubprocess,
|
||||
ipc,
|
||||
isConnected: isConnected(anyProcess),
|
||||
});
|
||||
|
||||
return getOneMessageAsync({
|
||||
anyProcess,
|
||||
channel,
|
||||
isSubprocess,
|
||||
filter,
|
||||
reference,
|
||||
});
|
||||
};
|
||||
|
||||
const getOneMessageAsync = async ({anyProcess, channel, isSubprocess, filter, reference}) => {
|
||||
addReference(channel, reference);
|
||||
const ipcEmitter = getIpcEmitter(anyProcess, channel, isSubprocess);
|
||||
const controller = new AbortController();
|
||||
try {
|
||||
return await Promise.race([
|
||||
getMessage(ipcEmitter, filter, controller),
|
||||
throwOnDisconnect(ipcEmitter, isSubprocess, controller),
|
||||
throwOnStrictError(ipcEmitter, isSubprocess, controller),
|
||||
]);
|
||||
} catch (error) {
|
||||
disconnect(anyProcess);
|
||||
throw error;
|
||||
} finally {
|
||||
controller.abort();
|
||||
removeReference(channel, reference);
|
||||
}
|
||||
};
|
||||
|
||||
const getMessage = async (ipcEmitter, filter, {signal}) => {
|
||||
if (filter === undefined) {
|
||||
const [message] = await once(ipcEmitter, 'message', {signal});
|
||||
return message;
|
||||
}
|
||||
|
||||
for await (const [message] of on(ipcEmitter, 'message', {signal})) {
|
||||
if (filter(message)) {
|
||||
return message;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const throwOnDisconnect = async (ipcEmitter, isSubprocess, {signal}) => {
|
||||
await once(ipcEmitter, 'disconnect', {signal});
|
||||
throwOnEarlyDisconnect(isSubprocess);
|
||||
};
|
||||
|
||||
const throwOnStrictError = async (ipcEmitter, isSubprocess, {signal}) => {
|
||||
const [error] = await once(ipcEmitter, 'strict:error', {signal});
|
||||
throw getStrictResponseError(error, isSubprocess);
|
||||
};
|
||||
72
node_modules/execa/lib/ipc/graceful.js
generated
vendored
Normal file
72
node_modules/execa/lib/ipc/graceful.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
import {scheduler} from 'node:timers/promises';
|
||||
import {sendOneMessage} from './send.js';
|
||||
import {getIpcEmitter} from './forward.js';
|
||||
import {validateConnection, getAbortDisconnectError, throwOnMissingParent} from './validation.js';
|
||||
|
||||
// Send an IPC message so the subprocess performs a graceful termination
|
||||
export const sendAbort = (subprocess, message) => {
|
||||
const methodName = 'cancelSignal';
|
||||
validateConnection(methodName, false, subprocess.connected);
|
||||
return sendOneMessage({
|
||||
anyProcess: subprocess,
|
||||
methodName,
|
||||
isSubprocess: false,
|
||||
wrappedMessage: {type: GRACEFUL_CANCEL_TYPE, message},
|
||||
message,
|
||||
});
|
||||
};
|
||||
|
||||
// When the signal is being used, start listening for incoming messages.
|
||||
// Unbuffering messages takes one microtask to complete, so this must be async.
|
||||
export const getCancelSignal = async ({anyProcess, channel, isSubprocess, ipc}) => {
|
||||
await startIpc({
|
||||
anyProcess,
|
||||
channel,
|
||||
isSubprocess,
|
||||
ipc,
|
||||
});
|
||||
return cancelController.signal;
|
||||
};
|
||||
|
||||
const startIpc = async ({anyProcess, channel, isSubprocess, ipc}) => {
|
||||
if (cancelListening) {
|
||||
return;
|
||||
}
|
||||
|
||||
cancelListening = true;
|
||||
|
||||
if (!ipc) {
|
||||
throwOnMissingParent();
|
||||
return;
|
||||
}
|
||||
|
||||
if (channel === null) {
|
||||
abortOnDisconnect();
|
||||
return;
|
||||
}
|
||||
|
||||
getIpcEmitter(anyProcess, channel, isSubprocess);
|
||||
await scheduler.yield();
|
||||
};
|
||||
|
||||
let cancelListening = false;
|
||||
|
||||
// Reception of IPC message to perform a graceful termination
|
||||
export const handleAbort = wrappedMessage => {
|
||||
if (wrappedMessage?.type !== GRACEFUL_CANCEL_TYPE) {
|
||||
return false;
|
||||
}
|
||||
|
||||
cancelController.abort(wrappedMessage.message);
|
||||
return true;
|
||||
};
|
||||
|
||||
const GRACEFUL_CANCEL_TYPE = 'execa:ipc:cancel';
|
||||
|
||||
// When the current process disconnects early, the subprocess `cancelSignal` is aborted.
|
||||
// Otherwise, the signal would never be able to be aborted later on.
|
||||
export const abortOnDisconnect = () => {
|
||||
cancelController.abort(getAbortDisconnectError());
|
||||
};
|
||||
|
||||
const cancelController = new AbortController();
|
||||
79
node_modules/execa/lib/ipc/incoming.js
generated
vendored
Normal file
79
node_modules/execa/lib/ipc/incoming.js
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
import {once} from 'node:events';
|
||||
import {scheduler} from 'node:timers/promises';
|
||||
import {waitForOutgoingMessages} from './outgoing.js';
|
||||
import {redoAddedReferences} from './reference.js';
|
||||
import {handleStrictRequest, handleStrictResponse} from './strict.js';
|
||||
import {handleAbort, abortOnDisconnect} from './graceful.js';
|
||||
|
||||
// By default, Node.js buffers `message` events.
|
||||
// - Buffering happens when there is a `message` event is emitted but there is no handler.
|
||||
// - As soon as a `message` event handler is set, all buffered `message` events are emitted, emptying the buffer.
|
||||
// - This happens both in the current process and the subprocess.
|
||||
// - See https://github.com/nodejs/node/blob/501546e8f37059cd577041e23941b640d0d4d406/lib/internal/child_process.js#L719
|
||||
// This is helpful. Notably, this allows sending messages to a subprocess that's still initializing.
|
||||
// However, it has several problems.
|
||||
// - This works with `events.on()` but not `events.once()` since all buffered messages are emitted at once.
|
||||
// For example, users cannot call `await getOneMessage()`/`getEachMessage()` multiple times in a row.
|
||||
// - When a user intentionally starts listening to `message` at a specific point in time, past `message` events are replayed, which might be unexpected.
|
||||
// - Buffering is unlimited, which might lead to an out-of-memory crash.
|
||||
// - This does not work well with multiple consumers.
|
||||
// For example, Execa consumes events with both `result.ipcOutput` and manual IPC calls like `getOneMessage()`.
|
||||
// Since `result.ipcOutput` reads all incoming messages, no buffering happens for manual IPC calls.
|
||||
// - Forgetting to setup a `message` listener, or setting it up too late, is a programming mistake.
|
||||
// The default behavior does not allow users to realize they made that mistake.
|
||||
// To solve those problems, instead of buffering messages, we debounce them.
|
||||
// The `message` event so it is emitted at most once per macrotask.
|
||||
export const onMessage = async ({anyProcess, channel, isSubprocess, ipcEmitter}, wrappedMessage) => {
|
||||
if (handleStrictResponse(wrappedMessage) || handleAbort(wrappedMessage)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!INCOMING_MESSAGES.has(anyProcess)) {
|
||||
INCOMING_MESSAGES.set(anyProcess, []);
|
||||
}
|
||||
|
||||
const incomingMessages = INCOMING_MESSAGES.get(anyProcess);
|
||||
incomingMessages.push(wrappedMessage);
|
||||
|
||||
if (incomingMessages.length > 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
while (incomingMessages.length > 0) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await waitForOutgoingMessages(anyProcess, ipcEmitter, wrappedMessage);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await scheduler.yield();
|
||||
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const message = await handleStrictRequest({
|
||||
wrappedMessage: incomingMessages[0],
|
||||
anyProcess,
|
||||
channel,
|
||||
isSubprocess,
|
||||
ipcEmitter,
|
||||
});
|
||||
|
||||
incomingMessages.shift();
|
||||
ipcEmitter.emit('message', message);
|
||||
ipcEmitter.emit('message:done');
|
||||
}
|
||||
};
|
||||
|
||||
// If the `message` event is currently debounced, the `disconnect` event must wait for it
|
||||
export const onDisconnect = async ({anyProcess, channel, isSubprocess, ipcEmitter, boundOnMessage}) => {
|
||||
abortOnDisconnect();
|
||||
|
||||
const incomingMessages = INCOMING_MESSAGES.get(anyProcess);
|
||||
while (incomingMessages?.length > 0) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await once(ipcEmitter, 'message:done');
|
||||
}
|
||||
|
||||
anyProcess.removeListener('message', boundOnMessage);
|
||||
redoAddedReferences(channel, isSubprocess);
|
||||
ipcEmitter.connected = false;
|
||||
ipcEmitter.emit('disconnect');
|
||||
};
|
||||
|
||||
const INCOMING_MESSAGES = new WeakMap();
|
||||
44
node_modules/execa/lib/ipc/ipc-input.js
generated
vendored
Normal file
44
node_modules/execa/lib/ipc/ipc-input.js
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
import {serialize} from 'node:v8';
|
||||
|
||||
// Validate the `ipcInput` option
|
||||
export const validateIpcInputOption = ({ipcInput, ipc, serialization}) => {
|
||||
if (ipcInput === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!ipc) {
|
||||
throw new Error('The `ipcInput` option cannot be set unless the `ipc` option is `true`.');
|
||||
}
|
||||
|
||||
validateIpcInput[serialization](ipcInput);
|
||||
};
|
||||
|
||||
const validateAdvancedInput = ipcInput => {
|
||||
try {
|
||||
serialize(ipcInput);
|
||||
} catch (error) {
|
||||
throw new Error('The `ipcInput` option is not serializable with a structured clone.', {cause: error});
|
||||
}
|
||||
};
|
||||
|
||||
const validateJsonInput = ipcInput => {
|
||||
try {
|
||||
JSON.stringify(ipcInput);
|
||||
} catch (error) {
|
||||
throw new Error('The `ipcInput` option is not serializable with JSON.', {cause: error});
|
||||
}
|
||||
};
|
||||
|
||||
const validateIpcInput = {
|
||||
advanced: validateAdvancedInput,
|
||||
json: validateJsonInput,
|
||||
};
|
||||
|
||||
// When the `ipcInput` option is set, it is sent as an initial IPC message to the subprocess
|
||||
export const sendIpcInput = async (subprocess, ipcInput) => {
|
||||
if (ipcInput === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
await subprocess.sendMessage(ipcInput);
|
||||
};
|
||||
49
node_modules/execa/lib/ipc/methods.js
generated
vendored
Normal file
49
node_modules/execa/lib/ipc/methods.js
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
import process from 'node:process';
|
||||
import {sendMessage} from './send.js';
|
||||
import {getOneMessage} from './get-one.js';
|
||||
import {getEachMessage} from './get-each.js';
|
||||
import {getCancelSignal} from './graceful.js';
|
||||
|
||||
// Add promise-based IPC methods in current process
|
||||
export const addIpcMethods = (subprocess, {ipc}) => {
|
||||
Object.assign(subprocess, getIpcMethods(subprocess, false, ipc));
|
||||
};
|
||||
|
||||
// Get promise-based IPC in the subprocess
|
||||
export const getIpcExport = () => {
|
||||
const anyProcess = process;
|
||||
const isSubprocess = true;
|
||||
const ipc = process.channel !== undefined;
|
||||
|
||||
return {
|
||||
...getIpcMethods(anyProcess, isSubprocess, ipc),
|
||||
getCancelSignal: getCancelSignal.bind(undefined, {
|
||||
anyProcess,
|
||||
channel: anyProcess.channel,
|
||||
isSubprocess,
|
||||
ipc,
|
||||
}),
|
||||
};
|
||||
};
|
||||
|
||||
// Retrieve the `ipc` shared by both the current process and the subprocess
|
||||
const getIpcMethods = (anyProcess, isSubprocess, ipc) => ({
|
||||
sendMessage: sendMessage.bind(undefined, {
|
||||
anyProcess,
|
||||
channel: anyProcess.channel,
|
||||
isSubprocess,
|
||||
ipc,
|
||||
}),
|
||||
getOneMessage: getOneMessage.bind(undefined, {
|
||||
anyProcess,
|
||||
channel: anyProcess.channel,
|
||||
isSubprocess,
|
||||
ipc,
|
||||
}),
|
||||
getEachMessage: getEachMessage.bind(undefined, {
|
||||
anyProcess,
|
||||
channel: anyProcess.channel,
|
||||
isSubprocess,
|
||||
ipc,
|
||||
}),
|
||||
});
|
||||
47
node_modules/execa/lib/ipc/outgoing.js
generated
vendored
Normal file
47
node_modules/execa/lib/ipc/outgoing.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
import {createDeferred} from '../utils/deferred.js';
|
||||
import {getFdSpecificValue} from '../arguments/specific.js';
|
||||
import {SUBPROCESS_OPTIONS} from '../arguments/fd-options.js';
|
||||
import {validateStrictDeadlock} from './strict.js';
|
||||
|
||||
// When `sendMessage()` is ongoing, any `message` being received waits before being emitted.
|
||||
// This allows calling one or multiple `await sendMessage()` followed by `await getOneMessage()`/`await getEachMessage()`.
|
||||
// Without running into a race condition when the other process sends a response too fast, before the current process set up a listener.
|
||||
export const startSendMessage = (anyProcess, wrappedMessage, strict) => {
|
||||
if (!OUTGOING_MESSAGES.has(anyProcess)) {
|
||||
OUTGOING_MESSAGES.set(anyProcess, new Set());
|
||||
}
|
||||
|
||||
const outgoingMessages = OUTGOING_MESSAGES.get(anyProcess);
|
||||
const onMessageSent = createDeferred();
|
||||
const id = strict ? wrappedMessage.id : undefined;
|
||||
const outgoingMessage = {onMessageSent, id};
|
||||
outgoingMessages.add(outgoingMessage);
|
||||
return {outgoingMessages, outgoingMessage};
|
||||
};
|
||||
|
||||
export const endSendMessage = ({outgoingMessages, outgoingMessage}) => {
|
||||
outgoingMessages.delete(outgoingMessage);
|
||||
outgoingMessage.onMessageSent.resolve();
|
||||
};
|
||||
|
||||
// Await while `sendMessage()` is ongoing, unless there is already a `message` listener
|
||||
export const waitForOutgoingMessages = async (anyProcess, ipcEmitter, wrappedMessage) => {
|
||||
while (!hasMessageListeners(anyProcess, ipcEmitter) && OUTGOING_MESSAGES.get(anyProcess)?.size > 0) {
|
||||
const outgoingMessages = [...OUTGOING_MESSAGES.get(anyProcess)];
|
||||
validateStrictDeadlock(outgoingMessages, wrappedMessage);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await Promise.all(outgoingMessages.map(({onMessageSent}) => onMessageSent));
|
||||
}
|
||||
};
|
||||
|
||||
const OUTGOING_MESSAGES = new WeakMap();
|
||||
|
||||
// Whether any `message` listener is setup
|
||||
export const hasMessageListeners = (anyProcess, ipcEmitter) => ipcEmitter.listenerCount('message') > getMinListenerCount(anyProcess);
|
||||
|
||||
// When `buffer` is `false`, we set up a `message` listener that should be ignored.
|
||||
// That listener is only meant to intercept `strict` acknowledgement responses.
|
||||
const getMinListenerCount = anyProcess => SUBPROCESS_OPTIONS.has(anyProcess)
|
||||
&& !getFdSpecificValue(SUBPROCESS_OPTIONS.get(anyProcess).options.buffer, 'ipc')
|
||||
? 1
|
||||
: 0;
|
||||
44
node_modules/execa/lib/ipc/reference.js
generated
vendored
Normal file
44
node_modules/execa/lib/ipc/reference.js
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
// By default, Node.js keeps the subprocess alive while it has a `message` or `disconnect` listener.
|
||||
// We replicate the same logic for the events that we proxy.
|
||||
// This ensures the subprocess is kept alive while `getOneMessage()` and `getEachMessage()` are ongoing.
|
||||
// This is not a problem with `sendMessage()` since Node.js handles that method automatically.
|
||||
// We do not use `anyProcess.channel.ref()` since this would prevent the automatic `.channel.refCounted()` Node.js is doing.
|
||||
// We keep a reference to `anyProcess.channel` since it might be `null` while `getOneMessage()` or `getEachMessage()` is still processing debounced messages.
|
||||
// See https://github.com/nodejs/node/blob/2aaeaa863c35befa2ebaa98fb7737ec84df4d8e9/lib/internal/child_process.js#L547
|
||||
export const addReference = (channel, reference) => {
|
||||
if (reference) {
|
||||
addReferenceCount(channel);
|
||||
}
|
||||
};
|
||||
|
||||
const addReferenceCount = channel => {
|
||||
channel.refCounted();
|
||||
};
|
||||
|
||||
export const removeReference = (channel, reference) => {
|
||||
if (reference) {
|
||||
removeReferenceCount(channel);
|
||||
}
|
||||
};
|
||||
|
||||
const removeReferenceCount = channel => {
|
||||
channel.unrefCounted();
|
||||
};
|
||||
|
||||
// To proxy events, we setup some global listeners on the `message` and `disconnect` events.
|
||||
// Those should not keep the subprocess alive, so we remove the automatic counting that Node.js is doing.
|
||||
// See https://github.com/nodejs/node/blob/1b965270a9c273d4cf70e8808e9d28b9ada7844f/lib/child_process.js#L180
|
||||
export const undoAddedReferences = (channel, isSubprocess) => {
|
||||
if (isSubprocess) {
|
||||
removeReferenceCount(channel);
|
||||
removeReferenceCount(channel);
|
||||
}
|
||||
};
|
||||
|
||||
// Reverse it during `disconnect`
|
||||
export const redoAddedReferences = (channel, isSubprocess) => {
|
||||
if (isSubprocess) {
|
||||
addReferenceCount(channel);
|
||||
addReferenceCount(channel);
|
||||
}
|
||||
};
|
||||
91
node_modules/execa/lib/ipc/send.js
generated
vendored
Normal file
91
node_modules/execa/lib/ipc/send.js
generated
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
import {promisify} from 'node:util';
|
||||
import {
|
||||
validateIpcMethod,
|
||||
handleEpipeError,
|
||||
handleSerializationError,
|
||||
disconnect,
|
||||
} from './validation.js';
|
||||
import {startSendMessage, endSendMessage} from './outgoing.js';
|
||||
import {handleSendStrict, waitForStrictResponse} from './strict.js';
|
||||
|
||||
// Like `[sub]process.send()` but promise-based.
|
||||
// We do not `await subprocess` during `.sendMessage()` nor `.getOneMessage()` since those methods are transient.
|
||||
// Users would still need to `await subprocess` after the method is done.
|
||||
// Also, this would prevent `unhandledRejection` event from being emitted, making it silent.
|
||||
export const sendMessage = ({anyProcess, channel, isSubprocess, ipc}, message, {strict = false} = {}) => {
|
||||
const methodName = 'sendMessage';
|
||||
validateIpcMethod({
|
||||
methodName,
|
||||
isSubprocess,
|
||||
ipc,
|
||||
isConnected: anyProcess.connected,
|
||||
});
|
||||
|
||||
return sendMessageAsync({
|
||||
anyProcess,
|
||||
channel,
|
||||
methodName,
|
||||
isSubprocess,
|
||||
message,
|
||||
strict,
|
||||
});
|
||||
};
|
||||
|
||||
const sendMessageAsync = async ({anyProcess, channel, methodName, isSubprocess, message, strict}) => {
|
||||
const wrappedMessage = handleSendStrict({
|
||||
anyProcess,
|
||||
channel,
|
||||
isSubprocess,
|
||||
message,
|
||||
strict,
|
||||
});
|
||||
const outgoingMessagesState = startSendMessage(anyProcess, wrappedMessage, strict);
|
||||
try {
|
||||
await sendOneMessage({
|
||||
anyProcess,
|
||||
methodName,
|
||||
isSubprocess,
|
||||
wrappedMessage,
|
||||
message,
|
||||
});
|
||||
} catch (error) {
|
||||
disconnect(anyProcess);
|
||||
throw error;
|
||||
} finally {
|
||||
endSendMessage(outgoingMessagesState);
|
||||
}
|
||||
};
|
||||
|
||||
// Used internally by `cancelSignal`
|
||||
export const sendOneMessage = async ({anyProcess, methodName, isSubprocess, wrappedMessage, message}) => {
|
||||
const sendMethod = getSendMethod(anyProcess);
|
||||
|
||||
try {
|
||||
await Promise.all([
|
||||
waitForStrictResponse(wrappedMessage, anyProcess, isSubprocess),
|
||||
sendMethod(wrappedMessage),
|
||||
]);
|
||||
} catch (error) {
|
||||
handleEpipeError({error, methodName, isSubprocess});
|
||||
handleSerializationError({
|
||||
error,
|
||||
methodName,
|
||||
isSubprocess,
|
||||
message,
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// [sub]process.send() promisified, memoized
|
||||
const getSendMethod = anyProcess => {
|
||||
if (PROCESS_SEND_METHODS.has(anyProcess)) {
|
||||
return PROCESS_SEND_METHODS.get(anyProcess);
|
||||
}
|
||||
|
||||
const sendMethod = promisify(anyProcess.send.bind(anyProcess));
|
||||
PROCESS_SEND_METHODS.set(anyProcess, sendMethod);
|
||||
return sendMethod;
|
||||
};
|
||||
|
||||
const PROCESS_SEND_METHODS = new WeakMap();
|
||||
113
node_modules/execa/lib/ipc/strict.js
generated
vendored
Normal file
113
node_modules/execa/lib/ipc/strict.js
generated
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
import {once} from 'node:events';
|
||||
import {createDeferred} from '../utils/deferred.js';
|
||||
import {incrementMaxListeners} from '../utils/max-listeners.js';
|
||||
import {sendMessage} from './send.js';
|
||||
import {throwOnMissingStrict, throwOnStrictDisconnect, throwOnStrictDeadlockError} from './validation.js';
|
||||
import {getIpcEmitter} from './forward.js';
|
||||
import {hasMessageListeners} from './outgoing.js';
|
||||
|
||||
// When using the `strict` option, wrap the message with metadata during `sendMessage()`
|
||||
export const handleSendStrict = ({anyProcess, channel, isSubprocess, message, strict}) => {
|
||||
if (!strict) {
|
||||
return message;
|
||||
}
|
||||
|
||||
const ipcEmitter = getIpcEmitter(anyProcess, channel, isSubprocess);
|
||||
const hasListeners = hasMessageListeners(anyProcess, ipcEmitter);
|
||||
return {
|
||||
id: count++,
|
||||
type: REQUEST_TYPE,
|
||||
message,
|
||||
hasListeners,
|
||||
};
|
||||
};
|
||||
|
||||
let count = 0n;
|
||||
|
||||
// Handles when both processes are calling `sendMessage()` with `strict` at the same time.
|
||||
// If neither process is listening, this would create a deadlock. We detect it and throw.
|
||||
export const validateStrictDeadlock = (outgoingMessages, wrappedMessage) => {
|
||||
if (wrappedMessage?.type !== REQUEST_TYPE || wrappedMessage.hasListeners) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const {id} of outgoingMessages) {
|
||||
if (id !== undefined) {
|
||||
STRICT_RESPONSES[id].resolve({isDeadlock: true, hasListeners: false});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// The other process then sends the acknowledgment back as a response
|
||||
export const handleStrictRequest = async ({wrappedMessage, anyProcess, channel, isSubprocess, ipcEmitter}) => {
|
||||
if (wrappedMessage?.type !== REQUEST_TYPE || !anyProcess.connected) {
|
||||
return wrappedMessage;
|
||||
}
|
||||
|
||||
const {id, message} = wrappedMessage;
|
||||
const response = {id, type: RESPONSE_TYPE, message: hasMessageListeners(anyProcess, ipcEmitter)};
|
||||
|
||||
try {
|
||||
await sendMessage({
|
||||
anyProcess,
|
||||
channel,
|
||||
isSubprocess,
|
||||
ipc: true,
|
||||
}, response);
|
||||
} catch (error) {
|
||||
ipcEmitter.emit('strict:error', error);
|
||||
}
|
||||
|
||||
return message;
|
||||
};
|
||||
|
||||
// Reception of the acknowledgment response
|
||||
export const handleStrictResponse = wrappedMessage => {
|
||||
if (wrappedMessage?.type !== RESPONSE_TYPE) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const {id, message: hasListeners} = wrappedMessage;
|
||||
STRICT_RESPONSES[id]?.resolve({isDeadlock: false, hasListeners});
|
||||
return true;
|
||||
};
|
||||
|
||||
// Wait for the other process to receive the message from `sendMessage()`
|
||||
export const waitForStrictResponse = async (wrappedMessage, anyProcess, isSubprocess) => {
|
||||
if (wrappedMessage?.type !== REQUEST_TYPE) {
|
||||
return;
|
||||
}
|
||||
|
||||
const deferred = createDeferred();
|
||||
STRICT_RESPONSES[wrappedMessage.id] = deferred;
|
||||
const controller = new AbortController();
|
||||
|
||||
try {
|
||||
const {isDeadlock, hasListeners} = await Promise.race([
|
||||
deferred,
|
||||
throwOnDisconnect(anyProcess, isSubprocess, controller),
|
||||
]);
|
||||
|
||||
if (isDeadlock) {
|
||||
throwOnStrictDeadlockError(isSubprocess);
|
||||
}
|
||||
|
||||
if (!hasListeners) {
|
||||
throwOnMissingStrict(isSubprocess);
|
||||
}
|
||||
} finally {
|
||||
controller.abort();
|
||||
delete STRICT_RESPONSES[wrappedMessage.id];
|
||||
}
|
||||
};
|
||||
|
||||
const STRICT_RESPONSES = {};
|
||||
|
||||
const throwOnDisconnect = async (anyProcess, isSubprocess, {signal}) => {
|
||||
incrementMaxListeners(anyProcess, 1, signal);
|
||||
await once(anyProcess, 'disconnect', {signal});
|
||||
throwOnStrictDisconnect(isSubprocess);
|
||||
};
|
||||
|
||||
const REQUEST_TYPE = 'execa:ipc:request';
|
||||
const RESPONSE_TYPE = 'execa:ipc:response';
|
||||
111
node_modules/execa/lib/ipc/validation.js
generated
vendored
Normal file
111
node_modules/execa/lib/ipc/validation.js
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
// Validate the IPC channel is connected before receiving/sending messages
|
||||
export const validateIpcMethod = ({methodName, isSubprocess, ipc, isConnected}) => {
|
||||
validateIpcOption(methodName, isSubprocess, ipc);
|
||||
validateConnection(methodName, isSubprocess, isConnected);
|
||||
};
|
||||
|
||||
// Better error message when forgetting to set `ipc: true` and using the IPC methods
|
||||
const validateIpcOption = (methodName, isSubprocess, ipc) => {
|
||||
if (!ipc) {
|
||||
throw new Error(`${getMethodName(methodName, isSubprocess)} can only be used if the \`ipc\` option is \`true\`.`);
|
||||
}
|
||||
};
|
||||
|
||||
// Better error message when one process does not send/receive messages once the other process has disconnected.
|
||||
// This also makes it clear that any buffered messages are lost once either process has disconnected.
|
||||
// Also when aborting `cancelSignal` after disconnecting the IPC.
|
||||
export const validateConnection = (methodName, isSubprocess, isConnected) => {
|
||||
if (!isConnected) {
|
||||
throw new Error(`${getMethodName(methodName, isSubprocess)} cannot be used: the ${getOtherProcessName(isSubprocess)} has already exited or disconnected.`);
|
||||
}
|
||||
};
|
||||
|
||||
// When `getOneMessage()` could not complete due to an early disconnection
|
||||
export const throwOnEarlyDisconnect = isSubprocess => {
|
||||
throw new Error(`${getMethodName('getOneMessage', isSubprocess)} could not complete: the ${getOtherProcessName(isSubprocess)} exited or disconnected.`);
|
||||
};
|
||||
|
||||
// When both processes use `sendMessage()` with `strict` at the same time
|
||||
export const throwOnStrictDeadlockError = isSubprocess => {
|
||||
throw new Error(`${getMethodName('sendMessage', isSubprocess)} failed: the ${getOtherProcessName(isSubprocess)} is sending a message too, instead of listening to incoming messages.
|
||||
This can be fixed by both sending a message and listening to incoming messages at the same time:
|
||||
|
||||
const [receivedMessage] = await Promise.all([
|
||||
${getMethodName('getOneMessage', isSubprocess)},
|
||||
${getMethodName('sendMessage', isSubprocess, 'message, {strict: true}')},
|
||||
]);`);
|
||||
};
|
||||
|
||||
// When the other process used `strict` but the current process had I/O error calling `sendMessage()` for the response
|
||||
export const getStrictResponseError = (error, isSubprocess) => new Error(`${getMethodName('sendMessage', isSubprocess)} failed when sending an acknowledgment response to the ${getOtherProcessName(isSubprocess)}.`, {cause: error});
|
||||
|
||||
// When using `strict` but the other process was not listening for messages
|
||||
export const throwOnMissingStrict = isSubprocess => {
|
||||
throw new Error(`${getMethodName('sendMessage', isSubprocess)} failed: the ${getOtherProcessName(isSubprocess)} is not listening to incoming messages.`);
|
||||
};
|
||||
|
||||
// When using `strict` but the other process disconnected before receiving the message
|
||||
export const throwOnStrictDisconnect = isSubprocess => {
|
||||
throw new Error(`${getMethodName('sendMessage', isSubprocess)} failed: the ${getOtherProcessName(isSubprocess)} exited without listening to incoming messages.`);
|
||||
};
|
||||
|
||||
// When the current process disconnects while the subprocess is listening to `cancelSignal`
|
||||
export const getAbortDisconnectError = () => new Error(`\`cancelSignal\` aborted: the ${getOtherProcessName(true)} disconnected.`);
|
||||
|
||||
// When the subprocess uses `cancelSignal` but not the current process
|
||||
export const throwOnMissingParent = () => {
|
||||
throw new Error('`getCancelSignal()` cannot be used without setting the `cancelSignal` subprocess option.');
|
||||
};
|
||||
|
||||
// EPIPE can happen when sending a message to a subprocess that is closing but has not disconnected yet
|
||||
export const handleEpipeError = ({error, methodName, isSubprocess}) => {
|
||||
if (error.code === 'EPIPE') {
|
||||
throw new Error(`${getMethodName(methodName, isSubprocess)} cannot be used: the ${getOtherProcessName(isSubprocess)} is disconnecting.`, {cause: error});
|
||||
}
|
||||
};
|
||||
|
||||
// Better error message when sending messages which cannot be serialized.
|
||||
// Works with both `serialization: 'advanced'` and `serialization: 'json'`.
|
||||
export const handleSerializationError = ({error, methodName, isSubprocess, message}) => {
|
||||
if (isSerializationError(error)) {
|
||||
throw new Error(`${getMethodName(methodName, isSubprocess)}'s argument type is invalid: the message cannot be serialized: ${String(message)}.`, {cause: error});
|
||||
}
|
||||
};
|
||||
|
||||
const isSerializationError = ({code, message}) => SERIALIZATION_ERROR_CODES.has(code)
|
||||
|| SERIALIZATION_ERROR_MESSAGES.some(serializationErrorMessage => message.includes(serializationErrorMessage));
|
||||
|
||||
// `error.code` set by Node.js when it failed to serialize the message
|
||||
const SERIALIZATION_ERROR_CODES = new Set([
|
||||
// Message is `undefined`
|
||||
'ERR_MISSING_ARGS',
|
||||
// Message is a function, a bigint, a symbol
|
||||
'ERR_INVALID_ARG_TYPE',
|
||||
]);
|
||||
|
||||
// `error.message` set by Node.js when it failed to serialize the message
|
||||
const SERIALIZATION_ERROR_MESSAGES = [
|
||||
// Message is a promise or a proxy, with `serialization: 'advanced'`
|
||||
'could not be cloned',
|
||||
// Message has cycles, with `serialization: 'json'`
|
||||
'circular structure',
|
||||
// Message has cycles inside toJSON(), with `serialization: 'json'`
|
||||
'call stack size exceeded',
|
||||
];
|
||||
|
||||
const getMethodName = (methodName, isSubprocess, parameters = '') => methodName === 'cancelSignal'
|
||||
? '`cancelSignal`\'s `controller.abort()`'
|
||||
: `${getNamespaceName(isSubprocess)}${methodName}(${parameters})`;
|
||||
|
||||
const getNamespaceName = isSubprocess => isSubprocess ? '' : 'subprocess.';
|
||||
|
||||
const getOtherProcessName = isSubprocess => isSubprocess ? 'parent process' : 'subprocess';
|
||||
|
||||
// When any error arises, we disconnect the IPC.
|
||||
// Otherwise, it is likely that one of the processes will stop sending/receiving messages.
|
||||
// This would leave the other process hanging.
|
||||
export const disconnect = anyProcess => {
|
||||
if (anyProcess.connected) {
|
||||
anyProcess.disconnect();
|
||||
}
|
||||
};
|
||||
23
node_modules/execa/lib/methods/bind.js
generated
vendored
Normal file
23
node_modules/execa/lib/methods/bind.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
import isPlainObject from 'is-plain-obj';
|
||||
import {FD_SPECIFIC_OPTIONS} from '../arguments/specific.js';
|
||||
|
||||
// Deep merge specific options like `env`. Shallow merge the other ones.
|
||||
export const mergeOptions = (boundOptions, options) => {
|
||||
const newOptions = Object.fromEntries(
|
||||
Object.entries(options).map(([optionName, optionValue]) => [
|
||||
optionName,
|
||||
mergeOption(optionName, boundOptions[optionName], optionValue),
|
||||
]),
|
||||
);
|
||||
return {...boundOptions, ...newOptions};
|
||||
};
|
||||
|
||||
const mergeOption = (optionName, boundOptionValue, optionValue) => {
|
||||
if (DEEP_OPTIONS.has(optionName) && isPlainObject(boundOptionValue) && isPlainObject(optionValue)) {
|
||||
return {...boundOptionValue, ...optionValue};
|
||||
}
|
||||
|
||||
return optionValue;
|
||||
};
|
||||
|
||||
const DEEP_OPTIONS = new Set(['env', ...FD_SPECIFIC_OPTIONS]);
|
||||
43
node_modules/execa/lib/methods/command.js
generated
vendored
Normal file
43
node_modules/execa/lib/methods/command.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
// Main logic for `execaCommand()`
|
||||
export const mapCommandAsync = ({file, commandArguments}) => parseCommand(file, commandArguments);
|
||||
|
||||
// Main logic for `execaCommandSync()`
|
||||
export const mapCommandSync = ({file, commandArguments}) => ({...parseCommand(file, commandArguments), isSync: true});
|
||||
|
||||
// Convert `execaCommand(command)` into `execa(file, ...commandArguments)`
|
||||
const parseCommand = (command, unusedArguments) => {
|
||||
if (unusedArguments.length > 0) {
|
||||
throw new TypeError(`The command and its arguments must be passed as a single string: ${command} ${unusedArguments}.`);
|
||||
}
|
||||
|
||||
const [file, ...commandArguments] = parseCommandString(command);
|
||||
return {file, commandArguments};
|
||||
};
|
||||
|
||||
// Convert `command` string into an array of file or arguments to pass to $`${...fileOrCommandArguments}`
|
||||
export const parseCommandString = command => {
|
||||
if (typeof command !== 'string') {
|
||||
throw new TypeError(`The command must be a string: ${String(command)}.`);
|
||||
}
|
||||
|
||||
const trimmedCommand = command.trim();
|
||||
if (trimmedCommand === '') {
|
||||
return [];
|
||||
}
|
||||
|
||||
const tokens = [];
|
||||
for (const token of trimmedCommand.split(SPACES_REGEXP)) {
|
||||
// Allow spaces to be escaped by a backslash if not meant as a delimiter
|
||||
const previousToken = tokens.at(-1);
|
||||
if (previousToken && previousToken.endsWith('\\')) {
|
||||
// Merge previous token with current one
|
||||
tokens[tokens.length - 1] = `${previousToken.slice(0, -1)} ${token}`;
|
||||
} else {
|
||||
tokens.push(token);
|
||||
}
|
||||
}
|
||||
|
||||
return tokens;
|
||||
};
|
||||
|
||||
const SPACES_REGEXP = / +/g;
|
||||
65
node_modules/execa/lib/methods/create.js
generated
vendored
Normal file
65
node_modules/execa/lib/methods/create.js
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
import isPlainObject from 'is-plain-obj';
|
||||
import {normalizeParameters} from './parameters.js';
|
||||
import {isTemplateString, parseTemplates} from './template.js';
|
||||
import {execaCoreSync} from './main-sync.js';
|
||||
import {execaCoreAsync} from './main-async.js';
|
||||
import {mergeOptions} from './bind.js';
|
||||
|
||||
// Wraps every exported methods to provide the following features:
|
||||
// - template string syntax: execa`command argument`
|
||||
// - options binding: boundExeca = execa(options)
|
||||
// - optional argument/options: execa(file), execa(file, args), execa(file, options), execa(file, args, options)
|
||||
// `mapArguments()` and `setBoundExeca()` allows for method-specific logic.
|
||||
export const createExeca = (mapArguments, boundOptions, deepOptions, setBoundExeca) => {
|
||||
const createNested = (mapArguments, boundOptions, setBoundExeca) => createExeca(mapArguments, boundOptions, deepOptions, setBoundExeca);
|
||||
const boundExeca = (...execaArguments) => callBoundExeca({
|
||||
mapArguments,
|
||||
deepOptions,
|
||||
boundOptions,
|
||||
setBoundExeca,
|
||||
createNested,
|
||||
}, ...execaArguments);
|
||||
|
||||
if (setBoundExeca !== undefined) {
|
||||
setBoundExeca(boundExeca, createNested, boundOptions);
|
||||
}
|
||||
|
||||
return boundExeca;
|
||||
};
|
||||
|
||||
const callBoundExeca = ({mapArguments, deepOptions = {}, boundOptions = {}, setBoundExeca, createNested}, firstArgument, ...nextArguments) => {
|
||||
if (isPlainObject(firstArgument)) {
|
||||
return createNested(mapArguments, mergeOptions(boundOptions, firstArgument), setBoundExeca);
|
||||
}
|
||||
|
||||
const {file, commandArguments, options, isSync} = parseArguments({
|
||||
mapArguments,
|
||||
firstArgument,
|
||||
nextArguments,
|
||||
deepOptions,
|
||||
boundOptions,
|
||||
});
|
||||
return isSync
|
||||
? execaCoreSync(file, commandArguments, options)
|
||||
: execaCoreAsync(file, commandArguments, options, createNested);
|
||||
};
|
||||
|
||||
const parseArguments = ({mapArguments, firstArgument, nextArguments, deepOptions, boundOptions}) => {
|
||||
const callArguments = isTemplateString(firstArgument)
|
||||
? parseTemplates(firstArgument, nextArguments)
|
||||
: [firstArgument, ...nextArguments];
|
||||
const [initialFile, initialArguments, initialOptions] = normalizeParameters(...callArguments);
|
||||
const mergedOptions = mergeOptions(mergeOptions(deepOptions, boundOptions), initialOptions);
|
||||
const {
|
||||
file = initialFile,
|
||||
commandArguments = initialArguments,
|
||||
options = mergedOptions,
|
||||
isSync = false,
|
||||
} = mapArguments({file: initialFile, commandArguments: initialArguments, options: mergedOptions});
|
||||
return {
|
||||
file,
|
||||
commandArguments,
|
||||
options,
|
||||
isSync,
|
||||
};
|
||||
};
|
||||
193
node_modules/execa/lib/methods/main-async.js
generated
vendored
Normal file
193
node_modules/execa/lib/methods/main-async.js
generated
vendored
Normal file
@@ -0,0 +1,193 @@
|
||||
import {setMaxListeners} from 'node:events';
|
||||
import {spawn} from 'node:child_process';
|
||||
import {MaxBufferError} from 'get-stream';
|
||||
import {handleCommand} from '../arguments/command.js';
|
||||
import {normalizeOptions} from '../arguments/options.js';
|
||||
import {SUBPROCESS_OPTIONS} from '../arguments/fd-options.js';
|
||||
import {addIpcMethods} from '../ipc/methods.js';
|
||||
import {makeError, makeSuccessResult} from '../return/result.js';
|
||||
import {handleResult} from '../return/reject.js';
|
||||
import {handleEarlyError} from '../return/early-error.js';
|
||||
import {handleStdioAsync} from '../stdio/handle-async.js';
|
||||
import {stripNewline} from '../io/strip-newline.js';
|
||||
import {pipeOutputAsync} from '../io/output-async.js';
|
||||
import {subprocessKill} from '../terminate/kill.js';
|
||||
import {cleanupOnExit} from '../terminate/cleanup.js';
|
||||
import {pipeToSubprocess} from '../pipe/setup.js';
|
||||
import {makeAllStream} from '../resolve/all-async.js';
|
||||
import {waitForSubprocessResult} from '../resolve/wait-subprocess.js';
|
||||
import {addConvertedStreams} from '../convert/add.js';
|
||||
import {createDeferred} from '../utils/deferred.js';
|
||||
import {mergePromise} from './promise.js';
|
||||
|
||||
// Main shared logic for all async methods: `execa()`, `$`, `execaNode()`
|
||||
export const execaCoreAsync = (rawFile, rawArguments, rawOptions, createNested) => {
|
||||
const {file, commandArguments, command, escapedCommand, startTime, verboseInfo, options, fileDescriptors} = handleAsyncArguments(rawFile, rawArguments, rawOptions);
|
||||
const {subprocess, promise} = spawnSubprocessAsync({
|
||||
file,
|
||||
commandArguments,
|
||||
options,
|
||||
startTime,
|
||||
verboseInfo,
|
||||
command,
|
||||
escapedCommand,
|
||||
fileDescriptors,
|
||||
});
|
||||
subprocess.pipe = pipeToSubprocess.bind(undefined, {
|
||||
source: subprocess,
|
||||
sourcePromise: promise,
|
||||
boundOptions: {},
|
||||
createNested,
|
||||
});
|
||||
mergePromise(subprocess, promise);
|
||||
SUBPROCESS_OPTIONS.set(subprocess, {options, fileDescriptors});
|
||||
return subprocess;
|
||||
};
|
||||
|
||||
// Compute arguments to pass to `child_process.spawn()`
|
||||
const handleAsyncArguments = (rawFile, rawArguments, rawOptions) => {
|
||||
const {command, escapedCommand, startTime, verboseInfo} = handleCommand(rawFile, rawArguments, rawOptions);
|
||||
const {file, commandArguments, options: normalizedOptions} = normalizeOptions(rawFile, rawArguments, rawOptions);
|
||||
const options = handleAsyncOptions(normalizedOptions);
|
||||
const fileDescriptors = handleStdioAsync(options, verboseInfo);
|
||||
return {
|
||||
file,
|
||||
commandArguments,
|
||||
command,
|
||||
escapedCommand,
|
||||
startTime,
|
||||
verboseInfo,
|
||||
options,
|
||||
fileDescriptors,
|
||||
};
|
||||
};
|
||||
|
||||
// Options normalization logic specific to async methods.
|
||||
// Prevent passing the `timeout` option directly to `child_process.spawn()`.
|
||||
const handleAsyncOptions = ({timeout, signal, ...options}) => {
|
||||
if (signal !== undefined) {
|
||||
throw new TypeError('The "signal" option has been renamed to "cancelSignal" instead.');
|
||||
}
|
||||
|
||||
return {...options, timeoutDuration: timeout};
|
||||
};
|
||||
|
||||
const spawnSubprocessAsync = ({file, commandArguments, options, startTime, verboseInfo, command, escapedCommand, fileDescriptors}) => {
|
||||
let subprocess;
|
||||
try {
|
||||
subprocess = spawn(file, commandArguments, options);
|
||||
} catch (error) {
|
||||
return handleEarlyError({
|
||||
error,
|
||||
command,
|
||||
escapedCommand,
|
||||
fileDescriptors,
|
||||
options,
|
||||
startTime,
|
||||
verboseInfo,
|
||||
});
|
||||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
setMaxListeners(Number.POSITIVE_INFINITY, controller.signal);
|
||||
|
||||
const originalStreams = [...subprocess.stdio];
|
||||
pipeOutputAsync(subprocess, fileDescriptors, controller);
|
||||
cleanupOnExit(subprocess, options, controller);
|
||||
|
||||
const context = {};
|
||||
const onInternalError = createDeferred();
|
||||
subprocess.kill = subprocessKill.bind(undefined, {
|
||||
kill: subprocess.kill.bind(subprocess),
|
||||
options,
|
||||
onInternalError,
|
||||
context,
|
||||
controller,
|
||||
});
|
||||
subprocess.all = makeAllStream(subprocess, options);
|
||||
addConvertedStreams(subprocess, options);
|
||||
addIpcMethods(subprocess, options);
|
||||
|
||||
const promise = handlePromise({
|
||||
subprocess,
|
||||
options,
|
||||
startTime,
|
||||
verboseInfo,
|
||||
fileDescriptors,
|
||||
originalStreams,
|
||||
command,
|
||||
escapedCommand,
|
||||
context,
|
||||
onInternalError,
|
||||
controller,
|
||||
});
|
||||
return {subprocess, promise};
|
||||
};
|
||||
|
||||
// Asynchronous logic, as opposed to the previous logic which can be run synchronously, i.e. can be returned to user right away
|
||||
const handlePromise = async ({subprocess, options, startTime, verboseInfo, fileDescriptors, originalStreams, command, escapedCommand, context, onInternalError, controller}) => {
|
||||
const [
|
||||
errorInfo,
|
||||
[exitCode, signal],
|
||||
stdioResults,
|
||||
allResult,
|
||||
ipcOutput,
|
||||
] = await waitForSubprocessResult({
|
||||
subprocess,
|
||||
options,
|
||||
context,
|
||||
verboseInfo,
|
||||
fileDescriptors,
|
||||
originalStreams,
|
||||
onInternalError,
|
||||
controller,
|
||||
});
|
||||
controller.abort();
|
||||
onInternalError.resolve();
|
||||
|
||||
const stdio = stdioResults.map((stdioResult, fdNumber) => stripNewline(stdioResult, options, fdNumber));
|
||||
const all = stripNewline(allResult, options, 'all');
|
||||
const result = getAsyncResult({
|
||||
errorInfo,
|
||||
exitCode,
|
||||
signal,
|
||||
stdio,
|
||||
all,
|
||||
ipcOutput,
|
||||
context,
|
||||
options,
|
||||
command,
|
||||
escapedCommand,
|
||||
startTime,
|
||||
});
|
||||
return handleResult(result, verboseInfo, options);
|
||||
};
|
||||
|
||||
const getAsyncResult = ({errorInfo, exitCode, signal, stdio, all, ipcOutput, context, options, command, escapedCommand, startTime}) => 'error' in errorInfo
|
||||
? makeError({
|
||||
error: errorInfo.error,
|
||||
command,
|
||||
escapedCommand,
|
||||
timedOut: context.terminationReason === 'timeout',
|
||||
isCanceled: context.terminationReason === 'cancel' || context.terminationReason === 'gracefulCancel',
|
||||
isGracefullyCanceled: context.terminationReason === 'gracefulCancel',
|
||||
isMaxBuffer: errorInfo.error instanceof MaxBufferError,
|
||||
isForcefullyTerminated: context.isForcefullyTerminated,
|
||||
exitCode,
|
||||
signal,
|
||||
stdio,
|
||||
all,
|
||||
ipcOutput,
|
||||
options,
|
||||
startTime,
|
||||
isSync: false,
|
||||
})
|
||||
: makeSuccessResult({
|
||||
command,
|
||||
escapedCommand,
|
||||
stdio,
|
||||
all,
|
||||
ipcOutput,
|
||||
options,
|
||||
startTime,
|
||||
});
|
||||
162
node_modules/execa/lib/methods/main-sync.js
generated
vendored
Normal file
162
node_modules/execa/lib/methods/main-sync.js
generated
vendored
Normal file
@@ -0,0 +1,162 @@
|
||||
import {spawnSync} from 'node:child_process';
|
||||
import {handleCommand} from '../arguments/command.js';
|
||||
import {normalizeOptions} from '../arguments/options.js';
|
||||
import {makeError, makeEarlyError, makeSuccessResult} from '../return/result.js';
|
||||
import {handleResult} from '../return/reject.js';
|
||||
import {handleStdioSync} from '../stdio/handle-sync.js';
|
||||
import {stripNewline} from '../io/strip-newline.js';
|
||||
import {addInputOptionsSync} from '../io/input-sync.js';
|
||||
import {transformOutputSync} from '../io/output-sync.js';
|
||||
import {getMaxBufferSync} from '../io/max-buffer.js';
|
||||
import {getAllSync} from '../resolve/all-sync.js';
|
||||
import {getExitResultSync} from '../resolve/exit-sync.js';
|
||||
|
||||
// Main shared logic for all sync methods: `execaSync()`, `$.sync()`
|
||||
export const execaCoreSync = (rawFile, rawArguments, rawOptions) => {
|
||||
const {file, commandArguments, command, escapedCommand, startTime, verboseInfo, options, fileDescriptors} = handleSyncArguments(rawFile, rawArguments, rawOptions);
|
||||
const result = spawnSubprocessSync({
|
||||
file,
|
||||
commandArguments,
|
||||
options,
|
||||
command,
|
||||
escapedCommand,
|
||||
verboseInfo,
|
||||
fileDescriptors,
|
||||
startTime,
|
||||
});
|
||||
return handleResult(result, verboseInfo, options);
|
||||
};
|
||||
|
||||
// Compute arguments to pass to `child_process.spawnSync()`
|
||||
const handleSyncArguments = (rawFile, rawArguments, rawOptions) => {
|
||||
const {command, escapedCommand, startTime, verboseInfo} = handleCommand(rawFile, rawArguments, rawOptions);
|
||||
const syncOptions = normalizeSyncOptions(rawOptions);
|
||||
const {file, commandArguments, options} = normalizeOptions(rawFile, rawArguments, syncOptions);
|
||||
validateSyncOptions(options);
|
||||
const fileDescriptors = handleStdioSync(options, verboseInfo);
|
||||
return {
|
||||
file,
|
||||
commandArguments,
|
||||
command,
|
||||
escapedCommand,
|
||||
startTime,
|
||||
verboseInfo,
|
||||
options,
|
||||
fileDescriptors,
|
||||
};
|
||||
};
|
||||
|
||||
// Options normalization logic specific to sync methods
|
||||
const normalizeSyncOptions = options => options.node && !options.ipc ? {...options, ipc: false} : options;
|
||||
|
||||
// Options validation logic specific to sync methods
|
||||
const validateSyncOptions = ({ipc, ipcInput, detached, cancelSignal}) => {
|
||||
if (ipcInput) {
|
||||
throwInvalidSyncOption('ipcInput');
|
||||
}
|
||||
|
||||
if (ipc) {
|
||||
throwInvalidSyncOption('ipc: true');
|
||||
}
|
||||
|
||||
if (detached) {
|
||||
throwInvalidSyncOption('detached: true');
|
||||
}
|
||||
|
||||
if (cancelSignal) {
|
||||
throwInvalidSyncOption('cancelSignal');
|
||||
}
|
||||
};
|
||||
|
||||
const throwInvalidSyncOption = value => {
|
||||
throw new TypeError(`The "${value}" option cannot be used with synchronous methods.`);
|
||||
};
|
||||
|
||||
const spawnSubprocessSync = ({file, commandArguments, options, command, escapedCommand, verboseInfo, fileDescriptors, startTime}) => {
|
||||
const syncResult = runSubprocessSync({
|
||||
file,
|
||||
commandArguments,
|
||||
options,
|
||||
command,
|
||||
escapedCommand,
|
||||
fileDescriptors,
|
||||
startTime,
|
||||
});
|
||||
if (syncResult.failed) {
|
||||
return syncResult;
|
||||
}
|
||||
|
||||
const {resultError, exitCode, signal, timedOut, isMaxBuffer} = getExitResultSync(syncResult, options);
|
||||
const {output, error = resultError} = transformOutputSync({
|
||||
fileDescriptors,
|
||||
syncResult,
|
||||
options,
|
||||
isMaxBuffer,
|
||||
verboseInfo,
|
||||
});
|
||||
const stdio = output.map((stdioOutput, fdNumber) => stripNewline(stdioOutput, options, fdNumber));
|
||||
const all = stripNewline(getAllSync(output, options), options, 'all');
|
||||
return getSyncResult({
|
||||
error,
|
||||
exitCode,
|
||||
signal,
|
||||
timedOut,
|
||||
isMaxBuffer,
|
||||
stdio,
|
||||
all,
|
||||
options,
|
||||
command,
|
||||
escapedCommand,
|
||||
startTime,
|
||||
});
|
||||
};
|
||||
|
||||
const runSubprocessSync = ({file, commandArguments, options, command, escapedCommand, fileDescriptors, startTime}) => {
|
||||
try {
|
||||
addInputOptionsSync(fileDescriptors, options);
|
||||
const normalizedOptions = normalizeSpawnSyncOptions(options);
|
||||
return spawnSync(file, commandArguments, normalizedOptions);
|
||||
} catch (error) {
|
||||
return makeEarlyError({
|
||||
error,
|
||||
command,
|
||||
escapedCommand,
|
||||
fileDescriptors,
|
||||
options,
|
||||
startTime,
|
||||
isSync: true,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// The `encoding` option is handled by Execa, not by `child_process.spawnSync()`
|
||||
const normalizeSpawnSyncOptions = ({encoding, maxBuffer, ...options}) => ({...options, encoding: 'buffer', maxBuffer: getMaxBufferSync(maxBuffer)});
|
||||
|
||||
const getSyncResult = ({error, exitCode, signal, timedOut, isMaxBuffer, stdio, all, options, command, escapedCommand, startTime}) => error === undefined
|
||||
? makeSuccessResult({
|
||||
command,
|
||||
escapedCommand,
|
||||
stdio,
|
||||
all,
|
||||
ipcOutput: [],
|
||||
options,
|
||||
startTime,
|
||||
})
|
||||
: makeError({
|
||||
error,
|
||||
command,
|
||||
escapedCommand,
|
||||
timedOut,
|
||||
isCanceled: false,
|
||||
isGracefullyCanceled: false,
|
||||
isMaxBuffer,
|
||||
isForcefullyTerminated: false,
|
||||
exitCode,
|
||||
signal,
|
||||
stdio,
|
||||
all,
|
||||
ipcOutput: [],
|
||||
options,
|
||||
startTime,
|
||||
isSync: true,
|
||||
});
|
||||
51
node_modules/execa/lib/methods/node.js
generated
vendored
Normal file
51
node_modules/execa/lib/methods/node.js
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
import {execPath, execArgv} from 'node:process';
|
||||
import path from 'node:path';
|
||||
import {safeNormalizeFileUrl} from '../arguments/file-url.js';
|
||||
|
||||
// `execaNode()` is a shortcut for `execa(..., {node: true})`
|
||||
export const mapNode = ({options}) => {
|
||||
if (options.node === false) {
|
||||
throw new TypeError('The "node" option cannot be false with `execaNode()`.');
|
||||
}
|
||||
|
||||
return {options: {...options, node: true}};
|
||||
};
|
||||
|
||||
// Applies the `node: true` option, and the related `nodePath`/`nodeOptions` options.
|
||||
// Modifies the file commands/arguments to ensure the same Node binary and flags are re-used.
|
||||
// Also adds `ipc: true` and `shell: false`.
|
||||
export const handleNodeOption = (file, commandArguments, {
|
||||
node: shouldHandleNode = false,
|
||||
nodePath = execPath,
|
||||
nodeOptions = execArgv.filter(nodeOption => !nodeOption.startsWith('--inspect')),
|
||||
cwd,
|
||||
execPath: formerNodePath,
|
||||
...options
|
||||
}) => {
|
||||
if (formerNodePath !== undefined) {
|
||||
throw new TypeError('The "execPath" option has been removed. Please use the "nodePath" option instead.');
|
||||
}
|
||||
|
||||
const normalizedNodePath = safeNormalizeFileUrl(nodePath, 'The "nodePath" option');
|
||||
const resolvedNodePath = path.resolve(cwd, normalizedNodePath);
|
||||
const newOptions = {
|
||||
...options,
|
||||
nodePath: resolvedNodePath,
|
||||
node: shouldHandleNode,
|
||||
cwd,
|
||||
};
|
||||
|
||||
if (!shouldHandleNode) {
|
||||
return [file, commandArguments, newOptions];
|
||||
}
|
||||
|
||||
if (path.basename(file, '.exe') === 'node') {
|
||||
throw new TypeError('When the "node" option is true, the first argument does not need to be "node".');
|
||||
}
|
||||
|
||||
return [
|
||||
resolvedNodePath,
|
||||
[...nodeOptions, file, ...commandArguments],
|
||||
{ipc: true, ...newOptions, shell: false},
|
||||
];
|
||||
};
|
||||
31
node_modules/execa/lib/methods/parameters.js
generated
vendored
Normal file
31
node_modules/execa/lib/methods/parameters.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
import isPlainObject from 'is-plain-obj';
|
||||
import {safeNormalizeFileUrl} from '../arguments/file-url.js';
|
||||
|
||||
// The command `arguments` and `options` are both optional.
|
||||
// This also does basic validation on them and on the command file.
|
||||
export const normalizeParameters = (rawFile, rawArguments = [], rawOptions = {}) => {
|
||||
const filePath = safeNormalizeFileUrl(rawFile, 'First argument');
|
||||
const [commandArguments, options] = isPlainObject(rawArguments)
|
||||
? [[], rawArguments]
|
||||
: [rawArguments, rawOptions];
|
||||
|
||||
if (!Array.isArray(commandArguments)) {
|
||||
throw new TypeError(`Second argument must be either an array of arguments or an options object: ${commandArguments}`);
|
||||
}
|
||||
|
||||
if (commandArguments.some(commandArgument => typeof commandArgument === 'object' && commandArgument !== null)) {
|
||||
throw new TypeError(`Second argument must be an array of strings: ${commandArguments}`);
|
||||
}
|
||||
|
||||
const normalizedArguments = commandArguments.map(String);
|
||||
const nullByteArgument = normalizedArguments.find(normalizedArgument => normalizedArgument.includes('\0'));
|
||||
if (nullByteArgument !== undefined) {
|
||||
throw new TypeError(`Arguments cannot contain null bytes ("\\0"): ${nullByteArgument}`);
|
||||
}
|
||||
|
||||
if (!isPlainObject(options)) {
|
||||
throw new TypeError(`Last argument must be an options object: ${options}`);
|
||||
}
|
||||
|
||||
return [filePath, normalizedArguments, options];
|
||||
};
|
||||
15
node_modules/execa/lib/methods/promise.js
generated
vendored
Normal file
15
node_modules/execa/lib/methods/promise.js
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
// The return value is a mixin of `subprocess` and `Promise`
|
||||
export const mergePromise = (subprocess, promise) => {
|
||||
for (const [property, descriptor] of descriptors) {
|
||||
const value = descriptor.value.bind(promise);
|
||||
Reflect.defineProperty(subprocess, property, {...descriptor, value});
|
||||
}
|
||||
};
|
||||
|
||||
// eslint-disable-next-line unicorn/prefer-top-level-await
|
||||
const nativePromisePrototype = (async () => {})().constructor.prototype;
|
||||
|
||||
const descriptors = ['then', 'catch', 'finally'].map(property => [
|
||||
property,
|
||||
Reflect.getOwnPropertyDescriptor(nativePromisePrototype, property),
|
||||
]);
|
||||
22
node_modules/execa/lib/methods/script.js
generated
vendored
Normal file
22
node_modules/execa/lib/methods/script.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
// Sets `$.sync` and `$.s`
|
||||
export const setScriptSync = (boundExeca, createNested, boundOptions) => {
|
||||
boundExeca.sync = createNested(mapScriptSync, boundOptions);
|
||||
boundExeca.s = boundExeca.sync;
|
||||
};
|
||||
|
||||
// Main logic for `$`
|
||||
export const mapScriptAsync = ({options}) => getScriptOptions(options);
|
||||
|
||||
// Main logic for `$.sync`
|
||||
const mapScriptSync = ({options}) => ({...getScriptOptions(options), isSync: true});
|
||||
|
||||
// `$` is like `execa` but with script-friendly options: `{stdin: 'inherit', preferLocal: true}`
|
||||
const getScriptOptions = options => ({options: {...getScriptStdinOption(options), ...options}});
|
||||
|
||||
const getScriptStdinOption = ({input, inputFile, stdio}) => input === undefined && inputFile === undefined && stdio === undefined
|
||||
? {stdin: 'inherit'}
|
||||
: {};
|
||||
|
||||
// When using $(...).pipe(...), most script-friendly options should apply to both commands.
|
||||
// However, some options (like `stdin: 'inherit'`) would create issues with piping, i.e. cannot be deep.
|
||||
export const deepScriptOptions = {preferLocal: true};
|
||||
153
node_modules/execa/lib/methods/template.js
generated
vendored
Normal file
153
node_modules/execa/lib/methods/template.js
generated
vendored
Normal file
@@ -0,0 +1,153 @@
|
||||
import {ChildProcess} from 'node:child_process';
|
||||
import isPlainObject from 'is-plain-obj';
|
||||
import {isUint8Array, uint8ArrayToString} from '../utils/uint-array.js';
|
||||
|
||||
// Check whether the template string syntax is being used
|
||||
export const isTemplateString = templates => Array.isArray(templates) && Array.isArray(templates.raw);
|
||||
|
||||
// Convert execa`file ...commandArguments` to execa(file, commandArguments)
|
||||
export const parseTemplates = (templates, expressions) => {
|
||||
let tokens = [];
|
||||
|
||||
for (const [index, template] of templates.entries()) {
|
||||
tokens = parseTemplate({
|
||||
templates,
|
||||
expressions,
|
||||
tokens,
|
||||
index,
|
||||
template,
|
||||
});
|
||||
}
|
||||
|
||||
if (tokens.length === 0) {
|
||||
throw new TypeError('Template script must not be empty');
|
||||
}
|
||||
|
||||
const [file, ...commandArguments] = tokens;
|
||||
return [file, commandArguments, {}];
|
||||
};
|
||||
|
||||
const parseTemplate = ({templates, expressions, tokens, index, template}) => {
|
||||
if (template === undefined) {
|
||||
throw new TypeError(`Invalid backslash sequence: ${templates.raw[index]}`);
|
||||
}
|
||||
|
||||
const {nextTokens, leadingWhitespaces, trailingWhitespaces} = splitByWhitespaces(template, templates.raw[index]);
|
||||
const newTokens = concatTokens(tokens, nextTokens, leadingWhitespaces);
|
||||
|
||||
if (index === expressions.length) {
|
||||
return newTokens;
|
||||
}
|
||||
|
||||
const expression = expressions[index];
|
||||
const expressionTokens = Array.isArray(expression)
|
||||
? expression.map(expression => parseExpression(expression))
|
||||
: [parseExpression(expression)];
|
||||
return concatTokens(newTokens, expressionTokens, trailingWhitespaces);
|
||||
};
|
||||
|
||||
// Like `string.split(/[ \t\r\n]+/)` except newlines and tabs are:
|
||||
// - ignored when input as a backslash sequence like: `echo foo\n bar`
|
||||
// - not ignored when input directly
|
||||
// The only way to distinguish those in JavaScript is to use a tagged template and compare:
|
||||
// - the first array argument, which does not escape backslash sequences
|
||||
// - its `raw` property, which escapes them
|
||||
const splitByWhitespaces = (template, rawTemplate) => {
|
||||
if (rawTemplate.length === 0) {
|
||||
return {nextTokens: [], leadingWhitespaces: false, trailingWhitespaces: false};
|
||||
}
|
||||
|
||||
const nextTokens = [];
|
||||
let templateStart = 0;
|
||||
const leadingWhitespaces = DELIMITERS.has(rawTemplate[0]);
|
||||
|
||||
for (
|
||||
let templateIndex = 0, rawIndex = 0;
|
||||
templateIndex < template.length;
|
||||
templateIndex += 1, rawIndex += 1
|
||||
) {
|
||||
const rawCharacter = rawTemplate[rawIndex];
|
||||
if (DELIMITERS.has(rawCharacter)) {
|
||||
if (templateStart !== templateIndex) {
|
||||
nextTokens.push(template.slice(templateStart, templateIndex));
|
||||
}
|
||||
|
||||
templateStart = templateIndex + 1;
|
||||
} else if (rawCharacter === '\\') {
|
||||
const nextRawCharacter = rawTemplate[rawIndex + 1];
|
||||
if (nextRawCharacter === '\n') {
|
||||
// Handles escaped newlines in templates
|
||||
templateIndex -= 1;
|
||||
rawIndex += 1;
|
||||
} else if (nextRawCharacter === 'u' && rawTemplate[rawIndex + 2] === '{') {
|
||||
rawIndex = rawTemplate.indexOf('}', rawIndex + 3);
|
||||
} else {
|
||||
rawIndex += ESCAPE_LENGTH[nextRawCharacter] ?? 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const trailingWhitespaces = templateStart === template.length;
|
||||
if (!trailingWhitespaces) {
|
||||
nextTokens.push(template.slice(templateStart));
|
||||
}
|
||||
|
||||
return {nextTokens, leadingWhitespaces, trailingWhitespaces};
|
||||
};
|
||||
|
||||
const DELIMITERS = new Set([' ', '\t', '\r', '\n']);
|
||||
|
||||
// Number of characters in backslash escape sequences: \0 \xXX or \uXXXX
|
||||
// \cX is allowed in RegExps but not in strings
|
||||
// Octal sequences are not allowed in strict mode
|
||||
const ESCAPE_LENGTH = {x: 3, u: 5};
|
||||
|
||||
const concatTokens = (tokens, nextTokens, isSeparated) => isSeparated
|
||||
|| tokens.length === 0
|
||||
|| nextTokens.length === 0
|
||||
? [...tokens, ...nextTokens]
|
||||
: [
|
||||
...tokens.slice(0, -1),
|
||||
`${tokens.at(-1)}${nextTokens[0]}`,
|
||||
...nextTokens.slice(1),
|
||||
];
|
||||
|
||||
// Handle `${expression}` inside the template string syntax
|
||||
const parseExpression = expression => {
|
||||
const typeOfExpression = typeof expression;
|
||||
|
||||
if (typeOfExpression === 'string') {
|
||||
return expression;
|
||||
}
|
||||
|
||||
if (typeOfExpression === 'number') {
|
||||
return String(expression);
|
||||
}
|
||||
|
||||
if (isPlainObject(expression) && ('stdout' in expression || 'isMaxBuffer' in expression)) {
|
||||
return getSubprocessResult(expression);
|
||||
}
|
||||
|
||||
if (expression instanceof ChildProcess || Object.prototype.toString.call(expression) === '[object Promise]') {
|
||||
// eslint-disable-next-line no-template-curly-in-string
|
||||
throw new TypeError('Unexpected subprocess in template expression. Please use ${await subprocess} instead of ${subprocess}.');
|
||||
}
|
||||
|
||||
throw new TypeError(`Unexpected "${typeOfExpression}" in template expression`);
|
||||
};
|
||||
|
||||
const getSubprocessResult = ({stdout}) => {
|
||||
if (typeof stdout === 'string') {
|
||||
return stdout;
|
||||
}
|
||||
|
||||
if (isUint8Array(stdout)) {
|
||||
return uint8ArrayToString(stdout);
|
||||
}
|
||||
|
||||
if (stdout === undefined) {
|
||||
throw new TypeError('Missing result.stdout in template expression. This is probably due to the previous subprocess\' "stdout" option.');
|
||||
}
|
||||
|
||||
throw new TypeError(`Unexpected "${typeof stdout}" stdout in template expression`);
|
||||
};
|
||||
20
node_modules/execa/lib/pipe/abort.js
generated
vendored
Normal file
20
node_modules/execa/lib/pipe/abort.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import {aborted} from 'node:util';
|
||||
import {createNonCommandError} from './throw.js';
|
||||
|
||||
// When passing an `unpipeSignal` option, abort piping when the signal is aborted.
|
||||
// However, do not terminate the subprocesses.
|
||||
export const unpipeOnAbort = (unpipeSignal, unpipeContext) => unpipeSignal === undefined
|
||||
? []
|
||||
: [unpipeOnSignalAbort(unpipeSignal, unpipeContext)];
|
||||
|
||||
const unpipeOnSignalAbort = async (unpipeSignal, {sourceStream, mergedStream, fileDescriptors, sourceOptions, startTime}) => {
|
||||
await aborted(unpipeSignal, sourceStream);
|
||||
await mergedStream.remove(sourceStream);
|
||||
const error = new Error('Pipe canceled by `unpipeSignal` option.');
|
||||
throw createNonCommandError({
|
||||
error,
|
||||
fileDescriptors,
|
||||
sourceOptions,
|
||||
startTime,
|
||||
});
|
||||
};
|
||||
91
node_modules/execa/lib/pipe/pipe-arguments.js
generated
vendored
Normal file
91
node_modules/execa/lib/pipe/pipe-arguments.js
generated
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
import {normalizeParameters} from '../methods/parameters.js';
|
||||
import {getStartTime} from '../return/duration.js';
|
||||
import {SUBPROCESS_OPTIONS, getToStream, getFromStream} from '../arguments/fd-options.js';
|
||||
import {isDenoExecPath} from '../arguments/file-url.js';
|
||||
|
||||
// Normalize and validate arguments passed to `source.pipe(destination)`
|
||||
export const normalizePipeArguments = ({source, sourcePromise, boundOptions, createNested}, ...pipeArguments) => {
|
||||
const startTime = getStartTime();
|
||||
const {
|
||||
destination,
|
||||
destinationStream,
|
||||
destinationError,
|
||||
from,
|
||||
unpipeSignal,
|
||||
} = getDestinationStream(boundOptions, createNested, pipeArguments);
|
||||
const {sourceStream, sourceError} = getSourceStream(source, from);
|
||||
const {options: sourceOptions, fileDescriptors} = SUBPROCESS_OPTIONS.get(source);
|
||||
return {
|
||||
sourcePromise,
|
||||
sourceStream,
|
||||
sourceOptions,
|
||||
sourceError,
|
||||
destination,
|
||||
destinationStream,
|
||||
destinationError,
|
||||
unpipeSignal,
|
||||
fileDescriptors,
|
||||
startTime,
|
||||
};
|
||||
};
|
||||
|
||||
const getDestinationStream = (boundOptions, createNested, pipeArguments) => {
|
||||
try {
|
||||
const {
|
||||
destination,
|
||||
pipeOptions: {from, to, unpipeSignal} = {},
|
||||
} = getDestination(boundOptions, createNested, ...pipeArguments);
|
||||
const destinationStream = getToStream(destination, to);
|
||||
return {
|
||||
destination,
|
||||
destinationStream,
|
||||
from,
|
||||
unpipeSignal,
|
||||
};
|
||||
} catch (error) {
|
||||
return {destinationError: error};
|
||||
}
|
||||
};
|
||||
|
||||
// Piping subprocesses can use three syntaxes:
|
||||
// - source.pipe('command', commandArguments, pipeOptionsOrDestinationOptions)
|
||||
// - source.pipe`command commandArgument` or source.pipe(pipeOptionsOrDestinationOptions)`command commandArgument`
|
||||
// - source.pipe(execa(...), pipeOptions)
|
||||
const getDestination = (boundOptions, createNested, firstArgument, ...pipeArguments) => {
|
||||
if (Array.isArray(firstArgument)) {
|
||||
const destination = createNested(mapDestinationArguments, boundOptions)(firstArgument, ...pipeArguments);
|
||||
return {destination, pipeOptions: boundOptions};
|
||||
}
|
||||
|
||||
if (typeof firstArgument === 'string' || firstArgument instanceof URL || isDenoExecPath(firstArgument)) {
|
||||
if (Object.keys(boundOptions).length > 0) {
|
||||
throw new TypeError('Please use .pipe("file", ..., options) or .pipe(execa("file", ..., options)) instead of .pipe(options)("file", ...).');
|
||||
}
|
||||
|
||||
const [rawFile, rawArguments, rawOptions] = normalizeParameters(firstArgument, ...pipeArguments);
|
||||
const destination = createNested(mapDestinationArguments)(rawFile, rawArguments, rawOptions);
|
||||
return {destination, pipeOptions: rawOptions};
|
||||
}
|
||||
|
||||
if (SUBPROCESS_OPTIONS.has(firstArgument)) {
|
||||
if (Object.keys(boundOptions).length > 0) {
|
||||
throw new TypeError('Please use .pipe(options)`command` or .pipe($(options)`command`) instead of .pipe(options)($`command`).');
|
||||
}
|
||||
|
||||
return {destination: firstArgument, pipeOptions: pipeArguments[0]};
|
||||
}
|
||||
|
||||
throw new TypeError(`The first argument must be a template string, an options object, or an Execa subprocess: ${firstArgument}`);
|
||||
};
|
||||
|
||||
// Force `stdin: 'pipe'` with the destination subprocess
|
||||
const mapDestinationArguments = ({options}) => ({options: {...options, stdin: 'pipe', piped: true}});
|
||||
|
||||
const getSourceStream = (source, from) => {
|
||||
try {
|
||||
const sourceStream = getFromStream(source, from);
|
||||
return {sourceStream};
|
||||
} catch (error) {
|
||||
return {sourceError: error};
|
||||
}
|
||||
};
|
||||
24
node_modules/execa/lib/pipe/sequence.js
generated
vendored
Normal file
24
node_modules/execa/lib/pipe/sequence.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
// Like Bash, we await both subprocesses. This is unlike some other shells which only await the destination subprocess.
|
||||
// Like Bash with the `pipefail` option, if either subprocess fails, the whole pipe fails.
|
||||
// Like Bash, if both subprocesses fail, we return the failure of the destination.
|
||||
// This ensures both subprocesses' errors are present, using `error.pipedFrom`.
|
||||
export const waitForBothSubprocesses = async subprocessPromises => {
|
||||
const [
|
||||
{status: sourceStatus, reason: sourceReason, value: sourceResult = sourceReason},
|
||||
{status: destinationStatus, reason: destinationReason, value: destinationResult = destinationReason},
|
||||
] = await subprocessPromises;
|
||||
|
||||
if (!destinationResult.pipedFrom.includes(sourceResult)) {
|
||||
destinationResult.pipedFrom.push(sourceResult);
|
||||
}
|
||||
|
||||
if (destinationStatus === 'rejected') {
|
||||
throw destinationResult;
|
||||
}
|
||||
|
||||
if (sourceStatus === 'rejected') {
|
||||
throw sourceResult;
|
||||
}
|
||||
|
||||
return destinationResult;
|
||||
};
|
||||
72
node_modules/execa/lib/pipe/setup.js
generated
vendored
Normal file
72
node_modules/execa/lib/pipe/setup.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
import isPlainObject from 'is-plain-obj';
|
||||
import {normalizePipeArguments} from './pipe-arguments.js';
|
||||
import {handlePipeArgumentsError} from './throw.js';
|
||||
import {waitForBothSubprocesses} from './sequence.js';
|
||||
import {pipeSubprocessStream} from './streaming.js';
|
||||
import {unpipeOnAbort} from './abort.js';
|
||||
|
||||
// Pipe a subprocess' `stdout`/`stderr`/`stdio` into another subprocess' `stdin`
|
||||
export const pipeToSubprocess = (sourceInfo, ...pipeArguments) => {
|
||||
if (isPlainObject(pipeArguments[0])) {
|
||||
return pipeToSubprocess.bind(undefined, {
|
||||
...sourceInfo,
|
||||
boundOptions: {...sourceInfo.boundOptions, ...pipeArguments[0]},
|
||||
});
|
||||
}
|
||||
|
||||
const {destination, ...normalizedInfo} = normalizePipeArguments(sourceInfo, ...pipeArguments);
|
||||
const promise = handlePipePromise({...normalizedInfo, destination});
|
||||
promise.pipe = pipeToSubprocess.bind(undefined, {
|
||||
...sourceInfo,
|
||||
source: destination,
|
||||
sourcePromise: promise,
|
||||
boundOptions: {},
|
||||
});
|
||||
return promise;
|
||||
};
|
||||
|
||||
// Asynchronous logic when piping subprocesses
|
||||
const handlePipePromise = async ({
|
||||
sourcePromise,
|
||||
sourceStream,
|
||||
sourceOptions,
|
||||
sourceError,
|
||||
destination,
|
||||
destinationStream,
|
||||
destinationError,
|
||||
unpipeSignal,
|
||||
fileDescriptors,
|
||||
startTime,
|
||||
}) => {
|
||||
const subprocessPromises = getSubprocessPromises(sourcePromise, destination);
|
||||
handlePipeArgumentsError({
|
||||
sourceStream,
|
||||
sourceError,
|
||||
destinationStream,
|
||||
destinationError,
|
||||
fileDescriptors,
|
||||
sourceOptions,
|
||||
startTime,
|
||||
});
|
||||
const maxListenersController = new AbortController();
|
||||
try {
|
||||
const mergedStream = pipeSubprocessStream(sourceStream, destinationStream, maxListenersController);
|
||||
return await Promise.race([
|
||||
waitForBothSubprocesses(subprocessPromises),
|
||||
...unpipeOnAbort(unpipeSignal, {
|
||||
sourceStream,
|
||||
mergedStream,
|
||||
sourceOptions,
|
||||
fileDescriptors,
|
||||
startTime,
|
||||
}),
|
||||
]);
|
||||
} finally {
|
||||
maxListenersController.abort();
|
||||
}
|
||||
};
|
||||
|
||||
// `.pipe()` awaits the subprocess promises.
|
||||
// When invalid arguments are passed to `.pipe()`, we throw an error, which prevents awaiting them.
|
||||
// We need to ensure this does not create unhandled rejections.
|
||||
const getSubprocessPromises = (sourcePromise, destination) => Promise.allSettled([sourcePromise, destination]);
|
||||
51
node_modules/execa/lib/pipe/streaming.js
generated
vendored
Normal file
51
node_modules/execa/lib/pipe/streaming.js
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
import {finished} from 'node:stream/promises';
|
||||
import mergeStreams from '@sindresorhus/merge-streams';
|
||||
import {incrementMaxListeners} from '../utils/max-listeners.js';
|
||||
import {pipeStreams} from '../io/pipeline.js';
|
||||
|
||||
// The piping behavior is like Bash.
|
||||
// In particular, when one subprocess exits, the other is not terminated by a signal.
|
||||
// Instead, its stdout (for the source) or stdin (for the destination) closes.
|
||||
// If the subprocess uses it, it will make it error with SIGPIPE or EPIPE (for the source) or end (for the destination).
|
||||
// If it does not use it, it will continue running.
|
||||
// This allows for subprocesses to gracefully exit and lower the coupling between subprocesses.
|
||||
export const pipeSubprocessStream = (sourceStream, destinationStream, maxListenersController) => {
|
||||
const mergedStream = MERGED_STREAMS.has(destinationStream)
|
||||
? pipeMoreSubprocessStream(sourceStream, destinationStream)
|
||||
: pipeFirstSubprocessStream(sourceStream, destinationStream);
|
||||
incrementMaxListeners(sourceStream, SOURCE_LISTENERS_PER_PIPE, maxListenersController.signal);
|
||||
incrementMaxListeners(destinationStream, DESTINATION_LISTENERS_PER_PIPE, maxListenersController.signal);
|
||||
cleanupMergedStreamsMap(destinationStream);
|
||||
return mergedStream;
|
||||
};
|
||||
|
||||
// We use `merge-streams` to allow for multiple sources to pipe to the same destination.
|
||||
const pipeFirstSubprocessStream = (sourceStream, destinationStream) => {
|
||||
const mergedStream = mergeStreams([sourceStream]);
|
||||
pipeStreams(mergedStream, destinationStream);
|
||||
MERGED_STREAMS.set(destinationStream, mergedStream);
|
||||
return mergedStream;
|
||||
};
|
||||
|
||||
const pipeMoreSubprocessStream = (sourceStream, destinationStream) => {
|
||||
const mergedStream = MERGED_STREAMS.get(destinationStream);
|
||||
mergedStream.add(sourceStream);
|
||||
return mergedStream;
|
||||
};
|
||||
|
||||
const cleanupMergedStreamsMap = async destinationStream => {
|
||||
try {
|
||||
await finished(destinationStream, {cleanup: true, readable: false, writable: true});
|
||||
} catch {}
|
||||
|
||||
MERGED_STREAMS.delete(destinationStream);
|
||||
};
|
||||
|
||||
const MERGED_STREAMS = new WeakMap();
|
||||
|
||||
// Number of listeners set up on `sourceStream` by each `sourceStream.pipe(destinationStream)`
|
||||
// Those are added by `merge-streams`
|
||||
const SOURCE_LISTENERS_PER_PIPE = 2;
|
||||
// Number of listeners set up on `destinationStream` by each `sourceStream.pipe(destinationStream)`
|
||||
// Those are added by `finished()` in `cleanupMergedStreamsMap()`
|
||||
const DESTINATION_LISTENERS_PER_PIPE = 1;
|
||||
58
node_modules/execa/lib/pipe/throw.js
generated
vendored
Normal file
58
node_modules/execa/lib/pipe/throw.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
import {makeEarlyError} from '../return/result.js';
|
||||
import {abortSourceStream, endDestinationStream} from '../io/pipeline.js';
|
||||
|
||||
// When passing invalid arguments to `source.pipe()`, throw asynchronously.
|
||||
// We also abort both subprocesses.
|
||||
export const handlePipeArgumentsError = ({
|
||||
sourceStream,
|
||||
sourceError,
|
||||
destinationStream,
|
||||
destinationError,
|
||||
fileDescriptors,
|
||||
sourceOptions,
|
||||
startTime,
|
||||
}) => {
|
||||
const error = getPipeArgumentsError({
|
||||
sourceStream,
|
||||
sourceError,
|
||||
destinationStream,
|
||||
destinationError,
|
||||
});
|
||||
if (error !== undefined) {
|
||||
throw createNonCommandError({
|
||||
error,
|
||||
fileDescriptors,
|
||||
sourceOptions,
|
||||
startTime,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const getPipeArgumentsError = ({sourceStream, sourceError, destinationStream, destinationError}) => {
|
||||
if (sourceError !== undefined && destinationError !== undefined) {
|
||||
return destinationError;
|
||||
}
|
||||
|
||||
if (destinationError !== undefined) {
|
||||
abortSourceStream(sourceStream);
|
||||
return destinationError;
|
||||
}
|
||||
|
||||
if (sourceError !== undefined) {
|
||||
endDestinationStream(destinationStream);
|
||||
return sourceError;
|
||||
}
|
||||
};
|
||||
|
||||
// Specific error return value when passing invalid arguments to `subprocess.pipe()` or when using `unpipeSignal`
|
||||
export const createNonCommandError = ({error, fileDescriptors, sourceOptions, startTime}) => makeEarlyError({
|
||||
error,
|
||||
command: PIPE_COMMAND_MESSAGE,
|
||||
escapedCommand: PIPE_COMMAND_MESSAGE,
|
||||
fileDescriptors,
|
||||
options: sourceOptions,
|
||||
startTime,
|
||||
isSync: false,
|
||||
});
|
||||
|
||||
const PIPE_COMMAND_MESSAGE = 'source.pipe(destination)';
|
||||
46
node_modules/execa/lib/resolve/all-async.js
generated
vendored
Normal file
46
node_modules/execa/lib/resolve/all-async.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
import mergeStreams from '@sindresorhus/merge-streams';
|
||||
import {waitForSubprocessStream} from './stdio.js';
|
||||
|
||||
// `all` interleaves `stdout` and `stderr`
|
||||
export const makeAllStream = ({stdout, stderr}, {all}) => all && (stdout || stderr)
|
||||
? mergeStreams([stdout, stderr].filter(Boolean))
|
||||
: undefined;
|
||||
|
||||
// Read the contents of `subprocess.all` and|or wait for its completion
|
||||
export const waitForAllStream = ({subprocess, encoding, buffer, maxBuffer, lines, stripFinalNewline, verboseInfo, streamInfo}) => waitForSubprocessStream({
|
||||
...getAllStream(subprocess, buffer),
|
||||
fdNumber: 'all',
|
||||
encoding,
|
||||
maxBuffer: maxBuffer[1] + maxBuffer[2],
|
||||
lines: lines[1] || lines[2],
|
||||
allMixed: getAllMixed(subprocess),
|
||||
stripFinalNewline,
|
||||
verboseInfo,
|
||||
streamInfo,
|
||||
});
|
||||
|
||||
const getAllStream = ({stdout, stderr, all}, [, bufferStdout, bufferStderr]) => {
|
||||
const buffer = bufferStdout || bufferStderr;
|
||||
if (!buffer) {
|
||||
return {stream: all, buffer};
|
||||
}
|
||||
|
||||
if (!bufferStdout) {
|
||||
return {stream: stderr, buffer};
|
||||
}
|
||||
|
||||
if (!bufferStderr) {
|
||||
return {stream: stdout, buffer};
|
||||
}
|
||||
|
||||
return {stream: all, buffer};
|
||||
};
|
||||
|
||||
// When `subprocess.stdout` is in objectMode but not `subprocess.stderr` (or the opposite), we need to use both:
|
||||
// - `getStreamAsArray()` for the chunks in objectMode, to return as an array without changing each chunk
|
||||
// - `getStreamAsArrayBuffer()` or `getStream()` for the chunks not in objectMode, to convert them from Buffers to string or Uint8Array
|
||||
// We do this by emulating the Buffer -> string|Uint8Array conversion performed by `get-stream` with our own, which is identical.
|
||||
const getAllMixed = ({all, stdout, stderr}) => all
|
||||
&& stdout
|
||||
&& stderr
|
||||
&& stdout.readableObjectMode !== stderr.readableObjectMode;
|
||||
33
node_modules/execa/lib/resolve/all-sync.js
generated
vendored
Normal file
33
node_modules/execa/lib/resolve/all-sync.js
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
import {isUint8Array, concatUint8Arrays} from '../utils/uint-array.js';
|
||||
import {stripNewline} from '../io/strip-newline.js';
|
||||
|
||||
// Retrieve `result.all` with synchronous methods
|
||||
export const getAllSync = ([, stdout, stderr], options) => {
|
||||
if (!options.all) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (stdout === undefined) {
|
||||
return stderr;
|
||||
}
|
||||
|
||||
if (stderr === undefined) {
|
||||
return stdout;
|
||||
}
|
||||
|
||||
if (Array.isArray(stdout)) {
|
||||
return Array.isArray(stderr)
|
||||
? [...stdout, ...stderr]
|
||||
: [...stdout, stripNewline(stderr, options, 'all')];
|
||||
}
|
||||
|
||||
if (Array.isArray(stderr)) {
|
||||
return [stripNewline(stdout, options, 'all'), ...stderr];
|
||||
}
|
||||
|
||||
if (isUint8Array(stdout) && isUint8Array(stderr)) {
|
||||
return concatUint8Arrays([stdout, stderr]);
|
||||
}
|
||||
|
||||
return `${stdout}${stderr}`;
|
||||
};
|
||||
54
node_modules/execa/lib/resolve/exit-async.js
generated
vendored
Normal file
54
node_modules/execa/lib/resolve/exit-async.js
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
import {once} from 'node:events';
|
||||
import {DiscardedError} from '../return/final-error.js';
|
||||
|
||||
// If `error` is emitted before `spawn`, `exit` will never be emitted.
|
||||
// However, `error` might be emitted after `spawn`.
|
||||
// In that case, `exit` will still be emitted.
|
||||
// Since the `exit` event contains the signal name, we want to make sure we are listening for it.
|
||||
// This function also takes into account the following unlikely cases:
|
||||
// - `exit` being emitted in the same microtask as `spawn`
|
||||
// - `error` being emitted multiple times
|
||||
export const waitForExit = async (subprocess, context) => {
|
||||
const [exitCode, signal] = await waitForExitOrError(subprocess);
|
||||
context.isForcefullyTerminated ??= false;
|
||||
return [exitCode, signal];
|
||||
};
|
||||
|
||||
const waitForExitOrError = async subprocess => {
|
||||
const [spawnPayload, exitPayload] = await Promise.allSettled([
|
||||
once(subprocess, 'spawn'),
|
||||
once(subprocess, 'exit'),
|
||||
]);
|
||||
|
||||
if (spawnPayload.status === 'rejected') {
|
||||
return [];
|
||||
}
|
||||
|
||||
return exitPayload.status === 'rejected'
|
||||
? waitForSubprocessExit(subprocess)
|
||||
: exitPayload.value;
|
||||
};
|
||||
|
||||
const waitForSubprocessExit = async subprocess => {
|
||||
try {
|
||||
return await once(subprocess, 'exit');
|
||||
} catch {
|
||||
return waitForSubprocessExit(subprocess);
|
||||
}
|
||||
};
|
||||
|
||||
// Retrieve the final exit code and|or signal name
|
||||
export const waitForSuccessfulExit = async exitPromise => {
|
||||
const [exitCode, signal] = await exitPromise;
|
||||
|
||||
if (!isSubprocessErrorExit(exitCode, signal) && isFailedExit(exitCode, signal)) {
|
||||
throw new DiscardedError();
|
||||
}
|
||||
|
||||
return [exitCode, signal];
|
||||
};
|
||||
|
||||
// When the subprocess fails due to an `error` event
|
||||
const isSubprocessErrorExit = (exitCode, signal) => exitCode === undefined && signal === undefined;
|
||||
// When the subprocess fails due to a non-0 exit code or to a signal termination
|
||||
export const isFailedExit = (exitCode, signal) => exitCode !== 0 || signal !== null;
|
||||
25
node_modules/execa/lib/resolve/exit-sync.js
generated
vendored
Normal file
25
node_modules/execa/lib/resolve/exit-sync.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
import {DiscardedError} from '../return/final-error.js';
|
||||
import {isMaxBufferSync} from '../io/max-buffer.js';
|
||||
import {isFailedExit} from './exit-async.js';
|
||||
|
||||
// Retrieve exit code, signal name and error information, with synchronous methods
|
||||
export const getExitResultSync = ({error, status: exitCode, signal, output}, {maxBuffer}) => {
|
||||
const resultError = getResultError(error, exitCode, signal);
|
||||
const timedOut = resultError?.code === 'ETIMEDOUT';
|
||||
const isMaxBuffer = isMaxBufferSync(resultError, output, maxBuffer);
|
||||
return {
|
||||
resultError,
|
||||
exitCode,
|
||||
signal,
|
||||
timedOut,
|
||||
isMaxBuffer,
|
||||
};
|
||||
};
|
||||
|
||||
const getResultError = (error, exitCode, signal) => {
|
||||
if (error !== undefined) {
|
||||
return error;
|
||||
}
|
||||
|
||||
return isFailedExit(exitCode, signal) ? new DiscardedError() : undefined;
|
||||
};
|
||||
47
node_modules/execa/lib/resolve/stdio.js
generated
vendored
Normal file
47
node_modules/execa/lib/resolve/stdio.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
import {getStreamOutput} from '../io/contents.js';
|
||||
import {waitForStream, isInputFileDescriptor} from './wait-stream.js';
|
||||
|
||||
// Read the contents of `subprocess.std*` and|or wait for its completion
|
||||
export const waitForStdioStreams = ({subprocess, encoding, buffer, maxBuffer, lines, stripFinalNewline, verboseInfo, streamInfo}) => subprocess.stdio.map((stream, fdNumber) => waitForSubprocessStream({
|
||||
stream,
|
||||
fdNumber,
|
||||
encoding,
|
||||
buffer: buffer[fdNumber],
|
||||
maxBuffer: maxBuffer[fdNumber],
|
||||
lines: lines[fdNumber],
|
||||
allMixed: false,
|
||||
stripFinalNewline,
|
||||
verboseInfo,
|
||||
streamInfo,
|
||||
}));
|
||||
|
||||
// Read the contents of `subprocess.std*` or `subprocess.all` and|or wait for its completion
|
||||
export const waitForSubprocessStream = async ({stream, fdNumber, encoding, buffer, maxBuffer, lines, allMixed, stripFinalNewline, verboseInfo, streamInfo}) => {
|
||||
if (!stream) {
|
||||
return;
|
||||
}
|
||||
|
||||
const onStreamEnd = waitForStream(stream, fdNumber, streamInfo);
|
||||
if (isInputFileDescriptor(streamInfo, fdNumber)) {
|
||||
await onStreamEnd;
|
||||
return;
|
||||
}
|
||||
|
||||
const [output] = await Promise.all([
|
||||
getStreamOutput({
|
||||
stream,
|
||||
onStreamEnd,
|
||||
fdNumber,
|
||||
encoding,
|
||||
buffer,
|
||||
maxBuffer,
|
||||
lines,
|
||||
allMixed,
|
||||
stripFinalNewline,
|
||||
verboseInfo,
|
||||
streamInfo,
|
||||
}),
|
||||
onStreamEnd,
|
||||
]);
|
||||
return output;
|
||||
};
|
||||
96
node_modules/execa/lib/resolve/wait-stream.js
generated
vendored
Normal file
96
node_modules/execa/lib/resolve/wait-stream.js
generated
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
import {finished} from 'node:stream/promises';
|
||||
|
||||
// Wraps `finished(stream)` to handle the following case:
|
||||
// - When the subprocess exits, Node.js automatically calls `subprocess.stdin.destroy()`, which we need to ignore.
|
||||
// - However, we still need to throw if `subprocess.stdin.destroy()` is called before subprocess exit.
|
||||
export const waitForStream = async (stream, fdNumber, streamInfo, {isSameDirection, stopOnExit = false} = {}) => {
|
||||
const state = handleStdinDestroy(stream, streamInfo);
|
||||
const abortController = new AbortController();
|
||||
try {
|
||||
await Promise.race([
|
||||
...(stopOnExit ? [streamInfo.exitPromise] : []),
|
||||
finished(stream, {cleanup: true, signal: abortController.signal}),
|
||||
]);
|
||||
} catch (error) {
|
||||
if (!state.stdinCleanedUp) {
|
||||
handleStreamError(error, fdNumber, streamInfo, isSameDirection);
|
||||
}
|
||||
} finally {
|
||||
abortController.abort();
|
||||
}
|
||||
};
|
||||
|
||||
// If `subprocess.stdin` is destroyed before being fully written to, it is considered aborted and should throw an error.
|
||||
// This can happen for example when user called `subprocess.stdin.destroy()` before `subprocess.stdin.end()`.
|
||||
// However, Node.js calls `subprocess.stdin.destroy()` on exit for cleanup purposes.
|
||||
// https://github.com/nodejs/node/blob/0b4cdb4b42956cbd7019058e409e06700a199e11/lib/internal/child_process.js#L278
|
||||
// This is normal and should not throw an error.
|
||||
// Therefore, we need to differentiate between both situations to know whether to throw an error.
|
||||
// Unfortunately, events (`close`, `error`, `end`, `exit`) cannot be used because `.destroy()` can take an arbitrary amount of time.
|
||||
// For example, `stdin: 'pipe'` is implemented as a TCP socket, and its `.destroy()` method waits for TCP disconnection.
|
||||
// Therefore `.destroy()` might end before or after subprocess exit, based on OS speed and load.
|
||||
// The only way to detect this is to spy on `subprocess.stdin._destroy()` by wrapping it.
|
||||
// If `subprocess.exitCode` or `subprocess.signalCode` is set, it means `.destroy()` is being called by Node.js itself.
|
||||
const handleStdinDestroy = (stream, {originalStreams: [originalStdin], subprocess}) => {
|
||||
const state = {stdinCleanedUp: false};
|
||||
if (stream === originalStdin) {
|
||||
spyOnStdinDestroy(stream, subprocess, state);
|
||||
}
|
||||
|
||||
return state;
|
||||
};
|
||||
|
||||
const spyOnStdinDestroy = (subprocessStdin, subprocess, state) => {
|
||||
const {_destroy} = subprocessStdin;
|
||||
subprocessStdin._destroy = (...destroyArguments) => {
|
||||
setStdinCleanedUp(subprocess, state);
|
||||
_destroy.call(subprocessStdin, ...destroyArguments);
|
||||
};
|
||||
};
|
||||
|
||||
const setStdinCleanedUp = ({exitCode, signalCode}, state) => {
|
||||
if (exitCode !== null || signalCode !== null) {
|
||||
state.stdinCleanedUp = true;
|
||||
}
|
||||
};
|
||||
|
||||
// We ignore EPIPEs on writable streams and aborts on readable streams since those can happen normally.
|
||||
// When one stream errors, the error is propagated to the other streams on the same file descriptor.
|
||||
// Those other streams might have a different direction due to the above.
|
||||
// When this happens, the direction of both the initial stream and the others should then be taken into account.
|
||||
// Therefore, we keep track of whether a stream error is currently propagating.
|
||||
const handleStreamError = (error, fdNumber, streamInfo, isSameDirection) => {
|
||||
if (!shouldIgnoreStreamError(error, fdNumber, streamInfo, isSameDirection)) {
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const shouldIgnoreStreamError = (error, fdNumber, streamInfo, isSameDirection = true) => {
|
||||
if (streamInfo.propagating) {
|
||||
return isStreamEpipe(error) || isStreamAbort(error);
|
||||
}
|
||||
|
||||
streamInfo.propagating = true;
|
||||
return isInputFileDescriptor(streamInfo, fdNumber) === isSameDirection
|
||||
? isStreamEpipe(error)
|
||||
: isStreamAbort(error);
|
||||
};
|
||||
|
||||
// Unfortunately, we cannot use the stream's class or properties to know whether it is readable or writable.
|
||||
// For example, `subprocess.stdin` is technically a Duplex, but can only be used as a writable.
|
||||
// Therefore, we need to use the file descriptor's direction (`stdin` is input, `stdout` is output, etc.).
|
||||
// However, while `subprocess.std*` and transforms follow that direction, any stream passed the `std*` option has the opposite direction.
|
||||
// For example, `subprocess.stdin` is a writable, but the `stdin` option is a readable.
|
||||
export const isInputFileDescriptor = ({fileDescriptors}, fdNumber) => fdNumber !== 'all' && fileDescriptors[fdNumber].direction === 'input';
|
||||
|
||||
// When `stream.destroy()` is called without an `error` argument, stream is aborted.
|
||||
// This is the only way to abort a readable stream, which can be useful in some instances.
|
||||
// Therefore, we ignore this error on readable streams.
|
||||
export const isStreamAbort = error => error?.code === 'ERR_STREAM_PREMATURE_CLOSE';
|
||||
|
||||
// When `stream.write()` is called but the underlying source has been closed, `EPIPE` is emitted.
|
||||
// When piping subprocesses, the source subprocess usually decides when to stop piping.
|
||||
// However, there are some instances when the destination does instead, such as `... | head -n1`.
|
||||
// It notifies the source by using `EPIPE`.
|
||||
// Therefore, we ignore this error on writable streams.
|
||||
const isStreamEpipe = error => error?.code === 'EPIPE';
|
||||
146
node_modules/execa/lib/resolve/wait-subprocess.js
generated
vendored
Normal file
146
node_modules/execa/lib/resolve/wait-subprocess.js
generated
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
import {once} from 'node:events';
|
||||
import {isStream as isNodeStream} from 'is-stream';
|
||||
import {throwOnTimeout} from '../terminate/timeout.js';
|
||||
import {throwOnCancel} from '../terminate/cancel.js';
|
||||
import {throwOnGracefulCancel} from '../terminate/graceful.js';
|
||||
import {isStandardStream} from '../utils/standard-stream.js';
|
||||
import {TRANSFORM_TYPES} from '../stdio/type.js';
|
||||
import {getBufferedData} from '../io/contents.js';
|
||||
import {waitForIpcOutput, getBufferedIpcOutput} from '../ipc/buffer-messages.js';
|
||||
import {sendIpcInput} from '../ipc/ipc-input.js';
|
||||
import {waitForAllStream} from './all-async.js';
|
||||
import {waitForStdioStreams} from './stdio.js';
|
||||
import {waitForExit, waitForSuccessfulExit} from './exit-async.js';
|
||||
import {waitForStream} from './wait-stream.js';
|
||||
|
||||
// Retrieve result of subprocess: exit code, signal, error, streams (stdout/stderr/all)
|
||||
export const waitForSubprocessResult = async ({
|
||||
subprocess,
|
||||
options: {
|
||||
encoding,
|
||||
buffer,
|
||||
maxBuffer,
|
||||
lines,
|
||||
timeoutDuration: timeout,
|
||||
cancelSignal,
|
||||
gracefulCancel,
|
||||
forceKillAfterDelay,
|
||||
stripFinalNewline,
|
||||
ipc,
|
||||
ipcInput,
|
||||
},
|
||||
context,
|
||||
verboseInfo,
|
||||
fileDescriptors,
|
||||
originalStreams,
|
||||
onInternalError,
|
||||
controller,
|
||||
}) => {
|
||||
const exitPromise = waitForExit(subprocess, context);
|
||||
const streamInfo = {
|
||||
originalStreams,
|
||||
fileDescriptors,
|
||||
subprocess,
|
||||
exitPromise,
|
||||
propagating: false,
|
||||
};
|
||||
|
||||
const stdioPromises = waitForStdioStreams({
|
||||
subprocess,
|
||||
encoding,
|
||||
buffer,
|
||||
maxBuffer,
|
||||
lines,
|
||||
stripFinalNewline,
|
||||
verboseInfo,
|
||||
streamInfo,
|
||||
});
|
||||
const allPromise = waitForAllStream({
|
||||
subprocess,
|
||||
encoding,
|
||||
buffer,
|
||||
maxBuffer,
|
||||
lines,
|
||||
stripFinalNewline,
|
||||
verboseInfo,
|
||||
streamInfo,
|
||||
});
|
||||
const ipcOutput = [];
|
||||
const ipcOutputPromise = waitForIpcOutput({
|
||||
subprocess,
|
||||
buffer,
|
||||
maxBuffer,
|
||||
ipc,
|
||||
ipcOutput,
|
||||
verboseInfo,
|
||||
});
|
||||
const originalPromises = waitForOriginalStreams(originalStreams, subprocess, streamInfo);
|
||||
const customStreamsEndPromises = waitForCustomStreamsEnd(fileDescriptors, streamInfo);
|
||||
|
||||
try {
|
||||
return await Promise.race([
|
||||
Promise.all([
|
||||
{},
|
||||
waitForSuccessfulExit(exitPromise),
|
||||
Promise.all(stdioPromises),
|
||||
allPromise,
|
||||
ipcOutputPromise,
|
||||
sendIpcInput(subprocess, ipcInput),
|
||||
...originalPromises,
|
||||
...customStreamsEndPromises,
|
||||
]),
|
||||
onInternalError,
|
||||
throwOnSubprocessError(subprocess, controller),
|
||||
...throwOnTimeout(subprocess, timeout, context, controller),
|
||||
...throwOnCancel({
|
||||
subprocess,
|
||||
cancelSignal,
|
||||
gracefulCancel,
|
||||
context,
|
||||
controller,
|
||||
}),
|
||||
...throwOnGracefulCancel({
|
||||
subprocess,
|
||||
cancelSignal,
|
||||
gracefulCancel,
|
||||
forceKillAfterDelay,
|
||||
context,
|
||||
controller,
|
||||
}),
|
||||
]);
|
||||
} catch (error) {
|
||||
context.terminationReason ??= 'other';
|
||||
return Promise.all([
|
||||
{error},
|
||||
exitPromise,
|
||||
Promise.all(stdioPromises.map(stdioPromise => getBufferedData(stdioPromise))),
|
||||
getBufferedData(allPromise),
|
||||
getBufferedIpcOutput(ipcOutputPromise, ipcOutput),
|
||||
Promise.allSettled(originalPromises),
|
||||
Promise.allSettled(customStreamsEndPromises),
|
||||
]);
|
||||
}
|
||||
};
|
||||
|
||||
// Transforms replace `subprocess.std*`, which means they are not exposed to users.
|
||||
// However, we still want to wait for their completion.
|
||||
const waitForOriginalStreams = (originalStreams, subprocess, streamInfo) =>
|
||||
originalStreams.map((stream, fdNumber) => stream === subprocess.stdio[fdNumber]
|
||||
? undefined
|
||||
: waitForStream(stream, fdNumber, streamInfo));
|
||||
|
||||
// Some `stdin`/`stdout`/`stderr` options create a stream, e.g. when passing a file path.
|
||||
// The `.pipe()` method automatically ends that stream when `subprocess` ends.
|
||||
// This makes sure we wait for the completion of those streams, in order to catch any error.
|
||||
const waitForCustomStreamsEnd = (fileDescriptors, streamInfo) => fileDescriptors.flatMap(({stdioItems}, fdNumber) => stdioItems
|
||||
.filter(({value, stream = value}) => isNodeStream(stream, {checkOpen: false}) && !isStandardStream(stream))
|
||||
.map(({type, value, stream = value}) => waitForStream(stream, fdNumber, streamInfo, {
|
||||
isSameDirection: TRANSFORM_TYPES.has(type),
|
||||
stopOnExit: type === 'native',
|
||||
})));
|
||||
|
||||
// Fails when the subprocess emits an `error` event
|
||||
const throwOnSubprocessError = async (subprocess, {signal}) => {
|
||||
const [error] = await once(subprocess, 'error', {signal});
|
||||
throw error;
|
||||
};
|
||||
8
node_modules/execa/lib/return/duration.js
generated
vendored
Normal file
8
node_modules/execa/lib/return/duration.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import {hrtime} from 'node:process';
|
||||
|
||||
// Start counting time before spawning the subprocess
|
||||
export const getStartTime = () => hrtime.bigint();
|
||||
|
||||
// Compute duration after the subprocess ended.
|
||||
// Printed by the `verbose` option.
|
||||
export const getDurationMs = startTime => Number(hrtime.bigint() - startTime) / 1e6;
|
||||
60
node_modules/execa/lib/return/early-error.js
generated
vendored
Normal file
60
node_modules/execa/lib/return/early-error.js
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
import {ChildProcess} from 'node:child_process';
|
||||
import {
|
||||
PassThrough,
|
||||
Readable,
|
||||
Writable,
|
||||
Duplex,
|
||||
} from 'node:stream';
|
||||
import {cleanupCustomStreams} from '../stdio/handle.js';
|
||||
import {makeEarlyError} from './result.js';
|
||||
import {handleResult} from './reject.js';
|
||||
|
||||
// When the subprocess fails to spawn.
|
||||
// We ensure the returned error is always both a promise and a subprocess.
|
||||
export const handleEarlyError = ({error, command, escapedCommand, fileDescriptors, options, startTime, verboseInfo}) => {
|
||||
cleanupCustomStreams(fileDescriptors);
|
||||
|
||||
const subprocess = new ChildProcess();
|
||||
createDummyStreams(subprocess, fileDescriptors);
|
||||
Object.assign(subprocess, {readable, writable, duplex});
|
||||
|
||||
const earlyError = makeEarlyError({
|
||||
error,
|
||||
command,
|
||||
escapedCommand,
|
||||
fileDescriptors,
|
||||
options,
|
||||
startTime,
|
||||
isSync: false,
|
||||
});
|
||||
const promise = handleDummyPromise(earlyError, verboseInfo, options);
|
||||
return {subprocess, promise};
|
||||
};
|
||||
|
||||
const createDummyStreams = (subprocess, fileDescriptors) => {
|
||||
const stdin = createDummyStream();
|
||||
const stdout = createDummyStream();
|
||||
const stderr = createDummyStream();
|
||||
const extraStdio = Array.from({length: fileDescriptors.length - 3}, createDummyStream);
|
||||
const all = createDummyStream();
|
||||
const stdio = [stdin, stdout, stderr, ...extraStdio];
|
||||
Object.assign(subprocess, {
|
||||
stdin,
|
||||
stdout,
|
||||
stderr,
|
||||
all,
|
||||
stdio,
|
||||
});
|
||||
};
|
||||
|
||||
const createDummyStream = () => {
|
||||
const stream = new PassThrough();
|
||||
stream.end();
|
||||
return stream;
|
||||
};
|
||||
|
||||
const readable = () => new Readable({read() {}});
|
||||
const writable = () => new Writable({write() {}});
|
||||
const duplex = () => new Duplex({read() {}, write() {}});
|
||||
|
||||
const handleDummyPromise = async (error, verboseInfo, options) => handleResult(error, verboseInfo, options);
|
||||
40
node_modules/execa/lib/return/final-error.js
generated
vendored
Normal file
40
node_modules/execa/lib/return/final-error.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
// When the subprocess fails, this is the error instance being returned.
|
||||
// If another error instance is being thrown, it is kept as `error.cause`.
|
||||
export const getFinalError = (originalError, message, isSync) => {
|
||||
const ErrorClass = isSync ? ExecaSyncError : ExecaError;
|
||||
const options = originalError instanceof DiscardedError ? {} : {cause: originalError};
|
||||
return new ErrorClass(message, options);
|
||||
};
|
||||
|
||||
// Indicates that the error is used only to interrupt control flow, but not in the return value
|
||||
export class DiscardedError extends Error {}
|
||||
|
||||
// Proper way to set `error.name`: it should be inherited and non-enumerable
|
||||
const setErrorName = (ErrorClass, value) => {
|
||||
Object.defineProperty(ErrorClass.prototype, 'name', {
|
||||
value,
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
});
|
||||
Object.defineProperty(ErrorClass.prototype, execaErrorSymbol, {
|
||||
value: true,
|
||||
writable: false,
|
||||
enumerable: false,
|
||||
configurable: false,
|
||||
});
|
||||
};
|
||||
|
||||
// Unlike `instanceof`, this works across realms
|
||||
export const isExecaError = error => isErrorInstance(error) && execaErrorSymbol in error;
|
||||
|
||||
const execaErrorSymbol = Symbol('isExecaError');
|
||||
|
||||
export const isErrorInstance = value => Object.prototype.toString.call(value) === '[object Error]';
|
||||
|
||||
// We use two different Error classes for async/sync methods since they have slightly different shape and types
|
||||
export class ExecaError extends Error {}
|
||||
setErrorName(ExecaError, ExecaError.name);
|
||||
|
||||
export class ExecaSyncError extends Error {}
|
||||
setErrorName(ExecaSyncError, ExecaSyncError.name);
|
||||
157
node_modules/execa/lib/return/message.js
generated
vendored
Normal file
157
node_modules/execa/lib/return/message.js
generated
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
import {inspect} from 'node:util';
|
||||
import stripFinalNewline from 'strip-final-newline';
|
||||
import {isUint8Array, uint8ArrayToString} from '../utils/uint-array.js';
|
||||
import {fixCwdError} from '../arguments/cwd.js';
|
||||
import {escapeLines} from '../arguments/escape.js';
|
||||
import {getMaxBufferMessage} from '../io/max-buffer.js';
|
||||
import {getSignalDescription} from '../terminate/signal.js';
|
||||
import {DiscardedError, isExecaError} from './final-error.js';
|
||||
|
||||
// Computes `error.message`, `error.shortMessage` and `error.originalMessage`
|
||||
export const createMessages = ({
|
||||
stdio,
|
||||
all,
|
||||
ipcOutput,
|
||||
originalError,
|
||||
signal,
|
||||
signalDescription,
|
||||
exitCode,
|
||||
escapedCommand,
|
||||
timedOut,
|
||||
isCanceled,
|
||||
isGracefullyCanceled,
|
||||
isMaxBuffer,
|
||||
isForcefullyTerminated,
|
||||
forceKillAfterDelay,
|
||||
killSignal,
|
||||
maxBuffer,
|
||||
timeout,
|
||||
cwd,
|
||||
}) => {
|
||||
const errorCode = originalError?.code;
|
||||
const prefix = getErrorPrefix({
|
||||
originalError,
|
||||
timedOut,
|
||||
timeout,
|
||||
isMaxBuffer,
|
||||
maxBuffer,
|
||||
errorCode,
|
||||
signal,
|
||||
signalDescription,
|
||||
exitCode,
|
||||
isCanceled,
|
||||
isGracefullyCanceled,
|
||||
isForcefullyTerminated,
|
||||
forceKillAfterDelay,
|
||||
killSignal,
|
||||
});
|
||||
const originalMessage = getOriginalMessage(originalError, cwd);
|
||||
const suffix = originalMessage === undefined ? '' : `\n${originalMessage}`;
|
||||
const shortMessage = `${prefix}: ${escapedCommand}${suffix}`;
|
||||
const messageStdio = all === undefined ? [stdio[2], stdio[1]] : [all];
|
||||
const message = [
|
||||
shortMessage,
|
||||
...messageStdio,
|
||||
...stdio.slice(3),
|
||||
ipcOutput.map(ipcMessage => serializeIpcMessage(ipcMessage)).join('\n'),
|
||||
]
|
||||
.map(messagePart => escapeLines(stripFinalNewline(serializeMessagePart(messagePart))))
|
||||
.filter(Boolean)
|
||||
.join('\n\n');
|
||||
return {originalMessage, shortMessage, message};
|
||||
};
|
||||
|
||||
const getErrorPrefix = ({
|
||||
originalError,
|
||||
timedOut,
|
||||
timeout,
|
||||
isMaxBuffer,
|
||||
maxBuffer,
|
||||
errorCode,
|
||||
signal,
|
||||
signalDescription,
|
||||
exitCode,
|
||||
isCanceled,
|
||||
isGracefullyCanceled,
|
||||
isForcefullyTerminated,
|
||||
forceKillAfterDelay,
|
||||
killSignal,
|
||||
}) => {
|
||||
const forcefulSuffix = getForcefulSuffix(isForcefullyTerminated, forceKillAfterDelay);
|
||||
|
||||
if (timedOut) {
|
||||
return `Command timed out after ${timeout} milliseconds${forcefulSuffix}`;
|
||||
}
|
||||
|
||||
if (isGracefullyCanceled) {
|
||||
if (signal === undefined) {
|
||||
return `Command was gracefully canceled with exit code ${exitCode}`;
|
||||
}
|
||||
|
||||
return isForcefullyTerminated
|
||||
? `Command was gracefully canceled${forcefulSuffix}`
|
||||
: `Command was gracefully canceled with ${signal} (${signalDescription})`;
|
||||
}
|
||||
|
||||
if (isCanceled) {
|
||||
return `Command was canceled${forcefulSuffix}`;
|
||||
}
|
||||
|
||||
if (isMaxBuffer) {
|
||||
return `${getMaxBufferMessage(originalError, maxBuffer)}${forcefulSuffix}`;
|
||||
}
|
||||
|
||||
if (errorCode !== undefined) {
|
||||
return `Command failed with ${errorCode}${forcefulSuffix}`;
|
||||
}
|
||||
|
||||
if (isForcefullyTerminated) {
|
||||
return `Command was killed with ${killSignal} (${getSignalDescription(killSignal)})${forcefulSuffix}`;
|
||||
}
|
||||
|
||||
if (signal !== undefined) {
|
||||
return `Command was killed with ${signal} (${signalDescription})`;
|
||||
}
|
||||
|
||||
if (exitCode !== undefined) {
|
||||
return `Command failed with exit code ${exitCode}`;
|
||||
}
|
||||
|
||||
return 'Command failed';
|
||||
};
|
||||
|
||||
const getForcefulSuffix = (isForcefullyTerminated, forceKillAfterDelay) => isForcefullyTerminated
|
||||
? ` and was forcefully terminated after ${forceKillAfterDelay} milliseconds`
|
||||
: '';
|
||||
|
||||
const getOriginalMessage = (originalError, cwd) => {
|
||||
if (originalError instanceof DiscardedError) {
|
||||
return;
|
||||
}
|
||||
|
||||
const originalMessage = isExecaError(originalError)
|
||||
? originalError.originalMessage
|
||||
: String(originalError?.message ?? originalError);
|
||||
const escapedOriginalMessage = escapeLines(fixCwdError(originalMessage, cwd));
|
||||
return escapedOriginalMessage === '' ? undefined : escapedOriginalMessage;
|
||||
};
|
||||
|
||||
const serializeIpcMessage = ipcMessage => typeof ipcMessage === 'string'
|
||||
? ipcMessage
|
||||
: inspect(ipcMessage);
|
||||
|
||||
const serializeMessagePart = messagePart => Array.isArray(messagePart)
|
||||
? messagePart.map(messageItem => stripFinalNewline(serializeMessageItem(messageItem))).filter(Boolean).join('\n')
|
||||
: serializeMessageItem(messagePart);
|
||||
|
||||
const serializeMessageItem = messageItem => {
|
||||
if (typeof messageItem === 'string') {
|
||||
return messageItem;
|
||||
}
|
||||
|
||||
if (isUint8Array(messageItem)) {
|
||||
return uint8ArrayToString(messageItem);
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
13
node_modules/execa/lib/return/reject.js
generated
vendored
Normal file
13
node_modules/execa/lib/return/reject.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
import {logResult} from '../verbose/complete.js';
|
||||
|
||||
// Applies the `reject` option.
|
||||
// Also print the final log line with `verbose`.
|
||||
export const handleResult = (result, verboseInfo, {reject}) => {
|
||||
logResult(result, verboseInfo);
|
||||
|
||||
if (result.failed && reject) {
|
||||
throw result;
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
186
node_modules/execa/lib/return/result.js
generated
vendored
Normal file
186
node_modules/execa/lib/return/result.js
generated
vendored
Normal file
@@ -0,0 +1,186 @@
|
||||
import {getSignalDescription} from '../terminate/signal.js';
|
||||
import {getDurationMs} from './duration.js';
|
||||
import {getFinalError} from './final-error.js';
|
||||
import {createMessages} from './message.js';
|
||||
|
||||
// Object returned on subprocess success
|
||||
export const makeSuccessResult = ({
|
||||
command,
|
||||
escapedCommand,
|
||||
stdio,
|
||||
all,
|
||||
ipcOutput,
|
||||
options: {cwd},
|
||||
startTime,
|
||||
}) => omitUndefinedProperties({
|
||||
command,
|
||||
escapedCommand,
|
||||
cwd,
|
||||
durationMs: getDurationMs(startTime),
|
||||
failed: false,
|
||||
timedOut: false,
|
||||
isCanceled: false,
|
||||
isGracefullyCanceled: false,
|
||||
isTerminated: false,
|
||||
isMaxBuffer: false,
|
||||
isForcefullyTerminated: false,
|
||||
exitCode: 0,
|
||||
stdout: stdio[1],
|
||||
stderr: stdio[2],
|
||||
all,
|
||||
stdio,
|
||||
ipcOutput,
|
||||
pipedFrom: [],
|
||||
});
|
||||
|
||||
// Object returned on subprocess failure before spawning
|
||||
export const makeEarlyError = ({
|
||||
error,
|
||||
command,
|
||||
escapedCommand,
|
||||
fileDescriptors,
|
||||
options,
|
||||
startTime,
|
||||
isSync,
|
||||
}) => makeError({
|
||||
error,
|
||||
command,
|
||||
escapedCommand,
|
||||
startTime,
|
||||
timedOut: false,
|
||||
isCanceled: false,
|
||||
isGracefullyCanceled: false,
|
||||
isMaxBuffer: false,
|
||||
isForcefullyTerminated: false,
|
||||
stdio: Array.from({length: fileDescriptors.length}),
|
||||
ipcOutput: [],
|
||||
options,
|
||||
isSync,
|
||||
});
|
||||
|
||||
// Object returned on subprocess failure
|
||||
export const makeError = ({
|
||||
error: originalError,
|
||||
command,
|
||||
escapedCommand,
|
||||
startTime,
|
||||
timedOut,
|
||||
isCanceled,
|
||||
isGracefullyCanceled,
|
||||
isMaxBuffer,
|
||||
isForcefullyTerminated,
|
||||
exitCode: rawExitCode,
|
||||
signal: rawSignal,
|
||||
stdio,
|
||||
all,
|
||||
ipcOutput,
|
||||
options: {
|
||||
timeoutDuration,
|
||||
timeout = timeoutDuration,
|
||||
forceKillAfterDelay,
|
||||
killSignal,
|
||||
cwd,
|
||||
maxBuffer,
|
||||
},
|
||||
isSync,
|
||||
}) => {
|
||||
const {exitCode, signal, signalDescription} = normalizeExitPayload(rawExitCode, rawSignal);
|
||||
const {originalMessage, shortMessage, message} = createMessages({
|
||||
stdio,
|
||||
all,
|
||||
ipcOutput,
|
||||
originalError,
|
||||
signal,
|
||||
signalDescription,
|
||||
exitCode,
|
||||
escapedCommand,
|
||||
timedOut,
|
||||
isCanceled,
|
||||
isGracefullyCanceled,
|
||||
isMaxBuffer,
|
||||
isForcefullyTerminated,
|
||||
forceKillAfterDelay,
|
||||
killSignal,
|
||||
maxBuffer,
|
||||
timeout,
|
||||
cwd,
|
||||
});
|
||||
const error = getFinalError(originalError, message, isSync);
|
||||
Object.assign(error, getErrorProperties({
|
||||
error,
|
||||
command,
|
||||
escapedCommand,
|
||||
startTime,
|
||||
timedOut,
|
||||
isCanceled,
|
||||
isGracefullyCanceled,
|
||||
isMaxBuffer,
|
||||
isForcefullyTerminated,
|
||||
exitCode,
|
||||
signal,
|
||||
signalDescription,
|
||||
stdio,
|
||||
all,
|
||||
ipcOutput,
|
||||
cwd,
|
||||
originalMessage,
|
||||
shortMessage,
|
||||
}));
|
||||
return error;
|
||||
};
|
||||
|
||||
const getErrorProperties = ({
|
||||
error,
|
||||
command,
|
||||
escapedCommand,
|
||||
startTime,
|
||||
timedOut,
|
||||
isCanceled,
|
||||
isGracefullyCanceled,
|
||||
isMaxBuffer,
|
||||
isForcefullyTerminated,
|
||||
exitCode,
|
||||
signal,
|
||||
signalDescription,
|
||||
stdio,
|
||||
all,
|
||||
ipcOutput,
|
||||
cwd,
|
||||
originalMessage,
|
||||
shortMessage,
|
||||
}) => omitUndefinedProperties({
|
||||
shortMessage,
|
||||
originalMessage,
|
||||
command,
|
||||
escapedCommand,
|
||||
cwd,
|
||||
durationMs: getDurationMs(startTime),
|
||||
failed: true,
|
||||
timedOut,
|
||||
isCanceled,
|
||||
isGracefullyCanceled,
|
||||
isTerminated: signal !== undefined,
|
||||
isMaxBuffer,
|
||||
isForcefullyTerminated,
|
||||
exitCode,
|
||||
signal,
|
||||
signalDescription,
|
||||
code: error.cause?.code,
|
||||
stdout: stdio[1],
|
||||
stderr: stdio[2],
|
||||
all,
|
||||
stdio,
|
||||
ipcOutput,
|
||||
pipedFrom: [],
|
||||
});
|
||||
|
||||
const omitUndefinedProperties = result => Object.fromEntries(Object.entries(result).filter(([, value]) => value !== undefined));
|
||||
|
||||
// `signal` and `exitCode` emitted on `subprocess.on('exit')` event can be `null`.
|
||||
// We normalize them to `undefined`
|
||||
const normalizeExitPayload = (rawExitCode, rawSignal) => {
|
||||
const exitCode = rawExitCode === null ? undefined : rawExitCode;
|
||||
const signal = rawSignal === null ? undefined : rawSignal;
|
||||
const signalDescription = signal === undefined ? undefined : getSignalDescription(rawSignal);
|
||||
return {exitCode, signal, signalDescription};
|
||||
};
|
||||
76
node_modules/execa/lib/stdio/direction.js
generated
vendored
Normal file
76
node_modules/execa/lib/stdio/direction.js
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
import process from 'node:process';
|
||||
import {
|
||||
isStream as isNodeStream,
|
||||
isReadableStream as isNodeReadableStream,
|
||||
isWritableStream as isNodeWritableStream,
|
||||
} from 'is-stream';
|
||||
import {isWritableStream} from './type.js';
|
||||
|
||||
// For `stdio[fdNumber]` beyond stdin/stdout/stderr, we need to guess whether the value passed is intended for inputs or outputs.
|
||||
// This allows us to know whether to pipe _into_ or _from_ the stream.
|
||||
// When `stdio[fdNumber]` is a single value, this guess is fairly straightforward.
|
||||
// However, when it is an array instead, we also need to make sure the different values are not incompatible with each other.
|
||||
export const getStreamDirection = (stdioItems, fdNumber, optionName) => {
|
||||
const directions = stdioItems.map(stdioItem => getStdioItemDirection(stdioItem, fdNumber));
|
||||
|
||||
if (directions.includes('input') && directions.includes('output')) {
|
||||
throw new TypeError(`The \`${optionName}\` option must not be an array of both readable and writable values.`);
|
||||
}
|
||||
|
||||
return directions.find(Boolean) ?? DEFAULT_DIRECTION;
|
||||
};
|
||||
|
||||
const getStdioItemDirection = ({type, value}, fdNumber) => KNOWN_DIRECTIONS[fdNumber] ?? guessStreamDirection[type](value);
|
||||
|
||||
// `stdin`/`stdout`/`stderr` have a known direction
|
||||
const KNOWN_DIRECTIONS = ['input', 'output', 'output'];
|
||||
|
||||
const anyDirection = () => undefined;
|
||||
const alwaysInput = () => 'input';
|
||||
|
||||
// `string` can only be added through the `input` option, i.e. does not need to be handled here
|
||||
const guessStreamDirection = {
|
||||
generator: anyDirection,
|
||||
asyncGenerator: anyDirection,
|
||||
fileUrl: anyDirection,
|
||||
filePath: anyDirection,
|
||||
iterable: alwaysInput,
|
||||
asyncIterable: alwaysInput,
|
||||
uint8Array: alwaysInput,
|
||||
webStream: value => isWritableStream(value) ? 'output' : 'input',
|
||||
nodeStream(value) {
|
||||
if (!isNodeReadableStream(value, {checkOpen: false})) {
|
||||
return 'output';
|
||||
}
|
||||
|
||||
return isNodeWritableStream(value, {checkOpen: false}) ? undefined : 'input';
|
||||
},
|
||||
webTransform: anyDirection,
|
||||
duplex: anyDirection,
|
||||
native(value) {
|
||||
const standardStreamDirection = getStandardStreamDirection(value);
|
||||
if (standardStreamDirection !== undefined) {
|
||||
return standardStreamDirection;
|
||||
}
|
||||
|
||||
if (isNodeStream(value, {checkOpen: false})) {
|
||||
return guessStreamDirection.nodeStream(value);
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
const getStandardStreamDirection = value => {
|
||||
if ([0, process.stdin].includes(value)) {
|
||||
return 'input';
|
||||
}
|
||||
|
||||
if ([1, 2, process.stdout, process.stderr].includes(value)) {
|
||||
return 'output';
|
||||
}
|
||||
};
|
||||
|
||||
// When ambiguous, we initially keep the direction as `undefined`.
|
||||
// This allows arrays of `stdio` values to resolve the ambiguity.
|
||||
// For example, `stdio[3]: DuplexStream` is ambiguous, but `stdio[3]: [DuplexStream, WritableStream]` is not.
|
||||
// When the ambiguity remains, we default to `output` since it is the most common use case for additional file descriptors.
|
||||
const DEFAULT_DIRECTION = 'output';
|
||||
116
node_modules/execa/lib/stdio/duplicate.js
generated
vendored
Normal file
116
node_modules/execa/lib/stdio/duplicate.js
generated
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
import {
|
||||
SPECIAL_DUPLICATE_TYPES_SYNC,
|
||||
SPECIAL_DUPLICATE_TYPES,
|
||||
FORBID_DUPLICATE_TYPES,
|
||||
TYPE_TO_MESSAGE,
|
||||
} from './type.js';
|
||||
|
||||
// Duplicates in the same file descriptor is most likely an error.
|
||||
// However, this can be useful with generators.
|
||||
export const filterDuplicates = stdioItems => stdioItems.filter((stdioItemOne, indexOne) =>
|
||||
stdioItems.every((stdioItemTwo, indexTwo) => stdioItemOne.value !== stdioItemTwo.value
|
||||
|| indexOne >= indexTwo
|
||||
|| stdioItemOne.type === 'generator'
|
||||
|| stdioItemOne.type === 'asyncGenerator'));
|
||||
|
||||
// Check if two file descriptors are sharing the same target.
|
||||
// For example `{stdout: {file: './output.txt'}, stderr: {file: './output.txt'}}`.
|
||||
export const getDuplicateStream = ({stdioItem: {type, value, optionName}, direction, fileDescriptors, isSync}) => {
|
||||
const otherStdioItems = getOtherStdioItems(fileDescriptors, type);
|
||||
if (otherStdioItems.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (isSync) {
|
||||
validateDuplicateStreamSync({
|
||||
otherStdioItems,
|
||||
type,
|
||||
value,
|
||||
optionName,
|
||||
direction,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (SPECIAL_DUPLICATE_TYPES.has(type)) {
|
||||
return getDuplicateStreamInstance({
|
||||
otherStdioItems,
|
||||
type,
|
||||
value,
|
||||
optionName,
|
||||
direction,
|
||||
});
|
||||
}
|
||||
|
||||
if (FORBID_DUPLICATE_TYPES.has(type)) {
|
||||
validateDuplicateTransform({
|
||||
otherStdioItems,
|
||||
type,
|
||||
value,
|
||||
optionName,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Values shared by multiple file descriptors
|
||||
const getOtherStdioItems = (fileDescriptors, type) => fileDescriptors
|
||||
.flatMap(({direction, stdioItems}) => stdioItems
|
||||
.filter(stdioItem => stdioItem.type === type)
|
||||
.map((stdioItem => ({...stdioItem, direction}))));
|
||||
|
||||
// With `execaSync()`, do not allow setting a file path both in input and output
|
||||
const validateDuplicateStreamSync = ({otherStdioItems, type, value, optionName, direction}) => {
|
||||
if (SPECIAL_DUPLICATE_TYPES_SYNC.has(type)) {
|
||||
getDuplicateStreamInstance({
|
||||
otherStdioItems,
|
||||
type,
|
||||
value,
|
||||
optionName,
|
||||
direction,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// When two file descriptors share the file or stream, we need to re-use the same underlying stream.
|
||||
// Otherwise, the stream would be closed twice when piping ends.
|
||||
// This is only an issue with output file descriptors.
|
||||
// This is not a problem with generator functions since those create a new instance for each file descriptor.
|
||||
// We also forbid input and output file descriptors sharing the same file or stream, since that does not make sense.
|
||||
const getDuplicateStreamInstance = ({otherStdioItems, type, value, optionName, direction}) => {
|
||||
const duplicateStdioItems = otherStdioItems.filter(stdioItem => hasSameValue(stdioItem, value));
|
||||
if (duplicateStdioItems.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const differentStdioItem = duplicateStdioItems.find(stdioItem => stdioItem.direction !== direction);
|
||||
throwOnDuplicateStream(differentStdioItem, optionName, type);
|
||||
|
||||
return direction === 'output' ? duplicateStdioItems[0].stream : undefined;
|
||||
};
|
||||
|
||||
const hasSameValue = ({type, value}, secondValue) => {
|
||||
if (type === 'filePath') {
|
||||
return value.file === secondValue.file;
|
||||
}
|
||||
|
||||
if (type === 'fileUrl') {
|
||||
return value.href === secondValue.href;
|
||||
}
|
||||
|
||||
return value === secondValue;
|
||||
};
|
||||
|
||||
// We do not allow two file descriptors to share the same Duplex or TransformStream.
|
||||
// This is because those are set directly to `subprocess.std*`.
|
||||
// For example, this could result in `subprocess.stdout` and `subprocess.stderr` being the same value.
|
||||
// This means reading from either would get data from both stdout and stderr.
|
||||
const validateDuplicateTransform = ({otherStdioItems, type, value, optionName}) => {
|
||||
const duplicateStdioItem = otherStdioItems.find(({value: {transform}}) => transform === value.transform);
|
||||
throwOnDuplicateStream(duplicateStdioItem, optionName, type);
|
||||
};
|
||||
|
||||
const throwOnDuplicateStream = (stdioItem, optionName, type) => {
|
||||
if (stdioItem !== undefined) {
|
||||
throw new TypeError(`The \`${stdioItem.optionName}\` and \`${optionName}\` options must not target ${TYPE_TO_MESSAGE[type]} that is the same.`);
|
||||
}
|
||||
};
|
||||
52
node_modules/execa/lib/stdio/handle-async.js
generated
vendored
Normal file
52
node_modules/execa/lib/stdio/handle-async.js
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
import {createReadStream, createWriteStream} from 'node:fs';
|
||||
import {Buffer} from 'node:buffer';
|
||||
import {Readable, Writable, Duplex} from 'node:stream';
|
||||
import {generatorToStream} from '../transform/generator.js';
|
||||
import {handleStdio} from './handle.js';
|
||||
import {TYPE_TO_MESSAGE} from './type.js';
|
||||
|
||||
// Handle `input`, `inputFile`, `stdin`, `stdout` and `stderr` options, before spawning, in async mode
|
||||
export const handleStdioAsync = (options, verboseInfo) => handleStdio(addPropertiesAsync, options, verboseInfo, false);
|
||||
|
||||
const forbiddenIfAsync = ({type, optionName}) => {
|
||||
throw new TypeError(`The \`${optionName}\` option cannot be ${TYPE_TO_MESSAGE[type]}.`);
|
||||
};
|
||||
|
||||
// Create streams used internally for piping when using specific values for the `std*` options, in async mode.
|
||||
// For example, `stdout: {file}` creates a file stream, which is piped from/to.
|
||||
const addProperties = {
|
||||
fileNumber: forbiddenIfAsync,
|
||||
generator: generatorToStream,
|
||||
asyncGenerator: generatorToStream,
|
||||
nodeStream: ({value}) => ({stream: value}),
|
||||
webTransform({value: {transform, writableObjectMode, readableObjectMode}}) {
|
||||
const objectMode = writableObjectMode || readableObjectMode;
|
||||
const stream = Duplex.fromWeb(transform, {objectMode});
|
||||
return {stream};
|
||||
},
|
||||
duplex: ({value: {transform}}) => ({stream: transform}),
|
||||
native() {},
|
||||
};
|
||||
|
||||
const addPropertiesAsync = {
|
||||
input: {
|
||||
...addProperties,
|
||||
fileUrl: ({value}) => ({stream: createReadStream(value)}),
|
||||
filePath: ({value: {file}}) => ({stream: createReadStream(file)}),
|
||||
webStream: ({value}) => ({stream: Readable.fromWeb(value)}),
|
||||
iterable: ({value}) => ({stream: Readable.from(value)}),
|
||||
asyncIterable: ({value}) => ({stream: Readable.from(value)}),
|
||||
string: ({value}) => ({stream: Readable.from(value)}),
|
||||
uint8Array: ({value}) => ({stream: Readable.from(Buffer.from(value))}),
|
||||
},
|
||||
output: {
|
||||
...addProperties,
|
||||
fileUrl: ({value}) => ({stream: createWriteStream(value)}),
|
||||
filePath: ({value: {file, append}}) => ({stream: createWriteStream(file, append ? {flags: 'a'} : {})}),
|
||||
webStream: ({value}) => ({stream: Writable.fromWeb(value)}),
|
||||
iterable: forbiddenIfAsync,
|
||||
asyncIterable: forbiddenIfAsync,
|
||||
string: forbiddenIfAsync,
|
||||
uint8Array: forbiddenIfAsync,
|
||||
},
|
||||
};
|
||||
57
node_modules/execa/lib/stdio/handle-sync.js
generated
vendored
Normal file
57
node_modules/execa/lib/stdio/handle-sync.js
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
import {readFileSync} from 'node:fs';
|
||||
import {bufferToUint8Array} from '../utils/uint-array.js';
|
||||
import {handleStdio} from './handle.js';
|
||||
import {TYPE_TO_MESSAGE} from './type.js';
|
||||
|
||||
// Normalize `input`, `inputFile`, `stdin`, `stdout` and `stderr` options, before spawning, in sync mode
|
||||
export const handleStdioSync = (options, verboseInfo) => handleStdio(addPropertiesSync, options, verboseInfo, true);
|
||||
|
||||
const forbiddenIfSync = ({type, optionName}) => {
|
||||
throwInvalidSyncValue(optionName, TYPE_TO_MESSAGE[type]);
|
||||
};
|
||||
|
||||
const forbiddenNativeIfSync = ({optionName, value}) => {
|
||||
if (value === 'ipc' || value === 'overlapped') {
|
||||
throwInvalidSyncValue(optionName, `"${value}"`);
|
||||
}
|
||||
|
||||
return {};
|
||||
};
|
||||
|
||||
const throwInvalidSyncValue = (optionName, value) => {
|
||||
throw new TypeError(`The \`${optionName}\` option cannot be ${value} with synchronous methods.`);
|
||||
};
|
||||
|
||||
// Create streams used internally for redirecting when using specific values for the `std*` options, in sync mode.
|
||||
// For example, `stdin: {file}` reads the file synchronously, then passes it as the `input` option.
|
||||
const addProperties = {
|
||||
generator() {},
|
||||
asyncGenerator: forbiddenIfSync,
|
||||
webStream: forbiddenIfSync,
|
||||
nodeStream: forbiddenIfSync,
|
||||
webTransform: forbiddenIfSync,
|
||||
duplex: forbiddenIfSync,
|
||||
asyncIterable: forbiddenIfSync,
|
||||
native: forbiddenNativeIfSync,
|
||||
};
|
||||
|
||||
const addPropertiesSync = {
|
||||
input: {
|
||||
...addProperties,
|
||||
fileUrl: ({value}) => ({contents: [bufferToUint8Array(readFileSync(value))]}),
|
||||
filePath: ({value: {file}}) => ({contents: [bufferToUint8Array(readFileSync(file))]}),
|
||||
fileNumber: forbiddenIfSync,
|
||||
iterable: ({value}) => ({contents: [...value]}),
|
||||
string: ({value}) => ({contents: [value]}),
|
||||
uint8Array: ({value}) => ({contents: [value]}),
|
||||
},
|
||||
output: {
|
||||
...addProperties,
|
||||
fileUrl: ({value}) => ({path: value}),
|
||||
filePath: ({value: {file, append}}) => ({path: file, append}),
|
||||
fileNumber: ({value}) => ({path: value}),
|
||||
iterable: forbiddenIfSync,
|
||||
string: forbiddenIfSync,
|
||||
uint8Array: forbiddenIfSync,
|
||||
},
|
||||
};
|
||||
214
node_modules/execa/lib/stdio/handle.js
generated
vendored
Normal file
214
node_modules/execa/lib/stdio/handle.js
generated
vendored
Normal file
@@ -0,0 +1,214 @@
|
||||
import {getStreamName, isStandardStream} from '../utils/standard-stream.js';
|
||||
import {normalizeTransforms} from '../transform/normalize.js';
|
||||
import {getFdObjectMode} from '../transform/object-mode.js';
|
||||
import {
|
||||
getStdioItemType,
|
||||
isRegularUrl,
|
||||
isUnknownStdioString,
|
||||
FILE_TYPES,
|
||||
} from './type.js';
|
||||
import {getStreamDirection} from './direction.js';
|
||||
import {normalizeStdioOption} from './stdio-option.js';
|
||||
import {handleNativeStream} from './native.js';
|
||||
import {handleInputOptions} from './input-option.js';
|
||||
import {filterDuplicates, getDuplicateStream} from './duplicate.js';
|
||||
|
||||
// Handle `input`, `inputFile`, `stdin`, `stdout` and `stderr` options, before spawning, in async/sync mode
|
||||
// They are converted into an array of `fileDescriptors`.
|
||||
// Each `fileDescriptor` is normalized, validated and contains all information necessary for further handling.
|
||||
export const handleStdio = (addProperties, options, verboseInfo, isSync) => {
|
||||
const stdio = normalizeStdioOption(options, verboseInfo, isSync);
|
||||
const initialFileDescriptors = stdio.map((stdioOption, fdNumber) => getFileDescriptor({
|
||||
stdioOption,
|
||||
fdNumber,
|
||||
options,
|
||||
isSync,
|
||||
}));
|
||||
const fileDescriptors = getFinalFileDescriptors({
|
||||
initialFileDescriptors,
|
||||
addProperties,
|
||||
options,
|
||||
isSync,
|
||||
});
|
||||
options.stdio = fileDescriptors.map(({stdioItems}) => forwardStdio(stdioItems));
|
||||
return fileDescriptors;
|
||||
};
|
||||
|
||||
const getFileDescriptor = ({stdioOption, fdNumber, options, isSync}) => {
|
||||
const optionName = getStreamName(fdNumber);
|
||||
const {stdioItems: initialStdioItems, isStdioArray} = initializeStdioItems({
|
||||
stdioOption,
|
||||
fdNumber,
|
||||
options,
|
||||
optionName,
|
||||
});
|
||||
const direction = getStreamDirection(initialStdioItems, fdNumber, optionName);
|
||||
const stdioItems = initialStdioItems.map(stdioItem => handleNativeStream({
|
||||
stdioItem,
|
||||
isStdioArray,
|
||||
fdNumber,
|
||||
direction,
|
||||
isSync,
|
||||
}));
|
||||
const normalizedStdioItems = normalizeTransforms(stdioItems, optionName, direction, options);
|
||||
const objectMode = getFdObjectMode(normalizedStdioItems, direction);
|
||||
validateFileObjectMode(normalizedStdioItems, objectMode);
|
||||
return {direction, objectMode, stdioItems: normalizedStdioItems};
|
||||
};
|
||||
|
||||
// We make sure passing an array with a single item behaves the same as passing that item without an array.
|
||||
// This is what users would expect.
|
||||
// For example, `stdout: ['ignore']` behaves the same as `stdout: 'ignore'`.
|
||||
const initializeStdioItems = ({stdioOption, fdNumber, options, optionName}) => {
|
||||
const values = Array.isArray(stdioOption) ? stdioOption : [stdioOption];
|
||||
const initialStdioItems = [
|
||||
...values.map(value => initializeStdioItem(value, optionName)),
|
||||
...handleInputOptions(options, fdNumber),
|
||||
];
|
||||
|
||||
const stdioItems = filterDuplicates(initialStdioItems);
|
||||
const isStdioArray = stdioItems.length > 1;
|
||||
validateStdioArray(stdioItems, isStdioArray, optionName);
|
||||
validateStreams(stdioItems);
|
||||
return {stdioItems, isStdioArray};
|
||||
};
|
||||
|
||||
const initializeStdioItem = (value, optionName) => ({
|
||||
type: getStdioItemType(value, optionName),
|
||||
value,
|
||||
optionName,
|
||||
});
|
||||
|
||||
const validateStdioArray = (stdioItems, isStdioArray, optionName) => {
|
||||
if (stdioItems.length === 0) {
|
||||
throw new TypeError(`The \`${optionName}\` option must not be an empty array.`);
|
||||
}
|
||||
|
||||
if (!isStdioArray) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const {value, optionName} of stdioItems) {
|
||||
if (INVALID_STDIO_ARRAY_OPTIONS.has(value)) {
|
||||
throw new Error(`The \`${optionName}\` option must not include \`${value}\`.`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Using those `stdio` values together with others for the same stream does not make sense, so we make it fail.
|
||||
// However, we do allow it if the array has a single item.
|
||||
const INVALID_STDIO_ARRAY_OPTIONS = new Set(['ignore', 'ipc']);
|
||||
|
||||
const validateStreams = stdioItems => {
|
||||
for (const stdioItem of stdioItems) {
|
||||
validateFileStdio(stdioItem);
|
||||
}
|
||||
};
|
||||
|
||||
const validateFileStdio = ({type, value, optionName}) => {
|
||||
if (isRegularUrl(value)) {
|
||||
throw new TypeError(`The \`${optionName}: URL\` option must use the \`file:\` scheme.
|
||||
For example, you can use the \`pathToFileURL()\` method of the \`url\` core module.`);
|
||||
}
|
||||
|
||||
if (isUnknownStdioString(type, value)) {
|
||||
throw new TypeError(`The \`${optionName}: { file: '...' }\` option must be used instead of \`${optionName}: '...'\`.`);
|
||||
}
|
||||
};
|
||||
|
||||
const validateFileObjectMode = (stdioItems, objectMode) => {
|
||||
if (!objectMode) {
|
||||
return;
|
||||
}
|
||||
|
||||
const fileStdioItem = stdioItems.find(({type}) => FILE_TYPES.has(type));
|
||||
if (fileStdioItem !== undefined) {
|
||||
throw new TypeError(`The \`${fileStdioItem.optionName}\` option cannot use both files and transforms in objectMode.`);
|
||||
}
|
||||
};
|
||||
|
||||
// Some `stdio` values require Execa to create streams.
|
||||
// For example, file paths create file read/write streams.
|
||||
// Those transformations are specified in `addProperties`, which is both direction-specific and type-specific.
|
||||
const getFinalFileDescriptors = ({initialFileDescriptors, addProperties, options, isSync}) => {
|
||||
const fileDescriptors = [];
|
||||
|
||||
try {
|
||||
for (const fileDescriptor of initialFileDescriptors) {
|
||||
fileDescriptors.push(getFinalFileDescriptor({
|
||||
fileDescriptor,
|
||||
fileDescriptors,
|
||||
addProperties,
|
||||
options,
|
||||
isSync,
|
||||
}));
|
||||
}
|
||||
|
||||
return fileDescriptors;
|
||||
} catch (error) {
|
||||
cleanupCustomStreams(fileDescriptors);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const getFinalFileDescriptor = ({
|
||||
fileDescriptor: {direction, objectMode, stdioItems},
|
||||
fileDescriptors,
|
||||
addProperties,
|
||||
options,
|
||||
isSync,
|
||||
}) => {
|
||||
const finalStdioItems = stdioItems.map(stdioItem => addStreamProperties({
|
||||
stdioItem,
|
||||
addProperties,
|
||||
direction,
|
||||
options,
|
||||
fileDescriptors,
|
||||
isSync,
|
||||
}));
|
||||
return {direction, objectMode, stdioItems: finalStdioItems};
|
||||
};
|
||||
|
||||
const addStreamProperties = ({stdioItem, addProperties, direction, options, fileDescriptors, isSync}) => {
|
||||
const duplicateStream = getDuplicateStream({
|
||||
stdioItem,
|
||||
direction,
|
||||
fileDescriptors,
|
||||
isSync,
|
||||
});
|
||||
|
||||
if (duplicateStream !== undefined) {
|
||||
return {...stdioItem, stream: duplicateStream};
|
||||
}
|
||||
|
||||
return {
|
||||
...stdioItem,
|
||||
...addProperties[direction][stdioItem.type](stdioItem, options),
|
||||
};
|
||||
};
|
||||
|
||||
// The stream error handling is performed by the piping logic above, which cannot be performed before subprocess spawning.
|
||||
// If the subprocess spawning fails (e.g. due to an invalid command), the streams need to be manually destroyed.
|
||||
// We need to create those streams before subprocess spawning, in case their creation fails, e.g. when passing an invalid generator as argument.
|
||||
// Like this, an exception would be thrown, which would prevent spawning a subprocess.
|
||||
export const cleanupCustomStreams = fileDescriptors => {
|
||||
for (const {stdioItems} of fileDescriptors) {
|
||||
for (const {stream} of stdioItems) {
|
||||
if (stream !== undefined && !isStandardStream(stream)) {
|
||||
stream.destroy();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// When the `std*: Iterable | WebStream | URL | filePath`, `input` or `inputFile` option is used, we pipe to `subprocess.std*`.
|
||||
// When the `std*: Array` option is used, we emulate some of the native values ('inherit', Node.js stream and file descriptor integer). To do so, we also need to pipe to `subprocess.std*`.
|
||||
// Therefore the `std*` options must be either `pipe` or `overlapped`. Other values do not set `subprocess.std*`.
|
||||
const forwardStdio = stdioItems => {
|
||||
if (stdioItems.length > 1) {
|
||||
return stdioItems.some(({value}) => value === 'overlapped') ? 'overlapped' : 'pipe';
|
||||
}
|
||||
|
||||
const [{type, value}] = stdioItems;
|
||||
return type === 'native' ? value : 'pipe';
|
||||
};
|
||||
50
node_modules/execa/lib/stdio/input-option.js
generated
vendored
Normal file
50
node_modules/execa/lib/stdio/input-option.js
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
import {isReadableStream} from 'is-stream';
|
||||
import {isUint8Array} from '../utils/uint-array.js';
|
||||
import {isUrl, isFilePathString} from './type.js';
|
||||
|
||||
// Append the `stdin` option with the `input` and `inputFile` options
|
||||
export const handleInputOptions = ({input, inputFile}, fdNumber) => fdNumber === 0
|
||||
? [
|
||||
...handleInputOption(input),
|
||||
...handleInputFileOption(inputFile),
|
||||
]
|
||||
: [];
|
||||
|
||||
const handleInputOption = input => input === undefined ? [] : [{
|
||||
type: getInputType(input),
|
||||
value: input,
|
||||
optionName: 'input',
|
||||
}];
|
||||
|
||||
const getInputType = input => {
|
||||
if (isReadableStream(input, {checkOpen: false})) {
|
||||
return 'nodeStream';
|
||||
}
|
||||
|
||||
if (typeof input === 'string') {
|
||||
return 'string';
|
||||
}
|
||||
|
||||
if (isUint8Array(input)) {
|
||||
return 'uint8Array';
|
||||
}
|
||||
|
||||
throw new Error('The `input` option must be a string, a Uint8Array or a Node.js Readable stream.');
|
||||
};
|
||||
|
||||
const handleInputFileOption = inputFile => inputFile === undefined ? [] : [{
|
||||
...getInputFileType(inputFile),
|
||||
optionName: 'inputFile',
|
||||
}];
|
||||
|
||||
const getInputFileType = inputFile => {
|
||||
if (isUrl(inputFile)) {
|
||||
return {type: 'fileUrl', value: inputFile};
|
||||
}
|
||||
|
||||
if (isFilePathString(inputFile)) {
|
||||
return {type: 'filePath', value: {file: inputFile}};
|
||||
}
|
||||
|
||||
throw new Error('The `inputFile` option must be a file path string or a file URL.');
|
||||
};
|
||||
106
node_modules/execa/lib/stdio/native.js
generated
vendored
Normal file
106
node_modules/execa/lib/stdio/native.js
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
import {readFileSync} from 'node:fs';
|
||||
import tty from 'node:tty';
|
||||
import {isStream as isNodeStream} from 'is-stream';
|
||||
import {STANDARD_STREAMS} from '../utils/standard-stream.js';
|
||||
import {bufferToUint8Array} from '../utils/uint-array.js';
|
||||
import {serializeOptionValue} from '../arguments/fd-options.js';
|
||||
|
||||
// When we use multiple `stdio` values for the same streams, we pass 'pipe' to `child_process.spawn()`.
|
||||
// We then emulate the piping done by core Node.js.
|
||||
// To do so, we transform the following values:
|
||||
// - Node.js streams are marked as `type: nodeStream`
|
||||
// - 'inherit' becomes `process.stdin|stdout|stderr`
|
||||
// - any file descriptor integer becomes `process.stdio[fdNumber]`
|
||||
// All of the above transformations tell Execa to perform manual piping.
|
||||
export const handleNativeStream = ({stdioItem, stdioItem: {type}, isStdioArray, fdNumber, direction, isSync}) => {
|
||||
if (!isStdioArray || type !== 'native') {
|
||||
return stdioItem;
|
||||
}
|
||||
|
||||
return isSync
|
||||
? handleNativeStreamSync({stdioItem, fdNumber, direction})
|
||||
: handleNativeStreamAsync({stdioItem, fdNumber});
|
||||
};
|
||||
|
||||
// Synchronous methods use a different logic.
|
||||
// 'inherit', file descriptors and process.std* are handled by readFileSync()/writeFileSync().
|
||||
const handleNativeStreamSync = ({stdioItem, stdioItem: {value, optionName}, fdNumber, direction}) => {
|
||||
const targetFd = getTargetFd({
|
||||
value,
|
||||
optionName,
|
||||
fdNumber,
|
||||
direction,
|
||||
});
|
||||
if (targetFd !== undefined) {
|
||||
return targetFd;
|
||||
}
|
||||
|
||||
if (isNodeStream(value, {checkOpen: false})) {
|
||||
throw new TypeError(`The \`${optionName}: Stream\` option cannot both be an array and include a stream with synchronous methods.`);
|
||||
}
|
||||
|
||||
return stdioItem;
|
||||
};
|
||||
|
||||
const getTargetFd = ({value, optionName, fdNumber, direction}) => {
|
||||
const targetFdNumber = getTargetFdNumber(value, fdNumber);
|
||||
if (targetFdNumber === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (direction === 'output') {
|
||||
return {type: 'fileNumber', value: targetFdNumber, optionName};
|
||||
}
|
||||
|
||||
if (tty.isatty(targetFdNumber)) {
|
||||
throw new TypeError(`The \`${optionName}: ${serializeOptionValue(value)}\` option is invalid: it cannot be a TTY with synchronous methods.`);
|
||||
}
|
||||
|
||||
return {type: 'uint8Array', value: bufferToUint8Array(readFileSync(targetFdNumber)), optionName};
|
||||
};
|
||||
|
||||
const getTargetFdNumber = (value, fdNumber) => {
|
||||
if (value === 'inherit') {
|
||||
return fdNumber;
|
||||
}
|
||||
|
||||
if (typeof value === 'number') {
|
||||
return value;
|
||||
}
|
||||
|
||||
const standardStreamIndex = STANDARD_STREAMS.indexOf(value);
|
||||
if (standardStreamIndex !== -1) {
|
||||
return standardStreamIndex;
|
||||
}
|
||||
};
|
||||
|
||||
const handleNativeStreamAsync = ({stdioItem, stdioItem: {value, optionName}, fdNumber}) => {
|
||||
if (value === 'inherit') {
|
||||
return {type: 'nodeStream', value: getStandardStream(fdNumber, value, optionName), optionName};
|
||||
}
|
||||
|
||||
if (typeof value === 'number') {
|
||||
return {type: 'nodeStream', value: getStandardStream(value, value, optionName), optionName};
|
||||
}
|
||||
|
||||
if (isNodeStream(value, {checkOpen: false})) {
|
||||
return {type: 'nodeStream', value, optionName};
|
||||
}
|
||||
|
||||
return stdioItem;
|
||||
};
|
||||
|
||||
// Node.js does not allow to easily retrieve file descriptors beyond stdin/stdout/stderr as streams.
|
||||
// - `fs.createReadStream()`/`fs.createWriteStream()` with the `fd` option do not work with character devices that use blocking reads/writes (such as interactive TTYs).
|
||||
// - Using a TCP `Socket` would work but be rather complex to implement.
|
||||
// Since this is an edge case, we simply throw an error message.
|
||||
// See https://github.com/sindresorhus/execa/pull/643#discussion_r1435905707
|
||||
const getStandardStream = (fdNumber, value, optionName) => {
|
||||
const standardStream = STANDARD_STREAMS[fdNumber];
|
||||
|
||||
if (standardStream === undefined) {
|
||||
throw new TypeError(`The \`${optionName}: ${value}\` option is invalid: no such standard stream.`);
|
||||
}
|
||||
|
||||
return standardStream;
|
||||
};
|
||||
60
node_modules/execa/lib/stdio/stdio-option.js
generated
vendored
Normal file
60
node_modules/execa/lib/stdio/stdio-option.js
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
import {STANDARD_STREAMS_ALIASES} from '../utils/standard-stream.js';
|
||||
import {normalizeIpcStdioArray} from '../ipc/array.js';
|
||||
import {isFullVerbose} from '../verbose/values.js';
|
||||
|
||||
// Add support for `stdin`/`stdout`/`stderr` as an alias for `stdio`.
|
||||
// Also normalize the `stdio` option.
|
||||
export const normalizeStdioOption = ({stdio, ipc, buffer, ...options}, verboseInfo, isSync) => {
|
||||
const stdioArray = getStdioArray(stdio, options).map((stdioOption, fdNumber) => addDefaultValue(stdioOption, fdNumber));
|
||||
return isSync
|
||||
? normalizeStdioSync(stdioArray, buffer, verboseInfo)
|
||||
: normalizeIpcStdioArray(stdioArray, ipc);
|
||||
};
|
||||
|
||||
const getStdioArray = (stdio, options) => {
|
||||
if (stdio === undefined) {
|
||||
return STANDARD_STREAMS_ALIASES.map(alias => options[alias]);
|
||||
}
|
||||
|
||||
if (hasAlias(options)) {
|
||||
throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${STANDARD_STREAMS_ALIASES.map(alias => `\`${alias}\``).join(', ')}`);
|
||||
}
|
||||
|
||||
if (typeof stdio === 'string') {
|
||||
return [stdio, stdio, stdio];
|
||||
}
|
||||
|
||||
if (!Array.isArray(stdio)) {
|
||||
throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``);
|
||||
}
|
||||
|
||||
const length = Math.max(stdio.length, STANDARD_STREAMS_ALIASES.length);
|
||||
return Array.from({length}, (_, fdNumber) => stdio[fdNumber]);
|
||||
};
|
||||
|
||||
const hasAlias = options => STANDARD_STREAMS_ALIASES.some(alias => options[alias] !== undefined);
|
||||
|
||||
const addDefaultValue = (stdioOption, fdNumber) => {
|
||||
if (Array.isArray(stdioOption)) {
|
||||
return stdioOption.map(item => addDefaultValue(item, fdNumber));
|
||||
}
|
||||
|
||||
if (stdioOption === null || stdioOption === undefined) {
|
||||
return fdNumber >= STANDARD_STREAMS_ALIASES.length ? 'ignore' : 'pipe';
|
||||
}
|
||||
|
||||
return stdioOption;
|
||||
};
|
||||
|
||||
// Using `buffer: false` with synchronous methods implies `stdout`/`stderr`: `ignore`.
|
||||
// Unless the output is needed, e.g. due to `verbose: 'full'` or to redirecting to a file.
|
||||
const normalizeStdioSync = (stdioArray, buffer, verboseInfo) => stdioArray.map((stdioOption, fdNumber) =>
|
||||
!buffer[fdNumber]
|
||||
&& fdNumber !== 0
|
||||
&& !isFullVerbose(verboseInfo, fdNumber)
|
||||
&& isOutputPipeOnly(stdioOption)
|
||||
? 'ignore'
|
||||
: stdioOption);
|
||||
|
||||
const isOutputPipeOnly = stdioOption => stdioOption === 'pipe'
|
||||
|| (Array.isArray(stdioOption) && stdioOption.every(item => item === 'pipe'));
|
||||
173
node_modules/execa/lib/stdio/type.js
generated
vendored
Normal file
173
node_modules/execa/lib/stdio/type.js
generated
vendored
Normal file
@@ -0,0 +1,173 @@
|
||||
import {isStream as isNodeStream, isDuplexStream} from 'is-stream';
|
||||
import isPlainObj from 'is-plain-obj';
|
||||
import {isUint8Array} from '../utils/uint-array.js';
|
||||
|
||||
// The `stdin`/`stdout`/`stderr` option can be of many types. This detects it.
|
||||
export const getStdioItemType = (value, optionName) => {
|
||||
if (isAsyncGenerator(value)) {
|
||||
return 'asyncGenerator';
|
||||
}
|
||||
|
||||
if (isSyncGenerator(value)) {
|
||||
return 'generator';
|
||||
}
|
||||
|
||||
if (isUrl(value)) {
|
||||
return 'fileUrl';
|
||||
}
|
||||
|
||||
if (isFilePathObject(value)) {
|
||||
return 'filePath';
|
||||
}
|
||||
|
||||
if (isWebStream(value)) {
|
||||
return 'webStream';
|
||||
}
|
||||
|
||||
if (isNodeStream(value, {checkOpen: false})) {
|
||||
return 'native';
|
||||
}
|
||||
|
||||
if (isUint8Array(value)) {
|
||||
return 'uint8Array';
|
||||
}
|
||||
|
||||
if (isAsyncIterableObject(value)) {
|
||||
return 'asyncIterable';
|
||||
}
|
||||
|
||||
if (isIterableObject(value)) {
|
||||
return 'iterable';
|
||||
}
|
||||
|
||||
if (isTransformStream(value)) {
|
||||
return getTransformStreamType({transform: value}, optionName);
|
||||
}
|
||||
|
||||
if (isTransformOptions(value)) {
|
||||
return getTransformObjectType(value, optionName);
|
||||
}
|
||||
|
||||
return 'native';
|
||||
};
|
||||
|
||||
const getTransformObjectType = (value, optionName) => {
|
||||
if (isDuplexStream(value.transform, {checkOpen: false})) {
|
||||
return getDuplexType(value, optionName);
|
||||
}
|
||||
|
||||
if (isTransformStream(value.transform)) {
|
||||
return getTransformStreamType(value, optionName);
|
||||
}
|
||||
|
||||
return getGeneratorObjectType(value, optionName);
|
||||
};
|
||||
|
||||
const getDuplexType = (value, optionName) => {
|
||||
validateNonGeneratorType(value, optionName, 'Duplex stream');
|
||||
return 'duplex';
|
||||
};
|
||||
|
||||
const getTransformStreamType = (value, optionName) => {
|
||||
validateNonGeneratorType(value, optionName, 'web TransformStream');
|
||||
return 'webTransform';
|
||||
};
|
||||
|
||||
const validateNonGeneratorType = ({final, binary, objectMode}, optionName, typeName) => {
|
||||
checkUndefinedOption(final, `${optionName}.final`, typeName);
|
||||
checkUndefinedOption(binary, `${optionName}.binary`, typeName);
|
||||
checkBooleanOption(objectMode, `${optionName}.objectMode`);
|
||||
};
|
||||
|
||||
const checkUndefinedOption = (value, optionName, typeName) => {
|
||||
if (value !== undefined) {
|
||||
throw new TypeError(`The \`${optionName}\` option can only be defined when using a generator, not a ${typeName}.`);
|
||||
}
|
||||
};
|
||||
|
||||
const getGeneratorObjectType = ({transform, final, binary, objectMode}, optionName) => {
|
||||
if (transform !== undefined && !isGenerator(transform)) {
|
||||
throw new TypeError(`The \`${optionName}.transform\` option must be a generator, a Duplex stream or a web TransformStream.`);
|
||||
}
|
||||
|
||||
if (isDuplexStream(final, {checkOpen: false})) {
|
||||
throw new TypeError(`The \`${optionName}.final\` option must not be a Duplex stream.`);
|
||||
}
|
||||
|
||||
if (isTransformStream(final)) {
|
||||
throw new TypeError(`The \`${optionName}.final\` option must not be a web TransformStream.`);
|
||||
}
|
||||
|
||||
if (final !== undefined && !isGenerator(final)) {
|
||||
throw new TypeError(`The \`${optionName}.final\` option must be a generator.`);
|
||||
}
|
||||
|
||||
checkBooleanOption(binary, `${optionName}.binary`);
|
||||
checkBooleanOption(objectMode, `${optionName}.objectMode`);
|
||||
|
||||
return isAsyncGenerator(transform) || isAsyncGenerator(final) ? 'asyncGenerator' : 'generator';
|
||||
};
|
||||
|
||||
const checkBooleanOption = (value, optionName) => {
|
||||
if (value !== undefined && typeof value !== 'boolean') {
|
||||
throw new TypeError(`The \`${optionName}\` option must use a boolean.`);
|
||||
}
|
||||
};
|
||||
|
||||
const isGenerator = value => isAsyncGenerator(value) || isSyncGenerator(value);
|
||||
export const isAsyncGenerator = value => Object.prototype.toString.call(value) === '[object AsyncGeneratorFunction]';
|
||||
const isSyncGenerator = value => Object.prototype.toString.call(value) === '[object GeneratorFunction]';
|
||||
const isTransformOptions = value => isPlainObj(value)
|
||||
&& (value.transform !== undefined || value.final !== undefined);
|
||||
|
||||
export const isUrl = value => Object.prototype.toString.call(value) === '[object URL]';
|
||||
export const isRegularUrl = value => isUrl(value) && value.protocol !== 'file:';
|
||||
|
||||
const isFilePathObject = value => isPlainObj(value)
|
||||
&& Object.keys(value).length > 0
|
||||
&& Object.keys(value).every(key => FILE_PATH_KEYS.has(key))
|
||||
&& isFilePathString(value.file);
|
||||
const FILE_PATH_KEYS = new Set(['file', 'append']);
|
||||
export const isFilePathString = file => typeof file === 'string';
|
||||
|
||||
export const isUnknownStdioString = (type, value) => type === 'native'
|
||||
&& typeof value === 'string'
|
||||
&& !KNOWN_STDIO_STRINGS.has(value);
|
||||
const KNOWN_STDIO_STRINGS = new Set(['ipc', 'ignore', 'inherit', 'overlapped', 'pipe']);
|
||||
|
||||
const isReadableStream = value => Object.prototype.toString.call(value) === '[object ReadableStream]';
|
||||
export const isWritableStream = value => Object.prototype.toString.call(value) === '[object WritableStream]';
|
||||
const isWebStream = value => isReadableStream(value) || isWritableStream(value);
|
||||
const isTransformStream = value => isReadableStream(value?.readable) && isWritableStream(value?.writable);
|
||||
|
||||
const isAsyncIterableObject = value => isObject(value) && typeof value[Symbol.asyncIterator] === 'function';
|
||||
const isIterableObject = value => isObject(value) && typeof value[Symbol.iterator] === 'function';
|
||||
const isObject = value => typeof value === 'object' && value !== null;
|
||||
|
||||
// Types which modify `subprocess.std*`
|
||||
export const TRANSFORM_TYPES = new Set(['generator', 'asyncGenerator', 'duplex', 'webTransform']);
|
||||
// Types which write to a file or a file descriptor
|
||||
export const FILE_TYPES = new Set(['fileUrl', 'filePath', 'fileNumber']);
|
||||
// When two file descriptors of this type share the same target, we need to do some special logic
|
||||
export const SPECIAL_DUPLICATE_TYPES_SYNC = new Set(['fileUrl', 'filePath']);
|
||||
export const SPECIAL_DUPLICATE_TYPES = new Set([...SPECIAL_DUPLICATE_TYPES_SYNC, 'webStream', 'nodeStream']);
|
||||
// Do not allow two file descriptors of this type sharing the same target
|
||||
export const FORBID_DUPLICATE_TYPES = new Set(['webTransform', 'duplex']);
|
||||
|
||||
// Convert types to human-friendly strings for error messages
|
||||
export const TYPE_TO_MESSAGE = {
|
||||
generator: 'a generator',
|
||||
asyncGenerator: 'an async generator',
|
||||
fileUrl: 'a file URL',
|
||||
filePath: 'a file path string',
|
||||
fileNumber: 'a file descriptor number',
|
||||
webStream: 'a web stream',
|
||||
nodeStream: 'a Node.js stream',
|
||||
webTransform: 'a web TransformStream',
|
||||
duplex: 'a Duplex stream',
|
||||
native: 'any value',
|
||||
iterable: 'an iterable',
|
||||
asyncIterable: 'an async iterable',
|
||||
string: 'a string',
|
||||
uint8Array: 'a Uint8Array',
|
||||
};
|
||||
20
node_modules/execa/lib/terminate/cancel.js
generated
vendored
Normal file
20
node_modules/execa/lib/terminate/cancel.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import {onAbortedSignal} from '../utils/abort-signal.js';
|
||||
|
||||
// Validate the `cancelSignal` option
|
||||
export const validateCancelSignal = ({cancelSignal}) => {
|
||||
if (cancelSignal !== undefined && Object.prototype.toString.call(cancelSignal) !== '[object AbortSignal]') {
|
||||
throw new Error(`The \`cancelSignal\` option must be an AbortSignal: ${String(cancelSignal)}`);
|
||||
}
|
||||
};
|
||||
|
||||
// Terminate the subprocess when aborting the `cancelSignal` option and `gracefulSignal` is `false`
|
||||
export const throwOnCancel = ({subprocess, cancelSignal, gracefulCancel, context, controller}) => cancelSignal === undefined || gracefulCancel
|
||||
? []
|
||||
: [terminateOnCancel(subprocess, cancelSignal, context, controller)];
|
||||
|
||||
const terminateOnCancel = async (subprocess, cancelSignal, context, {signal}) => {
|
||||
await onAbortedSignal(cancelSignal, signal);
|
||||
context.terminationReason ??= 'cancel';
|
||||
subprocess.kill();
|
||||
throw cancelSignal.reason;
|
||||
};
|
||||
16
node_modules/execa/lib/terminate/cleanup.js
generated
vendored
Normal file
16
node_modules/execa/lib/terminate/cleanup.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
import {addAbortListener} from 'node:events';
|
||||
import {onExit} from 'signal-exit';
|
||||
|
||||
// If the `cleanup` option is used, call `subprocess.kill()` when the parent process exits
|
||||
export const cleanupOnExit = (subprocess, {cleanup, detached}, {signal}) => {
|
||||
if (!cleanup || detached) {
|
||||
return;
|
||||
}
|
||||
|
||||
const removeExitHandler = onExit(() => {
|
||||
subprocess.kill();
|
||||
});
|
||||
addAbortListener(signal, () => {
|
||||
removeExitHandler();
|
||||
});
|
||||
};
|
||||
71
node_modules/execa/lib/terminate/graceful.js
generated
vendored
Normal file
71
node_modules/execa/lib/terminate/graceful.js
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
import {onAbortedSignal} from '../utils/abort-signal.js';
|
||||
import {sendAbort} from '../ipc/graceful.js';
|
||||
import {killOnTimeout} from './kill.js';
|
||||
|
||||
// Validate the `gracefulCancel` option
|
||||
export const validateGracefulCancel = ({gracefulCancel, cancelSignal, ipc, serialization}) => {
|
||||
if (!gracefulCancel) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (cancelSignal === undefined) {
|
||||
throw new Error('The `cancelSignal` option must be defined when setting the `gracefulCancel` option.');
|
||||
}
|
||||
|
||||
if (!ipc) {
|
||||
throw new Error('The `ipc` option cannot be false when setting the `gracefulCancel` option.');
|
||||
}
|
||||
|
||||
if (serialization === 'json') {
|
||||
throw new Error('The `serialization` option cannot be \'json\' when setting the `gracefulCancel` option.');
|
||||
}
|
||||
};
|
||||
|
||||
// Send abort reason to the subprocess when aborting the `cancelSignal` option and `gracefulCancel` is `true`
|
||||
export const throwOnGracefulCancel = ({
|
||||
subprocess,
|
||||
cancelSignal,
|
||||
gracefulCancel,
|
||||
forceKillAfterDelay,
|
||||
context,
|
||||
controller,
|
||||
}) => gracefulCancel
|
||||
? [sendOnAbort({
|
||||
subprocess,
|
||||
cancelSignal,
|
||||
forceKillAfterDelay,
|
||||
context,
|
||||
controller,
|
||||
})]
|
||||
: [];
|
||||
|
||||
const sendOnAbort = async ({subprocess, cancelSignal, forceKillAfterDelay, context, controller: {signal}}) => {
|
||||
await onAbortedSignal(cancelSignal, signal);
|
||||
const reason = getReason(cancelSignal);
|
||||
await sendAbort(subprocess, reason);
|
||||
killOnTimeout({
|
||||
kill: subprocess.kill,
|
||||
forceKillAfterDelay,
|
||||
context,
|
||||
controllerSignal: signal,
|
||||
});
|
||||
context.terminationReason ??= 'gracefulCancel';
|
||||
throw cancelSignal.reason;
|
||||
};
|
||||
|
||||
// The default `reason` is a DOMException, which is not serializable with V8
|
||||
// See https://github.com/nodejs/node/issues/53225
|
||||
const getReason = ({reason}) => {
|
||||
if (!(reason instanceof DOMException)) {
|
||||
return reason;
|
||||
}
|
||||
|
||||
const error = new Error(reason.message);
|
||||
Object.defineProperty(error, 'stack', {
|
||||
value: reason.stack,
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
});
|
||||
return error;
|
||||
};
|
||||
93
node_modules/execa/lib/terminate/kill.js
generated
vendored
Normal file
93
node_modules/execa/lib/terminate/kill.js
generated
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
import {setTimeout} from 'node:timers/promises';
|
||||
import {isErrorInstance} from '../return/final-error.js';
|
||||
import {normalizeSignalArgument} from './signal.js';
|
||||
|
||||
// Normalize the `forceKillAfterDelay` option
|
||||
export const normalizeForceKillAfterDelay = forceKillAfterDelay => {
|
||||
if (forceKillAfterDelay === false) {
|
||||
return forceKillAfterDelay;
|
||||
}
|
||||
|
||||
if (forceKillAfterDelay === true) {
|
||||
return DEFAULT_FORCE_KILL_TIMEOUT;
|
||||
}
|
||||
|
||||
if (!Number.isFinite(forceKillAfterDelay) || forceKillAfterDelay < 0) {
|
||||
throw new TypeError(`Expected the \`forceKillAfterDelay\` option to be a non-negative integer, got \`${forceKillAfterDelay}\` (${typeof forceKillAfterDelay})`);
|
||||
}
|
||||
|
||||
return forceKillAfterDelay;
|
||||
};
|
||||
|
||||
const DEFAULT_FORCE_KILL_TIMEOUT = 1000 * 5;
|
||||
|
||||
// Monkey-patches `subprocess.kill()` to add `forceKillAfterDelay` behavior and `.kill(error)`
|
||||
export const subprocessKill = (
|
||||
{kill, options: {forceKillAfterDelay, killSignal}, onInternalError, context, controller},
|
||||
signalOrError,
|
||||
errorArgument,
|
||||
) => {
|
||||
const {signal, error} = parseKillArguments(signalOrError, errorArgument, killSignal);
|
||||
emitKillError(error, onInternalError);
|
||||
const killResult = kill(signal);
|
||||
setKillTimeout({
|
||||
kill,
|
||||
signal,
|
||||
forceKillAfterDelay,
|
||||
killSignal,
|
||||
killResult,
|
||||
context,
|
||||
controller,
|
||||
});
|
||||
return killResult;
|
||||
};
|
||||
|
||||
const parseKillArguments = (signalOrError, errorArgument, killSignal) => {
|
||||
const [signal = killSignal, error] = isErrorInstance(signalOrError)
|
||||
? [undefined, signalOrError]
|
||||
: [signalOrError, errorArgument];
|
||||
|
||||
if (typeof signal !== 'string' && !Number.isInteger(signal)) {
|
||||
throw new TypeError(`The first argument must be an error instance or a signal name string/integer: ${String(signal)}`);
|
||||
}
|
||||
|
||||
if (error !== undefined && !isErrorInstance(error)) {
|
||||
throw new TypeError(`The second argument is optional. If specified, it must be an error instance: ${error}`);
|
||||
}
|
||||
|
||||
return {signal: normalizeSignalArgument(signal), error};
|
||||
};
|
||||
|
||||
// Fails right away when calling `subprocess.kill(error)`.
|
||||
// Does not wait for actual signal termination.
|
||||
// Uses a deferred promise instead of the `error` event on the subprocess, as this is less intrusive.
|
||||
const emitKillError = (error, onInternalError) => {
|
||||
if (error !== undefined) {
|
||||
onInternalError.reject(error);
|
||||
}
|
||||
};
|
||||
|
||||
const setKillTimeout = async ({kill, signal, forceKillAfterDelay, killSignal, killResult, context, controller}) => {
|
||||
if (signal === killSignal && killResult) {
|
||||
killOnTimeout({
|
||||
kill,
|
||||
forceKillAfterDelay,
|
||||
context,
|
||||
controllerSignal: controller.signal,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Forcefully terminate a subprocess after a timeout
|
||||
export const killOnTimeout = async ({kill, forceKillAfterDelay, context, controllerSignal}) => {
|
||||
if (forceKillAfterDelay === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await setTimeout(forceKillAfterDelay, undefined, {signal: controllerSignal});
|
||||
if (kill('SIGKILL')) {
|
||||
context.isForcefullyTerminated ??= true;
|
||||
}
|
||||
} catch {}
|
||||
};
|
||||
70
node_modules/execa/lib/terminate/signal.js
generated
vendored
Normal file
70
node_modules/execa/lib/terminate/signal.js
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
import {constants} from 'node:os';
|
||||
import {signalsByName} from 'human-signals';
|
||||
|
||||
// Normalize signals for comparison purpose.
|
||||
// Also validate the signal exists.
|
||||
export const normalizeKillSignal = killSignal => {
|
||||
const optionName = 'option `killSignal`';
|
||||
if (killSignal === 0) {
|
||||
throw new TypeError(`Invalid ${optionName}: 0 cannot be used.`);
|
||||
}
|
||||
|
||||
return normalizeSignal(killSignal, optionName);
|
||||
};
|
||||
|
||||
export const normalizeSignalArgument = signal => signal === 0
|
||||
? signal
|
||||
: normalizeSignal(signal, '`subprocess.kill()`\'s argument');
|
||||
|
||||
const normalizeSignal = (signalNameOrInteger, optionName) => {
|
||||
if (Number.isInteger(signalNameOrInteger)) {
|
||||
return normalizeSignalInteger(signalNameOrInteger, optionName);
|
||||
}
|
||||
|
||||
if (typeof signalNameOrInteger === 'string') {
|
||||
return normalizeSignalName(signalNameOrInteger, optionName);
|
||||
}
|
||||
|
||||
throw new TypeError(`Invalid ${optionName} ${String(signalNameOrInteger)}: it must be a string or an integer.\n${getAvailableSignals()}`);
|
||||
};
|
||||
|
||||
const normalizeSignalInteger = (signalInteger, optionName) => {
|
||||
if (signalsIntegerToName.has(signalInteger)) {
|
||||
return signalsIntegerToName.get(signalInteger);
|
||||
}
|
||||
|
||||
throw new TypeError(`Invalid ${optionName} ${signalInteger}: this signal integer does not exist.\n${getAvailableSignals()}`);
|
||||
};
|
||||
|
||||
const getSignalsIntegerToName = () => new Map(Object.entries(constants.signals)
|
||||
.reverse()
|
||||
.map(([signalName, signalInteger]) => [signalInteger, signalName]));
|
||||
|
||||
const signalsIntegerToName = getSignalsIntegerToName();
|
||||
|
||||
const normalizeSignalName = (signalName, optionName) => {
|
||||
if (signalName in constants.signals) {
|
||||
return signalName;
|
||||
}
|
||||
|
||||
if (signalName.toUpperCase() in constants.signals) {
|
||||
throw new TypeError(`Invalid ${optionName} '${signalName}': please rename it to '${signalName.toUpperCase()}'.`);
|
||||
}
|
||||
|
||||
throw new TypeError(`Invalid ${optionName} '${signalName}': this signal name does not exist.\n${getAvailableSignals()}`);
|
||||
};
|
||||
|
||||
const getAvailableSignals = () => `Available signal names: ${getAvailableSignalNames()}.
|
||||
Available signal numbers: ${getAvailableSignalIntegers()}.`;
|
||||
|
||||
const getAvailableSignalNames = () => Object.keys(constants.signals)
|
||||
.sort()
|
||||
.map(signalName => `'${signalName}'`)
|
||||
.join(', ');
|
||||
|
||||
const getAvailableSignalIntegers = () => [...new Set(Object.values(constants.signals)
|
||||
.sort((signalInteger, signalIntegerTwo) => signalInteger - signalIntegerTwo))]
|
||||
.join(', ');
|
||||
|
||||
// Human-friendly description of a signal
|
||||
export const getSignalDescription = signal => signalsByName[signal].description;
|
||||
21
node_modules/execa/lib/terminate/timeout.js
generated
vendored
Normal file
21
node_modules/execa/lib/terminate/timeout.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
import {setTimeout} from 'node:timers/promises';
|
||||
import {DiscardedError} from '../return/final-error.js';
|
||||
|
||||
// Validate `timeout` option
|
||||
export const validateTimeout = ({timeout}) => {
|
||||
if (timeout !== undefined && (!Number.isFinite(timeout) || timeout < 0)) {
|
||||
throw new TypeError(`Expected the \`timeout\` option to be a non-negative integer, got \`${timeout}\` (${typeof timeout})`);
|
||||
}
|
||||
};
|
||||
|
||||
// Fails when the `timeout` option is exceeded
|
||||
export const throwOnTimeout = (subprocess, timeout, context, controller) => timeout === 0 || timeout === undefined
|
||||
? []
|
||||
: [killAfterTimeout(subprocess, timeout, context, controller)];
|
||||
|
||||
const killAfterTimeout = async (subprocess, timeout, context, {signal}) => {
|
||||
await setTimeout(timeout, undefined, {signal});
|
||||
context.terminationReason ??= 'timeout';
|
||||
subprocess.kill();
|
||||
throw new DiscardedError();
|
||||
};
|
||||
51
node_modules/execa/lib/transform/encoding-transform.js
generated
vendored
Normal file
51
node_modules/execa/lib/transform/encoding-transform.js
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
import {Buffer} from 'node:buffer';
|
||||
import {StringDecoder} from 'node:string_decoder';
|
||||
import {isUint8Array, bufferToUint8Array} from '../utils/uint-array.js';
|
||||
|
||||
/*
|
||||
When using binary encodings, add an internal generator that converts chunks from `Buffer` to `string` or `Uint8Array`.
|
||||
Chunks might be Buffer, Uint8Array or strings since:
|
||||
- `subprocess.stdout|stderr` emits Buffers
|
||||
- `subprocess.stdin.write()` accepts Buffer, Uint8Array or string
|
||||
- Previous generators might return Uint8Array or string
|
||||
|
||||
However, those are converted to Buffer:
|
||||
- on writes: `Duplex.writable` `decodeStrings: true` default option
|
||||
- on reads: `Duplex.readable` `readableEncoding: null` default option
|
||||
*/
|
||||
export const getEncodingTransformGenerator = (binary, encoding, skipped) => {
|
||||
if (skipped) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (binary) {
|
||||
return {transform: encodingUint8ArrayGenerator.bind(undefined, new TextEncoder())};
|
||||
}
|
||||
|
||||
const stringDecoder = new StringDecoder(encoding);
|
||||
return {
|
||||
transform: encodingStringGenerator.bind(undefined, stringDecoder),
|
||||
final: encodingStringFinal.bind(undefined, stringDecoder),
|
||||
};
|
||||
};
|
||||
|
||||
const encodingUint8ArrayGenerator = function * (textEncoder, chunk) {
|
||||
if (Buffer.isBuffer(chunk)) {
|
||||
yield bufferToUint8Array(chunk);
|
||||
} else if (typeof chunk === 'string') {
|
||||
yield textEncoder.encode(chunk);
|
||||
} else {
|
||||
yield chunk;
|
||||
}
|
||||
};
|
||||
|
||||
const encodingStringGenerator = function * (stringDecoder, chunk) {
|
||||
yield isUint8Array(chunk) ? stringDecoder.write(chunk) : chunk;
|
||||
};
|
||||
|
||||
const encodingStringFinal = function * (stringDecoder) {
|
||||
const lastChunk = stringDecoder.end();
|
||||
if (lastChunk !== '') {
|
||||
yield lastChunk;
|
||||
}
|
||||
};
|
||||
107
node_modules/execa/lib/transform/generator.js
generated
vendored
Normal file
107
node_modules/execa/lib/transform/generator.js
generated
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
import {Transform, getDefaultHighWaterMark} from 'node:stream';
|
||||
import {isAsyncGenerator} from '../stdio/type.js';
|
||||
import {getSplitLinesGenerator, getAppendNewlineGenerator} from './split.js';
|
||||
import {getValidateTransformInput, getValidateTransformReturn} from './validate.js';
|
||||
import {getEncodingTransformGenerator} from './encoding-transform.js';
|
||||
import {
|
||||
pushChunks,
|
||||
transformChunk,
|
||||
finalChunks,
|
||||
destroyTransform,
|
||||
} from './run-async.js';
|
||||
import {
|
||||
pushChunksSync,
|
||||
transformChunkSync,
|
||||
finalChunksSync,
|
||||
runTransformSync,
|
||||
} from './run-sync.js';
|
||||
|
||||
/*
|
||||
Generators can be used to transform/filter standard streams.
|
||||
|
||||
Generators have a simple syntax, yet allows all of the following:
|
||||
- Sharing `state` between chunks
|
||||
- Flushing logic, by using a `final` function
|
||||
- Asynchronous logic
|
||||
- Emitting multiple chunks from a single source chunk, even if spaced in time, by using multiple `yield`
|
||||
- Filtering, by using no `yield`
|
||||
|
||||
Therefore, there is no need to allow Node.js or web transform streams.
|
||||
|
||||
The `highWaterMark` is kept as the default value, since this is what `subprocess.std*` uses.
|
||||
|
||||
Chunks are currently processed serially. We could add a `concurrency` option to parallelize in the future.
|
||||
|
||||
Transform an array of generator functions into a `Transform` stream.
|
||||
`Duplex.from(generator)` cannot be used because it does not allow setting the `objectMode` and `highWaterMark`.
|
||||
*/
|
||||
export const generatorToStream = ({
|
||||
value,
|
||||
value: {transform, final, writableObjectMode, readableObjectMode},
|
||||
optionName,
|
||||
}, {encoding}) => {
|
||||
const state = {};
|
||||
const generators = addInternalGenerators(value, encoding, optionName);
|
||||
|
||||
const transformAsync = isAsyncGenerator(transform);
|
||||
const finalAsync = isAsyncGenerator(final);
|
||||
const transformMethod = transformAsync
|
||||
? pushChunks.bind(undefined, transformChunk, state)
|
||||
: pushChunksSync.bind(undefined, transformChunkSync);
|
||||
const finalMethod = transformAsync || finalAsync
|
||||
? pushChunks.bind(undefined, finalChunks, state)
|
||||
: pushChunksSync.bind(undefined, finalChunksSync);
|
||||
const destroyMethod = transformAsync || finalAsync
|
||||
? destroyTransform.bind(undefined, state)
|
||||
: undefined;
|
||||
|
||||
const stream = new Transform({
|
||||
writableObjectMode,
|
||||
writableHighWaterMark: getDefaultHighWaterMark(writableObjectMode),
|
||||
readableObjectMode,
|
||||
readableHighWaterMark: getDefaultHighWaterMark(readableObjectMode),
|
||||
transform(chunk, encoding, done) {
|
||||
transformMethod([chunk, generators, 0], this, done);
|
||||
},
|
||||
flush(done) {
|
||||
finalMethod([generators], this, done);
|
||||
},
|
||||
destroy: destroyMethod,
|
||||
});
|
||||
return {stream};
|
||||
};
|
||||
|
||||
// Applies transform generators in sync mode
|
||||
export const runGeneratorsSync = (chunks, stdioItems, encoding, isInput) => {
|
||||
const generators = stdioItems.filter(({type}) => type === 'generator');
|
||||
const reversedGenerators = isInput ? generators.reverse() : generators;
|
||||
|
||||
for (const {value, optionName} of reversedGenerators) {
|
||||
const generators = addInternalGenerators(value, encoding, optionName);
|
||||
chunks = runTransformSync(generators, chunks);
|
||||
}
|
||||
|
||||
return chunks;
|
||||
};
|
||||
|
||||
// Generators used internally to convert the chunk type, validate it, and split into lines
|
||||
const addInternalGenerators = (
|
||||
{transform, final, binary, writableObjectMode, readableObjectMode, preserveNewlines},
|
||||
encoding,
|
||||
optionName,
|
||||
) => {
|
||||
const state = {};
|
||||
return [
|
||||
{transform: getValidateTransformInput(writableObjectMode, optionName)},
|
||||
getEncodingTransformGenerator(binary, encoding, writableObjectMode),
|
||||
getSplitLinesGenerator(binary, preserveNewlines, writableObjectMode, state),
|
||||
{transform, final},
|
||||
{transform: getValidateTransformReturn(readableObjectMode, optionName)},
|
||||
getAppendNewlineGenerator({
|
||||
binary,
|
||||
preserveNewlines,
|
||||
readableObjectMode,
|
||||
state,
|
||||
}),
|
||||
].filter(Boolean);
|
||||
};
|
||||
111
node_modules/execa/lib/transform/normalize.js
generated
vendored
Normal file
111
node_modules/execa/lib/transform/normalize.js
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
import isPlainObj from 'is-plain-obj';
|
||||
import {BINARY_ENCODINGS} from '../arguments/encoding-option.js';
|
||||
import {TRANSFORM_TYPES} from '../stdio/type.js';
|
||||
import {getTransformObjectModes} from './object-mode.js';
|
||||
|
||||
// Transforms generators/duplex/TransformStream can have multiple shapes.
|
||||
// This normalizes it and applies default values.
|
||||
export const normalizeTransforms = (stdioItems, optionName, direction, options) => [
|
||||
...stdioItems.filter(({type}) => !TRANSFORM_TYPES.has(type)),
|
||||
...getTransforms(stdioItems, optionName, direction, options),
|
||||
];
|
||||
|
||||
const getTransforms = (stdioItems, optionName, direction, {encoding}) => {
|
||||
const transforms = stdioItems.filter(({type}) => TRANSFORM_TYPES.has(type));
|
||||
const newTransforms = Array.from({length: transforms.length});
|
||||
|
||||
for (const [index, stdioItem] of Object.entries(transforms)) {
|
||||
newTransforms[index] = normalizeTransform({
|
||||
stdioItem,
|
||||
index: Number(index),
|
||||
newTransforms,
|
||||
optionName,
|
||||
direction,
|
||||
encoding,
|
||||
});
|
||||
}
|
||||
|
||||
return sortTransforms(newTransforms, direction);
|
||||
};
|
||||
|
||||
const normalizeTransform = ({stdioItem, stdioItem: {type}, index, newTransforms, optionName, direction, encoding}) => {
|
||||
if (type === 'duplex') {
|
||||
return normalizeDuplex({stdioItem, optionName});
|
||||
}
|
||||
|
||||
if (type === 'webTransform') {
|
||||
return normalizeTransformStream({
|
||||
stdioItem,
|
||||
index,
|
||||
newTransforms,
|
||||
direction,
|
||||
});
|
||||
}
|
||||
|
||||
return normalizeGenerator({
|
||||
stdioItem,
|
||||
index,
|
||||
newTransforms,
|
||||
direction,
|
||||
encoding,
|
||||
});
|
||||
};
|
||||
|
||||
const normalizeDuplex = ({
|
||||
stdioItem,
|
||||
stdioItem: {
|
||||
value: {
|
||||
transform,
|
||||
transform: {writableObjectMode, readableObjectMode},
|
||||
objectMode = readableObjectMode,
|
||||
},
|
||||
},
|
||||
optionName,
|
||||
}) => {
|
||||
if (objectMode && !readableObjectMode) {
|
||||
throw new TypeError(`The \`${optionName}.objectMode\` option can only be \`true\` if \`new Duplex({objectMode: true})\` is used.`);
|
||||
}
|
||||
|
||||
if (!objectMode && readableObjectMode) {
|
||||
throw new TypeError(`The \`${optionName}.objectMode\` option cannot be \`false\` if \`new Duplex({objectMode: true})\` is used.`);
|
||||
}
|
||||
|
||||
return {
|
||||
...stdioItem,
|
||||
value: {transform, writableObjectMode, readableObjectMode},
|
||||
};
|
||||
};
|
||||
|
||||
const normalizeTransformStream = ({stdioItem, stdioItem: {value}, index, newTransforms, direction}) => {
|
||||
const {transform, objectMode} = isPlainObj(value) ? value : {transform: value};
|
||||
const {writableObjectMode, readableObjectMode} = getTransformObjectModes(objectMode, index, newTransforms, direction);
|
||||
return ({
|
||||
...stdioItem,
|
||||
value: {transform, writableObjectMode, readableObjectMode},
|
||||
});
|
||||
};
|
||||
|
||||
const normalizeGenerator = ({stdioItem, stdioItem: {value}, index, newTransforms, direction, encoding}) => {
|
||||
const {
|
||||
transform,
|
||||
final,
|
||||
binary: binaryOption = false,
|
||||
preserveNewlines = false,
|
||||
objectMode,
|
||||
} = isPlainObj(value) ? value : {transform: value};
|
||||
const binary = binaryOption || BINARY_ENCODINGS.has(encoding);
|
||||
const {writableObjectMode, readableObjectMode} = getTransformObjectModes(objectMode, index, newTransforms, direction);
|
||||
return {
|
||||
...stdioItem,
|
||||
value: {
|
||||
transform,
|
||||
final,
|
||||
binary,
|
||||
preserveNewlines,
|
||||
writableObjectMode,
|
||||
readableObjectMode,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const sortTransforms = (newTransforms, direction) => direction === 'input' ? newTransforms.reverse() : newTransforms;
|
||||
41
node_modules/execa/lib/transform/object-mode.js
generated
vendored
Normal file
41
node_modules/execa/lib/transform/object-mode.js
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
import {TRANSFORM_TYPES} from '../stdio/type.js';
|
||||
|
||||
/*
|
||||
Retrieve the `objectMode`s of a single transform.
|
||||
`objectMode` determines the return value's type, i.e. the `readableObjectMode`.
|
||||
The chunk argument's type is based on the previous generator's return value, i.e. the `writableObjectMode` is based on the previous `readableObjectMode`.
|
||||
The last input's generator is read by `subprocess.stdin` which:
|
||||
- should not be in `objectMode` for performance reasons.
|
||||
- can only be strings, Buffers and Uint8Arrays.
|
||||
Therefore its `readableObjectMode` must be `false`.
|
||||
The same applies to the first output's generator's `writableObjectMode`.
|
||||
*/
|
||||
export const getTransformObjectModes = (objectMode, index, newTransforms, direction) => direction === 'output'
|
||||
? getOutputObjectModes(objectMode, index, newTransforms)
|
||||
: getInputObjectModes(objectMode, index, newTransforms);
|
||||
|
||||
const getOutputObjectModes = (objectMode, index, newTransforms) => {
|
||||
const writableObjectMode = index !== 0 && newTransforms[index - 1].value.readableObjectMode;
|
||||
const readableObjectMode = objectMode ?? writableObjectMode;
|
||||
return {writableObjectMode, readableObjectMode};
|
||||
};
|
||||
|
||||
const getInputObjectModes = (objectMode, index, newTransforms) => {
|
||||
const writableObjectMode = index === 0
|
||||
? objectMode === true
|
||||
: newTransforms[index - 1].value.readableObjectMode;
|
||||
const readableObjectMode = index !== newTransforms.length - 1 && (objectMode ?? writableObjectMode);
|
||||
return {writableObjectMode, readableObjectMode};
|
||||
};
|
||||
|
||||
// Retrieve the `objectMode` of a file descriptor, e.g. `stdout` or `stderr`
|
||||
export const getFdObjectMode = (stdioItems, direction) => {
|
||||
const lastTransform = stdioItems.findLast(({type}) => TRANSFORM_TYPES.has(type));
|
||||
if (lastTransform === undefined) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return direction === 'input'
|
||||
? lastTransform.value.writableObjectMode
|
||||
: lastTransform.value.readableObjectMode;
|
||||
};
|
||||
60
node_modules/execa/lib/transform/run-async.js
generated
vendored
Normal file
60
node_modules/execa/lib/transform/run-async.js
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
import {callbackify} from 'node:util';
|
||||
|
||||
// Applies a series of generator functions asynchronously
|
||||
export const pushChunks = callbackify(async (getChunks, state, getChunksArguments, transformStream) => {
|
||||
state.currentIterable = getChunks(...getChunksArguments);
|
||||
|
||||
try {
|
||||
for await (const chunk of state.currentIterable) {
|
||||
transformStream.push(chunk);
|
||||
}
|
||||
} finally {
|
||||
delete state.currentIterable;
|
||||
}
|
||||
});
|
||||
|
||||
// For each new chunk, apply each `transform()` method
|
||||
export const transformChunk = async function * (chunk, generators, index) {
|
||||
if (index === generators.length) {
|
||||
yield chunk;
|
||||
return;
|
||||
}
|
||||
|
||||
const {transform = identityGenerator} = generators[index];
|
||||
for await (const transformedChunk of transform(chunk)) {
|
||||
yield * transformChunk(transformedChunk, generators, index + 1);
|
||||
}
|
||||
};
|
||||
|
||||
// At the end, apply each `final()` method, followed by the `transform()` method of the next transforms
|
||||
export const finalChunks = async function * (generators) {
|
||||
for (const [index, {final}] of Object.entries(generators)) {
|
||||
yield * generatorFinalChunks(final, Number(index), generators);
|
||||
}
|
||||
};
|
||||
|
||||
const generatorFinalChunks = async function * (final, index, generators) {
|
||||
if (final === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
for await (const finalChunk of final()) {
|
||||
yield * transformChunk(finalChunk, generators, index + 1);
|
||||
}
|
||||
};
|
||||
|
||||
// Cancel any ongoing async generator when the Transform is destroyed, e.g. when the subprocess errors
|
||||
export const destroyTransform = callbackify(async ({currentIterable}, error) => {
|
||||
if (currentIterable !== undefined) {
|
||||
await (error ? currentIterable.throw(error) : currentIterable.return());
|
||||
return;
|
||||
}
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
const identityGenerator = function * (chunk) {
|
||||
yield chunk;
|
||||
};
|
||||
50
node_modules/execa/lib/transform/run-sync.js
generated
vendored
Normal file
50
node_modules/execa/lib/transform/run-sync.js
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
// Duplicate the code from `run-async.js` but as synchronous functions
|
||||
export const pushChunksSync = (getChunksSync, getChunksArguments, transformStream, done) => {
|
||||
try {
|
||||
for (const chunk of getChunksSync(...getChunksArguments)) {
|
||||
transformStream.push(chunk);
|
||||
}
|
||||
|
||||
done();
|
||||
} catch (error) {
|
||||
done(error);
|
||||
}
|
||||
};
|
||||
|
||||
// Run synchronous generators with `execaSync()`
|
||||
export const runTransformSync = (generators, chunks) => [
|
||||
...chunks.flatMap(chunk => [...transformChunkSync(chunk, generators, 0)]),
|
||||
...finalChunksSync(generators),
|
||||
];
|
||||
|
||||
export const transformChunkSync = function * (chunk, generators, index) {
|
||||
if (index === generators.length) {
|
||||
yield chunk;
|
||||
return;
|
||||
}
|
||||
|
||||
const {transform = identityGenerator} = generators[index];
|
||||
for (const transformedChunk of transform(chunk)) {
|
||||
yield * transformChunkSync(transformedChunk, generators, index + 1);
|
||||
}
|
||||
};
|
||||
|
||||
export const finalChunksSync = function * (generators) {
|
||||
for (const [index, {final}] of Object.entries(generators)) {
|
||||
yield * generatorFinalChunksSync(final, Number(index), generators);
|
||||
}
|
||||
};
|
||||
|
||||
const generatorFinalChunksSync = function * (final, index, generators) {
|
||||
if (final === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const finalChunk of final()) {
|
||||
yield * transformChunkSync(finalChunk, generators, index + 1);
|
||||
}
|
||||
};
|
||||
|
||||
const identityGenerator = function * (chunk) {
|
||||
yield chunk;
|
||||
};
|
||||
110
node_modules/execa/lib/transform/split.js
generated
vendored
Normal file
110
node_modules/execa/lib/transform/split.js
generated
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
// Split chunks line-wise for generators passed to the `std*` options
|
||||
export const getSplitLinesGenerator = (binary, preserveNewlines, skipped, state) => binary || skipped
|
||||
? undefined
|
||||
: initializeSplitLines(preserveNewlines, state);
|
||||
|
||||
// Same but for synchronous methods
|
||||
export const splitLinesSync = (chunk, preserveNewlines, objectMode) => objectMode
|
||||
? chunk.flatMap(item => splitLinesItemSync(item, preserveNewlines))
|
||||
: splitLinesItemSync(chunk, preserveNewlines);
|
||||
|
||||
const splitLinesItemSync = (chunk, preserveNewlines) => {
|
||||
const {transform, final} = initializeSplitLines(preserveNewlines, {});
|
||||
return [...transform(chunk), ...final()];
|
||||
};
|
||||
|
||||
const initializeSplitLines = (preserveNewlines, state) => {
|
||||
state.previousChunks = '';
|
||||
return {
|
||||
transform: splitGenerator.bind(undefined, state, preserveNewlines),
|
||||
final: linesFinal.bind(undefined, state),
|
||||
};
|
||||
};
|
||||
|
||||
// This imperative logic is much faster than using `String.split()` and uses very low memory.
|
||||
const splitGenerator = function * (state, preserveNewlines, chunk) {
|
||||
if (typeof chunk !== 'string') {
|
||||
yield chunk;
|
||||
return;
|
||||
}
|
||||
|
||||
let {previousChunks} = state;
|
||||
let start = -1;
|
||||
|
||||
for (let end = 0; end < chunk.length; end += 1) {
|
||||
if (chunk[end] === '\n') {
|
||||
const newlineLength = getNewlineLength(chunk, end, preserveNewlines, state);
|
||||
let line = chunk.slice(start + 1, end + 1 - newlineLength);
|
||||
|
||||
if (previousChunks.length > 0) {
|
||||
line = concatString(previousChunks, line);
|
||||
previousChunks = '';
|
||||
}
|
||||
|
||||
yield line;
|
||||
start = end;
|
||||
}
|
||||
}
|
||||
|
||||
if (start !== chunk.length - 1) {
|
||||
previousChunks = concatString(previousChunks, chunk.slice(start + 1));
|
||||
}
|
||||
|
||||
state.previousChunks = previousChunks;
|
||||
};
|
||||
|
||||
const getNewlineLength = (chunk, end, preserveNewlines, state) => {
|
||||
if (preserveNewlines) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
state.isWindowsNewline = end !== 0 && chunk[end - 1] === '\r';
|
||||
return state.isWindowsNewline ? 2 : 1;
|
||||
};
|
||||
|
||||
const linesFinal = function * ({previousChunks}) {
|
||||
if (previousChunks.length > 0) {
|
||||
yield previousChunks;
|
||||
}
|
||||
};
|
||||
|
||||
// Unless `preserveNewlines: true` is used, we strip the newline of each line.
|
||||
// This re-adds them after the user `transform` code has run.
|
||||
export const getAppendNewlineGenerator = ({binary, preserveNewlines, readableObjectMode, state}) => binary || preserveNewlines || readableObjectMode
|
||||
? undefined
|
||||
: {transform: appendNewlineGenerator.bind(undefined, state)};
|
||||
|
||||
const appendNewlineGenerator = function * ({isWindowsNewline = false}, chunk) {
|
||||
const {unixNewline, windowsNewline, LF, concatBytes} = typeof chunk === 'string' ? linesStringInfo : linesUint8ArrayInfo;
|
||||
|
||||
if (chunk.at(-1) === LF) {
|
||||
yield chunk;
|
||||
return;
|
||||
}
|
||||
|
||||
const newline = isWindowsNewline ? windowsNewline : unixNewline;
|
||||
yield concatBytes(chunk, newline);
|
||||
};
|
||||
|
||||
const concatString = (firstChunk, secondChunk) => `${firstChunk}${secondChunk}`;
|
||||
|
||||
const linesStringInfo = {
|
||||
windowsNewline: '\r\n',
|
||||
unixNewline: '\n',
|
||||
LF: '\n',
|
||||
concatBytes: concatString,
|
||||
};
|
||||
|
||||
const concatUint8Array = (firstChunk, secondChunk) => {
|
||||
const chunk = new Uint8Array(firstChunk.length + secondChunk.length);
|
||||
chunk.set(firstChunk, 0);
|
||||
chunk.set(secondChunk, firstChunk.length);
|
||||
return chunk;
|
||||
};
|
||||
|
||||
const linesUint8ArrayInfo = {
|
||||
windowsNewline: new Uint8Array([0x0D, 0x0A]),
|
||||
unixNewline: new Uint8Array([0x0A]),
|
||||
LF: 0x0A,
|
||||
concatBytes: concatUint8Array,
|
||||
};
|
||||
43
node_modules/execa/lib/transform/validate.js
generated
vendored
Normal file
43
node_modules/execa/lib/transform/validate.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
import {Buffer} from 'node:buffer';
|
||||
import {isUint8Array} from '../utils/uint-array.js';
|
||||
|
||||
// Validate the type of chunk argument passed to transform generators
|
||||
export const getValidateTransformInput = (writableObjectMode, optionName) => writableObjectMode
|
||||
? undefined
|
||||
: validateStringTransformInput.bind(undefined, optionName);
|
||||
|
||||
const validateStringTransformInput = function * (optionName, chunk) {
|
||||
if (typeof chunk !== 'string' && !isUint8Array(chunk) && !Buffer.isBuffer(chunk)) {
|
||||
throw new TypeError(`The \`${optionName}\` option's transform must use "objectMode: true" to receive as input: ${typeof chunk}.`);
|
||||
}
|
||||
|
||||
yield chunk;
|
||||
};
|
||||
|
||||
// Validate the type of the value returned by transform generators
|
||||
export const getValidateTransformReturn = (readableObjectMode, optionName) => readableObjectMode
|
||||
? validateObjectTransformReturn.bind(undefined, optionName)
|
||||
: validateStringTransformReturn.bind(undefined, optionName);
|
||||
|
||||
const validateObjectTransformReturn = function * (optionName, chunk) {
|
||||
validateEmptyReturn(optionName, chunk);
|
||||
yield chunk;
|
||||
};
|
||||
|
||||
const validateStringTransformReturn = function * (optionName, chunk) {
|
||||
validateEmptyReturn(optionName, chunk);
|
||||
|
||||
if (typeof chunk !== 'string' && !isUint8Array(chunk)) {
|
||||
throw new TypeError(`The \`${optionName}\` option's function must yield a string or an Uint8Array, not ${typeof chunk}.`);
|
||||
}
|
||||
|
||||
yield chunk;
|
||||
};
|
||||
|
||||
const validateEmptyReturn = (optionName, chunk) => {
|
||||
if (chunk === null || chunk === undefined) {
|
||||
throw new TypeError(`The \`${optionName}\` option's function must not call \`yield ${chunk}\`.
|
||||
Instead, \`yield\` should either be called with a value, or not be called at all. For example:
|
||||
if (condition) { yield value; }`);
|
||||
}
|
||||
};
|
||||
8
node_modules/execa/lib/utils/abort-signal.js
generated
vendored
Normal file
8
node_modules/execa/lib/utils/abort-signal.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import {once} from 'node:events';
|
||||
|
||||
// Combines `util.aborted()` and `events.addAbortListener()`: promise-based and cleaned up with a stop signal
|
||||
export const onAbortedSignal = async (mainSignal, stopSignal) => {
|
||||
if (!mainSignal.aborted) {
|
||||
await once(mainSignal, 'abort', {signal: stopSignal});
|
||||
}
|
||||
};
|
||||
7
node_modules/execa/lib/utils/deferred.js
generated
vendored
Normal file
7
node_modules/execa/lib/utils/deferred.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
export const createDeferred = () => {
|
||||
const methods = {};
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
Object.assign(methods, {resolve, reject});
|
||||
});
|
||||
return Object.assign(promise, methods);
|
||||
};
|
||||
14
node_modules/execa/lib/utils/max-listeners.js
generated
vendored
Normal file
14
node_modules/execa/lib/utils/max-listeners.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
import {addAbortListener} from 'node:events';
|
||||
|
||||
// Temporarily increase the maximum number of listeners on an eventEmitter
|
||||
export const incrementMaxListeners = (eventEmitter, maxListenersIncrement, signal) => {
|
||||
const maxListeners = eventEmitter.getMaxListeners();
|
||||
if (maxListeners === 0 || maxListeners === Number.POSITIVE_INFINITY) {
|
||||
return;
|
||||
}
|
||||
|
||||
eventEmitter.setMaxListeners(maxListeners + maxListenersIncrement);
|
||||
addAbortListener(signal, () => {
|
||||
eventEmitter.setMaxListeners(eventEmitter.getMaxListeners() - maxListenersIncrement);
|
||||
});
|
||||
};
|
||||
6
node_modules/execa/lib/utils/standard-stream.js
generated
vendored
Normal file
6
node_modules/execa/lib/utils/standard-stream.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
import process from 'node:process';
|
||||
|
||||
export const isStandardStream = stream => STANDARD_STREAMS.includes(stream);
|
||||
export const STANDARD_STREAMS = [process.stdin, process.stdout, process.stderr];
|
||||
export const STANDARD_STREAMS_ALIASES = ['stdin', 'stdout', 'stderr'];
|
||||
export const getStreamName = fdNumber => STANDARD_STREAMS_ALIASES[fdNumber] ?? `stdio[${fdNumber}]`;
|
||||
69
node_modules/execa/lib/utils/uint-array.js
generated
vendored
Normal file
69
node_modules/execa/lib/utils/uint-array.js
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
import {StringDecoder} from 'node:string_decoder';
|
||||
|
||||
const {toString: objectToString} = Object.prototype;
|
||||
|
||||
export const isArrayBuffer = value => objectToString.call(value) === '[object ArrayBuffer]';
|
||||
|
||||
// Is either Uint8Array or Buffer
|
||||
export const isUint8Array = value => objectToString.call(value) === '[object Uint8Array]';
|
||||
|
||||
export const bufferToUint8Array = buffer => new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength);
|
||||
|
||||
const textEncoder = new TextEncoder();
|
||||
const stringToUint8Array = string => textEncoder.encode(string);
|
||||
|
||||
const textDecoder = new TextDecoder();
|
||||
export const uint8ArrayToString = uint8Array => textDecoder.decode(uint8Array);
|
||||
|
||||
export const joinToString = (uint8ArraysOrStrings, encoding) => {
|
||||
const strings = uint8ArraysToStrings(uint8ArraysOrStrings, encoding);
|
||||
return strings.join('');
|
||||
};
|
||||
|
||||
const uint8ArraysToStrings = (uint8ArraysOrStrings, encoding) => {
|
||||
if (encoding === 'utf8' && uint8ArraysOrStrings.every(uint8ArrayOrString => typeof uint8ArrayOrString === 'string')) {
|
||||
return uint8ArraysOrStrings;
|
||||
}
|
||||
|
||||
const decoder = new StringDecoder(encoding);
|
||||
const strings = uint8ArraysOrStrings
|
||||
.map(uint8ArrayOrString => typeof uint8ArrayOrString === 'string'
|
||||
? stringToUint8Array(uint8ArrayOrString)
|
||||
: uint8ArrayOrString)
|
||||
.map(uint8Array => decoder.write(uint8Array));
|
||||
const finalString = decoder.end();
|
||||
return finalString === '' ? strings : [...strings, finalString];
|
||||
};
|
||||
|
||||
export const joinToUint8Array = uint8ArraysOrStrings => {
|
||||
if (uint8ArraysOrStrings.length === 1 && isUint8Array(uint8ArraysOrStrings[0])) {
|
||||
return uint8ArraysOrStrings[0];
|
||||
}
|
||||
|
||||
return concatUint8Arrays(stringsToUint8Arrays(uint8ArraysOrStrings));
|
||||
};
|
||||
|
||||
const stringsToUint8Arrays = uint8ArraysOrStrings => uint8ArraysOrStrings.map(uint8ArrayOrString => typeof uint8ArrayOrString === 'string'
|
||||
? stringToUint8Array(uint8ArrayOrString)
|
||||
: uint8ArrayOrString);
|
||||
|
||||
export const concatUint8Arrays = uint8Arrays => {
|
||||
const result = new Uint8Array(getJoinLength(uint8Arrays));
|
||||
|
||||
let index = 0;
|
||||
for (const uint8Array of uint8Arrays) {
|
||||
result.set(uint8Array, index);
|
||||
index += uint8Array.length;
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
const getJoinLength = uint8Arrays => {
|
||||
let joinLength = 0;
|
||||
for (const uint8Array of uint8Arrays) {
|
||||
joinLength += uint8Array.length;
|
||||
}
|
||||
|
||||
return joinLength;
|
||||
};
|
||||
24
node_modules/execa/lib/verbose/complete.js
generated
vendored
Normal file
24
node_modules/execa/lib/verbose/complete.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
import prettyMs from 'pretty-ms';
|
||||
import {isVerbose} from './values.js';
|
||||
import {verboseLog} from './log.js';
|
||||
import {logError} from './error.js';
|
||||
|
||||
// When `verbose` is `short|full|custom`, print each command's completion, duration and error
|
||||
export const logResult = (result, verboseInfo) => {
|
||||
if (!isVerbose(verboseInfo)) {
|
||||
return;
|
||||
}
|
||||
|
||||
logError(result, verboseInfo);
|
||||
logDuration(result, verboseInfo);
|
||||
};
|
||||
|
||||
const logDuration = (result, verboseInfo) => {
|
||||
const verboseMessage = `(done in ${prettyMs(result.durationMs)})`;
|
||||
verboseLog({
|
||||
type: 'duration',
|
||||
verboseMessage,
|
||||
verboseInfo,
|
||||
result,
|
||||
});
|
||||
};
|
||||
26
node_modules/execa/lib/verbose/custom.js
generated
vendored
Normal file
26
node_modules/execa/lib/verbose/custom.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
import {getVerboseFunction} from './values.js';
|
||||
|
||||
// Apply the `verbose` function on each line
|
||||
export const applyVerboseOnLines = (printedLines, verboseInfo, fdNumber) => {
|
||||
const verboseFunction = getVerboseFunction(verboseInfo, fdNumber);
|
||||
return printedLines
|
||||
.map(({verboseLine, verboseObject}) => applyVerboseFunction(verboseLine, verboseObject, verboseFunction))
|
||||
.filter(printedLine => printedLine !== undefined)
|
||||
.map(printedLine => appendNewline(printedLine))
|
||||
.join('');
|
||||
};
|
||||
|
||||
const applyVerboseFunction = (verboseLine, verboseObject, verboseFunction) => {
|
||||
if (verboseFunction === undefined) {
|
||||
return verboseLine;
|
||||
}
|
||||
|
||||
const printedLine = verboseFunction(verboseLine, verboseObject);
|
||||
if (typeof printedLine === 'string') {
|
||||
return printedLine;
|
||||
}
|
||||
};
|
||||
|
||||
const appendNewline = printedLine => printedLine.endsWith('\n')
|
||||
? printedLine
|
||||
: `${printedLine}\n`;
|
||||
54
node_modules/execa/lib/verbose/default.js
generated
vendored
Normal file
54
node_modules/execa/lib/verbose/default.js
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
import figures from 'figures';
|
||||
import {
|
||||
gray,
|
||||
bold,
|
||||
redBright,
|
||||
yellowBright,
|
||||
} from 'yoctocolors';
|
||||
|
||||
// Default when `verbose` is not a function
|
||||
export const defaultVerboseFunction = ({
|
||||
type,
|
||||
message,
|
||||
timestamp,
|
||||
piped,
|
||||
commandId,
|
||||
result: {failed = false} = {},
|
||||
options: {reject = true},
|
||||
}) => {
|
||||
const timestampString = serializeTimestamp(timestamp);
|
||||
const icon = ICONS[type]({failed, reject, piped});
|
||||
const color = COLORS[type]({reject});
|
||||
return `${gray(`[${timestampString}]`)} ${gray(`[${commandId}]`)} ${color(icon)} ${color(message)}`;
|
||||
};
|
||||
|
||||
// Prepending the timestamp allows debugging the slow paths of a subprocess
|
||||
const serializeTimestamp = timestamp => `${padField(timestamp.getHours(), 2)}:${padField(timestamp.getMinutes(), 2)}:${padField(timestamp.getSeconds(), 2)}.${padField(timestamp.getMilliseconds(), 3)}`;
|
||||
|
||||
const padField = (field, padding) => String(field).padStart(padding, '0');
|
||||
|
||||
const getFinalIcon = ({failed, reject}) => {
|
||||
if (!failed) {
|
||||
return figures.tick;
|
||||
}
|
||||
|
||||
return reject ? figures.cross : figures.warning;
|
||||
};
|
||||
|
||||
const ICONS = {
|
||||
command: ({piped}) => piped ? '|' : '$',
|
||||
output: () => ' ',
|
||||
ipc: () => '*',
|
||||
error: getFinalIcon,
|
||||
duration: getFinalIcon,
|
||||
};
|
||||
|
||||
const identity = string => string;
|
||||
|
||||
const COLORS = {
|
||||
command: () => bold,
|
||||
output: () => identity,
|
||||
ipc: () => identity,
|
||||
error: ({reject}) => reject ? redBright : yellowBright,
|
||||
duration: () => gray,
|
||||
};
|
||||
13
node_modules/execa/lib/verbose/error.js
generated
vendored
Normal file
13
node_modules/execa/lib/verbose/error.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
import {verboseLog} from './log.js';
|
||||
|
||||
// When `verbose` is `short|full|custom`, print each command's error when it fails
|
||||
export const logError = (result, verboseInfo) => {
|
||||
if (result.failed) {
|
||||
verboseLog({
|
||||
type: 'error',
|
||||
verboseMessage: result.shortMessage,
|
||||
verboseInfo,
|
||||
result,
|
||||
});
|
||||
}
|
||||
};
|
||||
39
node_modules/execa/lib/verbose/info.js
generated
vendored
Normal file
39
node_modules/execa/lib/verbose/info.js
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
import {isVerbose, VERBOSE_VALUES, isVerboseFunction} from './values.js';
|
||||
|
||||
// Information computed before spawning, used by the `verbose` option
|
||||
export const getVerboseInfo = (verbose, escapedCommand, rawOptions) => {
|
||||
validateVerbose(verbose);
|
||||
const commandId = getCommandId(verbose);
|
||||
return {
|
||||
verbose,
|
||||
escapedCommand,
|
||||
commandId,
|
||||
rawOptions,
|
||||
};
|
||||
};
|
||||
|
||||
const getCommandId = verbose => isVerbose({verbose}) ? COMMAND_ID++ : undefined;
|
||||
|
||||
// Prepending the `pid` is useful when multiple commands print their output at the same time.
|
||||
// However, we cannot use the real PID since this is not available with `child_process.spawnSync()`.
|
||||
// Also, we cannot use the real PID if we want to print it before `child_process.spawn()` is run.
|
||||
// As a pro, it is shorter than a normal PID and never re-uses the same id.
|
||||
// As a con, it cannot be used to send signals.
|
||||
let COMMAND_ID = 0n;
|
||||
|
||||
const validateVerbose = verbose => {
|
||||
for (const fdVerbose of verbose) {
|
||||
if (fdVerbose === false) {
|
||||
throw new TypeError('The "verbose: false" option was renamed to "verbose: \'none\'".');
|
||||
}
|
||||
|
||||
if (fdVerbose === true) {
|
||||
throw new TypeError('The "verbose: true" option was renamed to "verbose: \'short\'".');
|
||||
}
|
||||
|
||||
if (!VERBOSE_VALUES.includes(fdVerbose) && !isVerboseFunction(fdVerbose)) {
|
||||
const allowedValues = VERBOSE_VALUES.map(allowedValue => `'${allowedValue}'`).join(', ');
|
||||
throw new TypeError(`The "verbose" option must not be ${fdVerbose}. Allowed values are: ${allowedValues} or a function.`);
|
||||
}
|
||||
}
|
||||
};
|
||||
15
node_modules/execa/lib/verbose/ipc.js
generated
vendored
Normal file
15
node_modules/execa/lib/verbose/ipc.js
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
import {verboseLog, serializeVerboseMessage} from './log.js';
|
||||
import {isFullVerbose} from './values.js';
|
||||
|
||||
// When `verbose` is `'full'`, print IPC messages from the subprocess
|
||||
export const shouldLogIpc = verboseInfo => isFullVerbose(verboseInfo, 'ipc');
|
||||
|
||||
export const logIpcOutput = (message, verboseInfo) => {
|
||||
const verboseMessage = serializeVerboseMessage(message);
|
||||
verboseLog({
|
||||
type: 'ipc',
|
||||
verboseMessage,
|
||||
fdNumber: 'ipc',
|
||||
verboseInfo,
|
||||
});
|
||||
};
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user