加入了node_modules

添加了新的功能项
This commit is contained in:
zyb
2025-05-13 21:23:41 +08:00
parent 8d53374568
commit 0c0b5d869c
3589 changed files with 893641 additions and 233 deletions

46
node_modules/execa/lib/resolve/all-async.js generated vendored Normal file
View File

@@ -0,0 +1,46 @@
import mergeStreams from '@sindresorhus/merge-streams';
import {waitForSubprocessStream} from './stdio.js';
// `all` interleaves `stdout` and `stderr`
export const makeAllStream = ({stdout, stderr}, {all}) => all && (stdout || stderr)
? mergeStreams([stdout, stderr].filter(Boolean))
: undefined;
// Read the contents of `subprocess.all` and|or wait for its completion
export const waitForAllStream = ({subprocess, encoding, buffer, maxBuffer, lines, stripFinalNewline, verboseInfo, streamInfo}) => waitForSubprocessStream({
...getAllStream(subprocess, buffer),
fdNumber: 'all',
encoding,
maxBuffer: maxBuffer[1] + maxBuffer[2],
lines: lines[1] || lines[2],
allMixed: getAllMixed(subprocess),
stripFinalNewline,
verboseInfo,
streamInfo,
});
const getAllStream = ({stdout, stderr, all}, [, bufferStdout, bufferStderr]) => {
const buffer = bufferStdout || bufferStderr;
if (!buffer) {
return {stream: all, buffer};
}
if (!bufferStdout) {
return {stream: stderr, buffer};
}
if (!bufferStderr) {
return {stream: stdout, buffer};
}
return {stream: all, buffer};
};
// When `subprocess.stdout` is in objectMode but not `subprocess.stderr` (or the opposite), we need to use both:
// - `getStreamAsArray()` for the chunks in objectMode, to return as an array without changing each chunk
// - `getStreamAsArrayBuffer()` or `getStream()` for the chunks not in objectMode, to convert them from Buffers to string or Uint8Array
// We do this by emulating the Buffer -> string|Uint8Array conversion performed by `get-stream` with our own, which is identical.
const getAllMixed = ({all, stdout, stderr}) => all
&& stdout
&& stderr
&& stdout.readableObjectMode !== stderr.readableObjectMode;

33
node_modules/execa/lib/resolve/all-sync.js generated vendored Normal file
View File

@@ -0,0 +1,33 @@
import {isUint8Array, concatUint8Arrays} from '../utils/uint-array.js';
import {stripNewline} from '../io/strip-newline.js';
// Retrieve `result.all` with synchronous methods
export const getAllSync = ([, stdout, stderr], options) => {
if (!options.all) {
return;
}
if (stdout === undefined) {
return stderr;
}
if (stderr === undefined) {
return stdout;
}
if (Array.isArray(stdout)) {
return Array.isArray(stderr)
? [...stdout, ...stderr]
: [...stdout, stripNewline(stderr, options, 'all')];
}
if (Array.isArray(stderr)) {
return [stripNewline(stdout, options, 'all'), ...stderr];
}
if (isUint8Array(stdout) && isUint8Array(stderr)) {
return concatUint8Arrays([stdout, stderr]);
}
return `${stdout}${stderr}`;
};

54
node_modules/execa/lib/resolve/exit-async.js generated vendored Normal file
View File

@@ -0,0 +1,54 @@
import {once} from 'node:events';
import {DiscardedError} from '../return/final-error.js';
// If `error` is emitted before `spawn`, `exit` will never be emitted.
// However, `error` might be emitted after `spawn`.
// In that case, `exit` will still be emitted.
// Since the `exit` event contains the signal name, we want to make sure we are listening for it.
// This function also takes into account the following unlikely cases:
// - `exit` being emitted in the same microtask as `spawn`
// - `error` being emitted multiple times
export const waitForExit = async (subprocess, context) => {
const [exitCode, signal] = await waitForExitOrError(subprocess);
context.isForcefullyTerminated ??= false;
return [exitCode, signal];
};
const waitForExitOrError = async subprocess => {
const [spawnPayload, exitPayload] = await Promise.allSettled([
once(subprocess, 'spawn'),
once(subprocess, 'exit'),
]);
if (spawnPayload.status === 'rejected') {
return [];
}
return exitPayload.status === 'rejected'
? waitForSubprocessExit(subprocess)
: exitPayload.value;
};
const waitForSubprocessExit = async subprocess => {
try {
return await once(subprocess, 'exit');
} catch {
return waitForSubprocessExit(subprocess);
}
};
// Retrieve the final exit code and|or signal name
export const waitForSuccessfulExit = async exitPromise => {
const [exitCode, signal] = await exitPromise;
if (!isSubprocessErrorExit(exitCode, signal) && isFailedExit(exitCode, signal)) {
throw new DiscardedError();
}
return [exitCode, signal];
};
// When the subprocess fails due to an `error` event
const isSubprocessErrorExit = (exitCode, signal) => exitCode === undefined && signal === undefined;
// When the subprocess fails due to a non-0 exit code or to a signal termination
export const isFailedExit = (exitCode, signal) => exitCode !== 0 || signal !== null;

25
node_modules/execa/lib/resolve/exit-sync.js generated vendored Normal file
View File

@@ -0,0 +1,25 @@
import {DiscardedError} from '../return/final-error.js';
import {isMaxBufferSync} from '../io/max-buffer.js';
import {isFailedExit} from './exit-async.js';
// Retrieve exit code, signal name and error information, with synchronous methods
export const getExitResultSync = ({error, status: exitCode, signal, output}, {maxBuffer}) => {
const resultError = getResultError(error, exitCode, signal);
const timedOut = resultError?.code === 'ETIMEDOUT';
const isMaxBuffer = isMaxBufferSync(resultError, output, maxBuffer);
return {
resultError,
exitCode,
signal,
timedOut,
isMaxBuffer,
};
};
const getResultError = (error, exitCode, signal) => {
if (error !== undefined) {
return error;
}
return isFailedExit(exitCode, signal) ? new DiscardedError() : undefined;
};

47
node_modules/execa/lib/resolve/stdio.js generated vendored Normal file
View File

@@ -0,0 +1,47 @@
import {getStreamOutput} from '../io/contents.js';
import {waitForStream, isInputFileDescriptor} from './wait-stream.js';
// Read the contents of `subprocess.std*` and|or wait for its completion
export const waitForStdioStreams = ({subprocess, encoding, buffer, maxBuffer, lines, stripFinalNewline, verboseInfo, streamInfo}) => subprocess.stdio.map((stream, fdNumber) => waitForSubprocessStream({
stream,
fdNumber,
encoding,
buffer: buffer[fdNumber],
maxBuffer: maxBuffer[fdNumber],
lines: lines[fdNumber],
allMixed: false,
stripFinalNewline,
verboseInfo,
streamInfo,
}));
// Read the contents of `subprocess.std*` or `subprocess.all` and|or wait for its completion
export const waitForSubprocessStream = async ({stream, fdNumber, encoding, buffer, maxBuffer, lines, allMixed, stripFinalNewline, verboseInfo, streamInfo}) => {
if (!stream) {
return;
}
const onStreamEnd = waitForStream(stream, fdNumber, streamInfo);
if (isInputFileDescriptor(streamInfo, fdNumber)) {
await onStreamEnd;
return;
}
const [output] = await Promise.all([
getStreamOutput({
stream,
onStreamEnd,
fdNumber,
encoding,
buffer,
maxBuffer,
lines,
allMixed,
stripFinalNewline,
verboseInfo,
streamInfo,
}),
onStreamEnd,
]);
return output;
};

96
node_modules/execa/lib/resolve/wait-stream.js generated vendored Normal file
View File

@@ -0,0 +1,96 @@
import {finished} from 'node:stream/promises';
// Wraps `finished(stream)` to handle the following case:
// - When the subprocess exits, Node.js automatically calls `subprocess.stdin.destroy()`, which we need to ignore.
// - However, we still need to throw if `subprocess.stdin.destroy()` is called before subprocess exit.
export const waitForStream = async (stream, fdNumber, streamInfo, {isSameDirection, stopOnExit = false} = {}) => {
const state = handleStdinDestroy(stream, streamInfo);
const abortController = new AbortController();
try {
await Promise.race([
...(stopOnExit ? [streamInfo.exitPromise] : []),
finished(stream, {cleanup: true, signal: abortController.signal}),
]);
} catch (error) {
if (!state.stdinCleanedUp) {
handleStreamError(error, fdNumber, streamInfo, isSameDirection);
}
} finally {
abortController.abort();
}
};
// If `subprocess.stdin` is destroyed before being fully written to, it is considered aborted and should throw an error.
// This can happen for example when user called `subprocess.stdin.destroy()` before `subprocess.stdin.end()`.
// However, Node.js calls `subprocess.stdin.destroy()` on exit for cleanup purposes.
// https://github.com/nodejs/node/blob/0b4cdb4b42956cbd7019058e409e06700a199e11/lib/internal/child_process.js#L278
// This is normal and should not throw an error.
// Therefore, we need to differentiate between both situations to know whether to throw an error.
// Unfortunately, events (`close`, `error`, `end`, `exit`) cannot be used because `.destroy()` can take an arbitrary amount of time.
// For example, `stdin: 'pipe'` is implemented as a TCP socket, and its `.destroy()` method waits for TCP disconnection.
// Therefore `.destroy()` might end before or after subprocess exit, based on OS speed and load.
// The only way to detect this is to spy on `subprocess.stdin._destroy()` by wrapping it.
// If `subprocess.exitCode` or `subprocess.signalCode` is set, it means `.destroy()` is being called by Node.js itself.
const handleStdinDestroy = (stream, {originalStreams: [originalStdin], subprocess}) => {
const state = {stdinCleanedUp: false};
if (stream === originalStdin) {
spyOnStdinDestroy(stream, subprocess, state);
}
return state;
};
const spyOnStdinDestroy = (subprocessStdin, subprocess, state) => {
const {_destroy} = subprocessStdin;
subprocessStdin._destroy = (...destroyArguments) => {
setStdinCleanedUp(subprocess, state);
_destroy.call(subprocessStdin, ...destroyArguments);
};
};
const setStdinCleanedUp = ({exitCode, signalCode}, state) => {
if (exitCode !== null || signalCode !== null) {
state.stdinCleanedUp = true;
}
};
// We ignore EPIPEs on writable streams and aborts on readable streams since those can happen normally.
// When one stream errors, the error is propagated to the other streams on the same file descriptor.
// Those other streams might have a different direction due to the above.
// When this happens, the direction of both the initial stream and the others should then be taken into account.
// Therefore, we keep track of whether a stream error is currently propagating.
const handleStreamError = (error, fdNumber, streamInfo, isSameDirection) => {
if (!shouldIgnoreStreamError(error, fdNumber, streamInfo, isSameDirection)) {
throw error;
}
};
const shouldIgnoreStreamError = (error, fdNumber, streamInfo, isSameDirection = true) => {
if (streamInfo.propagating) {
return isStreamEpipe(error) || isStreamAbort(error);
}
streamInfo.propagating = true;
return isInputFileDescriptor(streamInfo, fdNumber) === isSameDirection
? isStreamEpipe(error)
: isStreamAbort(error);
};
// Unfortunately, we cannot use the stream's class or properties to know whether it is readable or writable.
// For example, `subprocess.stdin` is technically a Duplex, but can only be used as a writable.
// Therefore, we need to use the file descriptor's direction (`stdin` is input, `stdout` is output, etc.).
// However, while `subprocess.std*` and transforms follow that direction, any stream passed the `std*` option has the opposite direction.
// For example, `subprocess.stdin` is a writable, but the `stdin` option is a readable.
export const isInputFileDescriptor = ({fileDescriptors}, fdNumber) => fdNumber !== 'all' && fileDescriptors[fdNumber].direction === 'input';
// When `stream.destroy()` is called without an `error` argument, stream is aborted.
// This is the only way to abort a readable stream, which can be useful in some instances.
// Therefore, we ignore this error on readable streams.
export const isStreamAbort = error => error?.code === 'ERR_STREAM_PREMATURE_CLOSE';
// When `stream.write()` is called but the underlying source has been closed, `EPIPE` is emitted.
// When piping subprocesses, the source subprocess usually decides when to stop piping.
// However, there are some instances when the destination does instead, such as `... | head -n1`.
// It notifies the source by using `EPIPE`.
// Therefore, we ignore this error on writable streams.
const isStreamEpipe = error => error?.code === 'EPIPE';

146
node_modules/execa/lib/resolve/wait-subprocess.js generated vendored Normal file
View File

@@ -0,0 +1,146 @@
import {once} from 'node:events';
import {isStream as isNodeStream} from 'is-stream';
import {throwOnTimeout} from '../terminate/timeout.js';
import {throwOnCancel} from '../terminate/cancel.js';
import {throwOnGracefulCancel} from '../terminate/graceful.js';
import {isStandardStream} from '../utils/standard-stream.js';
import {TRANSFORM_TYPES} from '../stdio/type.js';
import {getBufferedData} from '../io/contents.js';
import {waitForIpcOutput, getBufferedIpcOutput} from '../ipc/buffer-messages.js';
import {sendIpcInput} from '../ipc/ipc-input.js';
import {waitForAllStream} from './all-async.js';
import {waitForStdioStreams} from './stdio.js';
import {waitForExit, waitForSuccessfulExit} from './exit-async.js';
import {waitForStream} from './wait-stream.js';
// Retrieve result of subprocess: exit code, signal, error, streams (stdout/stderr/all)
export const waitForSubprocessResult = async ({
subprocess,
options: {
encoding,
buffer,
maxBuffer,
lines,
timeoutDuration: timeout,
cancelSignal,
gracefulCancel,
forceKillAfterDelay,
stripFinalNewline,
ipc,
ipcInput,
},
context,
verboseInfo,
fileDescriptors,
originalStreams,
onInternalError,
controller,
}) => {
const exitPromise = waitForExit(subprocess, context);
const streamInfo = {
originalStreams,
fileDescriptors,
subprocess,
exitPromise,
propagating: false,
};
const stdioPromises = waitForStdioStreams({
subprocess,
encoding,
buffer,
maxBuffer,
lines,
stripFinalNewline,
verboseInfo,
streamInfo,
});
const allPromise = waitForAllStream({
subprocess,
encoding,
buffer,
maxBuffer,
lines,
stripFinalNewline,
verboseInfo,
streamInfo,
});
const ipcOutput = [];
const ipcOutputPromise = waitForIpcOutput({
subprocess,
buffer,
maxBuffer,
ipc,
ipcOutput,
verboseInfo,
});
const originalPromises = waitForOriginalStreams(originalStreams, subprocess, streamInfo);
const customStreamsEndPromises = waitForCustomStreamsEnd(fileDescriptors, streamInfo);
try {
return await Promise.race([
Promise.all([
{},
waitForSuccessfulExit(exitPromise),
Promise.all(stdioPromises),
allPromise,
ipcOutputPromise,
sendIpcInput(subprocess, ipcInput),
...originalPromises,
...customStreamsEndPromises,
]),
onInternalError,
throwOnSubprocessError(subprocess, controller),
...throwOnTimeout(subprocess, timeout, context, controller),
...throwOnCancel({
subprocess,
cancelSignal,
gracefulCancel,
context,
controller,
}),
...throwOnGracefulCancel({
subprocess,
cancelSignal,
gracefulCancel,
forceKillAfterDelay,
context,
controller,
}),
]);
} catch (error) {
context.terminationReason ??= 'other';
return Promise.all([
{error},
exitPromise,
Promise.all(stdioPromises.map(stdioPromise => getBufferedData(stdioPromise))),
getBufferedData(allPromise),
getBufferedIpcOutput(ipcOutputPromise, ipcOutput),
Promise.allSettled(originalPromises),
Promise.allSettled(customStreamsEndPromises),
]);
}
};
// Transforms replace `subprocess.std*`, which means they are not exposed to users.
// However, we still want to wait for their completion.
const waitForOriginalStreams = (originalStreams, subprocess, streamInfo) =>
originalStreams.map((stream, fdNumber) => stream === subprocess.stdio[fdNumber]
? undefined
: waitForStream(stream, fdNumber, streamInfo));
// Some `stdin`/`stdout`/`stderr` options create a stream, e.g. when passing a file path.
// The `.pipe()` method automatically ends that stream when `subprocess` ends.
// This makes sure we wait for the completion of those streams, in order to catch any error.
const waitForCustomStreamsEnd = (fileDescriptors, streamInfo) => fileDescriptors.flatMap(({stdioItems}, fdNumber) => stdioItems
.filter(({value, stream = value}) => isNodeStream(stream, {checkOpen: false}) && !isStandardStream(stream))
.map(({type, value, stream = value}) => waitForStream(stream, fdNumber, streamInfo, {
isSameDirection: TRANSFORM_TYPES.has(type),
stopOnExit: type === 'native',
})));
// Fails when the subprocess emits an `error` event
const throwOnSubprocessError = async (subprocess, {signal}) => {
const [error] = await once(subprocess, 'error', {signal});
throw error;
};