加入了node_modules
添加了新的功能项
This commit is contained in:
20
node_modules/execa/lib/pipe/abort.js
generated
vendored
Normal file
20
node_modules/execa/lib/pipe/abort.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import {aborted} from 'node:util';
|
||||
import {createNonCommandError} from './throw.js';
|
||||
|
||||
// When passing an `unpipeSignal` option, abort piping when the signal is aborted.
|
||||
// However, do not terminate the subprocesses.
|
||||
export const unpipeOnAbort = (unpipeSignal, unpipeContext) => unpipeSignal === undefined
|
||||
? []
|
||||
: [unpipeOnSignalAbort(unpipeSignal, unpipeContext)];
|
||||
|
||||
const unpipeOnSignalAbort = async (unpipeSignal, {sourceStream, mergedStream, fileDescriptors, sourceOptions, startTime}) => {
|
||||
await aborted(unpipeSignal, sourceStream);
|
||||
await mergedStream.remove(sourceStream);
|
||||
const error = new Error('Pipe canceled by `unpipeSignal` option.');
|
||||
throw createNonCommandError({
|
||||
error,
|
||||
fileDescriptors,
|
||||
sourceOptions,
|
||||
startTime,
|
||||
});
|
||||
};
|
||||
91
node_modules/execa/lib/pipe/pipe-arguments.js
generated
vendored
Normal file
91
node_modules/execa/lib/pipe/pipe-arguments.js
generated
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
import {normalizeParameters} from '../methods/parameters.js';
|
||||
import {getStartTime} from '../return/duration.js';
|
||||
import {SUBPROCESS_OPTIONS, getToStream, getFromStream} from '../arguments/fd-options.js';
|
||||
import {isDenoExecPath} from '../arguments/file-url.js';
|
||||
|
||||
// Normalize and validate arguments passed to `source.pipe(destination)`
|
||||
export const normalizePipeArguments = ({source, sourcePromise, boundOptions, createNested}, ...pipeArguments) => {
|
||||
const startTime = getStartTime();
|
||||
const {
|
||||
destination,
|
||||
destinationStream,
|
||||
destinationError,
|
||||
from,
|
||||
unpipeSignal,
|
||||
} = getDestinationStream(boundOptions, createNested, pipeArguments);
|
||||
const {sourceStream, sourceError} = getSourceStream(source, from);
|
||||
const {options: sourceOptions, fileDescriptors} = SUBPROCESS_OPTIONS.get(source);
|
||||
return {
|
||||
sourcePromise,
|
||||
sourceStream,
|
||||
sourceOptions,
|
||||
sourceError,
|
||||
destination,
|
||||
destinationStream,
|
||||
destinationError,
|
||||
unpipeSignal,
|
||||
fileDescriptors,
|
||||
startTime,
|
||||
};
|
||||
};
|
||||
|
||||
const getDestinationStream = (boundOptions, createNested, pipeArguments) => {
|
||||
try {
|
||||
const {
|
||||
destination,
|
||||
pipeOptions: {from, to, unpipeSignal} = {},
|
||||
} = getDestination(boundOptions, createNested, ...pipeArguments);
|
||||
const destinationStream = getToStream(destination, to);
|
||||
return {
|
||||
destination,
|
||||
destinationStream,
|
||||
from,
|
||||
unpipeSignal,
|
||||
};
|
||||
} catch (error) {
|
||||
return {destinationError: error};
|
||||
}
|
||||
};
|
||||
|
||||
// Piping subprocesses can use three syntaxes:
|
||||
// - source.pipe('command', commandArguments, pipeOptionsOrDestinationOptions)
|
||||
// - source.pipe`command commandArgument` or source.pipe(pipeOptionsOrDestinationOptions)`command commandArgument`
|
||||
// - source.pipe(execa(...), pipeOptions)
|
||||
const getDestination = (boundOptions, createNested, firstArgument, ...pipeArguments) => {
|
||||
if (Array.isArray(firstArgument)) {
|
||||
const destination = createNested(mapDestinationArguments, boundOptions)(firstArgument, ...pipeArguments);
|
||||
return {destination, pipeOptions: boundOptions};
|
||||
}
|
||||
|
||||
if (typeof firstArgument === 'string' || firstArgument instanceof URL || isDenoExecPath(firstArgument)) {
|
||||
if (Object.keys(boundOptions).length > 0) {
|
||||
throw new TypeError('Please use .pipe("file", ..., options) or .pipe(execa("file", ..., options)) instead of .pipe(options)("file", ...).');
|
||||
}
|
||||
|
||||
const [rawFile, rawArguments, rawOptions] = normalizeParameters(firstArgument, ...pipeArguments);
|
||||
const destination = createNested(mapDestinationArguments)(rawFile, rawArguments, rawOptions);
|
||||
return {destination, pipeOptions: rawOptions};
|
||||
}
|
||||
|
||||
if (SUBPROCESS_OPTIONS.has(firstArgument)) {
|
||||
if (Object.keys(boundOptions).length > 0) {
|
||||
throw new TypeError('Please use .pipe(options)`command` or .pipe($(options)`command`) instead of .pipe(options)($`command`).');
|
||||
}
|
||||
|
||||
return {destination: firstArgument, pipeOptions: pipeArguments[0]};
|
||||
}
|
||||
|
||||
throw new TypeError(`The first argument must be a template string, an options object, or an Execa subprocess: ${firstArgument}`);
|
||||
};
|
||||
|
||||
// Force `stdin: 'pipe'` with the destination subprocess
|
||||
const mapDestinationArguments = ({options}) => ({options: {...options, stdin: 'pipe', piped: true}});
|
||||
|
||||
const getSourceStream = (source, from) => {
|
||||
try {
|
||||
const sourceStream = getFromStream(source, from);
|
||||
return {sourceStream};
|
||||
} catch (error) {
|
||||
return {sourceError: error};
|
||||
}
|
||||
};
|
||||
24
node_modules/execa/lib/pipe/sequence.js
generated
vendored
Normal file
24
node_modules/execa/lib/pipe/sequence.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
// Like Bash, we await both subprocesses. This is unlike some other shells which only await the destination subprocess.
|
||||
// Like Bash with the `pipefail` option, if either subprocess fails, the whole pipe fails.
|
||||
// Like Bash, if both subprocesses fail, we return the failure of the destination.
|
||||
// This ensures both subprocesses' errors are present, using `error.pipedFrom`.
|
||||
export const waitForBothSubprocesses = async subprocessPromises => {
|
||||
const [
|
||||
{status: sourceStatus, reason: sourceReason, value: sourceResult = sourceReason},
|
||||
{status: destinationStatus, reason: destinationReason, value: destinationResult = destinationReason},
|
||||
] = await subprocessPromises;
|
||||
|
||||
if (!destinationResult.pipedFrom.includes(sourceResult)) {
|
||||
destinationResult.pipedFrom.push(sourceResult);
|
||||
}
|
||||
|
||||
if (destinationStatus === 'rejected') {
|
||||
throw destinationResult;
|
||||
}
|
||||
|
||||
if (sourceStatus === 'rejected') {
|
||||
throw sourceResult;
|
||||
}
|
||||
|
||||
return destinationResult;
|
||||
};
|
||||
72
node_modules/execa/lib/pipe/setup.js
generated
vendored
Normal file
72
node_modules/execa/lib/pipe/setup.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
import isPlainObject from 'is-plain-obj';
|
||||
import {normalizePipeArguments} from './pipe-arguments.js';
|
||||
import {handlePipeArgumentsError} from './throw.js';
|
||||
import {waitForBothSubprocesses} from './sequence.js';
|
||||
import {pipeSubprocessStream} from './streaming.js';
|
||||
import {unpipeOnAbort} from './abort.js';
|
||||
|
||||
// Pipe a subprocess' `stdout`/`stderr`/`stdio` into another subprocess' `stdin`
|
||||
export const pipeToSubprocess = (sourceInfo, ...pipeArguments) => {
|
||||
if (isPlainObject(pipeArguments[0])) {
|
||||
return pipeToSubprocess.bind(undefined, {
|
||||
...sourceInfo,
|
||||
boundOptions: {...sourceInfo.boundOptions, ...pipeArguments[0]},
|
||||
});
|
||||
}
|
||||
|
||||
const {destination, ...normalizedInfo} = normalizePipeArguments(sourceInfo, ...pipeArguments);
|
||||
const promise = handlePipePromise({...normalizedInfo, destination});
|
||||
promise.pipe = pipeToSubprocess.bind(undefined, {
|
||||
...sourceInfo,
|
||||
source: destination,
|
||||
sourcePromise: promise,
|
||||
boundOptions: {},
|
||||
});
|
||||
return promise;
|
||||
};
|
||||
|
||||
// Asynchronous logic when piping subprocesses
|
||||
const handlePipePromise = async ({
|
||||
sourcePromise,
|
||||
sourceStream,
|
||||
sourceOptions,
|
||||
sourceError,
|
||||
destination,
|
||||
destinationStream,
|
||||
destinationError,
|
||||
unpipeSignal,
|
||||
fileDescriptors,
|
||||
startTime,
|
||||
}) => {
|
||||
const subprocessPromises = getSubprocessPromises(sourcePromise, destination);
|
||||
handlePipeArgumentsError({
|
||||
sourceStream,
|
||||
sourceError,
|
||||
destinationStream,
|
||||
destinationError,
|
||||
fileDescriptors,
|
||||
sourceOptions,
|
||||
startTime,
|
||||
});
|
||||
const maxListenersController = new AbortController();
|
||||
try {
|
||||
const mergedStream = pipeSubprocessStream(sourceStream, destinationStream, maxListenersController);
|
||||
return await Promise.race([
|
||||
waitForBothSubprocesses(subprocessPromises),
|
||||
...unpipeOnAbort(unpipeSignal, {
|
||||
sourceStream,
|
||||
mergedStream,
|
||||
sourceOptions,
|
||||
fileDescriptors,
|
||||
startTime,
|
||||
}),
|
||||
]);
|
||||
} finally {
|
||||
maxListenersController.abort();
|
||||
}
|
||||
};
|
||||
|
||||
// `.pipe()` awaits the subprocess promises.
|
||||
// When invalid arguments are passed to `.pipe()`, we throw an error, which prevents awaiting them.
|
||||
// We need to ensure this does not create unhandled rejections.
|
||||
const getSubprocessPromises = (sourcePromise, destination) => Promise.allSettled([sourcePromise, destination]);
|
||||
51
node_modules/execa/lib/pipe/streaming.js
generated
vendored
Normal file
51
node_modules/execa/lib/pipe/streaming.js
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
import {finished} from 'node:stream/promises';
|
||||
import mergeStreams from '@sindresorhus/merge-streams';
|
||||
import {incrementMaxListeners} from '../utils/max-listeners.js';
|
||||
import {pipeStreams} from '../io/pipeline.js';
|
||||
|
||||
// The piping behavior is like Bash.
|
||||
// In particular, when one subprocess exits, the other is not terminated by a signal.
|
||||
// Instead, its stdout (for the source) or stdin (for the destination) closes.
|
||||
// If the subprocess uses it, it will make it error with SIGPIPE or EPIPE (for the source) or end (for the destination).
|
||||
// If it does not use it, it will continue running.
|
||||
// This allows for subprocesses to gracefully exit and lower the coupling between subprocesses.
|
||||
export const pipeSubprocessStream = (sourceStream, destinationStream, maxListenersController) => {
|
||||
const mergedStream = MERGED_STREAMS.has(destinationStream)
|
||||
? pipeMoreSubprocessStream(sourceStream, destinationStream)
|
||||
: pipeFirstSubprocessStream(sourceStream, destinationStream);
|
||||
incrementMaxListeners(sourceStream, SOURCE_LISTENERS_PER_PIPE, maxListenersController.signal);
|
||||
incrementMaxListeners(destinationStream, DESTINATION_LISTENERS_PER_PIPE, maxListenersController.signal);
|
||||
cleanupMergedStreamsMap(destinationStream);
|
||||
return mergedStream;
|
||||
};
|
||||
|
||||
// We use `merge-streams` to allow for multiple sources to pipe to the same destination.
|
||||
const pipeFirstSubprocessStream = (sourceStream, destinationStream) => {
|
||||
const mergedStream = mergeStreams([sourceStream]);
|
||||
pipeStreams(mergedStream, destinationStream);
|
||||
MERGED_STREAMS.set(destinationStream, mergedStream);
|
||||
return mergedStream;
|
||||
};
|
||||
|
||||
const pipeMoreSubprocessStream = (sourceStream, destinationStream) => {
|
||||
const mergedStream = MERGED_STREAMS.get(destinationStream);
|
||||
mergedStream.add(sourceStream);
|
||||
return mergedStream;
|
||||
};
|
||||
|
||||
const cleanupMergedStreamsMap = async destinationStream => {
|
||||
try {
|
||||
await finished(destinationStream, {cleanup: true, readable: false, writable: true});
|
||||
} catch {}
|
||||
|
||||
MERGED_STREAMS.delete(destinationStream);
|
||||
};
|
||||
|
||||
const MERGED_STREAMS = new WeakMap();
|
||||
|
||||
// Number of listeners set up on `sourceStream` by each `sourceStream.pipe(destinationStream)`
|
||||
// Those are added by `merge-streams`
|
||||
const SOURCE_LISTENERS_PER_PIPE = 2;
|
||||
// Number of listeners set up on `destinationStream` by each `sourceStream.pipe(destinationStream)`
|
||||
// Those are added by `finished()` in `cleanupMergedStreamsMap()`
|
||||
const DESTINATION_LISTENERS_PER_PIPE = 1;
|
||||
58
node_modules/execa/lib/pipe/throw.js
generated
vendored
Normal file
58
node_modules/execa/lib/pipe/throw.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
import {makeEarlyError} from '../return/result.js';
|
||||
import {abortSourceStream, endDestinationStream} from '../io/pipeline.js';
|
||||
|
||||
// When passing invalid arguments to `source.pipe()`, throw asynchronously.
|
||||
// We also abort both subprocesses.
|
||||
export const handlePipeArgumentsError = ({
|
||||
sourceStream,
|
||||
sourceError,
|
||||
destinationStream,
|
||||
destinationError,
|
||||
fileDescriptors,
|
||||
sourceOptions,
|
||||
startTime,
|
||||
}) => {
|
||||
const error = getPipeArgumentsError({
|
||||
sourceStream,
|
||||
sourceError,
|
||||
destinationStream,
|
||||
destinationError,
|
||||
});
|
||||
if (error !== undefined) {
|
||||
throw createNonCommandError({
|
||||
error,
|
||||
fileDescriptors,
|
||||
sourceOptions,
|
||||
startTime,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const getPipeArgumentsError = ({sourceStream, sourceError, destinationStream, destinationError}) => {
|
||||
if (sourceError !== undefined && destinationError !== undefined) {
|
||||
return destinationError;
|
||||
}
|
||||
|
||||
if (destinationError !== undefined) {
|
||||
abortSourceStream(sourceStream);
|
||||
return destinationError;
|
||||
}
|
||||
|
||||
if (sourceError !== undefined) {
|
||||
endDestinationStream(destinationStream);
|
||||
return sourceError;
|
||||
}
|
||||
};
|
||||
|
||||
// Specific error return value when passing invalid arguments to `subprocess.pipe()` or when using `unpipeSignal`
|
||||
export const createNonCommandError = ({error, fileDescriptors, sourceOptions, startTime}) => makeEarlyError({
|
||||
error,
|
||||
command: PIPE_COMMAND_MESSAGE,
|
||||
escapedCommand: PIPE_COMMAND_MESSAGE,
|
||||
fileDescriptors,
|
||||
options: sourceOptions,
|
||||
startTime,
|
||||
isSync: false,
|
||||
});
|
||||
|
||||
const PIPE_COMMAND_MESSAGE = 'source.pipe(destination)';
|
||||
Reference in New Issue
Block a user