删除 node_modules
This commit is contained in:
116
node_modules/execa/lib/io/contents.js
generated
vendored
116
node_modules/execa/lib/io/contents.js
generated
vendored
@@ -1,116 +0,0 @@
|
||||
import {setImmediate} from 'node:timers/promises';
|
||||
import getStream, {getStreamAsArrayBuffer, getStreamAsArray} from 'get-stream';
|
||||
import {isArrayBuffer} from '../utils/uint-array.js';
|
||||
import {shouldLogOutput, logLines} from '../verbose/output.js';
|
||||
import {iterateForResult} from './iterate.js';
|
||||
import {handleMaxBuffer} from './max-buffer.js';
|
||||
import {getStripFinalNewline} from './strip-newline.js';
|
||||
|
||||
// Retrieve `result.stdout|stderr|all|stdio[*]`
|
||||
export const getStreamOutput = async ({stream, onStreamEnd, fdNumber, encoding, buffer, maxBuffer, lines, allMixed, stripFinalNewline, verboseInfo, streamInfo}) => {
|
||||
const logPromise = logOutputAsync({
|
||||
stream,
|
||||
onStreamEnd,
|
||||
fdNumber,
|
||||
encoding,
|
||||
allMixed,
|
||||
verboseInfo,
|
||||
streamInfo,
|
||||
});
|
||||
|
||||
if (!buffer) {
|
||||
await Promise.all([resumeStream(stream), logPromise]);
|
||||
return;
|
||||
}
|
||||
|
||||
const stripFinalNewlineValue = getStripFinalNewline(stripFinalNewline, fdNumber);
|
||||
const iterable = iterateForResult({
|
||||
stream,
|
||||
onStreamEnd,
|
||||
lines,
|
||||
encoding,
|
||||
stripFinalNewline: stripFinalNewlineValue,
|
||||
allMixed,
|
||||
});
|
||||
const [output] = await Promise.all([
|
||||
getStreamContents({
|
||||
stream,
|
||||
iterable,
|
||||
fdNumber,
|
||||
encoding,
|
||||
maxBuffer,
|
||||
lines,
|
||||
}),
|
||||
logPromise,
|
||||
]);
|
||||
return output;
|
||||
};
|
||||
|
||||
const logOutputAsync = async ({stream, onStreamEnd, fdNumber, encoding, allMixed, verboseInfo, streamInfo: {fileDescriptors}}) => {
|
||||
if (!shouldLogOutput({
|
||||
stdioItems: fileDescriptors[fdNumber]?.stdioItems,
|
||||
encoding,
|
||||
verboseInfo,
|
||||
fdNumber,
|
||||
})) {
|
||||
return;
|
||||
}
|
||||
|
||||
const linesIterable = iterateForResult({
|
||||
stream,
|
||||
onStreamEnd,
|
||||
lines: true,
|
||||
encoding,
|
||||
stripFinalNewline: true,
|
||||
allMixed,
|
||||
});
|
||||
await logLines(linesIterable, stream, fdNumber, verboseInfo);
|
||||
};
|
||||
|
||||
// When using `buffer: false`, users need to read `subprocess.stdout|stderr|all` right away
|
||||
// See https://github.com/sindresorhus/execa/issues/730 and https://github.com/sindresorhus/execa/pull/729#discussion_r1465496310
|
||||
const resumeStream = async stream => {
|
||||
await setImmediate();
|
||||
if (stream.readableFlowing === null) {
|
||||
stream.resume();
|
||||
}
|
||||
};
|
||||
|
||||
const getStreamContents = async ({stream, stream: {readableObjectMode}, iterable, fdNumber, encoding, maxBuffer, lines}) => {
|
||||
try {
|
||||
if (readableObjectMode || lines) {
|
||||
return await getStreamAsArray(iterable, {maxBuffer});
|
||||
}
|
||||
|
||||
if (encoding === 'buffer') {
|
||||
return new Uint8Array(await getStreamAsArrayBuffer(iterable, {maxBuffer}));
|
||||
}
|
||||
|
||||
return await getStream(iterable, {maxBuffer});
|
||||
} catch (error) {
|
||||
return handleBufferedData(handleMaxBuffer({
|
||||
error,
|
||||
stream,
|
||||
readableObjectMode,
|
||||
lines,
|
||||
encoding,
|
||||
fdNumber,
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
// On failure, `result.stdout|stderr|all` should contain the currently buffered stream
|
||||
// They are automatically closed and flushed by Node.js when the subprocess exits
|
||||
// When `buffer` is `false`, `streamPromise` is `undefined` and there is no buffered data to retrieve
|
||||
export const getBufferedData = async streamPromise => {
|
||||
try {
|
||||
return await streamPromise;
|
||||
} catch (error) {
|
||||
return handleBufferedData(error);
|
||||
}
|
||||
};
|
||||
|
||||
// Ensure we are returning Uint8Arrays when using `encoding: 'buffer'`
|
||||
const handleBufferedData = ({bufferedData}) => isArrayBuffer(bufferedData)
|
||||
? new Uint8Array(bufferedData)
|
||||
: bufferedData;
|
||||
44
node_modules/execa/lib/io/input-sync.js
generated
vendored
44
node_modules/execa/lib/io/input-sync.js
generated
vendored
@@ -1,44 +0,0 @@
|
||||
import {runGeneratorsSync} from '../transform/generator.js';
|
||||
import {joinToUint8Array, isUint8Array} from '../utils/uint-array.js';
|
||||
import {TYPE_TO_MESSAGE} from '../stdio/type.js';
|
||||
|
||||
// Apply `stdin`/`input`/`inputFile` options, before spawning, in sync mode, by converting it to the `input` option
|
||||
export const addInputOptionsSync = (fileDescriptors, options) => {
|
||||
for (const fdNumber of getInputFdNumbers(fileDescriptors)) {
|
||||
addInputOptionSync(fileDescriptors, fdNumber, options);
|
||||
}
|
||||
};
|
||||
|
||||
const getInputFdNumbers = fileDescriptors => new Set(Object.entries(fileDescriptors)
|
||||
.filter(([, {direction}]) => direction === 'input')
|
||||
.map(([fdNumber]) => Number(fdNumber)));
|
||||
|
||||
const addInputOptionSync = (fileDescriptors, fdNumber, options) => {
|
||||
const {stdioItems} = fileDescriptors[fdNumber];
|
||||
const allStdioItems = stdioItems.filter(({contents}) => contents !== undefined);
|
||||
if (allStdioItems.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (fdNumber !== 0) {
|
||||
const [{type, optionName}] = allStdioItems;
|
||||
throw new TypeError(`Only the \`stdin\` option, not \`${optionName}\`, can be ${TYPE_TO_MESSAGE[type]} with synchronous methods.`);
|
||||
}
|
||||
|
||||
const allContents = allStdioItems.map(({contents}) => contents);
|
||||
const transformedContents = allContents.map(contents => applySingleInputGeneratorsSync(contents, stdioItems));
|
||||
options.input = joinToUint8Array(transformedContents);
|
||||
};
|
||||
|
||||
const applySingleInputGeneratorsSync = (contents, stdioItems) => {
|
||||
const newContents = runGeneratorsSync(contents, stdioItems, 'utf8', true);
|
||||
validateSerializable(newContents);
|
||||
return joinToUint8Array(newContents);
|
||||
};
|
||||
|
||||
const validateSerializable = newContents => {
|
||||
const invalidItem = newContents.find(item => typeof item !== 'string' && !isUint8Array(item));
|
||||
if (invalidItem !== undefined) {
|
||||
throw new TypeError(`The \`stdin\` option is invalid: when passing objects as input, a transform must be used to serialize them to strings or Uint8Arrays: ${invalidItem}.`);
|
||||
}
|
||||
};
|
||||
110
node_modules/execa/lib/io/iterate.js
generated
vendored
110
node_modules/execa/lib/io/iterate.js
generated
vendored
@@ -1,110 +0,0 @@
|
||||
import {on} from 'node:events';
|
||||
import {getDefaultHighWaterMark} from 'node:stream';
|
||||
import {getEncodingTransformGenerator} from '../transform/encoding-transform.js';
|
||||
import {getSplitLinesGenerator} from '../transform/split.js';
|
||||
import {transformChunkSync, finalChunksSync} from '../transform/run-sync.js';
|
||||
|
||||
// Iterate over lines of `subprocess.stdout`, used by `subprocess.readable|duplex|iterable()`
|
||||
export const iterateOnSubprocessStream = ({subprocessStdout, subprocess, binary, shouldEncode, encoding, preserveNewlines}) => {
|
||||
const controller = new AbortController();
|
||||
stopReadingOnExit(subprocess, controller);
|
||||
return iterateOnStream({
|
||||
stream: subprocessStdout,
|
||||
controller,
|
||||
binary,
|
||||
shouldEncode: !subprocessStdout.readableObjectMode && shouldEncode,
|
||||
encoding,
|
||||
shouldSplit: !subprocessStdout.readableObjectMode,
|
||||
preserveNewlines,
|
||||
});
|
||||
};
|
||||
|
||||
const stopReadingOnExit = async (subprocess, controller) => {
|
||||
try {
|
||||
await subprocess;
|
||||
} catch {} finally {
|
||||
controller.abort();
|
||||
}
|
||||
};
|
||||
|
||||
// Iterate over lines of `subprocess.stdout`, used by `result.stdout` and the `verbose: 'full'` option.
|
||||
// Applies the `lines` and `encoding` options.
|
||||
export const iterateForResult = ({stream, onStreamEnd, lines, encoding, stripFinalNewline, allMixed}) => {
|
||||
const controller = new AbortController();
|
||||
stopReadingOnStreamEnd(onStreamEnd, controller, stream);
|
||||
const objectMode = stream.readableObjectMode && !allMixed;
|
||||
return iterateOnStream({
|
||||
stream,
|
||||
controller,
|
||||
binary: encoding === 'buffer',
|
||||
shouldEncode: !objectMode,
|
||||
encoding,
|
||||
shouldSplit: !objectMode && lines,
|
||||
preserveNewlines: !stripFinalNewline,
|
||||
});
|
||||
};
|
||||
|
||||
const stopReadingOnStreamEnd = async (onStreamEnd, controller, stream) => {
|
||||
try {
|
||||
await onStreamEnd;
|
||||
} catch {
|
||||
stream.destroy();
|
||||
} finally {
|
||||
controller.abort();
|
||||
}
|
||||
};
|
||||
|
||||
const iterateOnStream = ({stream, controller, binary, shouldEncode, encoding, shouldSplit, preserveNewlines}) => {
|
||||
const onStdoutChunk = on(stream, 'data', {
|
||||
signal: controller.signal,
|
||||
highWaterMark: HIGH_WATER_MARK,
|
||||
// Backward compatibility with older name for this option
|
||||
// See https://github.com/nodejs/node/pull/52080#discussion_r1525227861
|
||||
// @todo Remove after removing support for Node 21
|
||||
highWatermark: HIGH_WATER_MARK,
|
||||
});
|
||||
return iterateOnData({
|
||||
onStdoutChunk,
|
||||
controller,
|
||||
binary,
|
||||
shouldEncode,
|
||||
encoding,
|
||||
shouldSplit,
|
||||
preserveNewlines,
|
||||
});
|
||||
};
|
||||
|
||||
export const DEFAULT_OBJECT_HIGH_WATER_MARK = getDefaultHighWaterMark(true);
|
||||
|
||||
// The `highWaterMark` of `events.on()` is measured in number of events, not in bytes.
|
||||
// Not knowing the average amount of bytes per `data` event, we use the same heuristic as streams in objectMode, since they have the same issue.
|
||||
// Therefore, we use the value of `getDefaultHighWaterMark(true)`.
|
||||
// Note: this option does not exist on Node 18, but this is ok since the logic works without it. It just consumes more memory.
|
||||
const HIGH_WATER_MARK = DEFAULT_OBJECT_HIGH_WATER_MARK;
|
||||
|
||||
const iterateOnData = async function * ({onStdoutChunk, controller, binary, shouldEncode, encoding, shouldSplit, preserveNewlines}) {
|
||||
const generators = getGenerators({
|
||||
binary,
|
||||
shouldEncode,
|
||||
encoding,
|
||||
shouldSplit,
|
||||
preserveNewlines,
|
||||
});
|
||||
|
||||
try {
|
||||
for await (const [chunk] of onStdoutChunk) {
|
||||
yield * transformChunkSync(chunk, generators, 0);
|
||||
}
|
||||
} catch (error) {
|
||||
if (!controller.signal.aborted) {
|
||||
throw error;
|
||||
}
|
||||
} finally {
|
||||
yield * finalChunksSync(generators);
|
||||
}
|
||||
};
|
||||
|
||||
const getGenerators = ({binary, shouldEncode, encoding, shouldSplit, preserveNewlines}) => [
|
||||
getEncodingTransformGenerator(binary, encoding, !shouldEncode),
|
||||
getSplitLinesGenerator(binary, preserveNewlines, !shouldSplit, {}),
|
||||
].filter(Boolean);
|
||||
89
node_modules/execa/lib/io/max-buffer.js
generated
vendored
89
node_modules/execa/lib/io/max-buffer.js
generated
vendored
@@ -1,89 +0,0 @@
|
||||
import {MaxBufferError} from 'get-stream';
|
||||
import {getStreamName} from '../utils/standard-stream.js';
|
||||
import {getFdSpecificValue} from '../arguments/specific.js';
|
||||
|
||||
// When the `maxBuffer` option is hit, a MaxBufferError is thrown.
|
||||
// The stream is aborted, then specific information is kept for the error message.
|
||||
export const handleMaxBuffer = ({error, stream, readableObjectMode, lines, encoding, fdNumber}) => {
|
||||
if (!(error instanceof MaxBufferError)) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (fdNumber === 'all') {
|
||||
return error;
|
||||
}
|
||||
|
||||
const unit = getMaxBufferUnit(readableObjectMode, lines, encoding);
|
||||
error.maxBufferInfo = {fdNumber, unit};
|
||||
stream.destroy();
|
||||
throw error;
|
||||
};
|
||||
|
||||
const getMaxBufferUnit = (readableObjectMode, lines, encoding) => {
|
||||
if (readableObjectMode) {
|
||||
return 'objects';
|
||||
}
|
||||
|
||||
if (lines) {
|
||||
return 'lines';
|
||||
}
|
||||
|
||||
if (encoding === 'buffer') {
|
||||
return 'bytes';
|
||||
}
|
||||
|
||||
return 'characters';
|
||||
};
|
||||
|
||||
// Check the `maxBuffer` option with `result.ipcOutput`
|
||||
export const checkIpcMaxBuffer = (subprocess, ipcOutput, maxBuffer) => {
|
||||
if (ipcOutput.length !== maxBuffer) {
|
||||
return;
|
||||
}
|
||||
|
||||
const error = new MaxBufferError();
|
||||
error.maxBufferInfo = {fdNumber: 'ipc'};
|
||||
throw error;
|
||||
};
|
||||
|
||||
// Error message when `maxBuffer` is hit
|
||||
export const getMaxBufferMessage = (error, maxBuffer) => {
|
||||
const {streamName, threshold, unit} = getMaxBufferInfo(error, maxBuffer);
|
||||
return `Command's ${streamName} was larger than ${threshold} ${unit}`;
|
||||
};
|
||||
|
||||
const getMaxBufferInfo = (error, maxBuffer) => {
|
||||
if (error?.maxBufferInfo === undefined) {
|
||||
return {streamName: 'output', threshold: maxBuffer[1], unit: 'bytes'};
|
||||
}
|
||||
|
||||
const {maxBufferInfo: {fdNumber, unit}} = error;
|
||||
delete error.maxBufferInfo;
|
||||
|
||||
const threshold = getFdSpecificValue(maxBuffer, fdNumber);
|
||||
if (fdNumber === 'ipc') {
|
||||
return {streamName: 'IPC output', threshold, unit: 'messages'};
|
||||
}
|
||||
|
||||
return {streamName: getStreamName(fdNumber), threshold, unit};
|
||||
};
|
||||
|
||||
// The only way to apply `maxBuffer` with `spawnSync()` is to use the native `maxBuffer` option Node.js provides.
|
||||
// However, this has multiple limitations, and cannot behave the exact same way as the async behavior.
|
||||
// When the `maxBuffer` is hit, a `ENOBUFS` error is thrown.
|
||||
export const isMaxBufferSync = (resultError, output, maxBuffer) => resultError?.code === 'ENOBUFS'
|
||||
&& output !== null
|
||||
&& output.some(result => result !== null && result.length > getMaxBufferSync(maxBuffer));
|
||||
|
||||
// When `maxBuffer` is hit, ensure the result is truncated
|
||||
export const truncateMaxBufferSync = (result, isMaxBuffer, maxBuffer) => {
|
||||
if (!isMaxBuffer) {
|
||||
return result;
|
||||
}
|
||||
|
||||
const maxBufferValue = getMaxBufferSync(maxBuffer);
|
||||
return result.length > maxBufferValue ? result.slice(0, maxBufferValue) : result;
|
||||
};
|
||||
|
||||
// `spawnSync()` does not allow differentiating `maxBuffer` per file descriptor, so we always use `stdout`
|
||||
export const getMaxBufferSync = ([, stdoutMaxBuffer]) => stdoutMaxBuffer;
|
||||
80
node_modules/execa/lib/io/output-async.js
generated
vendored
80
node_modules/execa/lib/io/output-async.js
generated
vendored
@@ -1,80 +0,0 @@
|
||||
import mergeStreams from '@sindresorhus/merge-streams';
|
||||
import {isStandardStream} from '../utils/standard-stream.js';
|
||||
import {incrementMaxListeners} from '../utils/max-listeners.js';
|
||||
import {TRANSFORM_TYPES} from '../stdio/type.js';
|
||||
import {pipeStreams} from './pipeline.js';
|
||||
|
||||
// Handle `input`, `inputFile`, `stdin`, `stdout` and `stderr` options, after spawning, in async mode
|
||||
// When multiple input streams are used, we merge them to ensure the output stream ends only once each input stream has ended
|
||||
export const pipeOutputAsync = (subprocess, fileDescriptors, controller) => {
|
||||
const pipeGroups = new Map();
|
||||
|
||||
for (const [fdNumber, {stdioItems, direction}] of Object.entries(fileDescriptors)) {
|
||||
for (const {stream} of stdioItems.filter(({type}) => TRANSFORM_TYPES.has(type))) {
|
||||
pipeTransform(subprocess, stream, direction, fdNumber);
|
||||
}
|
||||
|
||||
for (const {stream} of stdioItems.filter(({type}) => !TRANSFORM_TYPES.has(type))) {
|
||||
pipeStdioItem({
|
||||
subprocess,
|
||||
stream,
|
||||
direction,
|
||||
fdNumber,
|
||||
pipeGroups,
|
||||
controller,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (const [outputStream, inputStreams] of pipeGroups.entries()) {
|
||||
const inputStream = inputStreams.length === 1 ? inputStreams[0] : mergeStreams(inputStreams);
|
||||
pipeStreams(inputStream, outputStream);
|
||||
}
|
||||
};
|
||||
|
||||
// When using transforms, `subprocess.stdin|stdout|stderr|stdio` is directly mutated
|
||||
const pipeTransform = (subprocess, stream, direction, fdNumber) => {
|
||||
if (direction === 'output') {
|
||||
pipeStreams(subprocess.stdio[fdNumber], stream);
|
||||
} else {
|
||||
pipeStreams(stream, subprocess.stdio[fdNumber]);
|
||||
}
|
||||
|
||||
const streamProperty = SUBPROCESS_STREAM_PROPERTIES[fdNumber];
|
||||
if (streamProperty !== undefined) {
|
||||
subprocess[streamProperty] = stream;
|
||||
}
|
||||
|
||||
subprocess.stdio[fdNumber] = stream;
|
||||
};
|
||||
|
||||
const SUBPROCESS_STREAM_PROPERTIES = ['stdin', 'stdout', 'stderr'];
|
||||
|
||||
// Most `std*` option values involve piping `subprocess.std*` to a stream.
|
||||
// The stream is either passed by the user or created internally.
|
||||
const pipeStdioItem = ({subprocess, stream, direction, fdNumber, pipeGroups, controller}) => {
|
||||
if (stream === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
setStandardStreamMaxListeners(stream, controller);
|
||||
|
||||
const [inputStream, outputStream] = direction === 'output'
|
||||
? [stream, subprocess.stdio[fdNumber]]
|
||||
: [subprocess.stdio[fdNumber], stream];
|
||||
const outputStreams = pipeGroups.get(inputStream) ?? [];
|
||||
pipeGroups.set(inputStream, [...outputStreams, outputStream]);
|
||||
};
|
||||
|
||||
// Multiple subprocesses might be piping from/to `process.std*` at the same time.
|
||||
// This is not necessarily an error and should not print a `maxListeners` warning.
|
||||
const setStandardStreamMaxListeners = (stream, {signal}) => {
|
||||
if (isStandardStream(stream)) {
|
||||
incrementMaxListeners(stream, MAX_LISTENERS_INCREMENT, signal);
|
||||
}
|
||||
};
|
||||
|
||||
// `source.pipe(destination)` adds at most 1 listener for each event.
|
||||
// If `stdin` option is an array, the values might be combined with `merge-streams`.
|
||||
// That library also listens for `source` end, which adds 1 more listener.
|
||||
const MAX_LISTENERS_INCREMENT = 2;
|
||||
135
node_modules/execa/lib/io/output-sync.js
generated
vendored
135
node_modules/execa/lib/io/output-sync.js
generated
vendored
@@ -1,135 +0,0 @@
|
||||
import {writeFileSync, appendFileSync} from 'node:fs';
|
||||
import {shouldLogOutput, logLinesSync} from '../verbose/output.js';
|
||||
import {runGeneratorsSync} from '../transform/generator.js';
|
||||
import {splitLinesSync} from '../transform/split.js';
|
||||
import {joinToString, joinToUint8Array, bufferToUint8Array} from '../utils/uint-array.js';
|
||||
import {FILE_TYPES} from '../stdio/type.js';
|
||||
import {truncateMaxBufferSync} from './max-buffer.js';
|
||||
|
||||
// Apply `stdout`/`stderr` options, after spawning, in sync mode
|
||||
export const transformOutputSync = ({fileDescriptors, syncResult: {output}, options, isMaxBuffer, verboseInfo}) => {
|
||||
if (output === null) {
|
||||
return {output: Array.from({length: 3})};
|
||||
}
|
||||
|
||||
const state = {};
|
||||
const outputFiles = new Set([]);
|
||||
const transformedOutput = output.map((result, fdNumber) =>
|
||||
transformOutputResultSync({
|
||||
result,
|
||||
fileDescriptors,
|
||||
fdNumber,
|
||||
state,
|
||||
outputFiles,
|
||||
isMaxBuffer,
|
||||
verboseInfo,
|
||||
}, options));
|
||||
return {output: transformedOutput, ...state};
|
||||
};
|
||||
|
||||
const transformOutputResultSync = (
|
||||
{result, fileDescriptors, fdNumber, state, outputFiles, isMaxBuffer, verboseInfo},
|
||||
{buffer, encoding, lines, stripFinalNewline, maxBuffer},
|
||||
) => {
|
||||
if (result === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
const truncatedResult = truncateMaxBufferSync(result, isMaxBuffer, maxBuffer);
|
||||
const uint8ArrayResult = bufferToUint8Array(truncatedResult);
|
||||
const {stdioItems, objectMode} = fileDescriptors[fdNumber];
|
||||
const chunks = runOutputGeneratorsSync([uint8ArrayResult], stdioItems, encoding, state);
|
||||
const {serializedResult, finalResult = serializedResult} = serializeChunks({
|
||||
chunks,
|
||||
objectMode,
|
||||
encoding,
|
||||
lines,
|
||||
stripFinalNewline,
|
||||
fdNumber,
|
||||
});
|
||||
|
||||
logOutputSync({
|
||||
serializedResult,
|
||||
fdNumber,
|
||||
state,
|
||||
verboseInfo,
|
||||
encoding,
|
||||
stdioItems,
|
||||
objectMode,
|
||||
});
|
||||
|
||||
const returnedResult = buffer[fdNumber] ? finalResult : undefined;
|
||||
|
||||
try {
|
||||
if (state.error === undefined) {
|
||||
writeToFiles(serializedResult, stdioItems, outputFiles);
|
||||
}
|
||||
|
||||
return returnedResult;
|
||||
} catch (error) {
|
||||
state.error = error;
|
||||
return returnedResult;
|
||||
}
|
||||
};
|
||||
|
||||
// Applies transform generators to `stdout`/`stderr`
|
||||
const runOutputGeneratorsSync = (chunks, stdioItems, encoding, state) => {
|
||||
try {
|
||||
return runGeneratorsSync(chunks, stdioItems, encoding, false);
|
||||
} catch (error) {
|
||||
state.error = error;
|
||||
return chunks;
|
||||
}
|
||||
};
|
||||
|
||||
// The contents is converted to three stages:
|
||||
// - serializedResult: used when the target is a file path/URL or a file descriptor (including 'inherit')
|
||||
// - finalResult/returnedResult: returned as `result.std*`
|
||||
const serializeChunks = ({chunks, objectMode, encoding, lines, stripFinalNewline, fdNumber}) => {
|
||||
if (objectMode) {
|
||||
return {serializedResult: chunks};
|
||||
}
|
||||
|
||||
if (encoding === 'buffer') {
|
||||
return {serializedResult: joinToUint8Array(chunks)};
|
||||
}
|
||||
|
||||
const serializedResult = joinToString(chunks, encoding);
|
||||
if (lines[fdNumber]) {
|
||||
return {serializedResult, finalResult: splitLinesSync(serializedResult, !stripFinalNewline[fdNumber], objectMode)};
|
||||
}
|
||||
|
||||
return {serializedResult};
|
||||
};
|
||||
|
||||
const logOutputSync = ({serializedResult, fdNumber, state, verboseInfo, encoding, stdioItems, objectMode}) => {
|
||||
if (!shouldLogOutput({
|
||||
stdioItems,
|
||||
encoding,
|
||||
verboseInfo,
|
||||
fdNumber,
|
||||
})) {
|
||||
return;
|
||||
}
|
||||
|
||||
const linesArray = splitLinesSync(serializedResult, false, objectMode);
|
||||
|
||||
try {
|
||||
logLinesSync(linesArray, fdNumber, verboseInfo);
|
||||
} catch (error) {
|
||||
state.error ??= error;
|
||||
}
|
||||
};
|
||||
|
||||
// When the `std*` target is a file path/URL or a file descriptor
|
||||
const writeToFiles = (serializedResult, stdioItems, outputFiles) => {
|
||||
for (const {path, append} of stdioItems.filter(({type}) => FILE_TYPES.has(type))) {
|
||||
const pathString = typeof path === 'string' ? path : path.toString();
|
||||
if (append || outputFiles.has(pathString)) {
|
||||
appendFileSync(path, serializedResult);
|
||||
} else {
|
||||
outputFiles.add(pathString);
|
||||
writeFileSync(path, serializedResult);
|
||||
}
|
||||
}
|
||||
};
|
||||
48
node_modules/execa/lib/io/pipeline.js
generated
vendored
48
node_modules/execa/lib/io/pipeline.js
generated
vendored
@@ -1,48 +0,0 @@
|
||||
import {finished} from 'node:stream/promises';
|
||||
import {isStandardStream} from '../utils/standard-stream.js';
|
||||
|
||||
// Similar to `Stream.pipeline(source, destination)`, but does not destroy standard streams
|
||||
export const pipeStreams = (source, destination) => {
|
||||
source.pipe(destination);
|
||||
onSourceFinish(source, destination);
|
||||
onDestinationFinish(source, destination);
|
||||
};
|
||||
|
||||
// `source.pipe(destination)` makes `destination` end when `source` ends.
|
||||
// But it does not propagate aborts or errors. This function does it.
|
||||
const onSourceFinish = async (source, destination) => {
|
||||
if (isStandardStream(source) || isStandardStream(destination)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await finished(source, {cleanup: true, readable: true, writable: false});
|
||||
} catch {}
|
||||
|
||||
endDestinationStream(destination);
|
||||
};
|
||||
|
||||
export const endDestinationStream = destination => {
|
||||
if (destination.writable) {
|
||||
destination.end();
|
||||
}
|
||||
};
|
||||
|
||||
// We do the same thing in the other direction as well.
|
||||
const onDestinationFinish = async (source, destination) => {
|
||||
if (isStandardStream(source) || isStandardStream(destination)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await finished(destination, {cleanup: true, readable: false, writable: true});
|
||||
} catch {}
|
||||
|
||||
abortSourceStream(source);
|
||||
};
|
||||
|
||||
export const abortSourceStream = source => {
|
||||
if (source.readable) {
|
||||
source.destroy();
|
||||
}
|
||||
};
|
||||
12
node_modules/execa/lib/io/strip-newline.js
generated
vendored
12
node_modules/execa/lib/io/strip-newline.js
generated
vendored
@@ -1,12 +0,0 @@
|
||||
import stripFinalNewlineFunction from 'strip-final-newline';
|
||||
|
||||
// Apply `stripFinalNewline` option, which applies to `result.stdout|stderr|all|stdio[*]`.
|
||||
// If the `lines` option is used, it is applied on each line, but using a different function.
|
||||
export const stripNewline = (value, {stripFinalNewline}, fdNumber) => getStripFinalNewline(stripFinalNewline, fdNumber) && value !== undefined && !Array.isArray(value)
|
||||
? stripFinalNewlineFunction(value)
|
||||
: value;
|
||||
|
||||
// Retrieve `stripFinalNewline` option value, including with `subprocess.all`
|
||||
export const getStripFinalNewline = (stripFinalNewline, fdNumber) => fdNumber === 'all'
|
||||
? stripFinalNewline[1] || stripFinalNewline[2]
|
||||
: stripFinalNewline[fdNumber];
|
||||
Reference in New Issue
Block a user