删除 node_modules

This commit is contained in:
2025-06-22 17:27:35 +08:00
parent ebcd00ed99
commit 912d2d4a12
3852 changed files with 0 additions and 1061355 deletions

View File

@@ -1,84 +0,0 @@
import {getStreamContents} from './contents.js';
import {noop, throwObjectStream, getLengthProperty} from './utils.js';
export async function getStreamAsArrayBuffer(stream, options) {
return getStreamContents(stream, arrayBufferMethods, options);
}
const initArrayBuffer = () => ({contents: new ArrayBuffer(0)});
const useTextEncoder = chunk => textEncoder.encode(chunk);
const textEncoder = new TextEncoder();
const useUint8Array = chunk => new Uint8Array(chunk);
const useUint8ArrayWithOffset = chunk => new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength);
const truncateArrayBufferChunk = (convertedChunk, chunkSize) => convertedChunk.slice(0, chunkSize);
// `contents` is an increasingly growing `Uint8Array`.
const addArrayBufferChunk = (convertedChunk, {contents, length: previousLength}, length) => {
const newContents = hasArrayBufferResize() ? resizeArrayBuffer(contents, length) : resizeArrayBufferSlow(contents, length);
new Uint8Array(newContents).set(convertedChunk, previousLength);
return newContents;
};
// Without `ArrayBuffer.resize()`, `contents` size is always a power of 2.
// This means its last bytes are zeroes (not stream data), which need to be
// trimmed at the end with `ArrayBuffer.slice()`.
const resizeArrayBufferSlow = (contents, length) => {
if (length <= contents.byteLength) {
return contents;
}
const arrayBuffer = new ArrayBuffer(getNewContentsLength(length));
new Uint8Array(arrayBuffer).set(new Uint8Array(contents), 0);
return arrayBuffer;
};
// With `ArrayBuffer.resize()`, `contents` size matches exactly the size of
// the stream data. It does not include extraneous zeroes to trim at the end.
// The underlying `ArrayBuffer` does allocate a number of bytes that is a power
// of 2, but those bytes are only visible after calling `ArrayBuffer.resize()`.
const resizeArrayBuffer = (contents, length) => {
if (length <= contents.maxByteLength) {
contents.resize(length);
return contents;
}
const arrayBuffer = new ArrayBuffer(length, {maxByteLength: getNewContentsLength(length)});
new Uint8Array(arrayBuffer).set(new Uint8Array(contents), 0);
return arrayBuffer;
};
// Retrieve the closest `length` that is both >= and a power of 2
const getNewContentsLength = length => SCALE_FACTOR ** Math.ceil(Math.log(length) / Math.log(SCALE_FACTOR));
const SCALE_FACTOR = 2;
const finalizeArrayBuffer = ({contents, length}) => hasArrayBufferResize() ? contents : contents.slice(0, length);
// `ArrayBuffer.slice()` is slow. When `ArrayBuffer.resize()` is available
// (Node >=20.0.0, Safari >=16.4 and Chrome), we can use it instead.
// eslint-disable-next-line no-warning-comments
// TODO: remove after dropping support for Node 20.
// eslint-disable-next-line no-warning-comments
// TODO: use `ArrayBuffer.transferToFixedLength()` instead once it is available
const hasArrayBufferResize = () => 'resize' in ArrayBuffer.prototype;
const arrayBufferMethods = {
init: initArrayBuffer,
convertChunk: {
string: useTextEncoder,
buffer: useUint8Array,
arrayBuffer: useUint8Array,
dataView: useUint8ArrayWithOffset,
typedArray: useUint8ArrayWithOffset,
others: throwObjectStream,
},
getSize: getLengthProperty,
truncateChunk: truncateArrayBufferChunk,
addChunk: addArrayBufferChunk,
getFinalChunk: noop,
finalize: finalizeArrayBuffer,
};

View File

@@ -1,32 +0,0 @@
import {getStreamContents} from './contents.js';
import {identity, noop, getContentsProperty} from './utils.js';
export async function getStreamAsArray(stream, options) {
return getStreamContents(stream, arrayMethods, options);
}
const initArray = () => ({contents: []});
const increment = () => 1;
const addArrayChunk = (convertedChunk, {contents}) => {
contents.push(convertedChunk);
return contents;
};
const arrayMethods = {
init: initArray,
convertChunk: {
string: identity,
buffer: identity,
arrayBuffer: identity,
dataView: identity,
typedArray: identity,
others: identity,
},
getSize: increment,
truncateChunk: noop,
addChunk: addArrayChunk,
getFinalChunk: noop,
finalize: getContentsProperty,
};

View File

@@ -1,19 +0,0 @@
import {getStreamAsArrayBuffer} from './array-buffer.js';
export async function getStreamAsBuffer(stream, options) {
if (!('Buffer' in globalThis)) {
throw new Error('getStreamAsBuffer() is only supported in Node.js');
}
try {
return arrayBufferToNodeBuffer(await getStreamAsArrayBuffer(stream, options));
} catch (error) {
if (error.bufferedData !== undefined) {
error.bufferedData = arrayBufferToNodeBuffer(error.bufferedData);
}
throw error;
}
}
const arrayBufferToNodeBuffer = arrayBuffer => globalThis.Buffer.from(arrayBuffer);

View File

@@ -1,121 +0,0 @@
import {getAsyncIterable} from './stream.js';
export const getStreamContents = async (stream, {init, convertChunk, getSize, truncateChunk, addChunk, getFinalChunk, finalize}, {maxBuffer = Number.POSITIVE_INFINITY} = {}) => {
const asyncIterable = getAsyncIterable(stream);
const state = init();
state.length = 0;
try {
for await (const chunk of asyncIterable) {
const chunkType = getChunkType(chunk);
const convertedChunk = convertChunk[chunkType](chunk, state);
appendChunk({
convertedChunk,
state,
getSize,
truncateChunk,
addChunk,
maxBuffer,
});
}
appendFinalChunk({
state,
convertChunk,
getSize,
truncateChunk,
addChunk,
getFinalChunk,
maxBuffer,
});
return finalize(state);
} catch (error) {
const normalizedError = typeof error === 'object' && error !== null ? error : new Error(error);
normalizedError.bufferedData = finalize(state);
throw normalizedError;
}
};
const appendFinalChunk = ({state, getSize, truncateChunk, addChunk, getFinalChunk, maxBuffer}) => {
const convertedChunk = getFinalChunk(state);
if (convertedChunk !== undefined) {
appendChunk({
convertedChunk,
state,
getSize,
truncateChunk,
addChunk,
maxBuffer,
});
}
};
const appendChunk = ({convertedChunk, state, getSize, truncateChunk, addChunk, maxBuffer}) => {
const chunkSize = getSize(convertedChunk);
const newLength = state.length + chunkSize;
if (newLength <= maxBuffer) {
addNewChunk(convertedChunk, state, addChunk, newLength);
return;
}
const truncatedChunk = truncateChunk(convertedChunk, maxBuffer - state.length);
if (truncatedChunk !== undefined) {
addNewChunk(truncatedChunk, state, addChunk, maxBuffer);
}
throw new MaxBufferError();
};
const addNewChunk = (convertedChunk, state, addChunk, newLength) => {
state.contents = addChunk(convertedChunk, state, newLength);
state.length = newLength;
};
const getChunkType = chunk => {
const typeOfChunk = typeof chunk;
if (typeOfChunk === 'string') {
return 'string';
}
if (typeOfChunk !== 'object' || chunk === null) {
return 'others';
}
if (globalThis.Buffer?.isBuffer(chunk)) {
return 'buffer';
}
const prototypeName = objectToString.call(chunk);
if (prototypeName === '[object ArrayBuffer]') {
return 'arrayBuffer';
}
if (prototypeName === '[object DataView]') {
return 'dataView';
}
if (
Number.isInteger(chunk.byteLength)
&& Number.isInteger(chunk.byteOffset)
&& objectToString.call(chunk.buffer) === '[object ArrayBuffer]'
) {
return 'typedArray';
}
return 'others';
};
const {toString: objectToString} = Object.prototype;
export class MaxBufferError extends Error {
name = 'MaxBufferError';
constructor() {
super('maxBuffer exceeded');
}
}

View File

@@ -1,5 +0,0 @@
export {getStreamAsArray} from './array.js';
export {getStreamAsArrayBuffer} from './array-buffer.js';
export {getStreamAsBuffer} from './buffer.js';
export {getStreamAsString as default} from './string.js';
export {MaxBufferError} from './contents.js';

View File

@@ -1,121 +0,0 @@
import {type Readable} from 'node:stream';
import {type Buffer} from 'node:buffer';
export class MaxBufferError extends Error {
readonly name: 'MaxBufferError';
constructor();
}
// eslint-disable-next-line @typescript-eslint/ban-types
type TextStreamItem = string | Buffer | ArrayBuffer | ArrayBufferView;
export type AnyStream<SteamItem = TextStreamItem> = Readable | ReadableStream<SteamItem> | AsyncIterable<SteamItem>;
export type Options = {
/**
Maximum length of the stream. If exceeded, the promise will be rejected with a `MaxBufferError`.
Depending on the [method](#api), the length is measured with [`string.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/length), [`buffer.length`](https://nodejs.org/api/buffer.html#buflength), [`arrayBuffer.byteLength`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/byteLength) or [`array.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/length).
@default Infinity
*/
readonly maxBuffer?: number;
};
/**
Get the given `stream` as a string.
@returns The stream's contents as a promise.
@example
```
import fs from 'node:fs';
import getStream from 'get-stream';
const stream = fs.createReadStream('unicorn.txt');
console.log(await getStream(stream));
// ,,))))))));,
// __)))))))))))))),
// \|/ -\(((((''''((((((((.
// -*-==//////(('' . `)))))),
// /|\ ))| o ;-. '((((( ,(,
// ( `| / ) ;))))' ,_))^;(~
// | | | ,))((((_ _____------~~~-. %,;(;(>';'~
// o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~
// ; ''''```` `: `:::|\,__,%% );`'; ~
// | _ ) / `:|`----' `-'
// ______/\/~ | / /
// /~;;.____/;;' / ___--,-( `;;;/
// / // _;______;'------~~~~~ /;;/\ /
// // | | / ; \;;,\
// (<_ | ; /',/-----' _>
// \_| ||_ //~;~~~~~~~~~
// `\_| (,~~
// \~\
// ~~
```
@example
```
import getStream from 'get-stream';
const {body: readableStream} = await fetch('https://example.com');
console.log(await getStream(readableStream));
```
@example
```
import {opendir} from 'node:fs/promises';
import {getStreamAsArray} from 'get-stream';
const asyncIterable = await opendir(directory);
console.log(await getStreamAsArray(asyncIterable));
```
*/
export default function getStream(stream: AnyStream, options?: Options): Promise<string>;
/**
Get the given `stream` as a Node.js [`Buffer`](https://nodejs.org/api/buffer.html#class-buffer).
@returns The stream's contents as a promise.
@example
```
import {getStreamAsBuffer} from 'get-stream';
const stream = fs.createReadStream('unicorn.png');
console.log(await getStreamAsBuffer(stream));
```
*/
// eslint-disable-next-line @typescript-eslint/ban-types
export function getStreamAsBuffer(stream: AnyStream, options?: Options): Promise<Buffer>;
/**
Get the given `stream` as an [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer).
@returns The stream's contents as a promise.
@example
```
import {getStreamAsArrayBuffer} from 'get-stream';
const {body: readableStream} = await fetch('https://example.com');
console.log(await getStreamAsArrayBuffer(readableStream));
```
*/
export function getStreamAsArrayBuffer(stream: AnyStream, options?: Options): Promise<ArrayBuffer>;
/**
Get the given `stream` as an array. Unlike [other methods](#api), this supports [streams of objects](https://nodejs.org/api/stream.html#object-mode).
@returns The stream's contents as a promise.
@example
```
import {getStreamAsArray} from 'get-stream';
const {body: readableStream} = await fetch('https://example.com');
console.log(await getStreamAsArray(readableStream));
```
*/
export function getStreamAsArray<Item>(stream: AnyStream<Item>, options?: Options): Promise<Item[]>;

View File

@@ -1,13 +0,0 @@
import {on} from 'node:events';
import {finished} from 'node:stream/promises';
import {nodeImports} from './stream.js';
Object.assign(nodeImports, {on, finished});
export {
default,
getStreamAsArray,
getStreamAsArrayBuffer,
getStreamAsBuffer,
MaxBufferError,
} from './exports.js';

View File

@@ -1,65 +0,0 @@
import {isReadableStream} from 'is-stream';
import {asyncIterator} from '@sec-ant/readable-stream/ponyfill';
export const getAsyncIterable = stream => {
if (isReadableStream(stream, {checkOpen: false}) && nodeImports.on !== undefined) {
return getStreamIterable(stream);
}
if (typeof stream?.[Symbol.asyncIterator] === 'function') {
return stream;
}
// `ReadableStream[Symbol.asyncIterator]` support is missing in multiple browsers, so we ponyfill it
if (toString.call(stream) === '[object ReadableStream]') {
return asyncIterator.call(stream);
}
throw new TypeError('The first argument must be a Readable, a ReadableStream, or an async iterable.');
};
const {toString} = Object.prototype;
// The default iterable for Node.js streams does not allow for multiple readers at once, so we re-implement it
const getStreamIterable = async function * (stream) {
const controller = new AbortController();
const state = {};
handleStreamEnd(stream, controller, state);
try {
for await (const [chunk] of nodeImports.on(stream, 'data', {signal: controller.signal})) {
yield chunk;
}
} catch (error) {
// Stream failure, for example due to `stream.destroy(error)`
if (state.error !== undefined) {
throw state.error;
// `error` event directly emitted on stream
} else if (!controller.signal.aborted) {
throw error;
// Otherwise, stream completed successfully
}
// The `finally` block also runs when the caller throws, for example due to the `maxBuffer` option
} finally {
stream.destroy();
}
};
const handleStreamEnd = async (stream, controller, state) => {
try {
await nodeImports.finished(stream, {
cleanup: true,
readable: true,
writable: false,
error: false,
});
} catch (error) {
state.error = error;
} finally {
controller.abort();
}
};
// Loaded by the Node entrypoint, but not by the browser one.
// This prevents using dynamic imports.
export const nodeImports = {};

View File

@@ -1,41 +0,0 @@
import {getStreamContents} from './contents.js';
import {
identity,
getContentsProperty,
throwObjectStream,
getLengthProperty,
} from './utils.js';
export async function getStreamAsString(stream, options) {
return getStreamContents(stream, stringMethods, options);
}
const initString = () => ({contents: '', textDecoder: new TextDecoder()});
const useTextDecoder = (chunk, {textDecoder}) => textDecoder.decode(chunk, {stream: true});
const addStringChunk = (convertedChunk, {contents}) => contents + convertedChunk;
const truncateStringChunk = (convertedChunk, chunkSize) => convertedChunk.slice(0, chunkSize);
const getFinalStringChunk = ({textDecoder}) => {
const finalChunk = textDecoder.decode();
return finalChunk === '' ? undefined : finalChunk;
};
const stringMethods = {
init: initString,
convertChunk: {
string: identity,
buffer: useTextDecoder,
arrayBuffer: useTextDecoder,
dataView: useTextDecoder,
typedArray: useTextDecoder,
others: throwObjectStream,
},
getSize: getLengthProperty,
truncateChunk: truncateStringChunk,
addChunk: addStringChunk,
getFinalChunk: getFinalStringChunk,
finalize: getContentsProperty,
};

View File

@@ -1,11 +0,0 @@
export const identity = value => value;
export const noop = () => undefined;
export const getContentsProperty = ({contents}) => contents;
export const throwObjectStream = chunk => {
throw new Error(`Streams in object mode are not supported: ${String(chunk)}`);
};
export const getLengthProperty = convertedChunk => convertedChunk.length;