fix: 修复配额说明重复和undefined问题

- 在editStorageForm中初始化oss_storage_quota_value和oss_quota_unit
- 删除重复的旧配额说明块,保留新的当前配额设置显示

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-22 19:39:53 +08:00
commit 4350113979
7649 changed files with 897277 additions and 0 deletions

View File

@@ -0,0 +1,36 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ByteArrayCollector = void 0;
class ByteArrayCollector {
allocByteArray;
byteLength = 0;
byteArrays = [];
constructor(allocByteArray) {
this.allocByteArray = allocByteArray;
}
push(byteArray) {
this.byteArrays.push(byteArray);
this.byteLength += byteArray.byteLength;
}
flush() {
if (this.byteArrays.length === 1) {
const bytes = this.byteArrays[0];
this.reset();
return bytes;
}
const aggregation = this.allocByteArray(this.byteLength);
let cursor = 0;
for (let i = 0; i < this.byteArrays.length; ++i) {
const bytes = this.byteArrays[i];
aggregation.set(bytes, cursor);
cursor += bytes.byteLength;
}
this.reset();
return aggregation;
}
reset() {
this.byteArrays = [];
this.byteLength = 0;
}
}
exports.ByteArrayCollector = ByteArrayCollector;

View File

@@ -0,0 +1,7 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ChecksumStream = void 0;
const ReadableStreamRef = typeof ReadableStream === "function" ? ReadableStream : function () { };
class ChecksumStream extends ReadableStreamRef {
}
exports.ChecksumStream = ChecksumStream;

View File

@@ -0,0 +1,53 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ChecksumStream = void 0;
const util_base64_1 = require("@smithy/util-base64");
const stream_1 = require("stream");
class ChecksumStream extends stream_1.Duplex {
expectedChecksum;
checksumSourceLocation;
checksum;
source;
base64Encoder;
constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) {
super();
if (typeof source.pipe === "function") {
this.source = source;
}
else {
throw new Error(`@smithy/util-stream: unsupported source type ${source?.constructor?.name ?? source} in ChecksumStream.`);
}
this.base64Encoder = base64Encoder ?? util_base64_1.toBase64;
this.expectedChecksum = expectedChecksum;
this.checksum = checksum;
this.checksumSourceLocation = checksumSourceLocation;
this.source.pipe(this);
}
_read(size) { }
_write(chunk, encoding, callback) {
try {
this.checksum.update(chunk);
this.push(chunk);
}
catch (e) {
return callback(e);
}
return callback();
}
async _final(callback) {
try {
const digest = await this.checksum.digest();
const received = this.base64Encoder(digest);
if (this.expectedChecksum !== received) {
return callback(new Error(`Checksum mismatch: expected "${this.expectedChecksum}" but received "${received}"` +
` in response header "${this.checksumSourceLocation}".`));
}
}
catch (e) {
return callback(e);
}
this.push(null);
return callback();
}
}
exports.ChecksumStream = ChecksumStream;

View File

@@ -0,0 +1,39 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createChecksumStream = void 0;
const util_base64_1 = require("@smithy/util-base64");
const stream_type_check_1 = require("../stream-type-check");
const ChecksumStream_browser_1 = require("./ChecksumStream.browser");
const createChecksumStream = ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) => {
if (!(0, stream_type_check_1.isReadableStream)(source)) {
throw new Error(`@smithy/util-stream: unsupported source type ${source?.constructor?.name ?? source} in ChecksumStream.`);
}
const encoder = base64Encoder ?? util_base64_1.toBase64;
if (typeof TransformStream !== "function") {
throw new Error("@smithy/util-stream: unable to instantiate ChecksumStream because API unavailable: ReadableStream/TransformStream.");
}
const transform = new TransformStream({
start() { },
async transform(chunk, controller) {
checksum.update(chunk);
controller.enqueue(chunk);
},
async flush(controller) {
const digest = await checksum.digest();
const received = encoder(digest);
if (expectedChecksum !== received) {
const error = new Error(`Checksum mismatch: expected "${expectedChecksum}" but received "${received}"` +
` in response header "${checksumSourceLocation}".`);
controller.error(error);
}
else {
controller.terminate();
}
},
});
source.pipeThrough(transform);
const readable = transform.readable;
Object.setPrototypeOf(readable, ChecksumStream_browser_1.ChecksumStream.prototype);
return readable;
};
exports.createChecksumStream = createChecksumStream;

View File

@@ -0,0 +1,12 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createChecksumStream = createChecksumStream;
const stream_type_check_1 = require("../stream-type-check");
const ChecksumStream_1 = require("./ChecksumStream");
const createChecksumStream_browser_1 = require("./createChecksumStream.browser");
function createChecksumStream(init) {
if (typeof ReadableStream === "function" && (0, stream_type_check_1.isReadableStream)(init.source)) {
return (0, createChecksumStream_browser_1.createChecksumStream)(init);
}
return new ChecksumStream_1.ChecksumStream(init);
}

View File

@@ -0,0 +1,60 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createBufferedReadable = createBufferedReadable;
const node_stream_1 = require("node:stream");
const ByteArrayCollector_1 = require("./ByteArrayCollector");
const createBufferedReadableStream_1 = require("./createBufferedReadableStream");
const stream_type_check_1 = require("./stream-type-check");
function createBufferedReadable(upstream, size, logger) {
if ((0, stream_type_check_1.isReadableStream)(upstream)) {
return (0, createBufferedReadableStream_1.createBufferedReadableStream)(upstream, size, logger);
}
const downstream = new node_stream_1.Readable({ read() { } });
let streamBufferingLoggedWarning = false;
let bytesSeen = 0;
const buffers = [
"",
new ByteArrayCollector_1.ByteArrayCollector((size) => new Uint8Array(size)),
new ByteArrayCollector_1.ByteArrayCollector((size) => Buffer.from(new Uint8Array(size))),
];
let mode = -1;
upstream.on("data", (chunk) => {
const chunkMode = (0, createBufferedReadableStream_1.modeOf)(chunk, true);
if (mode !== chunkMode) {
if (mode >= 0) {
downstream.push((0, createBufferedReadableStream_1.flush)(buffers, mode));
}
mode = chunkMode;
}
if (mode === -1) {
downstream.push(chunk);
return;
}
const chunkSize = (0, createBufferedReadableStream_1.sizeOf)(chunk);
bytesSeen += chunkSize;
const bufferSize = (0, createBufferedReadableStream_1.sizeOf)(buffers[mode]);
if (chunkSize >= size && bufferSize === 0) {
downstream.push(chunk);
}
else {
const newSize = (0, createBufferedReadableStream_1.merge)(buffers, mode, chunk);
if (!streamBufferingLoggedWarning && bytesSeen > size * 2) {
streamBufferingLoggedWarning = true;
logger?.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`);
}
if (newSize >= size) {
downstream.push((0, createBufferedReadableStream_1.flush)(buffers, mode));
}
}
});
upstream.on("end", () => {
if (mode !== -1) {
const remainder = (0, createBufferedReadableStream_1.flush)(buffers, mode);
if ((0, createBufferedReadableStream_1.sizeOf)(remainder) > 0) {
downstream.push(remainder);
}
}
downstream.push(null);
});
return downstream;
}

View File

@@ -0,0 +1,103 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createBufferedReadable = void 0;
exports.createBufferedReadableStream = createBufferedReadableStream;
exports.merge = merge;
exports.flush = flush;
exports.sizeOf = sizeOf;
exports.modeOf = modeOf;
const ByteArrayCollector_1 = require("./ByteArrayCollector");
function createBufferedReadableStream(upstream, size, logger) {
const reader = upstream.getReader();
let streamBufferingLoggedWarning = false;
let bytesSeen = 0;
const buffers = ["", new ByteArrayCollector_1.ByteArrayCollector((size) => new Uint8Array(size))];
let mode = -1;
const pull = async (controller) => {
const { value, done } = await reader.read();
const chunk = value;
if (done) {
if (mode !== -1) {
const remainder = flush(buffers, mode);
if (sizeOf(remainder) > 0) {
controller.enqueue(remainder);
}
}
controller.close();
}
else {
const chunkMode = modeOf(chunk, false);
if (mode !== chunkMode) {
if (mode >= 0) {
controller.enqueue(flush(buffers, mode));
}
mode = chunkMode;
}
if (mode === -1) {
controller.enqueue(chunk);
return;
}
const chunkSize = sizeOf(chunk);
bytesSeen += chunkSize;
const bufferSize = sizeOf(buffers[mode]);
if (chunkSize >= size && bufferSize === 0) {
controller.enqueue(chunk);
}
else {
const newSize = merge(buffers, mode, chunk);
if (!streamBufferingLoggedWarning && bytesSeen > size * 2) {
streamBufferingLoggedWarning = true;
logger?.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`);
}
if (newSize >= size) {
controller.enqueue(flush(buffers, mode));
}
else {
await pull(controller);
}
}
}
};
return new ReadableStream({
pull,
});
}
exports.createBufferedReadable = createBufferedReadableStream;
function merge(buffers, mode, chunk) {
switch (mode) {
case 0:
buffers[0] += chunk;
return sizeOf(buffers[0]);
case 1:
case 2:
buffers[mode].push(chunk);
return sizeOf(buffers[mode]);
}
}
function flush(buffers, mode) {
switch (mode) {
case 0:
const s = buffers[0];
buffers[0] = "";
return s;
case 1:
case 2:
return buffers[mode].flush();
}
throw new Error(`@smithy/util-stream - invalid index ${mode} given to flush()`);
}
function sizeOf(chunk) {
return chunk?.byteLength ?? chunk?.length ?? 0;
}
function modeOf(chunk, allowBuffer = true) {
if (allowBuffer && typeof Buffer !== "undefined" && chunk instanceof Buffer) {
return 2;
}
if (chunk instanceof Uint8Array) {
return 1;
}
if (typeof chunk === "string") {
return 0;
}
return -1;
}

View File

@@ -0,0 +1,31 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getAwsChunkedEncodingStream = void 0;
const getAwsChunkedEncodingStream = (readableStream, options) => {
const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options;
const checksumRequired = base64Encoder !== undefined &&
bodyLengthChecker !== undefined &&
checksumAlgorithmFn !== undefined &&
checksumLocationName !== undefined &&
streamHasher !== undefined;
const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined;
const reader = readableStream.getReader();
return new ReadableStream({
async pull(controller) {
const { value, done } = await reader.read();
if (done) {
controller.enqueue(`0\r\n`);
if (checksumRequired) {
const checksum = base64Encoder(await digest);
controller.enqueue(`${checksumLocationName}:${checksum}\r\n`);
controller.enqueue(`\r\n`);
}
controller.close();
}
else {
controller.enqueue(`${(bodyLengthChecker(value) || 0).toString(16)}\r\n${value}\r\n`);
}
},
});
};
exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream;

View File

@@ -0,0 +1,41 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream;
const node_stream_1 = require("node:stream");
const getAwsChunkedEncodingStream_browser_1 = require("./getAwsChunkedEncodingStream.browser");
const stream_type_check_1 = require("./stream-type-check");
function getAwsChunkedEncodingStream(stream, options) {
const readable = stream;
const readableStream = stream;
if ((0, stream_type_check_1.isReadableStream)(readableStream)) {
return (0, getAwsChunkedEncodingStream_browser_1.getAwsChunkedEncodingStream)(readableStream, options);
}
const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options;
const checksumRequired = base64Encoder !== undefined &&
checksumAlgorithmFn !== undefined &&
checksumLocationName !== undefined &&
streamHasher !== undefined;
const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readable) : undefined;
const awsChunkedEncodingStream = new node_stream_1.Readable({
read: () => { },
});
readable.on("data", (data) => {
const length = bodyLengthChecker(data) || 0;
if (length === 0) {
return;
}
awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`);
awsChunkedEncodingStream.push(data);
awsChunkedEncodingStream.push("\r\n");
});
readable.on("end", async () => {
awsChunkedEncodingStream.push(`0\r\n`);
if (checksumRequired) {
const checksum = base64Encoder(await digest);
awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`);
awsChunkedEncodingStream.push(`\r\n`);
}
awsChunkedEncodingStream.push(null);
});
return awsChunkedEncodingStream;
}

View File

@@ -0,0 +1,34 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.headStream = headStream;
async function headStream(stream, bytes) {
let byteLengthCounter = 0;
const chunks = [];
const reader = stream.getReader();
let isDone = false;
while (!isDone) {
const { done, value } = await reader.read();
if (value) {
chunks.push(value);
byteLengthCounter += value?.byteLength ?? 0;
}
if (byteLengthCounter >= bytes) {
break;
}
isDone = done;
}
reader.releaseLock();
const collected = new Uint8Array(Math.min(bytes, byteLengthCounter));
let offset = 0;
for (const chunk of chunks) {
if (chunk.byteLength > collected.byteLength - offset) {
collected.set(chunk.subarray(0, collected.byteLength - offset), offset);
break;
}
else {
collected.set(chunk, offset);
}
offset += chunk.length;
}
return collected;
}

View File

@@ -0,0 +1,42 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.headStream = void 0;
const stream_1 = require("stream");
const headStream_browser_1 = require("./headStream.browser");
const stream_type_check_1 = require("./stream-type-check");
const headStream = (stream, bytes) => {
if ((0, stream_type_check_1.isReadableStream)(stream)) {
return (0, headStream_browser_1.headStream)(stream, bytes);
}
return new Promise((resolve, reject) => {
const collector = new Collector();
collector.limit = bytes;
stream.pipe(collector);
stream.on("error", (err) => {
collector.end();
reject(err);
});
collector.on("error", reject);
collector.on("finish", function () {
const bytes = new Uint8Array(Buffer.concat(this.buffers));
resolve(bytes);
});
});
};
exports.headStream = headStream;
class Collector extends stream_1.Writable {
buffers = [];
limit = Infinity;
bytesBuffered = 0;
_write(chunk, encoding, callback) {
this.buffers.push(chunk);
this.bytesBuffered += chunk.byteLength ?? 0;
if (this.bytesBuffered >= this.limit) {
const excess = this.bytesBuffered - this.limit;
const tailBuffer = this.buffers[this.buffers.length - 1];
this.buffers[this.buffers.length - 1] = tailBuffer.subarray(0, tailBuffer.byteLength - excess);
this.emit("finish");
}
callback();
}
}

View File

@@ -0,0 +1,86 @@
'use strict';
var utilBase64 = require('@smithy/util-base64');
var utilUtf8 = require('@smithy/util-utf8');
var ChecksumStream = require('./checksum/ChecksumStream');
var createChecksumStream = require('./checksum/createChecksumStream');
var createBufferedReadable = require('./createBufferedReadable');
var getAwsChunkedEncodingStream = require('./getAwsChunkedEncodingStream');
var headStream = require('./headStream');
var sdkStreamMixin = require('./sdk-stream-mixin');
var splitStream = require('./splitStream');
var streamTypeCheck = require('./stream-type-check');
class Uint8ArrayBlobAdapter extends Uint8Array {
static fromString(source, encoding = "utf-8") {
if (typeof source === "string") {
if (encoding === "base64") {
return Uint8ArrayBlobAdapter.mutate(utilBase64.fromBase64(source));
}
return Uint8ArrayBlobAdapter.mutate(utilUtf8.fromUtf8(source));
}
throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`);
}
static mutate(source) {
Object.setPrototypeOf(source, Uint8ArrayBlobAdapter.prototype);
return source;
}
transformToString(encoding = "utf-8") {
if (encoding === "base64") {
return utilBase64.toBase64(this);
}
return utilUtf8.toUtf8(this);
}
}
Object.defineProperty(exports, "isBlob", {
enumerable: true,
get: function () { return streamTypeCheck.isBlob; }
});
Object.defineProperty(exports, "isReadableStream", {
enumerable: true,
get: function () { return streamTypeCheck.isReadableStream; }
});
exports.Uint8ArrayBlobAdapter = Uint8ArrayBlobAdapter;
Object.keys(ChecksumStream).forEach(function (k) {
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
enumerable: true,
get: function () { return ChecksumStream[k]; }
});
});
Object.keys(createChecksumStream).forEach(function (k) {
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
enumerable: true,
get: function () { return createChecksumStream[k]; }
});
});
Object.keys(createBufferedReadable).forEach(function (k) {
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
enumerable: true,
get: function () { return createBufferedReadable[k]; }
});
});
Object.keys(getAwsChunkedEncodingStream).forEach(function (k) {
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
enumerable: true,
get: function () { return getAwsChunkedEncodingStream[k]; }
});
});
Object.keys(headStream).forEach(function (k) {
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
enumerable: true,
get: function () { return headStream[k]; }
});
});
Object.keys(sdkStreamMixin).forEach(function (k) {
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
enumerable: true,
get: function () { return sdkStreamMixin[k]; }
});
});
Object.keys(splitStream).forEach(function (k) {
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
enumerable: true,
get: function () { return splitStream[k]; }
});
});

View File

@@ -0,0 +1,68 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.sdkStreamMixin = void 0;
const fetch_http_handler_1 = require("@smithy/fetch-http-handler");
const util_base64_1 = require("@smithy/util-base64");
const util_hex_encoding_1 = require("@smithy/util-hex-encoding");
const util_utf8_1 = require("@smithy/util-utf8");
const stream_type_check_1 = require("./stream-type-check");
const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed.";
const sdkStreamMixin = (stream) => {
if (!isBlobInstance(stream) && !(0, stream_type_check_1.isReadableStream)(stream)) {
const name = stream?.__proto__?.constructor?.name || stream;
throw new Error(`Unexpected stream implementation, expect Blob or ReadableStream, got ${name}`);
}
let transformed = false;
const transformToByteArray = async () => {
if (transformed) {
throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED);
}
transformed = true;
return await (0, fetch_http_handler_1.streamCollector)(stream);
};
const blobToWebStream = (blob) => {
if (typeof blob.stream !== "function") {
throw new Error("Cannot transform payload Blob to web stream. Please make sure the Blob.stream() is polyfilled.\n" +
"If you are using React Native, this API is not yet supported, see: https://react-native.canny.io/feature-requests/p/fetch-streaming-body");
}
return blob.stream();
};
return Object.assign(stream, {
transformToByteArray: transformToByteArray,
transformToString: async (encoding) => {
const buf = await transformToByteArray();
if (encoding === "base64") {
return (0, util_base64_1.toBase64)(buf);
}
else if (encoding === "hex") {
return (0, util_hex_encoding_1.toHex)(buf);
}
else if (encoding === undefined || encoding === "utf8" || encoding === "utf-8") {
return (0, util_utf8_1.toUtf8)(buf);
}
else if (typeof TextDecoder === "function") {
return new TextDecoder(encoding).decode(buf);
}
else {
throw new Error("TextDecoder is not available, please make sure polyfill is provided.");
}
},
transformToWebStream: () => {
if (transformed) {
throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED);
}
transformed = true;
if (isBlobInstance(stream)) {
return blobToWebStream(stream);
}
else if ((0, stream_type_check_1.isReadableStream)(stream)) {
return stream;
}
else {
throw new Error(`Cannot transform payload to web stream, got ${stream}`);
}
},
});
};
exports.sdkStreamMixin = sdkStreamMixin;
const isBlobInstance = (stream) => typeof Blob === "function" && stream instanceof Blob;

View File

@@ -0,0 +1,54 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.sdkStreamMixin = void 0;
const node_http_handler_1 = require("@smithy/node-http-handler");
const util_buffer_from_1 = require("@smithy/util-buffer-from");
const stream_1 = require("stream");
const sdk_stream_mixin_browser_1 = require("./sdk-stream-mixin.browser");
const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed.";
const sdkStreamMixin = (stream) => {
if (!(stream instanceof stream_1.Readable)) {
try {
return (0, sdk_stream_mixin_browser_1.sdkStreamMixin)(stream);
}
catch (e) {
const name = stream?.__proto__?.constructor?.name || stream;
throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`);
}
}
let transformed = false;
const transformToByteArray = async () => {
if (transformed) {
throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED);
}
transformed = true;
return await (0, node_http_handler_1.streamCollector)(stream);
};
return Object.assign(stream, {
transformToByteArray,
transformToString: async (encoding) => {
const buf = await transformToByteArray();
if (encoding === undefined || Buffer.isEncoding(encoding)) {
return (0, util_buffer_from_1.fromArrayBuffer)(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding);
}
else {
const decoder = new TextDecoder(encoding);
return decoder.decode(buf);
}
},
transformToWebStream: () => {
if (transformed) {
throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED);
}
if (stream.readableFlowing !== null) {
throw new Error("The stream has been consumed by other callbacks.");
}
if (typeof stream_1.Readable.toWeb !== "function") {
throw new Error("Readable.toWeb() is not supported. Please ensure a polyfill is available.");
}
transformed = true;
return stream_1.Readable.toWeb(stream);
},
});
};
exports.sdkStreamMixin = sdkStreamMixin;

View File

@@ -0,0 +1,10 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.splitStream = splitStream;
async function splitStream(stream) {
if (typeof stream.stream === "function") {
stream = stream.stream();
}
const readableStream = stream;
return readableStream.tee();
}

View File

@@ -0,0 +1,16 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.splitStream = splitStream;
const stream_1 = require("stream");
const splitStream_browser_1 = require("./splitStream.browser");
const stream_type_check_1 = require("./stream-type-check");
async function splitStream(stream) {
if ((0, stream_type_check_1.isReadableStream)(stream) || (0, stream_type_check_1.isBlob)(stream)) {
return (0, splitStream_browser_1.splitStream)(stream);
}
const stream1 = new stream_1.PassThrough();
const stream2 = new stream_1.PassThrough();
stream.pipe(stream1);
stream.pipe(stream2);
return [stream1, stream2];
}

View File

@@ -0,0 +1,10 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.isBlob = exports.isReadableStream = void 0;
const isReadableStream = (stream) => typeof ReadableStream === "function" &&
(stream?.constructor?.name === ReadableStream.name || stream instanceof ReadableStream);
exports.isReadableStream = isReadableStream;
const isBlob = (blob) => {
return typeof Blob === "function" && (blob?.constructor?.name === Blob.name || blob instanceof Blob);
};
exports.isBlob = isBlob;