fix: 修复配额说明重复和undefined问题
- 在editStorageForm中初始化oss_storage_quota_value和oss_quota_unit - 删除重复的旧配额说明块,保留新的当前配额设置显示 Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
278
backend/node_modules/@smithy/util-retry/dist-cjs/index.js
generated
vendored
Normal file
278
backend/node_modules/@smithy/util-retry/dist-cjs/index.js
generated
vendored
Normal file
@@ -0,0 +1,278 @@
|
||||
'use strict';
|
||||
|
||||
var serviceErrorClassification = require('@smithy/service-error-classification');
|
||||
|
||||
exports.RETRY_MODES = void 0;
|
||||
(function (RETRY_MODES) {
|
||||
RETRY_MODES["STANDARD"] = "standard";
|
||||
RETRY_MODES["ADAPTIVE"] = "adaptive";
|
||||
})(exports.RETRY_MODES || (exports.RETRY_MODES = {}));
|
||||
const DEFAULT_MAX_ATTEMPTS = 3;
|
||||
const DEFAULT_RETRY_MODE = exports.RETRY_MODES.STANDARD;
|
||||
|
||||
class DefaultRateLimiter {
|
||||
static setTimeoutFn = setTimeout;
|
||||
beta;
|
||||
minCapacity;
|
||||
minFillRate;
|
||||
scaleConstant;
|
||||
smooth;
|
||||
currentCapacity = 0;
|
||||
enabled = false;
|
||||
lastMaxRate = 0;
|
||||
measuredTxRate = 0;
|
||||
requestCount = 0;
|
||||
fillRate;
|
||||
lastThrottleTime;
|
||||
lastTimestamp = 0;
|
||||
lastTxRateBucket;
|
||||
maxCapacity;
|
||||
timeWindow = 0;
|
||||
constructor(options) {
|
||||
this.beta = options?.beta ?? 0.7;
|
||||
this.minCapacity = options?.minCapacity ?? 1;
|
||||
this.minFillRate = options?.minFillRate ?? 0.5;
|
||||
this.scaleConstant = options?.scaleConstant ?? 0.4;
|
||||
this.smooth = options?.smooth ?? 0.8;
|
||||
const currentTimeInSeconds = this.getCurrentTimeInSeconds();
|
||||
this.lastThrottleTime = currentTimeInSeconds;
|
||||
this.lastTxRateBucket = Math.floor(this.getCurrentTimeInSeconds());
|
||||
this.fillRate = this.minFillRate;
|
||||
this.maxCapacity = this.minCapacity;
|
||||
}
|
||||
getCurrentTimeInSeconds() {
|
||||
return Date.now() / 1000;
|
||||
}
|
||||
async getSendToken() {
|
||||
return this.acquireTokenBucket(1);
|
||||
}
|
||||
async acquireTokenBucket(amount) {
|
||||
if (!this.enabled) {
|
||||
return;
|
||||
}
|
||||
this.refillTokenBucket();
|
||||
if (amount > this.currentCapacity) {
|
||||
const delay = ((amount - this.currentCapacity) / this.fillRate) * 1000;
|
||||
await new Promise((resolve) => DefaultRateLimiter.setTimeoutFn(resolve, delay));
|
||||
}
|
||||
this.currentCapacity = this.currentCapacity - amount;
|
||||
}
|
||||
refillTokenBucket() {
|
||||
const timestamp = this.getCurrentTimeInSeconds();
|
||||
if (!this.lastTimestamp) {
|
||||
this.lastTimestamp = timestamp;
|
||||
return;
|
||||
}
|
||||
const fillAmount = (timestamp - this.lastTimestamp) * this.fillRate;
|
||||
this.currentCapacity = Math.min(this.maxCapacity, this.currentCapacity + fillAmount);
|
||||
this.lastTimestamp = timestamp;
|
||||
}
|
||||
updateClientSendingRate(response) {
|
||||
let calculatedRate;
|
||||
this.updateMeasuredRate();
|
||||
if (serviceErrorClassification.isThrottlingError(response)) {
|
||||
const rateToUse = !this.enabled ? this.measuredTxRate : Math.min(this.measuredTxRate, this.fillRate);
|
||||
this.lastMaxRate = rateToUse;
|
||||
this.calculateTimeWindow();
|
||||
this.lastThrottleTime = this.getCurrentTimeInSeconds();
|
||||
calculatedRate = this.cubicThrottle(rateToUse);
|
||||
this.enableTokenBucket();
|
||||
}
|
||||
else {
|
||||
this.calculateTimeWindow();
|
||||
calculatedRate = this.cubicSuccess(this.getCurrentTimeInSeconds());
|
||||
}
|
||||
const newRate = Math.min(calculatedRate, 2 * this.measuredTxRate);
|
||||
this.updateTokenBucketRate(newRate);
|
||||
}
|
||||
calculateTimeWindow() {
|
||||
this.timeWindow = this.getPrecise(Math.pow((this.lastMaxRate * (1 - this.beta)) / this.scaleConstant, 1 / 3));
|
||||
}
|
||||
cubicThrottle(rateToUse) {
|
||||
return this.getPrecise(rateToUse * this.beta);
|
||||
}
|
||||
cubicSuccess(timestamp) {
|
||||
return this.getPrecise(this.scaleConstant * Math.pow(timestamp - this.lastThrottleTime - this.timeWindow, 3) + this.lastMaxRate);
|
||||
}
|
||||
enableTokenBucket() {
|
||||
this.enabled = true;
|
||||
}
|
||||
updateTokenBucketRate(newRate) {
|
||||
this.refillTokenBucket();
|
||||
this.fillRate = Math.max(newRate, this.minFillRate);
|
||||
this.maxCapacity = Math.max(newRate, this.minCapacity);
|
||||
this.currentCapacity = Math.min(this.currentCapacity, this.maxCapacity);
|
||||
}
|
||||
updateMeasuredRate() {
|
||||
const t = this.getCurrentTimeInSeconds();
|
||||
const timeBucket = Math.floor(t * 2) / 2;
|
||||
this.requestCount++;
|
||||
if (timeBucket > this.lastTxRateBucket) {
|
||||
const currentRate = this.requestCount / (timeBucket - this.lastTxRateBucket);
|
||||
this.measuredTxRate = this.getPrecise(currentRate * this.smooth + this.measuredTxRate * (1 - this.smooth));
|
||||
this.requestCount = 0;
|
||||
this.lastTxRateBucket = timeBucket;
|
||||
}
|
||||
}
|
||||
getPrecise(num) {
|
||||
return parseFloat(num.toFixed(8));
|
||||
}
|
||||
}
|
||||
|
||||
const DEFAULT_RETRY_DELAY_BASE = 100;
|
||||
const MAXIMUM_RETRY_DELAY = 20 * 1000;
|
||||
const THROTTLING_RETRY_DELAY_BASE = 500;
|
||||
const INITIAL_RETRY_TOKENS = 500;
|
||||
const RETRY_COST = 5;
|
||||
const TIMEOUT_RETRY_COST = 10;
|
||||
const NO_RETRY_INCREMENT = 1;
|
||||
const INVOCATION_ID_HEADER = "amz-sdk-invocation-id";
|
||||
const REQUEST_HEADER = "amz-sdk-request";
|
||||
|
||||
const getDefaultRetryBackoffStrategy = () => {
|
||||
let delayBase = DEFAULT_RETRY_DELAY_BASE;
|
||||
const computeNextBackoffDelay = (attempts) => {
|
||||
return Math.floor(Math.min(MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase));
|
||||
};
|
||||
const setDelayBase = (delay) => {
|
||||
delayBase = delay;
|
||||
};
|
||||
return {
|
||||
computeNextBackoffDelay,
|
||||
setDelayBase,
|
||||
};
|
||||
};
|
||||
|
||||
const createDefaultRetryToken = ({ retryDelay, retryCount, retryCost, }) => {
|
||||
const getRetryCount = () => retryCount;
|
||||
const getRetryDelay = () => Math.min(MAXIMUM_RETRY_DELAY, retryDelay);
|
||||
const getRetryCost = () => retryCost;
|
||||
return {
|
||||
getRetryCount,
|
||||
getRetryDelay,
|
||||
getRetryCost,
|
||||
};
|
||||
};
|
||||
|
||||
class StandardRetryStrategy {
|
||||
maxAttempts;
|
||||
mode = exports.RETRY_MODES.STANDARD;
|
||||
capacity = INITIAL_RETRY_TOKENS;
|
||||
retryBackoffStrategy = getDefaultRetryBackoffStrategy();
|
||||
maxAttemptsProvider;
|
||||
constructor(maxAttempts) {
|
||||
this.maxAttempts = maxAttempts;
|
||||
this.maxAttemptsProvider = typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts;
|
||||
}
|
||||
async acquireInitialRetryToken(retryTokenScope) {
|
||||
return createDefaultRetryToken({
|
||||
retryDelay: DEFAULT_RETRY_DELAY_BASE,
|
||||
retryCount: 0,
|
||||
});
|
||||
}
|
||||
async refreshRetryTokenForRetry(token, errorInfo) {
|
||||
const maxAttempts = await this.getMaxAttempts();
|
||||
if (this.shouldRetry(token, errorInfo, maxAttempts)) {
|
||||
const errorType = errorInfo.errorType;
|
||||
this.retryBackoffStrategy.setDelayBase(errorType === "THROTTLING" ? THROTTLING_RETRY_DELAY_BASE : DEFAULT_RETRY_DELAY_BASE);
|
||||
const delayFromErrorType = this.retryBackoffStrategy.computeNextBackoffDelay(token.getRetryCount());
|
||||
const retryDelay = errorInfo.retryAfterHint
|
||||
? Math.max(errorInfo.retryAfterHint.getTime() - Date.now() || 0, delayFromErrorType)
|
||||
: delayFromErrorType;
|
||||
const capacityCost = this.getCapacityCost(errorType);
|
||||
this.capacity -= capacityCost;
|
||||
return createDefaultRetryToken({
|
||||
retryDelay,
|
||||
retryCount: token.getRetryCount() + 1,
|
||||
retryCost: capacityCost,
|
||||
});
|
||||
}
|
||||
throw new Error("No retry token available");
|
||||
}
|
||||
recordSuccess(token) {
|
||||
this.capacity = Math.max(INITIAL_RETRY_TOKENS, this.capacity + (token.getRetryCost() ?? NO_RETRY_INCREMENT));
|
||||
}
|
||||
getCapacity() {
|
||||
return this.capacity;
|
||||
}
|
||||
async getMaxAttempts() {
|
||||
try {
|
||||
return await this.maxAttemptsProvider();
|
||||
}
|
||||
catch (error) {
|
||||
console.warn(`Max attempts provider could not resolve. Using default of ${DEFAULT_MAX_ATTEMPTS}`);
|
||||
return DEFAULT_MAX_ATTEMPTS;
|
||||
}
|
||||
}
|
||||
shouldRetry(tokenToRenew, errorInfo, maxAttempts) {
|
||||
const attempts = tokenToRenew.getRetryCount() + 1;
|
||||
return (attempts < maxAttempts &&
|
||||
this.capacity >= this.getCapacityCost(errorInfo.errorType) &&
|
||||
this.isRetryableError(errorInfo.errorType));
|
||||
}
|
||||
getCapacityCost(errorType) {
|
||||
return errorType === "TRANSIENT" ? TIMEOUT_RETRY_COST : RETRY_COST;
|
||||
}
|
||||
isRetryableError(errorType) {
|
||||
return errorType === "THROTTLING" || errorType === "TRANSIENT";
|
||||
}
|
||||
}
|
||||
|
||||
class AdaptiveRetryStrategy {
|
||||
maxAttemptsProvider;
|
||||
rateLimiter;
|
||||
standardRetryStrategy;
|
||||
mode = exports.RETRY_MODES.ADAPTIVE;
|
||||
constructor(maxAttemptsProvider, options) {
|
||||
this.maxAttemptsProvider = maxAttemptsProvider;
|
||||
const { rateLimiter } = options ?? {};
|
||||
this.rateLimiter = rateLimiter ?? new DefaultRateLimiter();
|
||||
this.standardRetryStrategy = new StandardRetryStrategy(maxAttemptsProvider);
|
||||
}
|
||||
async acquireInitialRetryToken(retryTokenScope) {
|
||||
await this.rateLimiter.getSendToken();
|
||||
return this.standardRetryStrategy.acquireInitialRetryToken(retryTokenScope);
|
||||
}
|
||||
async refreshRetryTokenForRetry(tokenToRenew, errorInfo) {
|
||||
this.rateLimiter.updateClientSendingRate(errorInfo);
|
||||
return this.standardRetryStrategy.refreshRetryTokenForRetry(tokenToRenew, errorInfo);
|
||||
}
|
||||
recordSuccess(token) {
|
||||
this.rateLimiter.updateClientSendingRate({});
|
||||
this.standardRetryStrategy.recordSuccess(token);
|
||||
}
|
||||
}
|
||||
|
||||
class ConfiguredRetryStrategy extends StandardRetryStrategy {
|
||||
computeNextBackoffDelay;
|
||||
constructor(maxAttempts, computeNextBackoffDelay = DEFAULT_RETRY_DELAY_BASE) {
|
||||
super(typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts);
|
||||
if (typeof computeNextBackoffDelay === "number") {
|
||||
this.computeNextBackoffDelay = () => computeNextBackoffDelay;
|
||||
}
|
||||
else {
|
||||
this.computeNextBackoffDelay = computeNextBackoffDelay;
|
||||
}
|
||||
}
|
||||
async refreshRetryTokenForRetry(tokenToRenew, errorInfo) {
|
||||
const token = await super.refreshRetryTokenForRetry(tokenToRenew, errorInfo);
|
||||
token.getRetryDelay = () => this.computeNextBackoffDelay(token.getRetryCount());
|
||||
return token;
|
||||
}
|
||||
}
|
||||
|
||||
exports.AdaptiveRetryStrategy = AdaptiveRetryStrategy;
|
||||
exports.ConfiguredRetryStrategy = ConfiguredRetryStrategy;
|
||||
exports.DEFAULT_MAX_ATTEMPTS = DEFAULT_MAX_ATTEMPTS;
|
||||
exports.DEFAULT_RETRY_DELAY_BASE = DEFAULT_RETRY_DELAY_BASE;
|
||||
exports.DEFAULT_RETRY_MODE = DEFAULT_RETRY_MODE;
|
||||
exports.DefaultRateLimiter = DefaultRateLimiter;
|
||||
exports.INITIAL_RETRY_TOKENS = INITIAL_RETRY_TOKENS;
|
||||
exports.INVOCATION_ID_HEADER = INVOCATION_ID_HEADER;
|
||||
exports.MAXIMUM_RETRY_DELAY = MAXIMUM_RETRY_DELAY;
|
||||
exports.NO_RETRY_INCREMENT = NO_RETRY_INCREMENT;
|
||||
exports.REQUEST_HEADER = REQUEST_HEADER;
|
||||
exports.RETRY_COST = RETRY_COST;
|
||||
exports.StandardRetryStrategy = StandardRetryStrategy;
|
||||
exports.THROTTLING_RETRY_DELAY_BASE = THROTTLING_RETRY_DELAY_BASE;
|
||||
exports.TIMEOUT_RETRY_COST = TIMEOUT_RETRY_COST;
|
||||
Reference in New Issue
Block a user