fix: 修复配额说明重复和undefined问题

- 在editStorageForm中初始化oss_storage_quota_value和oss_quota_unit
- 删除重复的旧配额说明块,保留新的当前配额设置显示

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-22 19:39:53 +08:00
commit 4350113979
7649 changed files with 897277 additions and 0 deletions

201
backend/node_modules/@aws-sdk/core/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

39
backend/node_modules/@aws-sdk/core/README.md generated vendored Normal file
View File

@@ -0,0 +1,39 @@
# `@aws-sdk/core`
This package provides common or core functionality to the AWS SDK for JavaScript (v3).
You do not need to explicitly install this package, since it will be transitively installed by AWS SDK clients.
## `@aws-sdk/core` submodules
Core submodules are organized for distribution via the `package.json` `exports` field.
`exports` is supported by default by the latest Node.js, webpack, and esbuild. For react-native, it can be
enabled via instructions found at [reactnative.dev/blog](https://reactnative.dev/blog/2023/06/21/package-exports-support).
Think of `@aws-sdk/core` as a mono-package within the monorepo.
It preserves the benefits of modularization, for example to optimize Node.js initialization speed,
while making it easier to have a consistent version of core dependencies, reducing package sprawl when
installing an SDK client.
### Guide for submodules
- Each `index.ts` file corresponding to the pattern `./src/submodules/<MODULE_NAME>/index.ts` will be
published as a separate `dist-cjs` bundled submodule index using the `Inliner.js` build script.
- create a folder as `./src/submodules/<SUBMODULE>` including an `index.ts` file and a `README.md` file.
- The linter will throw an error on missing submodule metadata in `package.json` and the various `tsconfig.json` files, but it will automatically fix them if possible.
- a submodule is equivalent to a standalone `@aws-sdk/<pkg>` package in that importing it in Node.js will resolve a separate bundle.
- submodules may not relatively import files from other submodules. Instead, directly use the `@scope/pkg/submodule` name as the import.
- The linter will check for this and throw an error.
- To the extent possible, correctly declaring submodule metadata is validated by the linter in `@aws-sdk/core`.
The linter runs during `yarn build` and also as `yarn lint`.
### When should I create an `@aws-sdk/core/submodule` vs. `@aws-sdk/new-package`?
Keep in mind that the core package is installed by all AWS SDK clients.
If the component functionality is upstream of multiple clients, it is
a good candidate for a core submodule. For example, XML serialization.
If the component's functionality is downstream of a client, for example S3 pre-signing,
it should be a standalone package with potentially a peer or runtime dependency on an AWS SDK client.

View File

@@ -0,0 +1,7 @@
/**
* Do not edit:
* This is a compatibility redirect for contexts that do not understand package.json exports field.
*/
declare module "@aws-sdk/core/account-id-endpoint" {
export * from "@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d";
}

View File

@@ -0,0 +1,6 @@
/**
* Do not edit:
* This is a compatibility redirect for contexts that do not understand package.json exports field.
*/
module.exports = require("./dist-cjs/submodules/account-id-endpoint/index.js");

7
backend/node_modules/@aws-sdk/core/client.d.ts generated vendored Normal file
View File

@@ -0,0 +1,7 @@
/**
* Do not edit:
* This is a compatibility redirect for contexts that do not understand package.json exports field.
*/
declare module "@aws-sdk/core/client" {
export * from "@aws-sdk/core/dist-types/submodules/client/index.d";
}

5
backend/node_modules/@aws-sdk/core/client.js generated vendored Normal file
View File

@@ -0,0 +1,5 @@
/**
* Do not edit:
* This is a compatibility redirect for contexts that do not understand package.json exports field.
*/
module.exports = require("./dist-cjs/submodules/client/index.js");

2236
backend/node_modules/@aws-sdk/core/dist-cjs/index.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,55 @@
'use strict';
var utilMiddleware = require('@smithy/util-middleware');
const DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred";
const ACCOUNT_ID_ENDPOINT_MODE_VALUES = ["disabled", "preferred", "required"];
function validateAccountIdEndpointMode(value) {
return ACCOUNT_ID_ENDPOINT_MODE_VALUES.includes(value);
}
const resolveAccountIdEndpointModeConfig = (input) => {
const { accountIdEndpointMode } = input;
const accountIdEndpointModeProvider = utilMiddleware.normalizeProvider(accountIdEndpointMode ?? DEFAULT_ACCOUNT_ID_ENDPOINT_MODE);
return Object.assign(input, {
accountIdEndpointMode: async () => {
const accIdMode = await accountIdEndpointModeProvider();
if (!validateAccountIdEndpointMode(accIdMode)) {
throw new Error(`Invalid value for accountIdEndpointMode: ${accIdMode}. Valid values are: "required", "preferred", "disabled".`);
}
return accIdMode;
},
});
};
const err = "Invalid AccountIdEndpointMode value";
const _throw = (message) => {
throw new Error(message);
};
const ENV_ACCOUNT_ID_ENDPOINT_MODE = "AWS_ACCOUNT_ID_ENDPOINT_MODE";
const CONFIG_ACCOUNT_ID_ENDPOINT_MODE = "account_id_endpoint_mode";
const NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS = {
environmentVariableSelector: (env) => {
const value = env[ENV_ACCOUNT_ID_ENDPOINT_MODE];
if (value && !validateAccountIdEndpointMode(value)) {
_throw(err);
}
return value;
},
configFileSelector: (profile) => {
const value = profile[CONFIG_ACCOUNT_ID_ENDPOINT_MODE];
if (value && !validateAccountIdEndpointMode(value)) {
_throw(err);
}
return value;
},
default: DEFAULT_ACCOUNT_ID_ENDPOINT_MODE,
};
exports.ACCOUNT_ID_ENDPOINT_MODE_VALUES = ACCOUNT_ID_ENDPOINT_MODE_VALUES;
exports.CONFIG_ACCOUNT_ID_ENDPOINT_MODE = CONFIG_ACCOUNT_ID_ENDPOINT_MODE;
exports.DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = DEFAULT_ACCOUNT_ID_ENDPOINT_MODE;
exports.ENV_ACCOUNT_ID_ENDPOINT_MODE = ENV_ACCOUNT_ID_ENDPOINT_MODE;
exports.NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS = NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS;
exports.resolveAccountIdEndpointModeConfig = resolveAccountIdEndpointModeConfig;
exports.validateAccountIdEndpointMode = validateAccountIdEndpointMode;

View File

@@ -0,0 +1,51 @@
'use strict';
const state = {
warningEmitted: false,
};
const emitWarningIfUnsupportedVersion = (version) => {
if (version && !state.warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 20) {
state.warningEmitted = true;
process.emitWarning(`NodeDeprecationWarning: The AWS SDK for JavaScript (v3) will
no longer support Node.js ${version} in January 2026.
To continue receiving updates to AWS services, bug fixes, and security
updates please upgrade to a supported Node.js LTS version.
More information can be found at: https://a.co/c895JFp`);
}
};
function setCredentialFeature(credentials, feature, value) {
if (!credentials.$source) {
credentials.$source = {};
}
credentials.$source[feature] = value;
return credentials;
}
function setFeature(context, feature, value) {
if (!context.__aws_sdk_context) {
context.__aws_sdk_context = {
features: {},
};
}
else if (!context.__aws_sdk_context.features) {
context.__aws_sdk_context.features = {};
}
context.__aws_sdk_context.features[feature] = value;
}
function setTokenFeature(token, feature, value) {
if (!token.$source) {
token.$source = {};
}
token.$source[feature] = value;
return token;
}
exports.emitWarningIfUnsupportedVersion = emitWarningIfUnsupportedVersion;
exports.setCredentialFeature = setCredentialFeature;
exports.setFeature = setFeature;
exports.setTokenFeature = setTokenFeature;
exports.state = state;

View File

@@ -0,0 +1,307 @@
'use strict';
var protocolHttp = require('@smithy/protocol-http');
var core = require('@smithy/core');
var propertyProvider = require('@smithy/property-provider');
var client = require('@aws-sdk/core/client');
var signatureV4 = require('@smithy/signature-v4');
const getDateHeader = (response) => protocolHttp.HttpResponse.isInstance(response) ? response.headers?.date ?? response.headers?.Date : undefined;
const getSkewCorrectedDate = (systemClockOffset) => new Date(Date.now() + systemClockOffset);
const isClockSkewed = (clockTime, systemClockOffset) => Math.abs(getSkewCorrectedDate(systemClockOffset).getTime() - clockTime) >= 300000;
const getUpdatedSystemClockOffset = (clockTime, currentSystemClockOffset) => {
const clockTimeInMs = Date.parse(clockTime);
if (isClockSkewed(clockTimeInMs, currentSystemClockOffset)) {
return clockTimeInMs - Date.now();
}
return currentSystemClockOffset;
};
const throwSigningPropertyError = (name, property) => {
if (!property) {
throw new Error(`Property \`${name}\` is not resolved for AWS SDK SigV4Auth`);
}
return property;
};
const validateSigningProperties = async (signingProperties) => {
const context = throwSigningPropertyError("context", signingProperties.context);
const config = throwSigningPropertyError("config", signingProperties.config);
const authScheme = context.endpointV2?.properties?.authSchemes?.[0];
const signerFunction = throwSigningPropertyError("signer", config.signer);
const signer = await signerFunction(authScheme);
const signingRegion = signingProperties?.signingRegion;
const signingRegionSet = signingProperties?.signingRegionSet;
const signingName = signingProperties?.signingName;
return {
config,
signer,
signingRegion,
signingRegionSet,
signingName,
};
};
class AwsSdkSigV4Signer {
async sign(httpRequest, identity, signingProperties) {
if (!protocolHttp.HttpRequest.isInstance(httpRequest)) {
throw new Error("The request is not an instance of `HttpRequest` and cannot be signed");
}
const validatedProps = await validateSigningProperties(signingProperties);
const { config, signer } = validatedProps;
let { signingRegion, signingName } = validatedProps;
const handlerExecutionContext = signingProperties.context;
if (handlerExecutionContext?.authSchemes?.length ?? 0 > 1) {
const [first, second] = handlerExecutionContext.authSchemes;
if (first?.name === "sigv4a" && second?.name === "sigv4") {
signingRegion = second?.signingRegion ?? signingRegion;
signingName = second?.signingName ?? signingName;
}
}
const signedRequest = await signer.sign(httpRequest, {
signingDate: getSkewCorrectedDate(config.systemClockOffset),
signingRegion: signingRegion,
signingService: signingName,
});
return signedRequest;
}
errorHandler(signingProperties) {
return (error) => {
const serverTime = error.ServerTime ?? getDateHeader(error.$response);
if (serverTime) {
const config = throwSigningPropertyError("config", signingProperties.config);
const initialSystemClockOffset = config.systemClockOffset;
config.systemClockOffset = getUpdatedSystemClockOffset(serverTime, config.systemClockOffset);
const clockSkewCorrected = config.systemClockOffset !== initialSystemClockOffset;
if (clockSkewCorrected && error.$metadata) {
error.$metadata.clockSkewCorrected = true;
}
}
throw error;
};
}
successHandler(httpResponse, signingProperties) {
const dateHeader = getDateHeader(httpResponse);
if (dateHeader) {
const config = throwSigningPropertyError("config", signingProperties.config);
config.systemClockOffset = getUpdatedSystemClockOffset(dateHeader, config.systemClockOffset);
}
}
}
const AWSSDKSigV4Signer = AwsSdkSigV4Signer;
class AwsSdkSigV4ASigner extends AwsSdkSigV4Signer {
async sign(httpRequest, identity, signingProperties) {
if (!protocolHttp.HttpRequest.isInstance(httpRequest)) {
throw new Error("The request is not an instance of `HttpRequest` and cannot be signed");
}
const { config, signer, signingRegion, signingRegionSet, signingName } = await validateSigningProperties(signingProperties);
const configResolvedSigningRegionSet = await config.sigv4aSigningRegionSet?.();
const multiRegionOverride = (configResolvedSigningRegionSet ??
signingRegionSet ?? [signingRegion]).join(",");
const signedRequest = await signer.sign(httpRequest, {
signingDate: getSkewCorrectedDate(config.systemClockOffset),
signingRegion: multiRegionOverride,
signingService: signingName,
});
return signedRequest;
}
}
const getArrayForCommaSeparatedString = (str) => typeof str === "string" && str.length > 0 ? str.split(",").map((item) => item.trim()) : [];
const getBearerTokenEnvKey = (signingName) => `AWS_BEARER_TOKEN_${signingName.replace(/[\s-]/g, "_").toUpperCase()}`;
const NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY = "AWS_AUTH_SCHEME_PREFERENCE";
const NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY = "auth_scheme_preference";
const NODE_AUTH_SCHEME_PREFERENCE_OPTIONS = {
environmentVariableSelector: (env, options) => {
if (options?.signingName) {
const bearerTokenKey = getBearerTokenEnvKey(options.signingName);
if (bearerTokenKey in env)
return ["httpBearerAuth"];
}
if (!(NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY in env))
return undefined;
return getArrayForCommaSeparatedString(env[NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY]);
},
configFileSelector: (profile) => {
if (!(NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY in profile))
return undefined;
return getArrayForCommaSeparatedString(profile[NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY]);
},
default: [],
};
const resolveAwsSdkSigV4AConfig = (config) => {
config.sigv4aSigningRegionSet = core.normalizeProvider(config.sigv4aSigningRegionSet);
return config;
};
const NODE_SIGV4A_CONFIG_OPTIONS = {
environmentVariableSelector(env) {
if (env.AWS_SIGV4A_SIGNING_REGION_SET) {
return env.AWS_SIGV4A_SIGNING_REGION_SET.split(",").map((_) => _.trim());
}
throw new propertyProvider.ProviderError("AWS_SIGV4A_SIGNING_REGION_SET not set in env.", {
tryNextLink: true,
});
},
configFileSelector(profile) {
if (profile.sigv4a_signing_region_set) {
return (profile.sigv4a_signing_region_set ?? "").split(",").map((_) => _.trim());
}
throw new propertyProvider.ProviderError("sigv4a_signing_region_set not set in profile.", {
tryNextLink: true,
});
},
default: undefined,
};
const resolveAwsSdkSigV4Config = (config) => {
let inputCredentials = config.credentials;
let isUserSupplied = !!config.credentials;
let resolvedCredentials = undefined;
Object.defineProperty(config, "credentials", {
set(credentials) {
if (credentials && credentials !== inputCredentials && credentials !== resolvedCredentials) {
isUserSupplied = true;
}
inputCredentials = credentials;
const memoizedProvider = normalizeCredentialProvider(config, {
credentials: inputCredentials,
credentialDefaultProvider: config.credentialDefaultProvider,
});
const boundProvider = bindCallerConfig(config, memoizedProvider);
if (isUserSupplied && !boundProvider.attributed) {
const isCredentialObject = typeof inputCredentials === "object" && inputCredentials !== null;
resolvedCredentials = async (options) => {
const creds = await boundProvider(options);
const attributedCreds = creds;
if (isCredentialObject && (!attributedCreds.$source || Object.keys(attributedCreds.$source).length === 0)) {
return client.setCredentialFeature(attributedCreds, "CREDENTIALS_CODE", "e");
}
return attributedCreds;
};
resolvedCredentials.memoized = boundProvider.memoized;
resolvedCredentials.configBound = boundProvider.configBound;
resolvedCredentials.attributed = true;
}
else {
resolvedCredentials = boundProvider;
}
},
get() {
return resolvedCredentials;
},
enumerable: true,
configurable: true,
});
config.credentials = inputCredentials;
const { signingEscapePath = true, systemClockOffset = config.systemClockOffset || 0, sha256, } = config;
let signer;
if (config.signer) {
signer = core.normalizeProvider(config.signer);
}
else if (config.regionInfoProvider) {
signer = () => core.normalizeProvider(config.region)()
.then(async (region) => [
(await config.regionInfoProvider(region, {
useFipsEndpoint: await config.useFipsEndpoint(),
useDualstackEndpoint: await config.useDualstackEndpoint(),
})) || {},
region,
])
.then(([regionInfo, region]) => {
const { signingRegion, signingService } = regionInfo;
config.signingRegion = config.signingRegion || signingRegion || region;
config.signingName = config.signingName || signingService || config.serviceId;
const params = {
...config,
credentials: config.credentials,
region: config.signingRegion,
service: config.signingName,
sha256,
uriEscapePath: signingEscapePath,
};
const SignerCtor = config.signerConstructor || signatureV4.SignatureV4;
return new SignerCtor(params);
});
}
else {
signer = async (authScheme) => {
authScheme = Object.assign({}, {
name: "sigv4",
signingName: config.signingName || config.defaultSigningName,
signingRegion: await core.normalizeProvider(config.region)(),
properties: {},
}, authScheme);
const signingRegion = authScheme.signingRegion;
const signingService = authScheme.signingName;
config.signingRegion = config.signingRegion || signingRegion;
config.signingName = config.signingName || signingService || config.serviceId;
const params = {
...config,
credentials: config.credentials,
region: config.signingRegion,
service: config.signingName,
sha256,
uriEscapePath: signingEscapePath,
};
const SignerCtor = config.signerConstructor || signatureV4.SignatureV4;
return new SignerCtor(params);
};
}
const resolvedConfig = Object.assign(config, {
systemClockOffset,
signingEscapePath,
signer,
});
return resolvedConfig;
};
const resolveAWSSDKSigV4Config = resolveAwsSdkSigV4Config;
function normalizeCredentialProvider(config, { credentials, credentialDefaultProvider, }) {
let credentialsProvider;
if (credentials) {
if (!credentials?.memoized) {
credentialsProvider = core.memoizeIdentityProvider(credentials, core.isIdentityExpired, core.doesIdentityRequireRefresh);
}
else {
credentialsProvider = credentials;
}
}
else {
if (credentialDefaultProvider) {
credentialsProvider = core.normalizeProvider(credentialDefaultProvider(Object.assign({}, config, {
parentClientConfig: config,
})));
}
else {
credentialsProvider = async () => {
throw new Error("@aws-sdk/core::resolveAwsSdkSigV4Config - `credentials` not provided and no credentialDefaultProvider was configured.");
};
}
}
credentialsProvider.memoized = true;
return credentialsProvider;
}
function bindCallerConfig(config, credentialsProvider) {
if (credentialsProvider.configBound) {
return credentialsProvider;
}
const fn = async (options) => credentialsProvider({ ...options, callerClientConfig: config });
fn.memoized = credentialsProvider.memoized;
fn.configBound = true;
return fn;
}
exports.AWSSDKSigV4Signer = AWSSDKSigV4Signer;
exports.AwsSdkSigV4ASigner = AwsSdkSigV4ASigner;
exports.AwsSdkSigV4Signer = AwsSdkSigV4Signer;
exports.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS = NODE_AUTH_SCHEME_PREFERENCE_OPTIONS;
exports.NODE_SIGV4A_CONFIG_OPTIONS = NODE_SIGV4A_CONFIG_OPTIONS;
exports.getBearerTokenEnvKey = getBearerTokenEnvKey;
exports.resolveAWSSDKSigV4Config = resolveAWSSDKSigV4Config;
exports.resolveAwsSdkSigV4AConfig = resolveAwsSdkSigV4AConfig;
exports.resolveAwsSdkSigV4Config = resolveAwsSdkSigV4Config;
exports.validateSigningProperties = validateSigningProperties;

File diff suppressed because it is too large Load Diff

3
backend/node_modules/@aws-sdk/core/dist-es/index.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
export * from "./submodules/client/index";
export * from "./submodules/httpAuthSchemes/index";
export * from "./submodules/protocols/index";

View File

@@ -0,0 +1,15 @@
import { normalizeProvider } from "@smithy/util-middleware";
import { DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, validateAccountIdEndpointMode, } from "./AccountIdEndpointModeConstants";
export const resolveAccountIdEndpointModeConfig = (input) => {
const { accountIdEndpointMode } = input;
const accountIdEndpointModeProvider = normalizeProvider(accountIdEndpointMode ?? DEFAULT_ACCOUNT_ID_ENDPOINT_MODE);
return Object.assign(input, {
accountIdEndpointMode: async () => {
const accIdMode = await accountIdEndpointModeProvider();
if (!validateAccountIdEndpointMode(accIdMode)) {
throw new Error(`Invalid value for accountIdEndpointMode: ${accIdMode}. Valid values are: "required", "preferred", "disabled".`);
}
return accIdMode;
},
});
};

View File

@@ -0,0 +1,5 @@
export const DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred";
export const ACCOUNT_ID_ENDPOINT_MODE_VALUES = ["disabled", "preferred", "required"];
export function validateAccountIdEndpointMode(value) {
return ACCOUNT_ID_ENDPOINT_MODE_VALUES.includes(value);
}

View File

@@ -0,0 +1,24 @@
import { DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, validateAccountIdEndpointMode, } from "./AccountIdEndpointModeConstants";
const err = "Invalid AccountIdEndpointMode value";
const _throw = (message) => {
throw new Error(message);
};
export const ENV_ACCOUNT_ID_ENDPOINT_MODE = "AWS_ACCOUNT_ID_ENDPOINT_MODE";
export const CONFIG_ACCOUNT_ID_ENDPOINT_MODE = "account_id_endpoint_mode";
export const NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS = {
environmentVariableSelector: (env) => {
const value = env[ENV_ACCOUNT_ID_ENDPOINT_MODE];
if (value && !validateAccountIdEndpointMode(value)) {
_throw(err);
}
return value;
},
configFileSelector: (profile) => {
const value = profile[CONFIG_ACCOUNT_ID_ENDPOINT_MODE];
if (value && !validateAccountIdEndpointMode(value)) {
_throw(err);
}
return value;
},
default: DEFAULT_ACCOUNT_ID_ENDPOINT_MODE,
};

View File

@@ -0,0 +1,3 @@
export * from "./AccountIdEndpointModeConfigResolver";
export * from "./AccountIdEndpointModeConstants";
export * from "./NodeAccountIdEndpointModeConfigOptions";

View File

@@ -0,0 +1,15 @@
export const state = {
warningEmitted: false,
};
export const emitWarningIfUnsupportedVersion = (version) => {
if (version && !state.warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 20) {
state.warningEmitted = true;
process.emitWarning(`NodeDeprecationWarning: The AWS SDK for JavaScript (v3) will
no longer support Node.js ${version} in January 2026.
To continue receiving updates to AWS services, bug fixes, and security
updates please upgrade to a supported Node.js LTS version.
More information can be found at: https://a.co/c895JFp`);
}
};

View File

@@ -0,0 +1,4 @@
export * from "./emitWarningIfUnsupportedVersion";
export * from "./setCredentialFeature";
export * from "./setFeature";
export * from "./setTokenFeature";

View File

@@ -0,0 +1,7 @@
export function setCredentialFeature(credentials, feature, value) {
if (!credentials.$source) {
credentials.$source = {};
}
credentials.$source[feature] = value;
return credentials;
}

View File

@@ -0,0 +1,11 @@
export function setFeature(context, feature, value) {
if (!context.__aws_sdk_context) {
context.__aws_sdk_context = {
features: {},
};
}
else if (!context.__aws_sdk_context.features) {
context.__aws_sdk_context.features = {};
}
context.__aws_sdk_context.features[feature] = value;
}

View File

@@ -0,0 +1,7 @@
export function setTokenFeature(token, feature, value) {
if (!token.$source) {
token.$source = {};
}
token.$source[feature] = value;
return token;
}

View File

@@ -0,0 +1,20 @@
import { HttpRequest } from "@smithy/protocol-http";
import { getSkewCorrectedDate } from "../utils";
import { AwsSdkSigV4Signer, validateSigningProperties } from "./AwsSdkSigV4Signer";
export class AwsSdkSigV4ASigner extends AwsSdkSigV4Signer {
async sign(httpRequest, identity, signingProperties) {
if (!HttpRequest.isInstance(httpRequest)) {
throw new Error("The request is not an instance of `HttpRequest` and cannot be signed");
}
const { config, signer, signingRegion, signingRegionSet, signingName } = await validateSigningProperties(signingProperties);
const configResolvedSigningRegionSet = await config.sigv4aSigningRegionSet?.();
const multiRegionOverride = (configResolvedSigningRegionSet ??
signingRegionSet ?? [signingRegion]).join(",");
const signedRequest = await signer.sign(httpRequest, {
signingDate: getSkewCorrectedDate(config.systemClockOffset),
signingRegion: multiRegionOverride,
signingService: signingName,
});
return signedRequest;
}
}

View File

@@ -0,0 +1,72 @@
import { HttpRequest } from "@smithy/protocol-http";
import { getDateHeader, getSkewCorrectedDate, getUpdatedSystemClockOffset } from "../utils";
const throwSigningPropertyError = (name, property) => {
if (!property) {
throw new Error(`Property \`${name}\` is not resolved for AWS SDK SigV4Auth`);
}
return property;
};
export const validateSigningProperties = async (signingProperties) => {
const context = throwSigningPropertyError("context", signingProperties.context);
const config = throwSigningPropertyError("config", signingProperties.config);
const authScheme = context.endpointV2?.properties?.authSchemes?.[0];
const signerFunction = throwSigningPropertyError("signer", config.signer);
const signer = await signerFunction(authScheme);
const signingRegion = signingProperties?.signingRegion;
const signingRegionSet = signingProperties?.signingRegionSet;
const signingName = signingProperties?.signingName;
return {
config,
signer,
signingRegion,
signingRegionSet,
signingName,
};
};
export class AwsSdkSigV4Signer {
async sign(httpRequest, identity, signingProperties) {
if (!HttpRequest.isInstance(httpRequest)) {
throw new Error("The request is not an instance of `HttpRequest` and cannot be signed");
}
const validatedProps = await validateSigningProperties(signingProperties);
const { config, signer } = validatedProps;
let { signingRegion, signingName } = validatedProps;
const handlerExecutionContext = signingProperties.context;
if (handlerExecutionContext?.authSchemes?.length ?? 0 > 1) {
const [first, second] = handlerExecutionContext.authSchemes;
if (first?.name === "sigv4a" && second?.name === "sigv4") {
signingRegion = second?.signingRegion ?? signingRegion;
signingName = second?.signingName ?? signingName;
}
}
const signedRequest = await signer.sign(httpRequest, {
signingDate: getSkewCorrectedDate(config.systemClockOffset),
signingRegion: signingRegion,
signingService: signingName,
});
return signedRequest;
}
errorHandler(signingProperties) {
return (error) => {
const serverTime = error.ServerTime ?? getDateHeader(error.$response);
if (serverTime) {
const config = throwSigningPropertyError("config", signingProperties.config);
const initialSystemClockOffset = config.systemClockOffset;
config.systemClockOffset = getUpdatedSystemClockOffset(serverTime, config.systemClockOffset);
const clockSkewCorrected = config.systemClockOffset !== initialSystemClockOffset;
if (clockSkewCorrected && error.$metadata) {
error.$metadata.clockSkewCorrected = true;
}
}
throw error;
};
}
successHandler(httpResponse, signingProperties) {
const dateHeader = getDateHeader(httpResponse);
if (dateHeader) {
const config = throwSigningPropertyError("config", signingProperties.config);
config.systemClockOffset = getUpdatedSystemClockOffset(dateHeader, config.systemClockOffset);
}
}
}
export const AWSSDKSigV4Signer = AwsSdkSigV4Signer;

View File

@@ -0,0 +1,22 @@
import { getArrayForCommaSeparatedString } from "../utils/getArrayForCommaSeparatedString";
import { getBearerTokenEnvKey } from "../utils/getBearerTokenEnvKey";
const NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY = "AWS_AUTH_SCHEME_PREFERENCE";
const NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY = "auth_scheme_preference";
export const NODE_AUTH_SCHEME_PREFERENCE_OPTIONS = {
environmentVariableSelector: (env, options) => {
if (options?.signingName) {
const bearerTokenKey = getBearerTokenEnvKey(options.signingName);
if (bearerTokenKey in env)
return ["httpBearerAuth"];
}
if (!(NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY in env))
return undefined;
return getArrayForCommaSeparatedString(env[NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY]);
},
configFileSelector: (profile) => {
if (!(NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY in profile))
return undefined;
return getArrayForCommaSeparatedString(profile[NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY]);
},
default: [],
};

View File

@@ -0,0 +1,5 @@
export { AwsSdkSigV4Signer, AWSSDKSigV4Signer, validateSigningProperties } from "./AwsSdkSigV4Signer";
export { AwsSdkSigV4ASigner } from "./AwsSdkSigV4ASigner";
export * from "./NODE_AUTH_SCHEME_PREFERENCE_OPTIONS";
export * from "./resolveAwsSdkSigV4AConfig";
export * from "./resolveAwsSdkSigV4Config";

View File

@@ -0,0 +1,25 @@
import { normalizeProvider } from "@smithy/core";
import { ProviderError } from "@smithy/property-provider";
export const resolveAwsSdkSigV4AConfig = (config) => {
config.sigv4aSigningRegionSet = normalizeProvider(config.sigv4aSigningRegionSet);
return config;
};
export const NODE_SIGV4A_CONFIG_OPTIONS = {
environmentVariableSelector(env) {
if (env.AWS_SIGV4A_SIGNING_REGION_SET) {
return env.AWS_SIGV4A_SIGNING_REGION_SET.split(",").map((_) => _.trim());
}
throw new ProviderError("AWS_SIGV4A_SIGNING_REGION_SET not set in env.", {
tryNextLink: true,
});
},
configFileSelector(profile) {
if (profile.sigv4a_signing_region_set) {
return (profile.sigv4a_signing_region_set ?? "").split(",").map((_) => _.trim());
}
throw new ProviderError("sigv4a_signing_region_set not set in profile.", {
tryNextLink: true,
});
},
default: undefined,
};

View File

@@ -0,0 +1,139 @@
import { setCredentialFeature } from "@aws-sdk/core/client";
import { doesIdentityRequireRefresh, isIdentityExpired, memoizeIdentityProvider, normalizeProvider, } from "@smithy/core";
import { SignatureV4 } from "@smithy/signature-v4";
export const resolveAwsSdkSigV4Config = (config) => {
let inputCredentials = config.credentials;
let isUserSupplied = !!config.credentials;
let resolvedCredentials = undefined;
Object.defineProperty(config, "credentials", {
set(credentials) {
if (credentials && credentials !== inputCredentials && credentials !== resolvedCredentials) {
isUserSupplied = true;
}
inputCredentials = credentials;
const memoizedProvider = normalizeCredentialProvider(config, {
credentials: inputCredentials,
credentialDefaultProvider: config.credentialDefaultProvider,
});
const boundProvider = bindCallerConfig(config, memoizedProvider);
if (isUserSupplied && !boundProvider.attributed) {
const isCredentialObject = typeof inputCredentials === "object" && inputCredentials !== null;
resolvedCredentials = async (options) => {
const creds = await boundProvider(options);
const attributedCreds = creds;
if (isCredentialObject && (!attributedCreds.$source || Object.keys(attributedCreds.$source).length === 0)) {
return setCredentialFeature(attributedCreds, "CREDENTIALS_CODE", "e");
}
return attributedCreds;
};
resolvedCredentials.memoized = boundProvider.memoized;
resolvedCredentials.configBound = boundProvider.configBound;
resolvedCredentials.attributed = true;
}
else {
resolvedCredentials = boundProvider;
}
},
get() {
return resolvedCredentials;
},
enumerable: true,
configurable: true,
});
config.credentials = inputCredentials;
const { signingEscapePath = true, systemClockOffset = config.systemClockOffset || 0, sha256, } = config;
let signer;
if (config.signer) {
signer = normalizeProvider(config.signer);
}
else if (config.regionInfoProvider) {
signer = () => normalizeProvider(config.region)()
.then(async (region) => [
(await config.regionInfoProvider(region, {
useFipsEndpoint: await config.useFipsEndpoint(),
useDualstackEndpoint: await config.useDualstackEndpoint(),
})) || {},
region,
])
.then(([regionInfo, region]) => {
const { signingRegion, signingService } = regionInfo;
config.signingRegion = config.signingRegion || signingRegion || region;
config.signingName = config.signingName || signingService || config.serviceId;
const params = {
...config,
credentials: config.credentials,
region: config.signingRegion,
service: config.signingName,
sha256,
uriEscapePath: signingEscapePath,
};
const SignerCtor = config.signerConstructor || SignatureV4;
return new SignerCtor(params);
});
}
else {
signer = async (authScheme) => {
authScheme = Object.assign({}, {
name: "sigv4",
signingName: config.signingName || config.defaultSigningName,
signingRegion: await normalizeProvider(config.region)(),
properties: {},
}, authScheme);
const signingRegion = authScheme.signingRegion;
const signingService = authScheme.signingName;
config.signingRegion = config.signingRegion || signingRegion;
config.signingName = config.signingName || signingService || config.serviceId;
const params = {
...config,
credentials: config.credentials,
region: config.signingRegion,
service: config.signingName,
sha256,
uriEscapePath: signingEscapePath,
};
const SignerCtor = config.signerConstructor || SignatureV4;
return new SignerCtor(params);
};
}
const resolvedConfig = Object.assign(config, {
systemClockOffset,
signingEscapePath,
signer,
});
return resolvedConfig;
};
export const resolveAWSSDKSigV4Config = resolveAwsSdkSigV4Config;
function normalizeCredentialProvider(config, { credentials, credentialDefaultProvider, }) {
let credentialsProvider;
if (credentials) {
if (!credentials?.memoized) {
credentialsProvider = memoizeIdentityProvider(credentials, isIdentityExpired, doesIdentityRequireRefresh);
}
else {
credentialsProvider = credentials;
}
}
else {
if (credentialDefaultProvider) {
credentialsProvider = normalizeProvider(credentialDefaultProvider(Object.assign({}, config, {
parentClientConfig: config,
})));
}
else {
credentialsProvider = async () => {
throw new Error("@aws-sdk/core::resolveAwsSdkSigV4Config - `credentials` not provided and no credentialDefaultProvider was configured.");
};
}
}
credentialsProvider.memoized = true;
return credentialsProvider;
}
function bindCallerConfig(config, credentialsProvider) {
if (credentialsProvider.configBound) {
return credentialsProvider;
}
const fn = async (options) => credentialsProvider({ ...options, callerClientConfig: config });
fn.memoized = credentialsProvider.memoized;
fn.configBound = true;
return fn;
}

View File

@@ -0,0 +1,2 @@
export * from "./aws_sdk";
export * from "./utils/getBearerTokenEnvKey";

View File

@@ -0,0 +1 @@
export const getArrayForCommaSeparatedString = (str) => typeof str === "string" && str.length > 0 ? str.split(",").map((item) => item.trim()) : [];

View File

@@ -0,0 +1 @@
export const getBearerTokenEnvKey = (signingName) => `AWS_BEARER_TOKEN_${signingName.replace(/[\s-]/g, "_").toUpperCase()}`;

View File

@@ -0,0 +1,2 @@
import { HttpResponse } from "@smithy/protocol-http";
export const getDateHeader = (response) => HttpResponse.isInstance(response) ? response.headers?.date ?? response.headers?.Date : undefined;

View File

@@ -0,0 +1 @@
export const getSkewCorrectedDate = (systemClockOffset) => new Date(Date.now() + systemClockOffset);

View File

@@ -0,0 +1,8 @@
import { isClockSkewed } from "./isClockSkewed";
export const getUpdatedSystemClockOffset = (clockTime, currentSystemClockOffset) => {
const clockTimeInMs = Date.parse(clockTime);
if (isClockSkewed(clockTimeInMs, currentSystemClockOffset)) {
return clockTimeInMs - Date.now();
}
return currentSystemClockOffset;
};

View File

@@ -0,0 +1,3 @@
export * from "./getDateHeader";
export * from "./getSkewCorrectedDate";
export * from "./getUpdatedSystemClockOffset";

View File

@@ -0,0 +1,2 @@
import { getSkewCorrectedDate } from "./getSkewCorrectedDate";
export const isClockSkewed = (clockTime, systemClockOffset) => Math.abs(getSkewCorrectedDate(systemClockOffset).getTime() - clockTime) >= 300000;

View File

@@ -0,0 +1,6 @@
export class SerdeContextConfig {
serdeContext;
setSerdeContext(serdeContext) {
this.serdeContext = serdeContext;
}
}

View File

@@ -0,0 +1,122 @@
import { NormalizedSchema, TypeRegistry } from "@smithy/core/schema";
import { decorateServiceException } from "@smithy/smithy-client";
export class ProtocolLib {
queryCompat;
constructor(queryCompat = false) {
this.queryCompat = queryCompat;
}
resolveRestContentType(defaultContentType, inputSchema) {
const members = inputSchema.getMemberSchemas();
const httpPayloadMember = Object.values(members).find((m) => {
return !!m.getMergedTraits().httpPayload;
});
if (httpPayloadMember) {
const mediaType = httpPayloadMember.getMergedTraits().mediaType;
if (mediaType) {
return mediaType;
}
else if (httpPayloadMember.isStringSchema()) {
return "text/plain";
}
else if (httpPayloadMember.isBlobSchema()) {
return "application/octet-stream";
}
else {
return defaultContentType;
}
}
else if (!inputSchema.isUnitSchema()) {
const hasBody = Object.values(members).find((m) => {
const { httpQuery, httpQueryParams, httpHeader, httpLabel, httpPrefixHeaders } = m.getMergedTraits();
const noPrefixHeaders = httpPrefixHeaders === void 0;
return !httpQuery && !httpQueryParams && !httpHeader && !httpLabel && noPrefixHeaders;
});
if (hasBody) {
return defaultContentType;
}
}
}
async getErrorSchemaOrThrowBaseException(errorIdentifier, defaultNamespace, response, dataObject, metadata, getErrorSchema) {
let namespace = defaultNamespace;
let errorName = errorIdentifier;
if (errorIdentifier.includes("#")) {
[namespace, errorName] = errorIdentifier.split("#");
}
const errorMetadata = {
$metadata: metadata,
$fault: response.statusCode < 500 ? "client" : "server",
};
const registry = TypeRegistry.for(namespace);
try {
const errorSchema = getErrorSchema?.(registry, errorName) ?? registry.getSchema(errorIdentifier);
return { errorSchema, errorMetadata };
}
catch (e) {
dataObject.message = dataObject.message ?? dataObject.Message ?? "UnknownError";
const synthetic = TypeRegistry.for("smithy.ts.sdk.synthetic." + namespace);
const baseExceptionSchema = synthetic.getBaseException();
if (baseExceptionSchema) {
const ErrorCtor = synthetic.getErrorCtor(baseExceptionSchema) ?? Error;
throw this.decorateServiceException(Object.assign(new ErrorCtor({ name: errorName }), errorMetadata), dataObject);
}
throw this.decorateServiceException(Object.assign(new Error(errorName), errorMetadata), dataObject);
}
}
decorateServiceException(exception, additions = {}) {
if (this.queryCompat) {
const msg = exception.Message ?? additions.Message;
const error = decorateServiceException(exception, additions);
if (msg) {
error.message = msg;
}
error.Error = {
...error.Error,
Type: error.Error.Type,
Code: error.Error.Code,
Message: error.Error.message ?? error.Error.Message ?? msg,
};
const reqId = error.$metadata.requestId;
if (reqId) {
error.RequestId = reqId;
}
return error;
}
return decorateServiceException(exception, additions);
}
setQueryCompatError(output, response) {
const queryErrorHeader = response.headers?.["x-amzn-query-error"];
if (output !== undefined && queryErrorHeader != null) {
const [Code, Type] = queryErrorHeader.split(";");
const entries = Object.entries(output);
const Error = {
Code,
Type,
};
Object.assign(output, Error);
for (const [k, v] of entries) {
Error[k === "message" ? "Message" : k] = v;
}
delete Error.__type;
output.Error = Error;
}
}
queryCompatOutput(queryCompatErrorData, errorData) {
if (queryCompatErrorData.Error) {
errorData.Error = queryCompatErrorData.Error;
}
if (queryCompatErrorData.Type) {
errorData.Type = queryCompatErrorData.Type;
}
if (queryCompatErrorData.Code) {
errorData.Code = queryCompatErrorData.Code;
}
}
findQueryCompatibleError(registry, errorName) {
try {
return registry.getSchema(errorName);
}
catch (e) {
return registry.find((schema) => NormalizedSchema.of(schema).getMergedTraits().awsQueryError?.[0] === errorName);
}
}
}

View File

@@ -0,0 +1,23 @@
export class UnionSerde {
from;
to;
keys;
constructor(from, to) {
this.from = from;
this.to = to;
this.keys = new Set(Object.keys(this.from).filter((k) => k !== "__type"));
}
mark(key) {
this.keys.delete(key);
}
hasUnknown() {
return this.keys.size === 1 && Object.keys(this.to).length === 0;
}
writeUnknown() {
if (this.hasUnknown()) {
const k = this.keys.values().next().value;
const v = this.from[k];
this.to.$unknown = [k, v];
}
}
}

View File

@@ -0,0 +1,49 @@
import { loadSmithyRpcV2CborErrorCode, SmithyRpcV2CborProtocol } from "@smithy/core/cbor";
import { NormalizedSchema, TypeRegistry } from "@smithy/core/schema";
import { ProtocolLib } from "../ProtocolLib";
export class AwsSmithyRpcV2CborProtocol extends SmithyRpcV2CborProtocol {
awsQueryCompatible;
mixin;
constructor({ defaultNamespace, awsQueryCompatible, }) {
super({ defaultNamespace });
this.awsQueryCompatible = !!awsQueryCompatible;
this.mixin = new ProtocolLib(this.awsQueryCompatible);
}
async serializeRequest(operationSchema, input, context) {
const request = await super.serializeRequest(operationSchema, input, context);
if (this.awsQueryCompatible) {
request.headers["x-amzn-query-mode"] = "true";
}
return request;
}
async handleError(operationSchema, context, response, dataObject, metadata) {
if (this.awsQueryCompatible) {
this.mixin.setQueryCompatError(dataObject, response);
}
const errorName = (() => {
const compatHeader = response.headers["x-amzn-query-error"];
if (compatHeader && this.awsQueryCompatible) {
return compatHeader.split(";")[0];
}
return loadSmithyRpcV2CborErrorCode(response, dataObject) ?? "Unknown";
})();
const { errorSchema, errorMetadata } = await this.mixin.getErrorSchemaOrThrowBaseException(errorName, this.options.defaultNamespace, response, dataObject, metadata, this.awsQueryCompatible ? this.mixin.findQueryCompatibleError : undefined);
const ns = NormalizedSchema.of(errorSchema);
const message = dataObject.message ?? dataObject.Message ?? "Unknown";
const ErrorCtor = TypeRegistry.for(errorSchema[1]).getErrorCtor(errorSchema) ?? Error;
const exception = new ErrorCtor(message);
const output = {};
for (const [name, member] of ns.structIterator()) {
if (dataObject[name] != null) {
output[name] = this.deserializer.readValue(member, dataObject[name]);
}
}
if (this.awsQueryCompatible) {
this.mixin.queryCompatOutput(dataObject, output);
}
throw this.mixin.decorateServiceException(Object.assign(exception, errorMetadata, {
$fault: ns.getMergedTraits().error,
message,
}, output), dataObject);
}
}

View File

@@ -0,0 +1,53 @@
export const _toStr = (val) => {
if (val == null) {
return val;
}
if (typeof val === "number" || typeof val === "bigint") {
const warning = new Error(`Received number ${val} where a string was expected.`);
warning.name = "Warning";
console.warn(warning);
return String(val);
}
if (typeof val === "boolean") {
const warning = new Error(`Received boolean ${val} where a string was expected.`);
warning.name = "Warning";
console.warn(warning);
return String(val);
}
return val;
};
export const _toBool = (val) => {
if (val == null) {
return val;
}
if (typeof val === "number") {
}
if (typeof val === "string") {
const lowercase = val.toLowerCase();
if (val !== "" && lowercase !== "false" && lowercase !== "true") {
const warning = new Error(`Received string "${val}" where a boolean was expected.`);
warning.name = "Warning";
console.warn(warning);
}
return val !== "" && lowercase !== "false";
}
return val;
};
export const _toNum = (val) => {
if (val == null) {
return val;
}
if (typeof val === "boolean") {
}
if (typeof val === "string") {
const num = Number(val);
if (num.toString() !== val) {
const warning = new Error(`Received string "${val}" where a number was expected.`);
warning.name = "Warning";
console.warn(warning);
return val;
}
return num;
}
return val;
};

View File

@@ -0,0 +1,3 @@
import { collectBody } from "@smithy/smithy-client";
import { toUtf8 } from "@smithy/util-utf8";
export const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => (context?.utf8Encoder ?? toUtf8)(body));

View File

@@ -0,0 +1,18 @@
export * from "./cbor/AwsSmithyRpcV2CborProtocol";
export * from "./coercing-serializers";
export * from "./json/AwsJson1_0Protocol";
export * from "./json/AwsJson1_1Protocol";
export * from "./json/AwsJsonRpcProtocol";
export * from "./json/AwsRestJsonProtocol";
export * from "./json/JsonCodec";
export * from "./json/JsonShapeDeserializer";
export * from "./json/JsonShapeSerializer";
export * from "./json/awsExpectUnion";
export * from "./json/parseJsonBody";
export * from "./query/AwsEc2QueryProtocol";
export * from "./query/AwsQueryProtocol";
export * from "./xml/AwsRestXmlProtocol";
export * from "./xml/XmlCodec";
export * from "./xml/XmlShapeDeserializer";
export * from "./xml/XmlShapeSerializer";
export * from "./xml/parseXmlBody";

View File

@@ -0,0 +1,20 @@
import { AwsJsonRpcProtocol } from "./AwsJsonRpcProtocol";
export class AwsJson1_0Protocol extends AwsJsonRpcProtocol {
constructor({ defaultNamespace, serviceTarget, awsQueryCompatible, jsonCodec, }) {
super({
defaultNamespace,
serviceTarget,
awsQueryCompatible,
jsonCodec,
});
}
getShapeId() {
return "aws.protocols#awsJson1_0";
}
getJsonRpcVersion() {
return "1.0";
}
getDefaultContentType() {
return "application/x-amz-json-1.0";
}
}

View File

@@ -0,0 +1,20 @@
import { AwsJsonRpcProtocol } from "./AwsJsonRpcProtocol";
export class AwsJson1_1Protocol extends AwsJsonRpcProtocol {
constructor({ defaultNamespace, serviceTarget, awsQueryCompatible, jsonCodec, }) {
super({
defaultNamespace,
serviceTarget,
awsQueryCompatible,
jsonCodec,
});
}
getShapeId() {
return "aws.protocols#awsJson1_1";
}
getJsonRpcVersion() {
return "1.1";
}
getDefaultContentType() {
return "application/x-amz-json-1.1";
}
}

View File

@@ -0,0 +1,76 @@
import { RpcProtocol } from "@smithy/core/protocols";
import { deref, NormalizedSchema, TypeRegistry } from "@smithy/core/schema";
import { ProtocolLib } from "../ProtocolLib";
import { JsonCodec } from "./JsonCodec";
import { loadRestJsonErrorCode } from "./parseJsonBody";
export class AwsJsonRpcProtocol extends RpcProtocol {
serializer;
deserializer;
serviceTarget;
codec;
mixin;
awsQueryCompatible;
constructor({ defaultNamespace, serviceTarget, awsQueryCompatible, jsonCodec, }) {
super({
defaultNamespace,
});
this.serviceTarget = serviceTarget;
this.codec =
jsonCodec ??
new JsonCodec({
timestampFormat: {
useTrait: true,
default: 7,
},
jsonName: false,
});
this.serializer = this.codec.createSerializer();
this.deserializer = this.codec.createDeserializer();
this.awsQueryCompatible = !!awsQueryCompatible;
this.mixin = new ProtocolLib(this.awsQueryCompatible);
}
async serializeRequest(operationSchema, input, context) {
const request = await super.serializeRequest(operationSchema, input, context);
if (!request.path.endsWith("/")) {
request.path += "/";
}
Object.assign(request.headers, {
"content-type": `application/x-amz-json-${this.getJsonRpcVersion()}`,
"x-amz-target": `${this.serviceTarget}.${operationSchema.name}`,
});
if (this.awsQueryCompatible) {
request.headers["x-amzn-query-mode"] = "true";
}
if (deref(operationSchema.input) === "unit" || !request.body) {
request.body = "{}";
}
return request;
}
getPayloadCodec() {
return this.codec;
}
async handleError(operationSchema, context, response, dataObject, metadata) {
if (this.awsQueryCompatible) {
this.mixin.setQueryCompatError(dataObject, response);
}
const errorIdentifier = loadRestJsonErrorCode(response, dataObject) ?? "Unknown";
const { errorSchema, errorMetadata } = await this.mixin.getErrorSchemaOrThrowBaseException(errorIdentifier, this.options.defaultNamespace, response, dataObject, metadata, this.awsQueryCompatible ? this.mixin.findQueryCompatibleError : undefined);
const ns = NormalizedSchema.of(errorSchema);
const message = dataObject.message ?? dataObject.Message ?? "Unknown";
const ErrorCtor = TypeRegistry.for(errorSchema[1]).getErrorCtor(errorSchema) ?? Error;
const exception = new ErrorCtor(message);
const output = {};
for (const [name, member] of ns.structIterator()) {
if (dataObject[name] != null) {
output[name] = this.codec.createDeserializer().readObject(member, dataObject[name]);
}
}
if (this.awsQueryCompatible) {
this.mixin.queryCompatOutput(dataObject, output);
}
throw this.mixin.decorateServiceException(Object.assign(exception, errorMetadata, {
$fault: ns.getMergedTraits().error,
message,
}, output), dataObject);
}
}

View File

@@ -0,0 +1,82 @@
import { HttpBindingProtocol, HttpInterceptingShapeDeserializer, HttpInterceptingShapeSerializer, } from "@smithy/core/protocols";
import { NormalizedSchema, TypeRegistry } from "@smithy/core/schema";
import { ProtocolLib } from "../ProtocolLib";
import { JsonCodec } from "./JsonCodec";
import { loadRestJsonErrorCode } from "./parseJsonBody";
export class AwsRestJsonProtocol extends HttpBindingProtocol {
serializer;
deserializer;
codec;
mixin = new ProtocolLib();
constructor({ defaultNamespace }) {
super({
defaultNamespace,
});
const settings = {
timestampFormat: {
useTrait: true,
default: 7,
},
httpBindings: true,
jsonName: true,
};
this.codec = new JsonCodec(settings);
this.serializer = new HttpInterceptingShapeSerializer(this.codec.createSerializer(), settings);
this.deserializer = new HttpInterceptingShapeDeserializer(this.codec.createDeserializer(), settings);
}
getShapeId() {
return "aws.protocols#restJson1";
}
getPayloadCodec() {
return this.codec;
}
setSerdeContext(serdeContext) {
this.codec.setSerdeContext(serdeContext);
super.setSerdeContext(serdeContext);
}
async serializeRequest(operationSchema, input, context) {
const request = await super.serializeRequest(operationSchema, input, context);
const inputSchema = NormalizedSchema.of(operationSchema.input);
if (!request.headers["content-type"]) {
const contentType = this.mixin.resolveRestContentType(this.getDefaultContentType(), inputSchema);
if (contentType) {
request.headers["content-type"] = contentType;
}
}
if (request.body == null && request.headers["content-type"] === this.getDefaultContentType()) {
request.body = "{}";
}
return request;
}
async deserializeResponse(operationSchema, context, response) {
const output = await super.deserializeResponse(operationSchema, context, response);
const outputSchema = NormalizedSchema.of(operationSchema.output);
for (const [name, member] of outputSchema.structIterator()) {
if (member.getMemberTraits().httpPayload && !(name in output)) {
output[name] = null;
}
}
return output;
}
async handleError(operationSchema, context, response, dataObject, metadata) {
const errorIdentifier = loadRestJsonErrorCode(response, dataObject) ?? "Unknown";
const { errorSchema, errorMetadata } = await this.mixin.getErrorSchemaOrThrowBaseException(errorIdentifier, this.options.defaultNamespace, response, dataObject, metadata);
const ns = NormalizedSchema.of(errorSchema);
const message = dataObject.message ?? dataObject.Message ?? "Unknown";
const ErrorCtor = TypeRegistry.for(errorSchema[1]).getErrorCtor(errorSchema) ?? Error;
const exception = new ErrorCtor(message);
await this.deserializeHttpMessage(errorSchema, context, response, dataObject);
const output = {};
for (const [name, member] of ns.structIterator()) {
const target = member.getMergedTraits().jsonName ?? name;
output[name] = this.codec.createDeserializer().readObject(member, dataObject[target]);
}
throw this.mixin.decorateServiceException(Object.assign(exception, errorMetadata, {
$fault: ns.getMergedTraits().error,
message,
}, output), dataObject);
}
getDefaultContentType() {
return "application/json";
}
}

View File

@@ -0,0 +1,20 @@
import { SerdeContextConfig } from "../ConfigurableSerdeContext";
import { JsonShapeDeserializer } from "./JsonShapeDeserializer";
import { JsonShapeSerializer } from "./JsonShapeSerializer";
export class JsonCodec extends SerdeContextConfig {
settings;
constructor(settings) {
super();
this.settings = settings;
}
createSerializer() {
const serializer = new JsonShapeSerializer(this.settings);
serializer.setSerdeContext(this.serdeContext);
return serializer;
}
createDeserializer() {
const deserializer = new JsonShapeDeserializer(this.settings);
deserializer.setSerdeContext(this.serdeContext);
return deserializer;
}
}

View File

@@ -0,0 +1,156 @@
import { determineTimestampFormat } from "@smithy/core/protocols";
import { NormalizedSchema } from "@smithy/core/schema";
import { LazyJsonString, NumericValue, parseEpochTimestamp, parseRfc3339DateTimeWithOffset, parseRfc7231DateTime, } from "@smithy/core/serde";
import { fromBase64 } from "@smithy/util-base64";
import { SerdeContextConfig } from "../ConfigurableSerdeContext";
import { deserializingStructIterator } from "../structIterator";
import { UnionSerde } from "../UnionSerde";
import { jsonReviver } from "./jsonReviver";
import { parseJsonBody } from "./parseJsonBody";
export class JsonShapeDeserializer extends SerdeContextConfig {
settings;
constructor(settings) {
super();
this.settings = settings;
}
async read(schema, data) {
return this._read(schema, typeof data === "string" ? JSON.parse(data, jsonReviver) : await parseJsonBody(data, this.serdeContext));
}
readObject(schema, data) {
return this._read(schema, data);
}
_read(schema, value) {
const isObject = value !== null && typeof value === "object";
const ns = NormalizedSchema.of(schema);
if (isObject) {
if (ns.isStructSchema()) {
const record = value;
const union = ns.isUnionSchema();
const out = {};
let nameMap = void 0;
const { jsonName } = this.settings;
if (jsonName) {
nameMap = {};
}
let unionSerde;
if (union) {
unionSerde = new UnionSerde(record, out);
}
for (const [memberName, memberSchema] of deserializingStructIterator(ns, record, jsonName ? "jsonName" : false)) {
let fromKey = memberName;
if (jsonName) {
fromKey = memberSchema.getMergedTraits().jsonName ?? fromKey;
nameMap[fromKey] = memberName;
}
if (union) {
unionSerde.mark(fromKey);
}
if (record[fromKey] != null) {
out[memberName] = this._read(memberSchema, record[fromKey]);
}
}
if (union) {
unionSerde.writeUnknown();
}
else if (typeof record.__type === "string") {
for (const [k, v] of Object.entries(record)) {
const t = jsonName ? nameMap[k] ?? k : k;
if (!(t in out)) {
out[t] = v;
}
}
}
return out;
}
if (Array.isArray(value) && ns.isListSchema()) {
const listMember = ns.getValueSchema();
const out = [];
const sparse = !!ns.getMergedTraits().sparse;
for (const item of value) {
if (sparse || item != null) {
out.push(this._read(listMember, item));
}
}
return out;
}
if (ns.isMapSchema()) {
const mapMember = ns.getValueSchema();
const out = {};
const sparse = !!ns.getMergedTraits().sparse;
for (const [_k, _v] of Object.entries(value)) {
if (sparse || _v != null) {
out[_k] = this._read(mapMember, _v);
}
}
return out;
}
}
if (ns.isBlobSchema() && typeof value === "string") {
return fromBase64(value);
}
const mediaType = ns.getMergedTraits().mediaType;
if (ns.isStringSchema() && typeof value === "string" && mediaType) {
const isJson = mediaType === "application/json" || mediaType.endsWith("+json");
if (isJson) {
return LazyJsonString.from(value);
}
return value;
}
if (ns.isTimestampSchema() && value != null) {
const format = determineTimestampFormat(ns, this.settings);
switch (format) {
case 5:
return parseRfc3339DateTimeWithOffset(value);
case 6:
return parseRfc7231DateTime(value);
case 7:
return parseEpochTimestamp(value);
default:
console.warn("Missing timestamp format, parsing value with Date constructor:", value);
return new Date(value);
}
}
if (ns.isBigIntegerSchema() && (typeof value === "number" || typeof value === "string")) {
return BigInt(value);
}
if (ns.isBigDecimalSchema() && value != undefined) {
if (value instanceof NumericValue) {
return value;
}
const untyped = value;
if (untyped.type === "bigDecimal" && "string" in untyped) {
return new NumericValue(untyped.string, untyped.type);
}
return new NumericValue(String(value), "bigDecimal");
}
if (ns.isNumericSchema() && typeof value === "string") {
switch (value) {
case "Infinity":
return Infinity;
case "-Infinity":
return -Infinity;
case "NaN":
return NaN;
}
return value;
}
if (ns.isDocumentSchema()) {
if (isObject) {
const out = Array.isArray(value) ? [] : {};
for (const [k, v] of Object.entries(value)) {
if (v instanceof NumericValue) {
out[k] = v;
}
else {
out[k] = this._read(ns, v);
}
}
return out;
}
else {
return structuredClone(value);
}
}
return value;
}
}

View File

@@ -0,0 +1,177 @@
import { determineTimestampFormat } from "@smithy/core/protocols";
import { NormalizedSchema } from "@smithy/core/schema";
import { dateToUtcString, generateIdempotencyToken, LazyJsonString, NumericValue } from "@smithy/core/serde";
import { toBase64 } from "@smithy/util-base64";
import { SerdeContextConfig } from "../ConfigurableSerdeContext";
import { serializingStructIterator } from "../structIterator";
import { JsonReplacer } from "./jsonReplacer";
export class JsonShapeSerializer extends SerdeContextConfig {
settings;
buffer;
useReplacer = false;
rootSchema;
constructor(settings) {
super();
this.settings = settings;
}
write(schema, value) {
this.rootSchema = NormalizedSchema.of(schema);
this.buffer = this._write(this.rootSchema, value);
}
writeDiscriminatedDocument(schema, value) {
this.write(schema, value);
if (typeof this.buffer === "object") {
this.buffer.__type = NormalizedSchema.of(schema).getName(true);
}
}
flush() {
const { rootSchema, useReplacer } = this;
this.rootSchema = undefined;
this.useReplacer = false;
if (rootSchema?.isStructSchema() || rootSchema?.isDocumentSchema()) {
if (!useReplacer) {
return JSON.stringify(this.buffer);
}
const replacer = new JsonReplacer();
return replacer.replaceInJson(JSON.stringify(this.buffer, replacer.createReplacer(), 0));
}
return this.buffer;
}
_write(schema, value, container) {
const isObject = value !== null && typeof value === "object";
const ns = NormalizedSchema.of(schema);
if (isObject) {
if (ns.isStructSchema()) {
const record = value;
const out = {};
const { jsonName } = this.settings;
let nameMap = void 0;
if (jsonName) {
nameMap = {};
}
for (const [memberName, memberSchema] of serializingStructIterator(ns, record)) {
const serializableValue = this._write(memberSchema, record[memberName], ns);
if (serializableValue !== undefined) {
let targetKey = memberName;
if (jsonName) {
targetKey = memberSchema.getMergedTraits().jsonName ?? memberName;
nameMap[memberName] = targetKey;
}
out[targetKey] = serializableValue;
}
}
if (ns.isUnionSchema() && Object.keys(out).length === 0) {
const { $unknown } = record;
if (Array.isArray($unknown)) {
const [k, v] = $unknown;
out[k] = this._write(15, v);
}
}
else if (typeof record.__type === "string") {
for (const [k, v] of Object.entries(record)) {
const targetKey = jsonName ? nameMap[k] ?? k : k;
if (!(targetKey in out)) {
out[targetKey] = this._write(15, v);
}
}
}
return out;
}
if (Array.isArray(value) && ns.isListSchema()) {
const listMember = ns.getValueSchema();
const out = [];
const sparse = !!ns.getMergedTraits().sparse;
for (const item of value) {
if (sparse || item != null) {
out.push(this._write(listMember, item));
}
}
return out;
}
if (ns.isMapSchema()) {
const mapMember = ns.getValueSchema();
const out = {};
const sparse = !!ns.getMergedTraits().sparse;
for (const [_k, _v] of Object.entries(value)) {
if (sparse || _v != null) {
out[_k] = this._write(mapMember, _v);
}
}
return out;
}
if (value instanceof Uint8Array && (ns.isBlobSchema() || ns.isDocumentSchema())) {
if (ns === this.rootSchema) {
return value;
}
return (this.serdeContext?.base64Encoder ?? toBase64)(value);
}
if (value instanceof Date && (ns.isTimestampSchema() || ns.isDocumentSchema())) {
const format = determineTimestampFormat(ns, this.settings);
switch (format) {
case 5:
return value.toISOString().replace(".000Z", "Z");
case 6:
return dateToUtcString(value);
case 7:
return value.getTime() / 1000;
default:
console.warn("Missing timestamp format, using epoch seconds", value);
return value.getTime() / 1000;
}
}
if (value instanceof NumericValue) {
this.useReplacer = true;
}
}
if (value === null && container?.isStructSchema()) {
return void 0;
}
if (ns.isStringSchema()) {
if (typeof value === "undefined" && ns.isIdempotencyToken()) {
return generateIdempotencyToken();
}
const mediaType = ns.getMergedTraits().mediaType;
if (value != null && mediaType) {
const isJson = mediaType === "application/json" || mediaType.endsWith("+json");
if (isJson) {
return LazyJsonString.from(value);
}
}
return value;
}
if (typeof value === "number" && ns.isNumericSchema()) {
if (Math.abs(value) === Infinity || isNaN(value)) {
return String(value);
}
return value;
}
if (typeof value === "string" && ns.isBlobSchema()) {
if (ns === this.rootSchema) {
return value;
}
return (this.serdeContext?.base64Encoder ?? toBase64)(value);
}
if (typeof value === "bigint") {
this.useReplacer = true;
}
if (ns.isDocumentSchema()) {
if (isObject) {
const out = Array.isArray(value) ? [] : {};
for (const [k, v] of Object.entries(value)) {
if (v instanceof NumericValue) {
this.useReplacer = true;
out[k] = v;
}
else {
out[k] = this._write(ns, v);
}
}
return out;
}
else {
return structuredClone(value);
}
}
return value;
}
}

View File

@@ -0,0 +1,10 @@
import { expectUnion } from "@smithy/smithy-client";
export const awsExpectUnion = (value) => {
if (value == null) {
return undefined;
}
if (typeof value === "object" && "__type" in value) {
delete value.__type;
}
return expectUnion(value);
};

View File

@@ -0,0 +1,136 @@
import { determineTimestampFormat } from "@smithy/core/protocols";
import { NormalizedSchema } from "@smithy/core/schema";
import { dateToUtcString, generateIdempotencyToken, LazyJsonString, NumericValue } from "@smithy/core/serde";
import { toBase64 } from "@smithy/util-base64";
import { SerdeContextConfig } from "../../ConfigurableSerdeContext";
import { serializingStructIterator } from "../../structIterator";
export class SinglePassJsonShapeSerializer extends SerdeContextConfig {
settings;
buffer;
rootSchema;
constructor(settings) {
super();
this.settings = settings;
}
write(schema, value) {
this.rootSchema = NormalizedSchema.of(schema);
this.buffer = this.writeObject(this.rootSchema, value);
}
writeDiscriminatedDocument(schema, value) {
this.write(schema, value);
if (typeof this.buffer === "object") {
this.buffer.__type = NormalizedSchema.of(schema).getName(true);
}
}
flush() {
this.rootSchema = undefined;
return this.buffer;
}
writeObject(schema, value) {
if (value == undefined) {
return "";
}
let b = "";
const ns = NormalizedSchema.of(schema);
const sparse = !!ns.getMergedTraits().sparse;
if (Array.isArray(value) && (ns.isDocumentSchema() || ns.isListSchema())) {
b += "[";
for (let i = 0; i < value.length; ++i) {
const item = value[i];
if (item != null || sparse) {
b += this.writeValue(ns.getValueSchema(), item);
b += ",";
}
}
}
else if (ns.isStructSchema()) {
b += "{";
let didWriteMember = false;
for (const [name, member] of serializingStructIterator(ns, value)) {
const item = value[name];
const targetKey = this.settings.jsonName ? member.getMergedTraits().jsonName ?? name : name;
const serializableValue = this.writeValue(member, item);
if (item != null || member.isIdempotencyToken()) {
didWriteMember = true;
b += `"${targetKey}":${serializableValue}`;
b += ",";
}
}
if (!didWriteMember && ns.isUnionSchema()) {
const { $unknown } = value;
if (Array.isArray($unknown)) {
const [k, v] = $unknown;
b += `"${k}":${this.writeValue(15, v)}`;
}
}
}
else if (ns.isMapSchema() || ns.isDocumentSchema()) {
b += "{";
for (const [k, v] of Object.entries(value)) {
if (v != null || sparse) {
b += `"${k}":${this.writeValue(ns, v)}`;
b += ",";
}
}
}
if (b[b.length - 1] === ",") {
b = b.slice(0, -1);
}
if (b[0] === "[") {
b += "]";
}
if (b[0] === "{") {
b += "}";
}
return b;
}
writeValue(schema, value) {
const isObject = value !== null && typeof value === "object";
const ns = NormalizedSchema.of(schema);
const quote = (_) => `"${_}"`;
if ((ns.isBlobSchema() && (value instanceof Uint8Array || typeof value === "string")) ||
(ns.isDocumentSchema() && value instanceof Uint8Array)) {
return quote((this.serdeContext?.base64Encoder ?? toBase64)(value));
}
if ((ns.isTimestampSchema() || ns.isDocumentSchema()) && value instanceof Date) {
const format = determineTimestampFormat(ns, this.settings);
switch (format) {
case 5:
return quote(value.toISOString().replace(".000Z", "Z"));
case 6:
return quote(dateToUtcString(value));
case 7:
return String(value.getTime() / 1000);
default:
console.warn("Missing timestamp format, using epoch seconds", value);
return String(value.getTime() / 1000);
}
}
if (ns.isNumericSchema() && typeof value === "number") {
if (Math.abs(value) === Infinity || isNaN(value)) {
return quote(String(value));
}
}
if (ns.isStringSchema()) {
if (typeof value === "undefined" && ns.isIdempotencyToken()) {
return quote(generateIdempotencyToken());
}
if (typeof value === "string") {
const mediaType = ns.getMergedTraits().mediaType;
if (mediaType) {
const isJson = mediaType === "application/json" || mediaType.endsWith("+json");
if (isJson) {
return quote(LazyJsonString.from(value).toString());
}
}
}
}
if (value instanceof NumericValue) {
return value.string;
}
if (isObject) {
return this.writeObject(ns, value);
}
return typeof value === "string" ? quote(value) : String(value);
}
}

View File

@@ -0,0 +1,46 @@
import { NumericValue } from "@smithy/core/serde";
const NUMERIC_CONTROL_CHAR = String.fromCharCode(925);
export class JsonReplacer {
values = new Map();
counter = 0;
stage = 0;
createReplacer() {
if (this.stage === 1) {
throw new Error("@aws-sdk/core/protocols - JsonReplacer already created.");
}
if (this.stage === 2) {
throw new Error("@aws-sdk/core/protocols - JsonReplacer exhausted.");
}
this.stage = 1;
return (key, value) => {
if (value instanceof NumericValue) {
const v = `${NUMERIC_CONTROL_CHAR + "nv" + this.counter++}_` + value.string;
this.values.set(`"${v}"`, value.string);
return v;
}
if (typeof value === "bigint") {
const s = value.toString();
const v = `${NUMERIC_CONTROL_CHAR + "b" + this.counter++}_` + s;
this.values.set(`"${v}"`, s);
return v;
}
return value;
};
}
replaceInJson(json) {
if (this.stage === 0) {
throw new Error("@aws-sdk/core/protocols - JsonReplacer not created yet.");
}
if (this.stage === 2) {
throw new Error("@aws-sdk/core/protocols - JsonReplacer exhausted.");
}
this.stage = 2;
if (this.counter === 0) {
return json;
}
for (const [key, value] of this.values) {
json = json.replace(key, value);
}
return json;
}
}

View File

@@ -0,0 +1,18 @@
import { NumericValue } from "@smithy/core/serde";
export function jsonReviver(key, value, context) {
if (context?.source) {
const numericString = context.source;
if (typeof value === "number") {
if (value > Number.MAX_SAFE_INTEGER || value < Number.MIN_SAFE_INTEGER || numericString !== String(value)) {
const isFractional = numericString.includes(".");
if (isFractional) {
return new NumericValue(numericString, "bigDecimal");
}
else {
return BigInt(numericString);
}
}
}
}
return value;
}

View File

@@ -0,0 +1,54 @@
import { collectBodyString } from "../common";
export const parseJsonBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => {
if (encoded.length) {
try {
return JSON.parse(encoded);
}
catch (e) {
if (e?.name === "SyntaxError") {
Object.defineProperty(e, "$responseBodyText", {
value: encoded,
});
}
throw e;
}
}
return {};
});
export const parseJsonErrorBody = async (errorBody, context) => {
const value = await parseJsonBody(errorBody, context);
value.message = value.message ?? value.Message;
return value;
};
export const loadRestJsonErrorCode = (output, data) => {
const findKey = (object, key) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase());
const sanitizeErrorCode = (rawValue) => {
let cleanValue = rawValue;
if (typeof cleanValue === "number") {
cleanValue = cleanValue.toString();
}
if (cleanValue.indexOf(",") >= 0) {
cleanValue = cleanValue.split(",")[0];
}
if (cleanValue.indexOf(":") >= 0) {
cleanValue = cleanValue.split(":")[0];
}
if (cleanValue.indexOf("#") >= 0) {
cleanValue = cleanValue.split("#")[1];
}
return cleanValue;
};
const headerKey = findKey(output.headers, "x-amzn-errortype");
if (headerKey !== undefined) {
return sanitizeErrorCode(output.headers[headerKey]);
}
if (data && typeof data === "object") {
const codeKey = findKey(data, "code");
if (codeKey && data[codeKey] !== undefined) {
return sanitizeErrorCode(data[codeKey]);
}
if (data["__type"] !== undefined) {
return sanitizeErrorCode(data["__type"]);
}
}
};

View File

@@ -0,0 +1,17 @@
import { AwsQueryProtocol } from "./AwsQueryProtocol";
export class AwsEc2QueryProtocol extends AwsQueryProtocol {
options;
constructor(options) {
super(options);
this.options = options;
const ec2Settings = {
capitalizeKeys: true,
flattenLists: true,
serializeEmptyLists: false,
};
Object.assign(this.serializer.settings, ec2Settings);
}
useNestedResult() {
return false;
}
}

View File

@@ -0,0 +1,136 @@
import { collectBody, RpcProtocol } from "@smithy/core/protocols";
import { deref, NormalizedSchema, TypeRegistry } from "@smithy/core/schema";
import { ProtocolLib } from "../ProtocolLib";
import { XmlShapeDeserializer } from "../xml/XmlShapeDeserializer";
import { QueryShapeSerializer } from "./QueryShapeSerializer";
export class AwsQueryProtocol extends RpcProtocol {
options;
serializer;
deserializer;
mixin = new ProtocolLib();
constructor(options) {
super({
defaultNamespace: options.defaultNamespace,
});
this.options = options;
const settings = {
timestampFormat: {
useTrait: true,
default: 5,
},
httpBindings: false,
xmlNamespace: options.xmlNamespace,
serviceNamespace: options.defaultNamespace,
serializeEmptyLists: true,
};
this.serializer = new QueryShapeSerializer(settings);
this.deserializer = new XmlShapeDeserializer(settings);
}
getShapeId() {
return "aws.protocols#awsQuery";
}
setSerdeContext(serdeContext) {
this.serializer.setSerdeContext(serdeContext);
this.deserializer.setSerdeContext(serdeContext);
}
getPayloadCodec() {
throw new Error("AWSQuery protocol has no payload codec.");
}
async serializeRequest(operationSchema, input, context) {
const request = await super.serializeRequest(operationSchema, input, context);
if (!request.path.endsWith("/")) {
request.path += "/";
}
Object.assign(request.headers, {
"content-type": `application/x-www-form-urlencoded`,
});
if (deref(operationSchema.input) === "unit" || !request.body) {
request.body = "";
}
const action = operationSchema.name.split("#")[1] ?? operationSchema.name;
request.body = `Action=${action}&Version=${this.options.version}` + request.body;
if (request.body.endsWith("&")) {
request.body = request.body.slice(-1);
}
return request;
}
async deserializeResponse(operationSchema, context, response) {
const deserializer = this.deserializer;
const ns = NormalizedSchema.of(operationSchema.output);
const dataObject = {};
if (response.statusCode >= 300) {
const bytes = await collectBody(response.body, context);
if (bytes.byteLength > 0) {
Object.assign(dataObject, await deserializer.read(15, bytes));
}
await this.handleError(operationSchema, context, response, dataObject, this.deserializeMetadata(response));
}
for (const header in response.headers) {
const value = response.headers[header];
delete response.headers[header];
response.headers[header.toLowerCase()] = value;
}
const shortName = operationSchema.name.split("#")[1] ?? operationSchema.name;
const awsQueryResultKey = ns.isStructSchema() && this.useNestedResult() ? shortName + "Result" : undefined;
const bytes = await collectBody(response.body, context);
if (bytes.byteLength > 0) {
Object.assign(dataObject, await deserializer.read(ns, bytes, awsQueryResultKey));
}
const output = {
$metadata: this.deserializeMetadata(response),
...dataObject,
};
return output;
}
useNestedResult() {
return true;
}
async handleError(operationSchema, context, response, dataObject, metadata) {
const errorIdentifier = this.loadQueryErrorCode(response, dataObject) ?? "Unknown";
const errorData = this.loadQueryError(dataObject);
const message = this.loadQueryErrorMessage(dataObject);
errorData.message = message;
errorData.Error = {
Type: errorData.Type,
Code: errorData.Code,
Message: message,
};
const { errorSchema, errorMetadata } = await this.mixin.getErrorSchemaOrThrowBaseException(errorIdentifier, this.options.defaultNamespace, response, errorData, metadata, this.mixin.findQueryCompatibleError);
const ns = NormalizedSchema.of(errorSchema);
const ErrorCtor = TypeRegistry.for(errorSchema[1]).getErrorCtor(errorSchema) ?? Error;
const exception = new ErrorCtor(message);
const output = {
Type: errorData.Error.Type,
Code: errorData.Error.Code,
Error: errorData.Error,
};
for (const [name, member] of ns.structIterator()) {
const target = member.getMergedTraits().xmlName ?? name;
const value = errorData[target] ?? dataObject[target];
output[name] = this.deserializer.readSchema(member, value);
}
throw this.mixin.decorateServiceException(Object.assign(exception, errorMetadata, {
$fault: ns.getMergedTraits().error,
message,
}, output), dataObject);
}
loadQueryErrorCode(output, data) {
const code = (data.Errors?.[0]?.Error ?? data.Errors?.Error ?? data.Error)?.Code;
if (code !== undefined) {
return code;
}
if (output.statusCode == 404) {
return "NotFound";
}
}
loadQueryError(data) {
return data.Errors?.[0]?.Error ?? data.Errors?.Error ?? data.Error;
}
loadQueryErrorMessage(data) {
const errorData = this.loadQueryError(data);
return errorData?.message ?? errorData?.Message ?? data.message ?? data.Message ?? "Unknown";
}
getDefaultContentType() {
return "application/x-www-form-urlencoded";
}
}

View File

@@ -0,0 +1 @@
export {};

View File

@@ -0,0 +1,182 @@
import { determineTimestampFormat, extendedEncodeURIComponent } from "@smithy/core/protocols";
import { NormalizedSchema } from "@smithy/core/schema";
import { generateIdempotencyToken, NumericValue } from "@smithy/core/serde";
import { dateToUtcString } from "@smithy/smithy-client";
import { toBase64 } from "@smithy/util-base64";
import { SerdeContextConfig } from "../ConfigurableSerdeContext";
import { serializingStructIterator } from "../structIterator";
export class QueryShapeSerializer extends SerdeContextConfig {
settings;
buffer;
constructor(settings) {
super();
this.settings = settings;
}
write(schema, value, prefix = "") {
if (this.buffer === undefined) {
this.buffer = "";
}
const ns = NormalizedSchema.of(schema);
if (prefix && !prefix.endsWith(".")) {
prefix += ".";
}
if (ns.isBlobSchema()) {
if (typeof value === "string" || value instanceof Uint8Array) {
this.writeKey(prefix);
this.writeValue((this.serdeContext?.base64Encoder ?? toBase64)(value));
}
}
else if (ns.isBooleanSchema() || ns.isNumericSchema() || ns.isStringSchema()) {
if (value != null) {
this.writeKey(prefix);
this.writeValue(String(value));
}
else if (ns.isIdempotencyToken()) {
this.writeKey(prefix);
this.writeValue(generateIdempotencyToken());
}
}
else if (ns.isBigIntegerSchema()) {
if (value != null) {
this.writeKey(prefix);
this.writeValue(String(value));
}
}
else if (ns.isBigDecimalSchema()) {
if (value != null) {
this.writeKey(prefix);
this.writeValue(value instanceof NumericValue ? value.string : String(value));
}
}
else if (ns.isTimestampSchema()) {
if (value instanceof Date) {
this.writeKey(prefix);
const format = determineTimestampFormat(ns, this.settings);
switch (format) {
case 5:
this.writeValue(value.toISOString().replace(".000Z", "Z"));
break;
case 6:
this.writeValue(dateToUtcString(value));
break;
case 7:
this.writeValue(String(value.getTime() / 1000));
break;
}
}
}
else if (ns.isDocumentSchema()) {
if (Array.isArray(value)) {
this.write(64 | 15, value, prefix);
}
else if (value instanceof Date) {
this.write(4, value, prefix);
}
else if (value instanceof Uint8Array) {
this.write(21, value, prefix);
}
else if (value && typeof value === "object") {
this.write(128 | 15, value, prefix);
}
else {
this.writeKey(prefix);
this.writeValue(String(value));
}
}
else if (ns.isListSchema()) {
if (Array.isArray(value)) {
if (value.length === 0) {
if (this.settings.serializeEmptyLists) {
this.writeKey(prefix);
this.writeValue("");
}
}
else {
const member = ns.getValueSchema();
const flat = this.settings.flattenLists || ns.getMergedTraits().xmlFlattened;
let i = 1;
for (const item of value) {
if (item == null) {
continue;
}
const suffix = this.getKey("member", member.getMergedTraits().xmlName);
const key = flat ? `${prefix}${i}` : `${prefix}${suffix}.${i}`;
this.write(member, item, key);
++i;
}
}
}
}
else if (ns.isMapSchema()) {
if (value && typeof value === "object") {
const keySchema = ns.getKeySchema();
const memberSchema = ns.getValueSchema();
const flat = ns.getMergedTraits().xmlFlattened;
let i = 1;
for (const [k, v] of Object.entries(value)) {
if (v == null) {
continue;
}
const keySuffix = this.getKey("key", keySchema.getMergedTraits().xmlName);
const key = flat ? `${prefix}${i}.${keySuffix}` : `${prefix}entry.${i}.${keySuffix}`;
const valueSuffix = this.getKey("value", memberSchema.getMergedTraits().xmlName);
const valueKey = flat ? `${prefix}${i}.${valueSuffix}` : `${prefix}entry.${i}.${valueSuffix}`;
this.write(keySchema, k, key);
this.write(memberSchema, v, valueKey);
++i;
}
}
}
else if (ns.isStructSchema()) {
if (value && typeof value === "object") {
let didWriteMember = false;
for (const [memberName, member] of serializingStructIterator(ns, value)) {
if (value[memberName] == null && !member.isIdempotencyToken()) {
continue;
}
const suffix = this.getKey(memberName, member.getMergedTraits().xmlName);
const key = `${prefix}${suffix}`;
this.write(member, value[memberName], key);
didWriteMember = true;
}
if (!didWriteMember && ns.isUnionSchema()) {
const { $unknown } = value;
if (Array.isArray($unknown)) {
const [k, v] = $unknown;
const key = `${prefix}${k}`;
this.write(15, v, key);
}
}
}
}
else if (ns.isUnitSchema()) {
}
else {
throw new Error(`@aws-sdk/core/protocols - QuerySerializer unrecognized schema type ${ns.getName(true)}`);
}
}
flush() {
if (this.buffer === undefined) {
throw new Error("@aws-sdk/core/protocols - QuerySerializer cannot flush with nothing written to buffer.");
}
const str = this.buffer;
delete this.buffer;
return str;
}
getKey(memberName, xmlName) {
const key = xmlName ?? memberName;
if (this.settings.capitalizeKeys) {
return key[0].toUpperCase() + key.slice(1);
}
return key;
}
writeKey(key) {
if (key.endsWith(".")) {
key = key.slice(0, key.length - 1);
}
this.buffer += `&${extendedEncodeURIComponent(key)}=`;
}
writeValue(value) {
this.buffer += extendedEncodeURIComponent(value);
}
}

View File

@@ -0,0 +1,40 @@
import { NormalizedSchema } from "@smithy/core/schema";
export function* serializingStructIterator(ns, sourceObject) {
if (ns.isUnitSchema()) {
return;
}
const struct = ns.getSchema();
for (let i = 0; i < struct[4].length; ++i) {
const key = struct[4][i];
const memberSchema = struct[5][i];
const memberNs = new NormalizedSchema([memberSchema, 0], key);
if (!(key in sourceObject) && !memberNs.isIdempotencyToken()) {
continue;
}
yield [key, memberNs];
}
}
export function* deserializingStructIterator(ns, sourceObject, nameTrait) {
if (ns.isUnitSchema()) {
return;
}
const struct = ns.getSchema();
let keysRemaining = Object.keys(sourceObject).filter((k) => k !== "__type").length;
for (let i = 0; i < struct[4].length; ++i) {
if (keysRemaining === 0) {
break;
}
const key = struct[4][i];
const memberSchema = struct[5][i];
const memberNs = new NormalizedSchema([memberSchema, 0], key);
let serializationKey = key;
if (nameTrait) {
serializationKey = memberNs.getMergedTraits()[nameTrait] ?? key;
}
if (!(serializationKey in sourceObject)) {
continue;
}
yield [key, memberNs];
keysRemaining -= 1;
}
}

View File

@@ -0,0 +1,82 @@
import { HttpBindingProtocol, HttpInterceptingShapeDeserializer, HttpInterceptingShapeSerializer, } from "@smithy/core/protocols";
import { NormalizedSchema, TypeRegistry } from "@smithy/core/schema";
import { ProtocolLib } from "../ProtocolLib";
import { loadRestXmlErrorCode } from "./parseXmlBody";
import { XmlCodec } from "./XmlCodec";
export class AwsRestXmlProtocol extends HttpBindingProtocol {
codec;
serializer;
deserializer;
mixin = new ProtocolLib();
constructor(options) {
super(options);
const settings = {
timestampFormat: {
useTrait: true,
default: 5,
},
httpBindings: true,
xmlNamespace: options.xmlNamespace,
serviceNamespace: options.defaultNamespace,
};
this.codec = new XmlCodec(settings);
this.serializer = new HttpInterceptingShapeSerializer(this.codec.createSerializer(), settings);
this.deserializer = new HttpInterceptingShapeDeserializer(this.codec.createDeserializer(), settings);
}
getPayloadCodec() {
return this.codec;
}
getShapeId() {
return "aws.protocols#restXml";
}
async serializeRequest(operationSchema, input, context) {
const request = await super.serializeRequest(operationSchema, input, context);
const inputSchema = NormalizedSchema.of(operationSchema.input);
if (!request.headers["content-type"]) {
const contentType = this.mixin.resolveRestContentType(this.getDefaultContentType(), inputSchema);
if (contentType) {
request.headers["content-type"] = contentType;
}
}
if (typeof request.body === "string" &&
request.headers["content-type"] === this.getDefaultContentType() &&
!request.body.startsWith("<?xml ") &&
!this.hasUnstructuredPayloadBinding(inputSchema)) {
request.body = '<?xml version="1.0" encoding="UTF-8"?>' + request.body;
}
return request;
}
async deserializeResponse(operationSchema, context, response) {
return super.deserializeResponse(operationSchema, context, response);
}
async handleError(operationSchema, context, response, dataObject, metadata) {
const errorIdentifier = loadRestXmlErrorCode(response, dataObject) ?? "Unknown";
const { errorSchema, errorMetadata } = await this.mixin.getErrorSchemaOrThrowBaseException(errorIdentifier, this.options.defaultNamespace, response, dataObject, metadata);
const ns = NormalizedSchema.of(errorSchema);
const message = dataObject.Error?.message ?? dataObject.Error?.Message ?? dataObject.message ?? dataObject.Message ?? "Unknown";
const ErrorCtor = TypeRegistry.for(errorSchema[1]).getErrorCtor(errorSchema) ?? Error;
const exception = new ErrorCtor(message);
await this.deserializeHttpMessage(errorSchema, context, response, dataObject);
const output = {};
for (const [name, member] of ns.structIterator()) {
const target = member.getMergedTraits().xmlName ?? name;
const value = dataObject.Error?.[target] ?? dataObject[target];
output[name] = this.codec.createDeserializer().readSchema(member, value);
}
throw this.mixin.decorateServiceException(Object.assign(exception, errorMetadata, {
$fault: ns.getMergedTraits().error,
message,
}, output), dataObject);
}
getDefaultContentType() {
return "application/xml";
}
hasUnstructuredPayloadBinding(ns) {
for (const [, member] of ns.structIterator()) {
if (member.getMergedTraits().httpPayload) {
return !(member.isStructSchema() || member.isMapSchema() || member.isListSchema());
}
}
return false;
}
}

View File

@@ -0,0 +1,20 @@
import { SerdeContextConfig } from "../ConfigurableSerdeContext";
import { XmlShapeDeserializer } from "./XmlShapeDeserializer";
import { XmlShapeSerializer } from "./XmlShapeSerializer";
export class XmlCodec extends SerdeContextConfig {
settings;
constructor(settings) {
super();
this.settings = settings;
}
createSerializer() {
const serializer = new XmlShapeSerializer(this.settings);
serializer.setSerdeContext(this.serdeContext);
return serializer;
}
createDeserializer() {
const deserializer = new XmlShapeDeserializer(this.settings);
deserializer.setSerdeContext(this.serdeContext);
return deserializer;
}
}

View File

@@ -0,0 +1,155 @@
import { parseXML } from "@aws-sdk/xml-builder";
import { FromStringShapeDeserializer } from "@smithy/core/protocols";
import { NormalizedSchema } from "@smithy/core/schema";
import { getValueFromTextNode } from "@smithy/smithy-client";
import { toUtf8 } from "@smithy/util-utf8";
import { SerdeContextConfig } from "../ConfigurableSerdeContext";
import { UnionSerde } from "../UnionSerde";
export class XmlShapeDeserializer extends SerdeContextConfig {
settings;
stringDeserializer;
constructor(settings) {
super();
this.settings = settings;
this.stringDeserializer = new FromStringShapeDeserializer(settings);
}
setSerdeContext(serdeContext) {
this.serdeContext = serdeContext;
this.stringDeserializer.setSerdeContext(serdeContext);
}
read(schema, bytes, key) {
const ns = NormalizedSchema.of(schema);
const memberSchemas = ns.getMemberSchemas();
const isEventPayload = ns.isStructSchema() &&
ns.isMemberSchema() &&
!!Object.values(memberSchemas).find((memberNs) => {
return !!memberNs.getMemberTraits().eventPayload;
});
if (isEventPayload) {
const output = {};
const memberName = Object.keys(memberSchemas)[0];
const eventMemberSchema = memberSchemas[memberName];
if (eventMemberSchema.isBlobSchema()) {
output[memberName] = bytes;
}
else {
output[memberName] = this.read(memberSchemas[memberName], bytes);
}
return output;
}
const xmlString = (this.serdeContext?.utf8Encoder ?? toUtf8)(bytes);
const parsedObject = this.parseXml(xmlString);
return this.readSchema(schema, key ? parsedObject[key] : parsedObject);
}
readSchema(_schema, value) {
const ns = NormalizedSchema.of(_schema);
if (ns.isUnitSchema()) {
return;
}
const traits = ns.getMergedTraits();
if (ns.isListSchema() && !Array.isArray(value)) {
return this.readSchema(ns, [value]);
}
if (value == null) {
return value;
}
if (typeof value === "object") {
const sparse = !!traits.sparse;
const flat = !!traits.xmlFlattened;
if (ns.isListSchema()) {
const listValue = ns.getValueSchema();
const buffer = [];
const sourceKey = listValue.getMergedTraits().xmlName ?? "member";
const source = flat ? value : (value[0] ?? value)[sourceKey];
const sourceArray = Array.isArray(source) ? source : [source];
for (const v of sourceArray) {
if (v != null || sparse) {
buffer.push(this.readSchema(listValue, v));
}
}
return buffer;
}
const buffer = {};
if (ns.isMapSchema()) {
const keyNs = ns.getKeySchema();
const memberNs = ns.getValueSchema();
let entries;
if (flat) {
entries = Array.isArray(value) ? value : [value];
}
else {
entries = Array.isArray(value.entry) ? value.entry : [value.entry];
}
const keyProperty = keyNs.getMergedTraits().xmlName ?? "key";
const valueProperty = memberNs.getMergedTraits().xmlName ?? "value";
for (const entry of entries) {
const key = entry[keyProperty];
const value = entry[valueProperty];
if (value != null || sparse) {
buffer[key] = this.readSchema(memberNs, value);
}
}
return buffer;
}
if (ns.isStructSchema()) {
const union = ns.isUnionSchema();
let unionSerde;
if (union) {
unionSerde = new UnionSerde(value, buffer);
}
for (const [memberName, memberSchema] of ns.structIterator()) {
const memberTraits = memberSchema.getMergedTraits();
const xmlObjectKey = !memberTraits.httpPayload
? memberSchema.getMemberTraits().xmlName ?? memberName
: memberTraits.xmlName ?? memberSchema.getName();
if (union) {
unionSerde.mark(xmlObjectKey);
}
if (value[xmlObjectKey] != null) {
buffer[memberName] = this.readSchema(memberSchema, value[xmlObjectKey]);
}
}
if (union) {
unionSerde.writeUnknown();
}
return buffer;
}
if (ns.isDocumentSchema()) {
return value;
}
throw new Error(`@aws-sdk/core/protocols - xml deserializer unhandled schema type for ${ns.getName(true)}`);
}
if (ns.isListSchema()) {
return [];
}
if (ns.isMapSchema() || ns.isStructSchema()) {
return {};
}
return this.stringDeserializer.read(ns, value);
}
parseXml(xml) {
if (xml.length) {
let parsedObj;
try {
parsedObj = parseXML(xml);
}
catch (e) {
if (e && typeof e === "object") {
Object.defineProperty(e, "$responseBodyText", {
value: xml,
});
}
throw e;
}
const textNodeName = "#text";
const key = Object.keys(parsedObj)[0];
const parsedObjToReturn = parsedObj[key];
if (parsedObjToReturn[textNodeName]) {
parsedObjToReturn[key] = parsedObjToReturn[textNodeName];
delete parsedObjToReturn[textNodeName];
}
return getValueFromTextNode(parsedObjToReturn);
}
return {};
}
}

View File

@@ -0,0 +1,296 @@
import { XmlNode, XmlText } from "@aws-sdk/xml-builder";
import { determineTimestampFormat } from "@smithy/core/protocols";
import { NormalizedSchema } from "@smithy/core/schema";
import { generateIdempotencyToken, NumericValue } from "@smithy/core/serde";
import { dateToUtcString } from "@smithy/smithy-client";
import { fromBase64, toBase64 } from "@smithy/util-base64";
import { SerdeContextConfig } from "../ConfigurableSerdeContext";
import { serializingStructIterator } from "../structIterator";
export class XmlShapeSerializer extends SerdeContextConfig {
settings;
stringBuffer;
byteBuffer;
buffer;
constructor(settings) {
super();
this.settings = settings;
}
write(schema, value) {
const ns = NormalizedSchema.of(schema);
if (ns.isStringSchema() && typeof value === "string") {
this.stringBuffer = value;
}
else if (ns.isBlobSchema()) {
this.byteBuffer =
"byteLength" in value
? value
: (this.serdeContext?.base64Decoder ?? fromBase64)(value);
}
else {
this.buffer = this.writeStruct(ns, value, undefined);
const traits = ns.getMergedTraits();
if (traits.httpPayload && !traits.xmlName) {
this.buffer.withName(ns.getName());
}
}
}
flush() {
if (this.byteBuffer !== undefined) {
const bytes = this.byteBuffer;
delete this.byteBuffer;
return bytes;
}
if (this.stringBuffer !== undefined) {
const str = this.stringBuffer;
delete this.stringBuffer;
return str;
}
const buffer = this.buffer;
if (this.settings.xmlNamespace) {
if (!buffer?.attributes?.["xmlns"]) {
buffer.addAttribute("xmlns", this.settings.xmlNamespace);
}
}
delete this.buffer;
return buffer.toString();
}
writeStruct(ns, value, parentXmlns) {
const traits = ns.getMergedTraits();
const name = ns.isMemberSchema() && !traits.httpPayload
? ns.getMemberTraits().xmlName ?? ns.getMemberName()
: traits.xmlName ?? ns.getName();
if (!name || !ns.isStructSchema()) {
throw new Error(`@aws-sdk/core/protocols - xml serializer, cannot write struct with empty name or non-struct, schema=${ns.getName(true)}.`);
}
const structXmlNode = XmlNode.of(name);
const [xmlnsAttr, xmlns] = this.getXmlnsAttribute(ns, parentXmlns);
for (const [memberName, memberSchema] of serializingStructIterator(ns, value)) {
const val = value[memberName];
if (val != null || memberSchema.isIdempotencyToken()) {
if (memberSchema.getMergedTraits().xmlAttribute) {
structXmlNode.addAttribute(memberSchema.getMergedTraits().xmlName ?? memberName, this.writeSimple(memberSchema, val));
continue;
}
if (memberSchema.isListSchema()) {
this.writeList(memberSchema, val, structXmlNode, xmlns);
}
else if (memberSchema.isMapSchema()) {
this.writeMap(memberSchema, val, structXmlNode, xmlns);
}
else if (memberSchema.isStructSchema()) {
structXmlNode.addChildNode(this.writeStruct(memberSchema, val, xmlns));
}
else {
const memberNode = XmlNode.of(memberSchema.getMergedTraits().xmlName ?? memberSchema.getMemberName());
this.writeSimpleInto(memberSchema, val, memberNode, xmlns);
structXmlNode.addChildNode(memberNode);
}
}
}
const { $unknown } = value;
if ($unknown && ns.isUnionSchema() && Array.isArray($unknown) && Object.keys(value).length === 1) {
const [k, v] = $unknown;
const node = XmlNode.of(k);
if (typeof v !== "string") {
if (value instanceof XmlNode || value instanceof XmlText) {
structXmlNode.addChildNode(value);
}
else {
throw new Error(`@aws-sdk - $unknown union member in XML requires ` +
`value of type string, @aws-sdk/xml-builder::XmlNode or XmlText.`);
}
}
this.writeSimpleInto(0, v, node, xmlns);
structXmlNode.addChildNode(node);
}
if (xmlns) {
structXmlNode.addAttribute(xmlnsAttr, xmlns);
}
return structXmlNode;
}
writeList(listMember, array, container, parentXmlns) {
if (!listMember.isMemberSchema()) {
throw new Error(`@aws-sdk/core/protocols - xml serializer, cannot write non-member list: ${listMember.getName(true)}`);
}
const listTraits = listMember.getMergedTraits();
const listValueSchema = listMember.getValueSchema();
const listValueTraits = listValueSchema.getMergedTraits();
const sparse = !!listValueTraits.sparse;
const flat = !!listTraits.xmlFlattened;
const [xmlnsAttr, xmlns] = this.getXmlnsAttribute(listMember, parentXmlns);
const writeItem = (container, value) => {
if (listValueSchema.isListSchema()) {
this.writeList(listValueSchema, Array.isArray(value) ? value : [value], container, xmlns);
}
else if (listValueSchema.isMapSchema()) {
this.writeMap(listValueSchema, value, container, xmlns);
}
else if (listValueSchema.isStructSchema()) {
const struct = this.writeStruct(listValueSchema, value, xmlns);
container.addChildNode(struct.withName(flat ? listTraits.xmlName ?? listMember.getMemberName() : listValueTraits.xmlName ?? "member"));
}
else {
const listItemNode = XmlNode.of(flat ? listTraits.xmlName ?? listMember.getMemberName() : listValueTraits.xmlName ?? "member");
this.writeSimpleInto(listValueSchema, value, listItemNode, xmlns);
container.addChildNode(listItemNode);
}
};
if (flat) {
for (const value of array) {
if (sparse || value != null) {
writeItem(container, value);
}
}
}
else {
const listNode = XmlNode.of(listTraits.xmlName ?? listMember.getMemberName());
if (xmlns) {
listNode.addAttribute(xmlnsAttr, xmlns);
}
for (const value of array) {
if (sparse || value != null) {
writeItem(listNode, value);
}
}
container.addChildNode(listNode);
}
}
writeMap(mapMember, map, container, parentXmlns, containerIsMap = false) {
if (!mapMember.isMemberSchema()) {
throw new Error(`@aws-sdk/core/protocols - xml serializer, cannot write non-member map: ${mapMember.getName(true)}`);
}
const mapTraits = mapMember.getMergedTraits();
const mapKeySchema = mapMember.getKeySchema();
const mapKeyTraits = mapKeySchema.getMergedTraits();
const keyTag = mapKeyTraits.xmlName ?? "key";
const mapValueSchema = mapMember.getValueSchema();
const mapValueTraits = mapValueSchema.getMergedTraits();
const valueTag = mapValueTraits.xmlName ?? "value";
const sparse = !!mapValueTraits.sparse;
const flat = !!mapTraits.xmlFlattened;
const [xmlnsAttr, xmlns] = this.getXmlnsAttribute(mapMember, parentXmlns);
const addKeyValue = (entry, key, val) => {
const keyNode = XmlNode.of(keyTag, key);
const [keyXmlnsAttr, keyXmlns] = this.getXmlnsAttribute(mapKeySchema, xmlns);
if (keyXmlns) {
keyNode.addAttribute(keyXmlnsAttr, keyXmlns);
}
entry.addChildNode(keyNode);
let valueNode = XmlNode.of(valueTag);
if (mapValueSchema.isListSchema()) {
this.writeList(mapValueSchema, val, valueNode, xmlns);
}
else if (mapValueSchema.isMapSchema()) {
this.writeMap(mapValueSchema, val, valueNode, xmlns, true);
}
else if (mapValueSchema.isStructSchema()) {
valueNode = this.writeStruct(mapValueSchema, val, xmlns);
}
else {
this.writeSimpleInto(mapValueSchema, val, valueNode, xmlns);
}
entry.addChildNode(valueNode);
};
if (flat) {
for (const [key, val] of Object.entries(map)) {
if (sparse || val != null) {
const entry = XmlNode.of(mapTraits.xmlName ?? mapMember.getMemberName());
addKeyValue(entry, key, val);
container.addChildNode(entry);
}
}
}
else {
let mapNode;
if (!containerIsMap) {
mapNode = XmlNode.of(mapTraits.xmlName ?? mapMember.getMemberName());
if (xmlns) {
mapNode.addAttribute(xmlnsAttr, xmlns);
}
container.addChildNode(mapNode);
}
for (const [key, val] of Object.entries(map)) {
if (sparse || val != null) {
const entry = XmlNode.of("entry");
addKeyValue(entry, key, val);
(containerIsMap ? container : mapNode).addChildNode(entry);
}
}
}
}
writeSimple(_schema, value) {
if (null === value) {
throw new Error("@aws-sdk/core/protocols - (XML serializer) cannot write null value.");
}
const ns = NormalizedSchema.of(_schema);
let nodeContents = null;
if (value && typeof value === "object") {
if (ns.isBlobSchema()) {
nodeContents = (this.serdeContext?.base64Encoder ?? toBase64)(value);
}
else if (ns.isTimestampSchema() && value instanceof Date) {
const format = determineTimestampFormat(ns, this.settings);
switch (format) {
case 5:
nodeContents = value.toISOString().replace(".000Z", "Z");
break;
case 6:
nodeContents = dateToUtcString(value);
break;
case 7:
nodeContents = String(value.getTime() / 1000);
break;
default:
console.warn("Missing timestamp format, using http date", value);
nodeContents = dateToUtcString(value);
break;
}
}
else if (ns.isBigDecimalSchema() && value) {
if (value instanceof NumericValue) {
return value.string;
}
return String(value);
}
else if (ns.isMapSchema() || ns.isListSchema()) {
throw new Error("@aws-sdk/core/protocols - xml serializer, cannot call _write() on List/Map schema, call writeList or writeMap() instead.");
}
else {
throw new Error(`@aws-sdk/core/protocols - xml serializer, unhandled schema type for object value and schema: ${ns.getName(true)}`);
}
}
if (ns.isBooleanSchema() || ns.isNumericSchema() || ns.isBigIntegerSchema() || ns.isBigDecimalSchema()) {
nodeContents = String(value);
}
if (ns.isStringSchema()) {
if (value === undefined && ns.isIdempotencyToken()) {
nodeContents = generateIdempotencyToken();
}
else {
nodeContents = String(value);
}
}
if (nodeContents === null) {
throw new Error(`Unhandled schema-value pair ${ns.getName(true)}=${value}`);
}
return nodeContents;
}
writeSimpleInto(_schema, value, into, parentXmlns) {
const nodeContents = this.writeSimple(_schema, value);
const ns = NormalizedSchema.of(_schema);
const content = new XmlText(nodeContents);
const [xmlnsAttr, xmlns] = this.getXmlnsAttribute(ns, parentXmlns);
if (xmlns) {
into.addAttribute(xmlnsAttr, xmlns);
}
into.addChildNode(content);
}
getXmlnsAttribute(ns, parentXmlns) {
const traits = ns.getMergedTraits();
const [prefix, xmlns] = traits.xmlNamespace ?? [];
if (xmlns && xmlns !== parentXmlns) {
return [prefix ? `xmlns:${prefix}` : "xmlns", xmlns];
}
return [void 0, void 0];
}
}

View File

@@ -0,0 +1,46 @@
import { parseXML } from "@aws-sdk/xml-builder";
import { getValueFromTextNode } from "@smithy/smithy-client";
import { collectBodyString } from "../common";
export const parseXmlBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => {
if (encoded.length) {
let parsedObj;
try {
parsedObj = parseXML(encoded);
}
catch (e) {
if (e && typeof e === "object") {
Object.defineProperty(e, "$responseBodyText", {
value: encoded,
});
}
throw e;
}
const textNodeName = "#text";
const key = Object.keys(parsedObj)[0];
const parsedObjToReturn = parsedObj[key];
if (parsedObjToReturn[textNodeName]) {
parsedObjToReturn[key] = parsedObjToReturn[textNodeName];
delete parsedObjToReturn[textNodeName];
}
return getValueFromTextNode(parsedObjToReturn);
}
return {};
});
export const parseXmlErrorBody = async (errorBody, context) => {
const value = await parseXmlBody(errorBody, context);
if (value.Error) {
value.Error.message = value.Error.message ?? value.Error.Message;
}
return value;
};
export const loadRestXmlErrorCode = (output, data) => {
if (data?.Error?.Code !== undefined) {
return data.Error.Code;
}
if (data?.Code !== undefined) {
return data.Code;
}
if (output.statusCode == 404) {
return "NotFound";
}
};

View File

@@ -0,0 +1,27 @@
export function simpleFormatXml(xml) {
let b = "";
let indentation = 0;
for (let i = 0; i < xml.length; ++i) {
const c = xml[i];
if (c === "<") {
if (xml[i + 1] === "/") {
b += "\n" + " ".repeat(indentation - 2) + c;
indentation -= 4;
}
else {
b += c;
}
}
else if (c === ">") {
indentation += 2;
b += c + "\n" + " ".repeat(indentation);
}
else {
b += c;
}
}
return b
.split("\n")
.filter((s) => !!s.trim())
.join("\n");
}

View File

@@ -0,0 +1,5 @@
export * from "./index";
export * from "./submodules/account-id-endpoint/index";
export * from "./submodules/client/index";
export * from "./submodules/httpAuthSchemes/index";
export * from "./submodules/protocols/index";

View File

@@ -0,0 +1,22 @@
/**
* Submodules annotated with "Legacy" are from prior to the submodule system.
* They are exported from the package's root index to preserve backwards compatibility.
*
* New development should go in a proper submodule and not be exported from the root index.
*/
/**
* Legacy submodule.
*/
export * from "./submodules/client/index";
/**
* Legacy submodule.
*/
export * from "./submodules/httpAuthSchemes/index";
/**
* Legacy submodule.
*/
export * from "./submodules/protocols/index";
/**
* Warning: do not export any additional submodules from the root of this package. See readme.md for
* guide on developing submodules.
*/

View File

@@ -0,0 +1,27 @@
import type { Provider } from "@smithy/types";
import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants";
/**
* @public
*/
export interface AccountIdEndpointModeInputConfig {
/**
* The account ID endpoint mode to use.
*/
accountIdEndpointMode?: AccountIdEndpointMode | Provider<AccountIdEndpointMode>;
}
/**
* @internal
*/
interface PreviouslyResolved {
}
/**
* @internal
*/
export interface AccountIdEndpointModeResolvedConfig {
accountIdEndpointMode: Provider<AccountIdEndpointMode>;
}
/**
* @internal
*/
export declare const resolveAccountIdEndpointModeConfig: <T>(input: T & AccountIdEndpointModeInputConfig & PreviouslyResolved) => T & AccountIdEndpointModeResolvedConfig;
export {};

View File

@@ -0,0 +1,16 @@
/**
* @public
*/
export type AccountIdEndpointMode = "disabled" | "preferred" | "required";
/**
* @internal
*/
export declare const DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred";
/**
* @internal
*/
export declare const ACCOUNT_ID_ENDPOINT_MODE_VALUES: AccountIdEndpointMode[];
/**
* @internal
*/
export declare function validateAccountIdEndpointMode(value: any): value is AccountIdEndpointMode;

View File

@@ -0,0 +1,14 @@
import { LoadedConfigSelectors } from "@smithy/node-config-provider";
import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants";
/**
* @internal
*/
export declare const ENV_ACCOUNT_ID_ENDPOINT_MODE = "AWS_ACCOUNT_ID_ENDPOINT_MODE";
/**
* @internal
*/
export declare const CONFIG_ACCOUNT_ID_ENDPOINT_MODE = "account_id_endpoint_mode";
/**
* @internal
*/
export declare const NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors<AccountIdEndpointMode>;

View File

@@ -0,0 +1,3 @@
export * from "./AccountIdEndpointModeConfigResolver";
export * from "./AccountIdEndpointModeConstants";
export * from "./NodeAccountIdEndpointModeConfigOptions";

View File

@@ -0,0 +1,12 @@
export declare const state: {
warningEmitted: boolean;
};
/**
* @internal
*
* Emits warning if the provided Node.js version string is
* pending deprecation by AWS SDK JSv3.
*
* @param version - The Node.js version string.
*/
export declare const emitWarningIfUnsupportedVersion: (version: string) => void;

View File

@@ -0,0 +1,4 @@
export * from "./emitWarningIfUnsupportedVersion";
export * from "./setCredentialFeature";
export * from "./setFeature";
export * from "./setTokenFeature";

View File

@@ -0,0 +1,7 @@
import type { AttributedAwsCredentialIdentity, AwsSdkCredentialsFeatures } from "@aws-sdk/types";
/**
* @internal
*
* @returns the credentials with source feature attribution.
*/
export declare function setCredentialFeature<F extends keyof AwsSdkCredentialsFeatures>(credentials: AttributedAwsCredentialIdentity, feature: F, value: AwsSdkCredentialsFeatures[F]): AttributedAwsCredentialIdentity;

View File

@@ -0,0 +1,12 @@
import type { AwsHandlerExecutionContext, AwsSdkFeatures } from "@aws-sdk/types";
/**
* @internal
* Indicates to the request context that a given feature is active.
*
* @param context - handler execution context.
* @param feature - readable name of feature.
* @param value - encoding value of feature. This is required because the
* specification asks the SDK not to include a runtime lookup of all
* the feature identifiers.
*/
export declare function setFeature<F extends keyof AwsSdkFeatures>(context: AwsHandlerExecutionContext, feature: F, value: AwsSdkFeatures[F]): void;

View File

@@ -0,0 +1,7 @@
import type { AttributedTokenIdentity, AwsSdkTokenFeatures } from "@aws-sdk/types";
/**
* @internal
*
* @returns the token with source feature attribution.
*/
export declare function setTokenFeature<F extends keyof AwsSdkTokenFeatures>(token: AttributedTokenIdentity, feature: F, value: AwsSdkTokenFeatures[F]): AttributedTokenIdentity;

View File

@@ -0,0 +1,10 @@
import type { AwsCredentialIdentity, HttpRequest as IHttpRequest } from "@smithy/types";
import { AwsSdkSigV4Signer } from "./AwsSdkSigV4Signer";
/**
* @internal
* Note: this is not a signing algorithm implementation. The sign method
* accepts the real signer as an input parameter.
*/
export declare class AwsSdkSigV4ASigner extends AwsSdkSigV4Signer {
sign(httpRequest: IHttpRequest, identity: AwsCredentialIdentity, signingProperties: Record<string, unknown>): Promise<IHttpRequest>;
}

View File

@@ -0,0 +1,43 @@
import type { AuthScheme, AwsCredentialIdentity, HttpRequest as IHttpRequest, HttpResponse, HttpSigner, RequestSigner } from "@smithy/types";
import { AwsSdkSigV4AAuthResolvedConfig } from "./resolveAwsSdkSigV4AConfig";
/**
* @internal
*/
interface AwsSdkSigV4Config extends AwsSdkSigV4AAuthResolvedConfig {
systemClockOffset: number;
signer: (authScheme?: AuthScheme) => Promise<RequestSigner>;
}
/**
* @internal
*/
interface AwsSdkSigV4AuthSigningProperties {
config: AwsSdkSigV4Config;
signer: RequestSigner;
signingRegion?: string;
signingRegionSet?: string[];
signingName?: string;
}
/**
* @internal
*/
export declare const validateSigningProperties: (signingProperties: Record<string, unknown>) => Promise<AwsSdkSigV4AuthSigningProperties>;
/**
* Note: this is not a signing algorithm implementation. The sign method
* accepts the real signer as an input parameter.
* @internal
*/
export declare class AwsSdkSigV4Signer implements HttpSigner {
sign(httpRequest: IHttpRequest,
/**
* `identity` is bound in {@link resolveAWSSDKSigV4Config}
*/
identity: AwsCredentialIdentity, signingProperties: Record<string, unknown>): Promise<IHttpRequest>;
errorHandler(signingProperties: Record<string, unknown>): (error: Error) => never;
successHandler(httpResponse: HttpResponse | unknown, signingProperties: Record<string, unknown>): void;
}
/**
* @internal
* @deprecated renamed to {@link AwsSdkSigV4Signer}
*/
export declare const AWSSDKSigV4Signer: typeof AwsSdkSigV4Signer;
export {};

View File

@@ -0,0 +1,5 @@
import { LoadedConfigSelectors } from "@smithy/node-config-provider";
/**
* @public
*/
export declare const NODE_AUTH_SCHEME_PREFERENCE_OPTIONS: LoadedConfigSelectors<string[]>;

View File

@@ -0,0 +1,5 @@
export { AwsSdkSigV4Signer, AWSSDKSigV4Signer, validateSigningProperties } from "./AwsSdkSigV4Signer";
export { AwsSdkSigV4ASigner } from "./AwsSdkSigV4ASigner";
export * from "./NODE_AUTH_SCHEME_PREFERENCE_OPTIONS";
export * from "./resolveAwsSdkSigV4AConfig";
export * from "./resolveAwsSdkSigV4Config";

View File

@@ -0,0 +1,38 @@
import { LoadedConfigSelectors } from "@smithy/node-config-provider";
import type { Provider } from "@smithy/types";
/**
* @public
*/
export interface AwsSdkSigV4AAuthInputConfig {
/**
* This option will override the AWS sigv4a
* signing regionSet from any other source.
*
* The lookup order is:
* 1. this value
* 2. configuration file value of sigv4a_signing_region_set.
* 3. environment value of AWS_SIGV4A_SIGNING_REGION_SET.
* 4. signingRegionSet given by endpoint resolution.
* 5. the singular region of the SDK client.
*/
sigv4aSigningRegionSet?: string[] | undefined | Provider<string[] | undefined>;
}
/**
* @internal
*/
export interface AwsSdkSigV4APreviouslyResolved {
}
/**
* @internal
*/
export interface AwsSdkSigV4AAuthResolvedConfig {
sigv4aSigningRegionSet: Provider<string[] | undefined>;
}
/**
* @internal
*/
export declare const resolveAwsSdkSigV4AConfig: <T>(config: T & AwsSdkSigV4AAuthInputConfig & AwsSdkSigV4APreviouslyResolved) => T & AwsSdkSigV4AAuthResolvedConfig;
/**
* @internal
*/
export declare const NODE_SIGV4A_CONFIG_OPTIONS: LoadedConfigSelectors<string[] | undefined>;

View File

@@ -0,0 +1,117 @@
import type { MergeFunctions } from "@aws-sdk/types";
import { SignatureV4CryptoInit, SignatureV4Init } from "@smithy/signature-v4";
import type { AuthScheme, AwsCredentialIdentity, AwsCredentialIdentityProvider, ChecksumConstructor, HashConstructor, MemoizedProvider, Provider, RegionInfoProvider, RequestSigner } from "@smithy/types";
/**
* @public
*/
export interface AwsSdkSigV4AuthInputConfig {
/**
* The credentials used to sign requests.
*/
credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider;
/**
* The signer to use when signing requests.
*/
signer?: RequestSigner | ((authScheme?: AuthScheme) => Promise<RequestSigner>);
/**
* Whether to escape request path when signing the request.
*/
signingEscapePath?: boolean;
/**
* An offset value in milliseconds to apply to all signing times.
*/
systemClockOffset?: number;
/**
* The region where you want to sign your request against. This
* can be different to the region in the endpoint.
*/
signingRegion?: string;
/**
* The injectable SigV4-compatible signer class constructor. If not supplied,
* regular SignatureV4 constructor will be used.
*
* @internal
*/
signerConstructor?: new (options: SignatureV4Init & SignatureV4CryptoInit) => RequestSigner;
}
/**
* Used to indicate whether a credential provider function was memoized by this resolver.
* @public
*/
export type AwsSdkSigV4Memoized = {
/**
* The credential provider has been memoized by the AWS SDK SigV4 config resolver.
*/
memoized?: boolean;
/**
* The credential provider has the caller client config object bound to its arguments.
*/
configBound?: boolean;
/**
* Function is wrapped with attribution transform.
*/
attributed?: boolean;
};
/**
* @internal
*/
export interface AwsSdkSigV4PreviouslyResolved {
credentialDefaultProvider?: (input: any) => MemoizedProvider<AwsCredentialIdentity>;
region: string | Provider<string>;
sha256: ChecksumConstructor | HashConstructor;
signingName?: string;
regionInfoProvider?: RegionInfoProvider;
defaultSigningName?: string;
serviceId: string;
useFipsEndpoint: Provider<boolean>;
useDualstackEndpoint: Provider<boolean>;
}
/**
* @internal
*/
export interface AwsSdkSigV4AuthResolvedConfig {
/**
* Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.credentials}
* This provider MAY memoize the loaded credentials for certain period.
*/
credentials: MergeFunctions<AwsCredentialIdentityProvider, MemoizedProvider<AwsCredentialIdentity>> & AwsSdkSigV4Memoized;
/**
* Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.signer}
*/
signer: (authScheme?: AuthScheme) => Promise<RequestSigner>;
/**
* Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.signingEscapePath}
*/
signingEscapePath: boolean;
/**
* Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.systemClockOffset}
*/
systemClockOffset: number;
}
/**
* @internal
*/
export declare const resolveAwsSdkSigV4Config: <T>(config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved) => T & AwsSdkSigV4AuthResolvedConfig;
/**
* @internal
* @deprecated renamed to {@link AwsSdkSigV4AuthInputConfig}
*/
export interface AWSSDKSigV4AuthInputConfig extends AwsSdkSigV4AuthInputConfig {
}
/**
* @internal
* @deprecated renamed to {@link AwsSdkSigV4PreviouslyResolved}
*/
export interface AWSSDKSigV4PreviouslyResolved extends AwsSdkSigV4PreviouslyResolved {
}
/**
* @internal
* @deprecated renamed to {@link AwsSdkSigV4AuthResolvedConfig}
*/
export interface AWSSDKSigV4AuthResolvedConfig extends AwsSdkSigV4AuthResolvedConfig {
}
/**
* @internal
* @deprecated renamed to {@link resolveAwsSdkSigV4Config}
*/
export declare const resolveAWSSDKSigV4Config: <T>(config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved) => T & AwsSdkSigV4AuthResolvedConfig;

View File

@@ -0,0 +1,2 @@
export * from "./aws_sdk";
export * from "./utils/getBearerTokenEnvKey";

View File

@@ -0,0 +1,8 @@
/**
* Converts a comma-separated string into an array of trimmed strings
* @param str The comma-separated input string to split
* @returns Array of trimmed strings split from the input
*
* @internal
*/
export declare const getArrayForCommaSeparatedString: (str: string) => string[];

View File

@@ -0,0 +1,6 @@
/**
* Returns an environment variable key base on signing name.
* @param signingName - The signing name to use in the key
* @returns The environment variable key in format AWS_BEARER_TOKEN_<SIGNING_NAME>
*/
export declare const getBearerTokenEnvKey: (signingName: string) => string;

View File

@@ -0,0 +1,4 @@
/**
* @internal
*/
export declare const getDateHeader: (response: unknown) => string | undefined;

View File

@@ -0,0 +1,8 @@
/**
* @internal
*
* Returns a date that is corrected for clock skew.
*
* @param systemClockOffset The offset of the system clock in milliseconds.
*/
export declare const getSkewCorrectedDate: (systemClockOffset: number) => Date;

View File

@@ -0,0 +1,10 @@
/**
* @internal
*
* If clock is skewed, it returns the difference between serverTime and current time.
* If clock is not skewed, it returns currentSystemClockOffset.
*
* @param clockTime The string value of the server time.
* @param currentSystemClockOffset The current system clock offset.
*/
export declare const getUpdatedSystemClockOffset: (clockTime: string, currentSystemClockOffset: number) => number;

View File

@@ -0,0 +1,3 @@
export * from "./getDateHeader";
export * from "./getSkewCorrectedDate";
export * from "./getUpdatedSystemClockOffset";

View File

@@ -0,0 +1,9 @@
/**
* @internal
*
* Checks if the provided date is within the skew window of 300000ms.
*
* @param clockTime - The time to check for skew in milliseconds.
* @param systemClockOffset - The offset of the system clock in milliseconds.
*/
export declare const isClockSkewed: (clockTime: number, systemClockOffset: number) => boolean;

View File

@@ -0,0 +1,8 @@
import type { ConfigurableSerdeContext, SerdeFunctions } from "@smithy/types";
/**
* @internal
*/
export declare class SerdeContextConfig implements ConfigurableSerdeContext {
protected serdeContext?: SerdeFunctions;
setSerdeContext(serdeContext: SerdeFunctions): void;
}

View File

@@ -0,0 +1,61 @@
import { NormalizedSchema, TypeRegistry } from "@smithy/core/schema";
import { ServiceException as SDKBaseServiceException } from "@smithy/smithy-client";
import type { HttpResponse as IHttpResponse, MetadataBearer, ResponseMetadata, StaticErrorSchema } from "@smithy/types";
/**
* @internal
*/
type ErrorMetadataBearer = MetadataBearer & {
$fault: "client" | "server";
};
/**
* Shared code for Protocols.
*
* @internal
*/
export declare class ProtocolLib {
private queryCompat;
constructor(queryCompat?: boolean);
/**
* This is only for REST protocols.
*
* @param defaultContentType - of the protocol.
* @param inputSchema - schema for which to determine content type.
*
* @returns content-type header value or undefined when not applicable.
*/
resolveRestContentType(defaultContentType: string, inputSchema: NormalizedSchema): string | undefined;
/**
* Shared code for finding error schema or throwing an unmodeled base error.
* @returns error schema and error metadata.
*
* @throws ServiceBaseException or generic Error if no error schema could be found.
*/
getErrorSchemaOrThrowBaseException(errorIdentifier: string, defaultNamespace: string, response: IHttpResponse, dataObject: any, metadata: ResponseMetadata, getErrorSchema?: (registry: TypeRegistry, errorName: string) => StaticErrorSchema): Promise<{
errorSchema: StaticErrorSchema;
errorMetadata: ErrorMetadataBearer;
}>;
/**
* Assigns additions onto exception if not already present.
*/
decorateServiceException<E extends SDKBaseServiceException>(exception: E, additions?: Record<string, any>): E;
/**
* Reads the x-amzn-query-error header for awsQuery compatibility.
*
* @param output - values that will be assigned to an error object.
* @param response - from which to read awsQueryError headers.
*/
setQueryCompatError(output: Record<string, any>, response: IHttpResponse): void;
/**
* Assigns Error, Type, Code from the awsQuery error object to the output error object.
* @param queryCompatErrorData - query compat error object.
* @param errorData - canonical error object returned to the caller.
*/
queryCompatOutput(queryCompatErrorData: any, errorData: any): void;
/**
* Finds the canonical modeled error using the awsQueryError alias.
* @param registry - service error registry.
* @param errorName - awsQueryError name or regular qualified shapeId.
*/
findQueryCompatibleError(registry: TypeRegistry, errorName: string): StaticErrorSchema;
}
export {};

View File

@@ -0,0 +1,24 @@
/**
* Helper for identifying unknown union members during deserialization.
*/
export declare class UnionSerde {
private from;
private to;
private keys;
constructor(from: any, to: any);
/**
* Marks the key as being a known member.
* @param key - to mark.
*/
mark(key: string): void;
/**
* @returns whether only one key remains unmarked and nothing has been written,
* implying the object is a union.
*/
hasUnknown(): boolean;
/**
* Writes the unknown key-value pair, if present, into the $unknown property
* of the union object.
*/
writeUnknown(): void;
}

View File

@@ -0,0 +1,23 @@
import { SmithyRpcV2CborProtocol } from "@smithy/core/cbor";
import type { EndpointBearer, HandlerExecutionContext, HttpRequest, HttpResponse, OperationSchema, ResponseMetadata, SerdeFunctions } from "@smithy/types";
/**
* Extends the Smithy implementation to add AwsQueryCompatibility support.
*
* @public
*/
export declare class AwsSmithyRpcV2CborProtocol extends SmithyRpcV2CborProtocol {
private readonly awsQueryCompatible;
private readonly mixin;
constructor({ defaultNamespace, awsQueryCompatible, }: {
defaultNamespace: string;
awsQueryCompatible?: boolean;
});
/**
* @override
*/
serializeRequest<Input extends object>(operationSchema: OperationSchema, input: Input, context: HandlerExecutionContext & SerdeFunctions & EndpointBearer): Promise<HttpRequest>;
/**
* @override
*/
protected handleError(operationSchema: OperationSchema, context: HandlerExecutionContext & SerdeFunctions, response: HttpResponse, dataObject: any, metadata: ResponseMetadata): Promise<never>;
}

View File

@@ -0,0 +1,18 @@
/**
* @internal
*
* Used for awsQueryCompatibility trait.
*/
export declare const _toStr: (val: unknown) => string | undefined;
/**
* @internal
*
* Used for awsQueryCompatibility trait.
*/
export declare const _toBool: (val: unknown) => boolean | undefined;
/**
* @internal
*
* Used for awsQueryCompatibility trait.
*/
export declare const _toNum: (val: unknown) => number | undefined;

View File

@@ -0,0 +1,2 @@
import type { SerdeFunctions } from "@smithy/types";
export declare const collectBodyString: (streamBody: any, context: SerdeFunctions) => Promise<string>;

View File

@@ -0,0 +1,18 @@
export * from "./cbor/AwsSmithyRpcV2CborProtocol";
export * from "./coercing-serializers";
export * from "./json/AwsJson1_0Protocol";
export * from "./json/AwsJson1_1Protocol";
export * from "./json/AwsJsonRpcProtocol";
export * from "./json/AwsRestJsonProtocol";
export * from "./json/JsonCodec";
export * from "./json/JsonShapeDeserializer";
export * from "./json/JsonShapeSerializer";
export * from "./json/awsExpectUnion";
export * from "./json/parseJsonBody";
export * from "./query/AwsEc2QueryProtocol";
export * from "./query/AwsQueryProtocol";
export * from "./xml/AwsRestXmlProtocol";
export * from "./xml/XmlCodec";
export * from "./xml/XmlShapeDeserializer";
export * from "./xml/XmlShapeSerializer";
export * from "./xml/parseXmlBody";

View File

@@ -0,0 +1,20 @@
import { AwsJsonRpcProtocol } from "./AwsJsonRpcProtocol";
import type { JsonCodec } from "./JsonCodec";
/**
* @public
* @see https://smithy.io/2.0/aws/protocols/aws-json-1_1-protocol.html#differences-between-awsjson1-0-and-awsjson1-1
*/
export declare class AwsJson1_0Protocol extends AwsJsonRpcProtocol {
constructor({ defaultNamespace, serviceTarget, awsQueryCompatible, jsonCodec, }: {
defaultNamespace: string;
serviceTarget: string;
awsQueryCompatible?: boolean;
jsonCodec?: JsonCodec;
});
getShapeId(): string;
protected getJsonRpcVersion(): "1.0";
/**
* @override
*/
protected getDefaultContentType(): string;
}

View File

@@ -0,0 +1,20 @@
import { AwsJsonRpcProtocol } from "./AwsJsonRpcProtocol";
import type { JsonCodec } from "./JsonCodec";
/**
* @public
* @see https://smithy.io/2.0/aws/protocols/aws-json-1_1-protocol.html#differences-between-awsjson1-0-and-awsjson1-1
*/
export declare class AwsJson1_1Protocol extends AwsJsonRpcProtocol {
constructor({ defaultNamespace, serviceTarget, awsQueryCompatible, jsonCodec, }: {
defaultNamespace: string;
serviceTarget: string;
awsQueryCompatible?: boolean;
jsonCodec?: JsonCodec;
});
getShapeId(): string;
protected getJsonRpcVersion(): "1.1";
/**
* @override
*/
protected getDefaultContentType(): string;
}

View File

@@ -0,0 +1,27 @@
import { RpcProtocol } from "@smithy/core/protocols";
import type { EndpointBearer, HandlerExecutionContext, HttpRequest, HttpResponse, OperationSchema, ResponseMetadata, SerdeFunctions, ShapeDeserializer, ShapeSerializer } from "@smithy/types";
import { JsonCodec } from "./JsonCodec";
/**
* @public
*/
export declare abstract class AwsJsonRpcProtocol extends RpcProtocol {
protected serializer: ShapeSerializer<string | Uint8Array>;
protected deserializer: ShapeDeserializer<string | Uint8Array>;
protected serviceTarget: string;
private readonly codec;
private readonly mixin;
private readonly awsQueryCompatible;
protected constructor({ defaultNamespace, serviceTarget, awsQueryCompatible, jsonCodec, }: {
defaultNamespace: string;
serviceTarget: string;
awsQueryCompatible?: boolean;
jsonCodec?: JsonCodec;
});
serializeRequest<Input extends object>(operationSchema: OperationSchema, input: Input, context: HandlerExecutionContext & SerdeFunctions & EndpointBearer): Promise<HttpRequest>;
getPayloadCodec(): JsonCodec;
protected abstract getJsonRpcVersion(): "1.1" | "1.0";
/**
* @override
*/
protected handleError(operationSchema: OperationSchema, context: HandlerExecutionContext & SerdeFunctions, response: HttpResponse, dataObject: any, metadata: ResponseMetadata): Promise<never>;
}

Some files were not shown because too many files have changed in this diff Show More