chore: checkpoint before Python removal

This commit is contained in:
2026-03-26 22:33:59 +00:00
parent 683cec9307
commit e568ddf82a
29972 changed files with 11269302 additions and 2 deletions

1
vendor/aws-lc-rs/.cargo-checksum.json vendored Normal file

File diff suppressed because one or more lines are too long

6
vendor/aws-lc-rs/.cargo_vcs_info.json vendored Normal file
View File

@@ -0,0 +1,6 @@
{
"git": {
"sha1": "d61726b69d589460645e1b22ebd9a847b7f2f63a"
},
"path_in_vcs": "aws-lc-rs"
}

510
vendor/aws-lc-rs/Cargo.lock generated vendored Normal file
View File

@@ -0,0 +1,510 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "aho-corasick"
version = "1.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301"
dependencies = [
"memchr",
]
[[package]]
name = "anstream"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "824a212faf96e9acacdbd09febd34438f8f711fb84e09a8916013cd7815ca28d"
dependencies = [
"anstyle",
"anstyle-parse",
"anstyle-query",
"anstyle-wincon",
"colorchoice",
"is_terminal_polyfill",
"utf8parse",
]
[[package]]
name = "anstyle"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "940b3a0ca603d1eade50a4846a2afffd5ef57a9feac2c0e2ec2e14f9ead76000"
[[package]]
name = "anstyle-parse"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52ce7f38b242319f7cabaa6813055467063ecdc9d355bbb4ce0c68908cd8130e"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc"
dependencies = [
"windows-sys",
]
[[package]]
name = "anstyle-wincon"
version = "3.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d"
dependencies = [
"anstyle",
"once_cell_polyfill",
"windows-sys",
]
[[package]]
name = "aws-lc-fips-sys"
version = "0.13.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8bce4948d2520386c6d92a6ea2d472300257702242e5a1d01d6add52bd2e7c1"
dependencies = [
"bindgen",
"cc",
"cmake",
"dunce",
"fs_extra",
"regex",
]
[[package]]
name = "aws-lc-rs"
version = "1.16.2"
dependencies = [
"aws-lc-fips-sys",
"aws-lc-sys",
"clap",
"hex",
"lazy_static",
"paste",
"regex",
"untrusted",
"zeroize",
]
[[package]]
name = "aws-lc-sys"
version = "0.39.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fa7e52a4c5c547c741610a2c6f123f3881e409b714cd27e6798ef020c514f0a"
dependencies = [
"bindgen",
"cc",
"cmake",
"dunce",
"fs_extra",
]
[[package]]
name = "bindgen"
version = "0.72.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895"
dependencies = [
"bitflags",
"cexpr",
"clang-sys",
"itertools",
"log",
"prettyplease",
"proc-macro2",
"quote",
"regex",
"rustc-hash",
"shlex",
"syn",
]
[[package]]
name = "bitflags"
version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af"
[[package]]
name = "cc"
version = "1.2.57"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a0dd1ca384932ff3641c8718a02769f1698e7563dc6974ffd03346116310423"
dependencies = [
"find-msvc-tools",
"jobserver",
"libc",
"shlex",
]
[[package]]
name = "cexpr"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
dependencies = [
"nom",
]
[[package]]
name = "cfg-if"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
[[package]]
name = "clang-sys"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4"
dependencies = [
"glob",
"libc",
"libloading",
]
[[package]]
name = "clap"
version = "4.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b193af5b67834b676abd72466a96c1024e6a6ad978a1f484bd90b85c94041351"
dependencies = [
"clap_builder",
"clap_derive",
]
[[package]]
name = "clap_builder"
version = "4.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "714a53001bf66416adb0e2ef5ac857140e7dc3a0c48fb28b2f10762fc4b5069f"
dependencies = [
"anstream",
"anstyle",
"clap_lex",
"strsim",
]
[[package]]
name = "clap_derive"
version = "4.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1110bd8a634a1ab8cb04345d8d878267d57c3cf1b38d91b71af6686408bbca6a"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "clap_lex"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8d4a3bb8b1e0c1050499d1815f5ab16d04f0959b233085fb31653fbfc9d98f9"
[[package]]
name = "cmake"
version = "0.1.57"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75443c44cd6b379beb8c5b45d85d0773baf31cce901fe7bb252f4eff3008ef7d"
dependencies = [
"cc",
]
[[package]]
name = "colorchoice"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d07550c9036bf2ae0c684c4297d503f838287c83c53686d05370d0e139ae570"
[[package]]
name = "dunce"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813"
[[package]]
name = "either"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
[[package]]
name = "find-msvc-tools"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582"
[[package]]
name = "fs_extra"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c"
[[package]]
name = "getrandom"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd"
dependencies = [
"cfg-if",
"libc",
"r-efi",
"wasip2",
]
[[package]]
name = "glob"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
[[package]]
name = "heck"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "hex"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "is_terminal_polyfill"
version = "1.70.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695"
[[package]]
name = "itertools"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
dependencies = [
"either",
]
[[package]]
name = "jobserver"
version = "0.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
dependencies = [
"getrandom",
"libc",
]
[[package]]
name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "libc"
version = "0.2.183"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d"
[[package]]
name = "libloading"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55"
dependencies = [
"cfg-if",
"windows-link",
]
[[package]]
name = "log"
version = "0.4.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
[[package]]
name = "memchr"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "nom"
version = "7.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
dependencies = [
"memchr",
"minimal-lexical",
]
[[package]]
name = "once_cell_polyfill"
version = "1.70.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe"
[[package]]
name = "paste"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
[[package]]
name = "prettyplease"
version = "0.2.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b"
dependencies = [
"proc-macro2",
"syn",
]
[[package]]
name = "proc-macro2"
version = "1.0.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924"
dependencies = [
"proc-macro2",
]
[[package]]
name = "r-efi"
version = "5.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
[[package]]
name = "regex"
version = "1.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "regex-automata"
version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.8.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a"
[[package]]
name = "rustc-hash"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
[[package]]
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "strsim"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "syn"
version = "2.0.117"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75"
[[package]]
name = "untrusted"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
[[package]]
name = "utf8parse"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "wasip2"
version = "1.0.2+wasi-0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5"
dependencies = [
"wit-bindgen",
]
[[package]]
name = "windows-link"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
[[package]]
name = "windows-sys"
version = "0.61.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
dependencies = [
"windows-link",
]
[[package]]
name = "wit-bindgen"
version = "0.51.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5"
[[package]]
name = "zeroize"
version = "1.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"

132
vendor/aws-lc-rs/Cargo.toml vendored Normal file
View File

@@ -0,0 +1,132 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2021"
rust-version = "1.71.0"
name = "aws-lc-rs"
version = "1.16.2"
authors = ["AWS-LibCrypto"]
build = "build.rs"
links = "aws_lc_rs_1_16_2_sys"
exclude = [
"third_party/NIST/*",
"tests/**/*",
"*.txt",
"*.p8",
"*.der",
"*.bin",
]
autolib = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "aws-lc-rs is a cryptographic library using AWS-LC for its cryptographic operations. This library strives to be API-compatible with the popular Rust library named ring."
homepage = "https://github.com/aws/aws-lc-rs"
documentation = "https://docs.rs/crate/aws-lc-rs"
readme = "README.md"
keywords = [
"crypto",
"cryptography",
"security",
]
license = "ISC AND (Apache-2.0 OR ISC)"
repository = "https://github.com/aws/aws-lc-rs"
[package.metadata.docs.rs]
rustdoc-args = [
"--cfg",
"aws_lc_rs_docsrs",
]
features = ["unstable"]
[package.metadata.cargo-udeps.ignore]
development = [
"which",
"home",
"regex",
"regex-automata",
"regex-syntax",
"proc-macro2",
"jobserver",
"cc",
"once_cell",
]
[features]
alloc = []
asan = [
"aws-lc-sys?/asan",
"aws-lc-fips-sys?/asan",
]
bindgen = [
"aws-lc-sys?/bindgen",
"aws-lc-fips-sys?/bindgen",
]
default = [
"aws-lc-sys",
"alloc",
"ring-io",
"ring-sig-verify",
]
dev-tests-only = []
fips = ["dep:aws-lc-fips-sys"]
non-fips = ["aws-lc-sys"]
prebuilt-nasm = ["aws-lc-sys?/prebuilt-nasm"]
ring-io = ["dep:untrusted"]
ring-sig-verify = ["dep:untrusted"]
test_logging = []
unstable = []
[lib]
name = "aws_lc_rs"
path = "src/lib.rs"
[[example]]
name = "cipher"
path = "examples/cipher.rs"
[[example]]
name = "digest"
path = "examples/digest.rs"
[dependencies.aws-lc-fips-sys]
version = "0.13.1"
optional = true
[dependencies.aws-lc-sys]
version = "0.39.0"
optional = true
default-features = false
[dependencies.untrusted]
version = "0.7.1"
optional = true
[dependencies.zeroize]
version = "1.8.1"
[dev-dependencies.clap]
version = "4.4"
features = ["derive"]
[dev-dependencies.hex]
version = "0.4.3"
[dev-dependencies.lazy_static]
version = "1.5.0"
[dev-dependencies.paste]
version = "1.0.15"
[dev-dependencies.regex]
version = "1.11.1"

10
vendor/aws-lc-rs/Cross.toml vendored Normal file
View File

@@ -0,0 +1,10 @@
[build]
dockerfile = "../docker/linux-cross/Dockerfile"
[build.env]
passthrough = [
"CROSS_CMAKE_SYSTEM_PROCESSOR",
"RUST_BACKTRACE",
"RUST_LOG",
"AWS_LC_RS_DISABLE_SLOW_TESTS",
]

202
vendor/aws-lc-rs/LICENSE vendored Normal file
View File

@@ -0,0 +1,202 @@
SPDX-License-Identifier: ISC AND (Apache-2.0 OR ISC)
Apache 2.0 license
-------------------------------------
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
ISC license
-------------------------------------
Copyright Amazon.com, Inc. or its affiliates.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

65
vendor/aws-lc-rs/Makefile vendored Normal file
View File

@@ -0,0 +1,65 @@
include ../Makefile
UNAME_S := $(shell uname -s)
AWS_LC_RS_COV_EXTRA_FEATURES := unstable
export AWS_LC_RS_DISABLE_SLOW_TESTS := 1
asan:
# TODO: This build target produces linker error on Mac.
# Run specific tests:
# RUST_BACKTRACE=1 ASAN_OPTIONS=detect_leaks=1 RUSTFLAGS=-Zsanitizer=address RUSTDOCFLAGS=-Zsanitizer=address cargo +nightly test --test ecdsa_tests --target `rustc -vV | sed -n 's|host: ||p'` --features asan
RUST_BACKTRACE=1 ASAN_OPTIONS=detect_leaks=1 RUSTFLAGS=-Zsanitizer=address RUSTDOCFLAGS=-Zsanitizer=address cargo +nightly test --lib --bins --tests --examples --target `rustc -vV | sed -n 's|host: ||p'` --features asan
asan-release:
# TODO: This build target produces linker error on Mac.
# Run specific tests:
# RUST_BACKTRACE=1 ASAN_OPTIONS=detect_leaks=1 RUSTFLAGS=-Zsanitizer=address RUSTDOCFLAGS=-Zsanitizer=address cargo +nightly test --release --test basic_rsa_test --target `rustc -vV | sed -n 's|host: ||p'` --features asan
RUST_BACKTRACE=1 ASAN_OPTIONS=detect_leaks=1 RUSTFLAGS=-Zsanitizer=address RUSTDOCFLAGS=-Zsanitizer=address cargo +nightly test --release --lib --bins --tests --examples --target `rustc -vV | sed -n 's|host: ||p'` --features asan
asan-fips:
# TODO: This build target produces linker error on Mac.
# Run specific tests:
# RUST_BACKTRACE=1 ASAN_OPTIONS=detect_leaks=1 RUSTFLAGS=-Zsanitizer=address RUSTDOCFLAGS=-Zsanitizer=address cargo +nightly test --test ecdsa_tests --target `rustc -vV | sed -n 's|host: ||p'` --no-default-features --features fips,asan
RUST_BACKTRACE=1 ASAN_OPTIONS=detect_leaks=1 RUSTFLAGS=-Zsanitizer=address RUSTDOCFLAGS=-Zsanitizer=address cargo +nightly test --lib --bins --tests --examples --target `rustc -vV | sed -n 's|host: ||p'` --no-default-features --features fips,asan
coverage:
cargo llvm-cov --features "${AWS_LC_RS_COV_EXTRA_FEATURES}" --no-fail-fast --fail-under-lines 95 --ignore-filename-regex "aws-lc(-fips|)-sys/*" --lcov --output-path lcov.info
coverage-fips:
cargo llvm-cov --features "${AWS_LC_RS_COV_EXTRA_FEATURES},fips" --no-fail-fast --fail-under-lines 95 --ignore-filename-regex "aws-lc(-fips|)-sys/*" --lcov --output-path lcov.info
test: export AWS_LC_RS_DISABLE_SLOW_TESTS = 1
test:
cargo test --all-targets
cargo test --all-targets --features unstable
cargo test --release --all-targets --features bindgen,unstable
cargo test --release --all-targets --features fips,bindgen,unstable
cargo test --no-default-features --all-targets --features aws-lc-sys
cargo test --no-default-features --all-targets --features aws-lc-sys,unstable
cargo test --no-default-features --all-targets --features fips
cargo test --no-default-features --all-targets --features fips,unstable
cargo test --no-default-features --all-targets --features aws-lc-sys,ring-sig-verify
cargo test --no-default-features --all-targets --features aws-lc-sys,ring-io
cargo test --no-default-features --all-targets --features aws-lc-sys,alloc
msrv:
cargo msrv verify
clippy:
cargo +nightly clippy --all-targets --features bindgen,fips,unstable -- -W clippy::all -W clippy::pedantic
cargo +nightly clippy --all-targets --features unstable,dev-tests-only -- -W clippy::all -W clippy::pedantic
clippy-fips-fix:
cargo +nightly clippy --all-targets --features bindgen,fips,unstable --fix --allow-dirty -- -W clippy::all -W clippy::pedantic
clippy-fix:
cargo +nightly clippy --all-targets --features bindgen,unstable,dev-tests-only --fix --allow-dirty -- -W clippy::all -W clippy::pedantic
ci: format clippy msrv test coverage api-diff-pub
readme:
cargo readme | tee README.md
.PHONY: asan asan-fips asan-release ci clippy coverage coverage-fips test msrv clippy clippy-fix

233
vendor/aws-lc-rs/README.md vendored Normal file
View File

@@ -0,0 +1,233 @@
# AWS Libcrypto for Rust (aws-lc-rs)
[![Crates.io](https://img.shields.io/crates/v/aws-lc-rs.svg)](https://crates.io/crates/aws-lc-rs)
[![GitHub](https://img.shields.io/badge/GitHub-aws%2Faws--lc--rs-blue)](https://github.com/aws/aws-lc-rs)
A [*ring*](https://github.com/briansmith/ring)-compatible crypto library using the cryptographic
operations provided by [*AWS-LC*](https://github.com/aws/aws-lc). It uses either the
auto-generated [*aws-lc-sys*](https://crates.io/crates/aws-lc-sys) or
[*aws-lc-fips-sys*](https://crates.io/crates/aws-lc-fips-sys)
Foreign Function Interface (FFI) crates found in this repository for invoking *AWS-LC*.
## Build
`aws-lc-rs` is available through [crates.io](https://crates.io/crates/aws-lc-rs). It can
be added to your project in the [standard way](https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html)
using `Cargo.toml`:
```toml
[dependencies]
aws-lc-rs = "1"
```
Consuming projects will need a C/C++ compiler to build.
**Non-FIPS builds (default):**
* CMake is **never** required
* Bindgen is **never** required (pre-generated bindings are provided)
* Go is **never** required
**FIPS builds:** Require **CMake**, **Go**, and potentially **bindgen** depending on the target platform.
See our [User Guide](https://aws.github.io/aws-lc-rs/) for guidance on installing build requirements.
## Feature Flags
##### alloc (default)
Allows implementation to allocate values of arbitrary size. (The meaning of this feature differs
from the "alloc" feature of *ring*.) Currently, this is required by the `io::writer` module.
##### ring-io (default)
Enable feature to access the `io` module.
##### ring-sig-verify (default)
Enable feature to preserve compatibility with ring's `signature::VerificationAlgorithm::verify`
function. This adds a requirement on `untrusted = "0.7.1"`.
##### fips
Enable this feature to have aws-lc-rs use the [*aws-lc-fips-sys*](https://crates.io/crates/aws-lc-fips-sys)
crate for the cryptographic implementations. The aws-lc-fips-sys crate provides bindings to the
latest version of the AWS-LC-FIPS module that has completed FIPS validation testing by an
accredited lab and has been submitted to NIST for certification. This will continue to be the
case as we periodically submit new versions of the AWS-LC-FIPS module to NIST for certification.
Currently, aws-lc-fips-sys binds to
[AWS-LC-FIPS 3.0.x](https://github.com/aws/aws-lc/tree/fips-2024-09-27).
Consult with your local FIPS compliance team to determine the version of AWS-LC-FIPS module that you require. Consumers
needing to remain on a previous version of the AWS-LC-FIPS module should pin to specific versions of aws-lc-rs to avoid
automatically being upgraded to a newer module version.
(See [cargo's documentation](https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html)
on how to specify dependency versions.)
| AWS-LC-FIPS module | aws-lc-rs |
|--------------------|-----------|
| 2.0.x | \<1.12.0 |
| 3.0.x | *latest* |
Refer to the
[NIST Cryptographic Module Validation Program's Modules In Progress List](https://csrc.nist.gov/Projects/cryptographic-module-validation-program/modules-in-process/Modules-In-Process-List)
for the latest status of the static or dynamic AWS-LC Cryptographic Module. Please see the
[FIPS.md in the aws-lc repository](https://github.com/aws/aws-lc/blob/main/crypto/fipsmodule/FIPS.md)
for relevant security policies and information on supported operating environments.
We will also update our release notes and documentation to reflect any changes in FIPS certification status.
##### non-fips
Enable this feature to guarantee that the non-FIPS [*aws-lc-sys*](https://crates.io/crates/aws-lc-sys)
crate is used for cryptographic implementations. This feature is mutually exclusive with the `fips`
feature - enabling both will result in a compile-time error. Use this feature when you need a
compile-time guarantee that your build is using the non-FIPS cryptographic module.
##### asan
Performs an "address sanitizer" build. This can be used to help detect memory leaks. See the
["Address Sanitizer" section](https://doc.rust-lang.org/beta/unstable-book/compiler-flags/sanitizer.html#addresssanitizer)
of the [Rust Unstable Book](https://doc.rust-lang.org/beta/unstable-book/).
##### bindgen
Causes `aws-lc-sys` or `aws-lc-fips-sys` to generates fresh bindings for AWS-LC instead of using
the pre-generated bindings. This feature requires `libclang` to be installed. See the
[requirements](https://rust-lang.github.io/rust-bindgen/requirements.html)
for [rust-bindgen](https://github.com/rust-lang/rust-bindgen)
##### prebuilt-nasm
Enables the use of crate provided prebuilt NASM objects under certain conditions. This only affects builds for
Windows x86-64 platforms. This feature is ignored if the "fips" feature is also enabled.
Use of prebuilt NASM objects is prevented if either of the following conditions are true:
* The NASM assembler is detected in the build environment
* `AWS_LC_SYS_PREBUILT_NASM` environment variable is set with a value of `0`
Be aware that [features are additive](https://doc.rust-lang.org/cargo/reference/features.html#feature-unification);
by enabling this feature, it is enabled for all crates within the same build.
##### dev-tests-only
Enables the `rand::unsealed` module, which re-exports the normally sealed `SecureRandom` trait.
This allows consumers to provide their own implementations of `SecureRandom` (e.g., a
deterministic RNG) for testing purposes. When enabled, a `mut_fill` method is also available on
`SecureRandom`.
This feature is restricted to **dev/debug profile builds only** — attempting to use it in a
release build will result in a compile-time error.
It can be enabled in two ways:
* **Feature flag:** `cargo test --features dev-tests-only`
* **Environment variable:** `AWS_LC_RS_DEV_TESTS_ONLY=1 cargo test`
**⚠️ Warning:** This feature is intended **only** for development and testing. It must not be
used in production builds. The `rand::unsealed` module and `mut_fill` method are not part of the
stable public API and may change without notice.
## Use of prebuilt NASM objects
Prebuilt NASM objects are **only** applicable to Windows x86-64 platforms. They are **never** used on any other platform (Linux, macOS, etc.).
For Windows x86 and x86-64, NASM is required for assembly code compilation. On these platforms,
we recommend that you install [the NASM assembler](https://www.nasm.us/). **If NASM is
detected in the build environment, it is always used** to compile the assembly files. Prebuilt NASM objects are only used as a fallback.
If a NASM assembler is not available, and the "fips" feature is not enabled, then the build fails unless one of the following conditions are true:
* You are building for `x86-64` and either:
* The `AWS_LC_SYS_PREBUILT_NASM` environment variable is found and has a value of "1"; OR
* `AWS_LC_SYS_PREBUILT_NASM` is *not found* in the environment AND the "prebuilt-nasm" feature has been enabled.
If the above cases apply, then the crate provided prebuilt NASM objects will be used for the build. To prevent usage of prebuilt NASM
objects, install NASM in the build environment and/or set the variable `AWS_LC_SYS_PREBUILT_NASM` to `0` in the build environment to prevent their use.
### About prebuilt NASM objects
Prebuilt NASM objects are generated using automation similar to the crate provided pregenerated bindings. See the repository's
[GitHub workflow configuration](https://github.com/aws/aws-lc-rs/blob/main/.github/workflows/sys-bindings-generator.yml) for more information.
The prebuilt NASM objects are checked into the repository
and are [available for inspection](https://github.com/aws/aws-lc-rs/tree/main/aws-lc-sys/builder/prebuilt-nasm).
For each PR submitted,
[CI verifies](https://github.com/aws/aws-lc-rs/blob/main/.github/workflows/tests.yml)
that the NASM objects newly built from source match the NASM objects currently in the repository.
## *ring*-compatibility
Although this library attempts to be fully compatible with *ring* (v0.16.x), there are a few places where our
behavior is observably different.
* Our implementation requires the `std` library. We currently do not support a
[`#![no_std]`](https://docs.rust-embedded.org/book/intro/no-std.html) build.
* `aws-lc-rs` supports the platforms supported by `aws-lc-sys` and AWS-LC. See the
[Platform Support](https://aws.github.io/aws-lc-rs/platform_support.html) page in our User Guide.
* `Ed25519KeyPair::from_pkcs8` and `Ed25519KeyPair::from_pkcs8_maybe_unchecked` both support
parsing of v1 or v2 PKCS#8 documents. If a v2 encoded key is provided to either function,
public key component, if present, will be verified to match the one derived from the encoded
private key.
## Post-Quantum Cryptography
Details on the post-quantum algorithms supported by aws-lc-rs can be found at
[PQREADME](https://github.com/aws/aws-lc/tree/main/crypto/fipsmodule/PQREADME.md).
## Motivation
Rust developers increasingly need to deploy applications that meet US and Canadian government
cryptographic requirements. We evaluated how to deliver FIPS validated cryptography in idiomatic
and performant Rust, built around our AWS-LC offering. We found that the popular ring (v0.16)
library fulfilled much of the cryptographic needs in the Rust community, but it did not meet the
needs of developers with FIPS requirements. Our intention is to contribute a drop-in replacement
for ring that provides FIPS support and is compatible with the ring API. Rust developers with
prescribed cryptographic requirements can seamlessly integrate aws-lc-rs into their applications
and deploy them into AWS Regions.
### Contributor Quickstart for Amazon Linux 2023
For those who would like to contribute to our project or build it directly from our repository,
a few more packages may be needed. The listing below shows the steps needed for you to begin
building and testing our project locally.
```shell
# Install dependencies needed for build and testing
sudo yum install -y cmake3 clang git clang-libs golang openssl-devel perl-FindBin
# Install Rust
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
source "$HOME/.cargo/env"
# Clone and initialize a local repository
git clone https://github.com/aws/aws-lc-rs.git
cd aws-lc-rs
git submodule update --init --recursive
# Build and test the project
cargo test
```
## Questions, Feedback and Contributing
* [Submit an non-security Bug/Issue/Request](https://github.com/aws/aws-lc-rs/issues/new/choose)
* [API documentation](https://docs.rs/aws-lc-rs/)
* [Fork our repo](https://github.com/aws/aws-lc-rs/fork)
We use [GitHub Issues](https://github.com/aws/aws-lc-rs/issues/new/choose) for managing feature requests, bug
reports, or questions about aws-lc-rs API usage.
Otherwise, if you think you might have found a security impacting issue, please instead
follow our *Security Notification Process* below.
## Security Notification Process
If you discover a potential security issue in *AWS-LC* or *aws-lc-rs*, we ask that you notify AWS
Security via our
[vulnerability reporting page](https://aws.amazon.com/security/vulnerability-reporting/).
Please do **not** create a public GitHub issue.
If you package or distribute *aws-lc-rs*, or use *aws-lc-rs* as part of a large multi-user service,
you may be eligible for pre-notification of future *aws-lc-rs* releases.
Please contact aws-lc-pre-notifications@amazon.com.
## License
This library is licensed under the Apache-2.0 or the ISC License.

57
vendor/aws-lc-rs/README.tpl vendored Normal file
View File

@@ -0,0 +1,57 @@
# AWS Libcrypto for Rust ({{crate}})
[![Crates.io](https://img.shields.io/crates/v/aws-lc-rs.svg)](https://crates.io/crates/aws-lc-rs)
[![GitHub](https://img.shields.io/badge/GitHub-aws%2Faws--lc--rs-blue)](https://github.com/aws/aws-lc-rs)
{{readme}}
### Contributor Quickstart for Amazon Linux 2023
For those who would like to contribute to our project or build it directly from our repository,
a few more packages may be needed. The listing below shows the steps needed for you to begin
building and testing our project locally.
```shell
# Install dependencies needed for build and testing
sudo yum install -y cmake3 clang git clang-libs golang openssl-devel perl-FindBin
# Install Rust
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
source "$HOME/.cargo/env"
# Clone and initialize a local repository
git clone https://github.com/aws/aws-lc-rs.git
cd aws-lc-rs
git submodule update --init --recursive
# Build and test the project
cargo test
```
## Questions, Feedback and Contributing
* [Submit an non-security Bug/Issue/Request](https://github.com/aws/aws-lc-rs/issues/new/choose)
* [API documentation](https://docs.rs/aws-lc-rs/)
* [Fork our repo](https://github.com/aws/aws-lc-rs/fork)
We use [GitHub Issues](https://github.com/aws/aws-lc-rs/issues/new/choose) for managing feature requests, bug
reports, or questions about aws-lc-rs API usage.
Otherwise, if you think you might have found a security impacting issue, please instead
follow our *Security Notification Process* below.
## Security Notification Process
If you discover a potential security issue in *AWS-LC* or *aws-lc-rs*, we ask that you notify AWS
Security via our
[vulnerability reporting page](https://aws.amazon.com/security/vulnerability-reporting/).
Please do **not** create a public GitHub issue.
If you package or distribute *aws-lc-rs*, or use *aws-lc-rs* as part of a large multi-user service,
you may be eligible for pre-notification of future *aws-lc-rs* releases.
Please contact aws-lc-pre-notifications@amazon.com.
## License
This library is licensed under the Apache-2.0 or the ISC License.

126
vendor/aws-lc-rs/build.rs vendored Normal file
View File

@@ -0,0 +1,126 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use std::env;
fn main() {
let has_mutually_exclusive_features = cfg!(feature = "non-fips") && cfg!(feature = "fips");
assert!(
!has_mutually_exclusive_features,
"`fips` and `non-fips` are mutually exclusive crate features."
);
println!("cargo:rustc-check-cfg=cfg(aws_lc_rs_docsrs)");
println!("cargo:rustc-check-cfg=cfg(disable_slow_tests)");
println!("cargo:rustc-check-cfg=cfg(dev_tests_only)");
if let Ok(disable) = env::var("AWS_LC_RS_DISABLE_SLOW_TESTS") {
if disable == "1" {
println!("cargo:warning=### Slow tests will be disabled! ###");
println!("cargo:rustc-cfg=disable_slow_tests");
} else {
println!("cargo:warning=### Slow tests are enabled: {disable}! ###");
}
}
println!("cargo:rerun-if-env-changed=AWS_LC_RS_DISABLE_SLOW_TESTS");
let mut enable_dev_test_only = None;
if cfg!(feature = "dev-tests-only") {
enable_dev_test_only = Some(true);
}
// Environment variable can override
if let Ok(dev_tests) = env::var("AWS_LC_RS_DEV_TESTS_ONLY") {
println!("cargo:warning=### AWS_LC_RS_DEV_TESTS_ONLY: '{dev_tests}' ###");
enable_dev_test_only = Some(dev_tests == "1");
}
println!("cargo:rerun-if-env-changed=AWS_LC_RS_DEV_TESTS_ONLY");
if let Some(dev_test_only) = enable_dev_test_only {
if dev_test_only {
let profile = env::var("PROFILE").unwrap();
if !profile.contains("dev") && !profile.contains("debug") && !profile.contains("test") {
println!("cargo:warning=### PROFILE: '{profile}' ###");
panic!("dev-tests-only feature only allowed for dev profile builds");
}
println!("cargo:warning=### Enabling public testing functions! ###");
println!("cargo:rustc-cfg=dev_tests_only");
} else {
println!("cargo:warning=### AWS_LC_RS_DEV_TESTS_ONLY: Public testing functions not enabled! ###");
}
}
// This appears asymmetric, but it reflects the `cfg` statements in lib.rs that
// require `aws-lc-sys` to be present when "fips" is not enabled.
// if `fips` is enabled, then use that
let sys_crate = if cfg!(feature = "fips") {
"aws-lc-fips-sys"
} else if cfg!(feature = "aws-lc-sys") {
"aws-lc-sys"
} else {
panic!(
"one of the following features must be specified: `aws-lc-sys`, `non-fips`, or `fips`."
);
};
// When using static CRT on Windows MSVC, ignore missing PDB file warnings
// The static CRT libraries reference PDB files from Microsoft's build servers
// which are not available during linking
if env::var("CARGO_CFG_TARGET_OS").as_deref() == Ok("windows")
&& env::var("CARGO_CFG_TARGET_ENV").as_deref() == Ok("msvc")
&& env::var("CARGO_CFG_TARGET_FEATURE")
.is_ok_and(|features| features.contains("crt-static"))
{
println!("cargo:rustc-link-arg=/ignore:4099");
}
export_sys_vars(sys_crate);
}
fn export_sys_vars(sys_crate: &str) {
let prefix = if sys_crate == "aws-lc-fips-sys" {
"DEP_AWS_LC_FIPS_"
} else {
"DEP_AWS_LC_"
};
let mut selected = String::default();
let mut candidates = vec![];
// search through the DEP vars and find the selected sys crate version
for (name, value) in std::env::vars() {
// if we've selected a prefix then we can go straight to exporting it
if !selected.is_empty() {
try_export_var(&selected, &name, &value);
continue;
}
// we're still looking for a selected prefix
if let Some(version) = name.strip_prefix(prefix) {
if let Some(version) = version.strip_suffix("_INCLUDE") {
// we've found the selected version so update it and export it
selected = format!("{prefix}{version}_");
try_export_var(&selected, &name, &value);
} else {
// it started with the expected prefix, but we don't know what the version is yet
// so save it for later
candidates.push((name, value));
}
}
}
assert!(!selected.is_empty(), "missing {prefix} include");
// process all of the remaining candidates
for (name, value) in candidates {
try_export_var(&selected, &name, &value);
}
}
fn try_export_var(selected: &str, name: &str, value: &str) {
assert!(!selected.is_empty(), "missing selected prefix");
if let Some(var) = name.strip_prefix(selected) {
eprintln!("cargo:rerun-if-env-changed={name}");
let var = var.to_lowercase();
println!("cargo:{var}={value}");
}
}

271
vendor/aws-lc-rs/examples/cipher.rs vendored Normal file
View File

@@ -0,0 +1,271 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! cipher - Perform symmetric cipher encryption/decryption on utf8 plaintext.
//!
//! *cipher* is an example program demonstrating the `aws_lc_rs::cipher` API for *aws-lc-rs*.
//! It demonstrates CTR & CBC mode encryption using AES 128 or 256 bit keys.
//!
//! The program can be run from the command line using cargo:
//! ```sh
//! $ cargo run --example cipher -- encrypt --mode ctr "Hello World"
//! key: b331133eb742497c67ced9520c9a7de3
//! iv: 4e967c7b799e0670431888e2e959e154
//! ciphertext: 88bcbd8d1656d60de739c5
//!
//! $ cargo run --example cipher -- decrypt --mode ctr --key b331133eb742497c67ced9520c9a7de3 --iv 4e967c7b799e0670431888e2e959e154 88bcbd8d1656d60de739c5
//! Hello World
//!
//! $ cargo run --example cipher -- encrypt --mode cbc "Hello World"
//! key: 6489d8ce0c4facf18b872705a05d5ee4
//! iv: 5cd56fb752830ec2459889226c5431bd
//! ciphertext: 6311c14e8104730be124ce1e57e51fe3
//!
//! $ cargo run --example cipher -- decrypt --mode cbc --key 6489d8ce0c4facf18b872705a05d5ee4 --iv 5cd56fb752830ec2459889226c5431bd 6311c14e8104730be124ce1e57e51fe3
//! Hello World
//! ```
use aws_lc_rs::cipher::{
DecryptingKey, DecryptionContext, EncryptingKey, EncryptionContext, PaddedBlockDecryptingKey,
PaddedBlockEncryptingKey, UnboundCipherKey, AES_128, AES_128_KEY_LEN, AES_192, AES_192_KEY_LEN,
AES_256, AES_256_KEY_LEN, AES_CBC_IV_LEN, AES_CTR_IV_LEN,
};
use aws_lc_rs::iv::FixedLength;
use clap::{Parser, Subcommand, ValueEnum};
#[derive(Parser)]
#[command(author, version, name = "cipher")]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(ValueEnum, Clone, Copy)]
enum Mode {
Ctr,
Cbc,
}
#[derive(Subcommand)]
enum Commands {
Encrypt {
#[arg(short, long, help = "Initalization Vector (IV) in hex")]
iv: Option<String>,
#[arg(
short,
long,
help = "AES 128 or 256 bit key in hex, if not provided defaults to 128"
)]
key: Option<String>,
#[arg(short, long, value_enum, help = "AES cipher mode")]
mode: Mode,
plaintext: String,
},
Decrypt {
#[arg(short, long, help = "Initalization Vector (IV) in hex")]
iv: String,
#[arg(short, long, help = "AES 128 or 256 bit key in hex")]
key: String,
#[arg(short, long, value_enum, help = "AES cipher mode")]
mode: Mode,
ciphertext: String,
},
}
fn main() -> Result<(), &'static str> {
let cli = Cli::parse();
match cli.command {
Commands::Encrypt {
iv,
key,
mode,
plaintext,
} => {
if matches!(mode, Mode::Ctr) {
aes_ctr_encrypt(key, iv, plaintext)
} else {
aes_cbc_encrypt(key, iv, plaintext)
}
}
Commands::Decrypt {
iv,
key,
mode,
ciphertext,
} => {
if matches!(mode, Mode::Ctr) {
aes_ctr_decrypt(key, iv, ciphertext)
} else {
aes_cbc_decrypt(key, iv, ciphertext)
}
}
}?;
Ok(())
}
fn construct_key_bytes(key: Option<String>) -> Result<Vec<u8>, &'static str> {
if let Some(key) = key {
match hex::decode(key) {
Ok(v) => Ok(v),
Err(..) => Err("invalid key"),
}
} else {
let mut v = vec![0u8; AES_128_KEY_LEN];
aws_lc_rs::rand::fill(v.as_mut_slice()).map_err(|_| "failed to generate key")?;
Ok(v)
}
}
fn aes_ctr_encrypt(
key: Option<String>,
iv: Option<String>,
plaintext: String,
) -> Result<(), &'static str> {
let key_bytes = construct_key_bytes(key)?;
let hex_key = hex::encode(key_bytes.as_slice());
let key = new_unbound_key(key_bytes.as_slice())?;
let key = EncryptingKey::ctr(key).map_err(|_| "failed to initalized aes encryption")?;
let mut ciphertext = Vec::from(plaintext);
let context = match iv {
Some(iv) => {
let context = {
let v = hex::decode(iv).map_err(|_| "invalid iv")?;
let v: FixedLength<AES_CTR_IV_LEN> =
v.as_slice().try_into().map_err(|_| "invalid iv")?;
EncryptionContext::Iv128(v)
};
key.less_safe_encrypt(ciphertext.as_mut(), context)
}
None => key.encrypt(ciphertext.as_mut()),
}
.map_err(|_| "failed to encrypt plaintext")?;
let iv: &[u8] = (&context)
.try_into()
.map_err(|_| "unexpected encryption context")?;
let ciphertext = hex::encode(ciphertext.as_slice());
println!("key: {hex_key}");
println!("iv: {}", hex::encode(iv));
println!("ciphertext: {ciphertext}");
Ok(())
}
fn aes_ctr_decrypt(key: String, iv: String, ciphertext: String) -> Result<(), &'static str> {
let key_bytes = construct_key_bytes(Some(key))?;
let key = new_unbound_key(key_bytes.as_slice())?;
let iv = {
let v = hex::decode(iv).map_err(|_| "invalid iv")?;
let v: FixedLength<AES_CTR_IV_LEN> = v.as_slice().try_into().map_err(|_| "invalid iv")?;
v
};
let key = DecryptingKey::ctr(key).map_err(|_| "failed to initalized aes decryption")?;
let mut ciphertext =
hex::decode(ciphertext).map_err(|_| "ciphertext is not valid hex encoding")?;
let plaintext = key
.decrypt(ciphertext.as_mut(), DecryptionContext::Iv128(iv))
.map_err(|_| "failed to decrypt ciphertext")?;
let plaintext =
String::from_utf8(plaintext.into()).map_err(|_| "decrypted text was not a utf8 string")?;
println!("{plaintext}");
Ok(())
}
fn aes_cbc_encrypt(
key: Option<String>,
iv: Option<String>,
plaintext: String,
) -> Result<(), &'static str> {
let key_bytes = construct_key_bytes(key)?;
let hex_key = hex::encode(key_bytes.as_slice());
let key = new_unbound_key(key_bytes.as_slice())?;
let key = PaddedBlockEncryptingKey::cbc_pkcs7(key)
.map_err(|_| "failed to initalized aes encryption")?;
let mut ciphertext = Vec::from(plaintext);
let context = match iv {
Some(iv) => {
let context = {
let v = hex::decode(iv).map_err(|_| "invalid iv")?;
let v: FixedLength<AES_CBC_IV_LEN> =
v.as_slice().try_into().map_err(|_| "invalid iv")?;
EncryptionContext::Iv128(v)
};
key.less_safe_encrypt(&mut ciphertext, context)
}
None => key.encrypt(&mut ciphertext),
}
.map_err(|_| "failed to initalized aes encryption")?;
let iv: &[u8] = (&context)
.try_into()
.map_err(|_| "unexpected encryption context")?;
let ciphertext = hex::encode(ciphertext.as_slice());
println!("key: {hex_key}");
println!("iv: {}", hex::encode(iv));
println!("ciphertext: {ciphertext}");
Ok(())
}
fn aes_cbc_decrypt(key: String, iv: String, ciphertext: String) -> Result<(), &'static str> {
let key_bytes = construct_key_bytes(Some(key))?;
let key = new_unbound_key(key_bytes.as_slice())?;
let iv = {
let v = hex::decode(iv).map_err(|_| "invalid iv")?;
let v: FixedLength<AES_CBC_IV_LEN> = v.as_slice().try_into().map_err(|_| "invalid iv")?;
v
};
let key = PaddedBlockDecryptingKey::cbc_pkcs7(key)
.map_err(|_| "failed to initalized aes decryption")?;
let mut ciphertext =
hex::decode(ciphertext).map_err(|_| "ciphertext is not valid hex encoding")?;
let plaintext = key
.decrypt(ciphertext.as_mut(), DecryptionContext::Iv128(iv))
.map_err(|_| "failed to decrypt ciphertext")?;
let plaintext =
String::from_utf8(plaintext.into()).map_err(|_| "decrypted text was not a utf8 string")?;
println!("{plaintext}");
Ok(())
}
fn new_unbound_key(key: &[u8]) -> Result<UnboundCipherKey, &'static str> {
let alg = match key.len() {
AES_128_KEY_LEN => &AES_128,
AES_192_KEY_LEN => &AES_192,
AES_256_KEY_LEN => &AES_256,
_ => {
return Err("invalid aes key length");
}
};
UnboundCipherKey::new(alg, key).map_err(|_| "failed to construct aes key")
}

114
vendor/aws-lc-rs/examples/digest.rs vendored Normal file
View File

@@ -0,0 +1,114 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! digest - display the checksum for files.
//!
//! *digest* is an example program using *aws-lc-rs*. It can compute the digest (i.e., "checksum")
//! of files using any of the digest algorithms supported by *aws-lc-rs*.
//!
//! The program can be run from the command line using cargo:
//! ```
//! > cargo run --example digest -- -d sha256 LICENSE
//! ```
use aws_lc_rs::{digest, test};
use clap::{Parser, ValueEnum};
use std::fs::File;
use std::io::{Read, Result};
#[derive(ValueEnum, Clone, Copy, Debug)]
enum DigestType {
SHA1,
SHA256,
SHA384,
SHA512,
SHA512_256,
}
impl DigestType {
fn digest(self) -> &'static digest::Algorithm {
match self {
DigestType::SHA1 => &digest::SHA1_FOR_LEGACY_USE_ONLY,
DigestType::SHA256 => &digest::SHA256,
DigestType::SHA384 => &digest::SHA384,
DigestType::SHA512 => &digest::SHA512,
DigestType::SHA512_256 => &digest::SHA512_256,
}
}
}
#[derive(Parser, Debug)]
#[command(author, version, name = "digest")]
struct Cli {
#[arg(short, long, value_enum)]
digest: Option<DigestType>,
files: Vec<String>,
}
const BUFFER_SIZE: usize = 4096;
fn process(
digest_alg: &'static digest::Algorithm,
file: &mut dyn Read,
name: &str,
) -> Result<digest::Digest> {
// Initialize a digest context, which will be used to compute the digest.
let mut digest_context = digest::Context::new(digest_alg);
// byte buffer used to load bytes from the file into the digest context.
let mut buffer = [0u8; BUFFER_SIZE];
// loop over bytes of the file until reaching the end or getting an error.
loop {
// Collect the next buffer of bytes from the file.
let result = file.read(&mut buffer);
match result {
// When 0 bytes are returned, this indicates we've reached EOF.
Ok(0) => {
// "finish" the context to compute the digest/checksum
let digest = digest_context.finish();
// Display the resulting checksum
println!("{} {}", test::to_hex(digest.as_ref()), name);
return Ok(digest);
}
// n indicates the number of bytes loaded into the buffer
Ok(n) => {
// Update the context with the next buffer of bytes
digest_context.update(&buffer[0..n]);
}
Err(e) => {
return Err(e);
}
}
}
}
fn main() -> Result<()> {
let cli = Cli::parse();
let digest_alg = cli.digest.unwrap_or(DigestType::SHA1).digest();
let mut error = None;
if cli.files.is_empty() {
if let Err(e) = process(digest_alg, &mut std::io::stdin(), "-") {
// Display error information
println!("digest: -: {e}");
error = Some(e);
}
} else {
for file_name in cli.files {
if let Err(e) = File::open(&file_name)
.and_then(|mut file| process(digest_alg, &mut file, &file_name))
{
// Display error information
println!("digest: {}: {}", &file_name, e);
error = Some(e);
}
}
}
if let Some(e) = error {
Err(e)
} else {
Ok(())
}
}

View File

@@ -0,0 +1,254 @@
#!/bin/bash -exu
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0 OR ISC
# This script tests aws-lc-rs integration with the rustls ecosystem (rcgen, webpki, rustls).
# It uses Cargo's [patch.crates-io] feature to override dependencies, which is more robust
# than modifying individual dependency declarations.
function usage() {
cat << EOF
Usage: $(basename "$0") [OPTIONS]
Tests aws-lc-rs integration with the rustls ecosystem.
Options:
--latest-release Test against latest stable releases (instead of main branch)
--cleanup Automatically delete cloned repositories on exit
--help Show this help message
Dependencies: jq, cargo-show, cargo-download
EOF
exit 0
}
[[ " $* " =~ " --help " ]] && usage
ROOT="${GITHUB_WORKSPACE:-$(git rev-parse --show-toplevel)}"
latest_release=0
cleanup=0
for arg in "$@"; do
if [ "$arg" = "--latest-release" ]; then
latest_release=1
fi
if [ "$arg" = "--cleanup" ]; then
cleanup=1
fi
done
function check_dependencies() {
local missing=()
command -v jq >/dev/null 2>&1 || missing+=("jq")
command -v cargo-show >/dev/null 2>&1 >/dev/null 2>&1 || missing+=("cargo-show (cargo install cargo-show)")
command -v cargo-download >/dev/null 2>&1 || missing+=("cargo-download (cargo install cargo-download)")
if [ ${#missing[@]} -gt 0 ]; then
echo "Missing dependencies: ${missing[*]}" >&2
exit 1
fi
}
check_dependencies
CLEANUP_ON_EXIT=()
function cleanup() {
if [ ${#CLEANUP_ON_EXIT[@]} -eq 0 ]; then
return
fi
if [ "$cleanup" -eq 0 ]; then
echo "You can delete the following directories:"
echo "${CLEANUP_ON_EXIT[@]}"
else
for x in "${CLEANUP_ON_EXIT[@]}"; do
echo "Deleting: ${x}"
rm -rf "${x}"
done
fi
}
trap cleanup EXIT
# Get the latest stable (non-prerelease) version of a crate from crates.io
function get_latest_stable_version() {
local crate="$1"
cargo show --json "$crate" | jq -r '
[.versions[] |
select(.yanked == false and (.num | test("alpha|beta|rc") | not))
][0].num
'
}
# Get the git commit SHA for a specific crate version from crates.io
function get_crate_commit() {
local crate="$1"
local version="$2"
local tmp_dir
tmp_dir="$(mktemp -d)"
cargo download -o "$tmp_dir/crate.tar.gz" "${crate}=${version}"
tar xzf "$tmp_dir/crate.tar.gz" -C "$tmp_dir" --strip-components=1
local sha
sha="$(jq -r '.git.sha1' "$tmp_dir/.cargo_vcs_info.json")"
rm -rf "$tmp_dir"
echo "$sha"
}
# Add [patch.crates-io] section to a Cargo.toml to override aws-lc-rs and aws-lc-sys
# Usage: add_aws_lc_patch <cargo_toml_path> <aws_lc_rs_workspace_root>
function add_aws_lc_patch() {
local cargo_toml="$1"
local aws_lc_workspace="$2"
# Skip if already patched
if grep -q "aws-lc-rs = { path = \"${aws_lc_workspace}" "$cargo_toml"; then
echo "Patch already present in $cargo_toml"
return
fi
local aws_lc_rs_patch="aws-lc-rs = { path = \"${aws_lc_workspace}/aws-lc-rs\" }"
local aws_lc_sys_patch="aws-lc-sys = { path = \"${aws_lc_workspace}/aws-lc-sys\" }"
if grep -q '^\[patch\.crates-io\]' "$cargo_toml"; then
# [patch.crates-io] section exists - insert our patches after the header
local tmp_file
tmp_file="$(mktemp)"
trap "rm -f '$tmp_file'" RETURN
while IFS= read -r line || [[ -n "$line" ]]; do
echo "$line"
if [[ "$line" == "[patch.crates-io]" ]]; then
echo "$aws_lc_rs_patch"
echo "$aws_lc_sys_patch"
fi
done < "$cargo_toml" > "$tmp_file"
mv "$tmp_file" "$cargo_toml"
else
# No existing [patch.crates-io] section - append to end of file
cat >> "$cargo_toml" << EOF
[patch.crates-io]
${aws_lc_rs_patch}
${aws_lc_sys_patch}
EOF
fi
}
# Clone a repository and optionally checkout a specific commit
# Usage: clone_repo <url> <destination> [commit]
function clone_repo() {
local url="$1"
local dest="$2"
local commit="${3:-}"
git clone --recurse-submodules "$url" "$dest"
if [ -n "$commit" ]; then
pushd "$dest" > /dev/null
git checkout "$commit"
popd > /dev/null
fi
}
echo "=== Testing rcgen with aws-lc-rs ==="
RCGEN_DIR="$(mktemp -d)"
CLEANUP_ON_EXIT+=("$RCGEN_DIR")
if [[ $latest_release == "1" ]]; then
RCGEN_VERSION="$(get_latest_stable_version rcgen)"
RCGEN_COMMIT="$(get_crate_commit rcgen "$RCGEN_VERSION")"
echo "Using rcgen version ${RCGEN_VERSION} (commit: ${RCGEN_COMMIT})"
clone_repo "https://github.com/rustls/rcgen" "$RCGEN_DIR" "$RCGEN_COMMIT"
else
clone_repo "https://github.com/rustls/rcgen" "$RCGEN_DIR"
fi
pushd "$RCGEN_DIR"
add_aws_lc_patch "Cargo.toml" "$ROOT"
if [[ $latest_release != "1" ]]; then
rm -f Cargo.lock
cargo update
else
cargo update -p aws-lc-rs -p aws-lc-sys
fi
cargo tree -i aws-lc-rs --features aws_lc_rs
cargo test --features aws_lc_rs
popd > /dev/null
echo "=== Testing rustls-webpki with aws-lc-rs ==="
WEBPKI_DIR="$(mktemp -d)"
CLEANUP_ON_EXIT+=("$WEBPKI_DIR")
if [[ $latest_release == "1" ]]; then
WEBPKI_VERSION="$(get_latest_stable_version rustls-webpki)"
WEBPKI_COMMIT="$(get_crate_commit rustls-webpki "$WEBPKI_VERSION")"
echo "Using rustls-webpki version ${WEBPKI_VERSION} (commit: ${WEBPKI_COMMIT})"
clone_repo "https://github.com/rustls/webpki.git" "$WEBPKI_DIR" "$WEBPKI_COMMIT"
else
clone_repo "https://github.com/rustls/webpki.git" "$WEBPKI_DIR"
fi
pushd "$WEBPKI_DIR"
add_aws_lc_patch "Cargo.toml" "$ROOT"
if [[ $latest_release != "1" ]]; then
rm -f Cargo.lock
cargo update
else
cargo update -p aws-lc-rs -p aws-lc-sys
fi
# Extract just the [features] section and check for aws-lc-rs feature there.
FEATURES_SECTION=$(sed -n '/^\[features\]/,/^\[/p' Cargo.toml)
if echo "$FEATURES_SECTION" | grep -qE '^aws(-|_)lc(-|_)rs\s*='; then
WEBPKI_FEATURE="aws-lc-rs"
cargo tree -i aws-lc-rs --features "$WEBPKI_FEATURE"
cargo test --features "$WEBPKI_FEATURE"
else
# No aws-lc-rs feature - newer structure uses rustls-aws-lc-rs dev-dependency
echo "No aws-lc-rs feature found, running tests with default configuration"
cargo tree -i aws-lc-rs
cargo test
fi
popd > /dev/null
echo "=== Testing rustls with aws-lc-rs ==="
RUSTLS_DIR="$(mktemp -d)"
CLEANUP_ON_EXIT+=("$RUSTLS_DIR")
if [[ $latest_release == "1" ]]; then
RUSTLS_VERSION="$(get_latest_stable_version rustls)"
RUSTLS_COMMIT="$(get_crate_commit rustls "$RUSTLS_VERSION")"
echo "Using rustls version ${RUSTLS_VERSION} (commit: ${RUSTLS_COMMIT})"
clone_repo "https://github.com/rustls/rustls.git" "$RUSTLS_DIR" "$RUSTLS_COMMIT"
else
clone_repo "https://github.com/rustls/rustls.git" "$RUSTLS_DIR"
fi
pushd "$RUSTLS_DIR"
add_aws_lc_patch "Cargo.toml" "$ROOT"
if [[ $latest_release != "1" ]]; then
rm -f Cargo.lock
cargo update
else
cargo update -p aws-lc-rs -p aws-lc-sys
fi
# Detect which package has the aws-lc-rs feature by checking [features] section.
# Old structure (<=0.23.x): aws-lc-rs feature is in rustls/Cargo.toml
# New structure (>=0.24.x): aws-lc-rs feature is in rustls-test/Cargo.toml
if grep -q '^aws-lc-rs\s*=' ./rustls/Cargo.toml; then
# Old structure: aws-lc-rs feature is in the main rustls crate
pushd ./rustls
cargo tree -i aws-lc-rs --features aws-lc-rs
cargo test --features aws-lc-rs
popd > /dev/null # ./rustls
else
# New structure: aws-lc-rs feature is in rustls-test
pushd ./rustls-test
cargo tree -i aws-lc-rs --features aws-lc-rs
cargo test --features aws-lc-rs
popd > /dev/null # ./rustls-test
fi
popd > /dev/null # "$RUSTLS_DIR"
echo "=== All rustls integration tests passed ==="

View File

@@ -0,0 +1,19 @@
#!/bin/bash -exu
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0 OR ISC
S2N_QUIC_TEMP=$(mktemp -d)/s2n-quic
QUIC_AWS_LC_RS_STRING="^aws-lc-rs = .*"
QUIC_PATH_STRING="aws-lc-rs = { path = \"${PWD}\" }"
git clone https://github.com/aws/s2n-quic.git $S2N_QUIC_TEMP
cd $S2N_QUIC_TEMP
# replace instances of ring with our crate
if [[ "$(uname)" == "Darwin" ]]; then
find ./ -type f -name "Cargo.toml" | xargs sed -i '' -e "s|${QUIC_AWS_LC_RS_STRING}|${QUIC_PATH_STRING}|"
else
find ./ -type f -name "Cargo.toml" | xargs sed -i -e "s|${QUIC_AWS_LC_RS_STRING}|${QUIC_PATH_STRING}|"
fi
cargo test

227
vendor/aws-lc-rs/scripts/run-valgrind.sh vendored Executable file
View File

@@ -0,0 +1,227 @@
#!/usr/bin/env bash
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0 OR ISC
# # Helper script for running aws-lc-rs tests under Valgrind
#
# Usage:
# ./scripts/run-valgrind.sh [OPTIONS] [TEST_NAME]
#
# Examples:
# ./scripts/run-valgrind.sh # Run all tests
# ./scripts/run-valgrind.sh pqdsa_test # Run specific test
# ./scripts/run-valgrind.sh --no-suppress # Run without suppressions
# ./scripts/run-valgrind.sh --release # Run release build
# ./scripts/run-valgrind.sh --strict-leaks # Only check for real leaks (definite/indirect)
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Default configuration
USE_SUPPRESSIONS=1
BUILD_MODE="debug"
LEAK_CHECK="full"
SHOW_LEAK_KINDS="all"
ERROR_EXITCODE=1
TEST_THREADS=1
FEATURES="unstable"
PACKAGE="aws-lc-rs"
VALGRIND_EXTRA_ARGS=""
GEN_SUPPRESSIONS=0
STRICT_LEAKS=0
export AWS_LC_RS_DISABLE_SLOW_TESTS=1
# Parse command line arguments
while [[ $# -gt 0 ]]; do
case $1 in
--no-suppress)
USE_SUPPRESSIONS=0
shift
;;
--gen-suppressions)
GEN_SUPPRESSIONS=1
shift
;;
--strict-leaks)
STRICT_LEAKS=1
shift
;;
--release)
BUILD_MODE="release"
shift
;;
--debug)
BUILD_MODE="debug"
shift
;;
--threads)
TEST_THREADS="$2"
shift 2
;;
--features)
FEATURES="$2"
shift 2
;;
--package|-p)
PACKAGE="$2"
shift 2
;;
--help|-h)
echo "Usage: $0 [OPTIONS] [TEST_NAME]"
echo ""
echo "Options:"
echo " --no-suppress Disable Valgrind suppressions (show all warnings)"
echo " --gen-suppressions Generate suppression rules for errors found"
echo " --strict-leaks Only report real leaks (definite/indirect), ignores"
echo " possibly lost and still reachable. Use this to verify"
echo " suppressions aren't masking actual memory leaks."
echo " --release Use release build (faster but less debug info)"
echo " --debug Use debug build (default)"
echo " --threads N Number of test threads (default: 1)"
echo " --features FEATS Cargo features to enable (default: unstable)"
echo " --package PKG Package to test (default: aws-lc-rs)"
echo " --help, -h Show this help message"
echo ""
echo "Examples:"
echo " $0 # Run all tests"
echo " $0 pqdsa_test # Run specific test"
echo " $0 --no-suppress # Run without suppressions"
echo " $0 --gen-suppressions # Generate suppression rules"
echo " $0 --strict-leaks # Verify no real leaks (ignores false positives)"
echo " $0 --release pqdsa_test # Run specific test in release mode"
exit 0
;;
--*)
echo -e "${RED}Error: Unknown option $1${NC}"
exit 1
;;
*)
# Assume it's a test name
TEST_NAME="$1"
shift
;;
esac
done
# Get the repository root directory
REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$REPO_ROOT/aws-lc-rs"
# Check if Valgrind is installed
if ! command -v valgrind &> /dev/null; then
echo -e "${RED}Error: Valgrind is not installed${NC}"
echo "Install it with:"
echo " Ubuntu/Debian: sudo apt-get install valgrind"
echo " macOS: brew install valgrind"
exit 1
fi
# Handle strict-leaks mode - only show definite and indirect leaks (real leaks)
if [ $STRICT_LEAKS -eq 1 ]; then
SHOW_LEAK_KINDS="definite,indirect"
USE_SUPPRESSIONS=0 # No need for suppressions in strict mode
echo -e "${YELLOW}=== STRICT LEAKS MODE ===${NC}"
echo -e "${YELLOW}Only checking for real memory leaks (definite/indirect).${NC}"
echo -e "${YELLOW}Possibly lost and still reachable are IGNORED.${NC}"
echo -e "${YELLOW}If this passes, your suppressions are NOT masking real leaks.${NC}"
echo ""
fi
# Build Valgrind command
VALGRIND_CMD="valgrind --error-exitcode=${ERROR_EXITCODE} --leak-check=${LEAK_CHECK} --show-leak-kinds=${SHOW_LEAK_KINDS}"
# Add gen-suppressions if enabled
if [ $GEN_SUPPRESSIONS -eq 1 ]; then
VALGRIND_CMD="${VALGRIND_CMD} --gen-suppressions=all"
echo -e "${BLUE}Generating suppression rules for all errors${NC}"
# Disable error exit code when generating suppressions to see all issues
ERROR_EXITCODE=0
fi
# Add suppression file if enabled
if [ $USE_SUPPRESSIONS -eq 1 ]; then
SUPPRESSION_FILE="${REPO_ROOT}/.valgrind/rust-test.supp"
if [ -f "$SUPPRESSION_FILE" ]; then
VALGRIND_CMD="${VALGRIND_CMD} --suppressions=${SUPPRESSION_FILE}"
echo -e "${BLUE}Using suppressions from: ${SUPPRESSION_FILE}${NC}"
else
echo -e "${YELLOW}Warning: Suppression file not found: ${SUPPRESSION_FILE}${NC}"
fi
else
echo -e "${YELLOW}Running WITHOUT suppressions - expect false positives${NC}"
fi
# Add any extra Valgrind arguments
if [ -n "$VALGRIND_EXTRA_ARGS" ]; then
VALGRIND_CMD="${VALGRIND_CMD} ${VALGRIND_EXTRA_ARGS}"
fi
# Build cargo command
CARGO_CMD="cargo test -p ${PACKAGE} --features ${FEATURES}"
if [ "$BUILD_MODE" = "release" ]; then
CARGO_CMD="${CARGO_CMD} --release"
echo -e "${BLUE}Using release build${NC}"
else
echo -e "${BLUE}Using debug build${NC}"
fi
# Add test name if provided
if [ -n "$TEST_NAME" ]; then
CARGO_CMD="${CARGO_CMD} --test ${TEST_NAME}"
echo -e "${BLUE}Running test: ${TEST_NAME}${NC}"
else
echo -e "${BLUE}Running all tests${NC}"
fi
# Add test arguments
CARGO_CMD="${CARGO_CMD} -- --test-threads=${TEST_THREADS}"
# Print configuration
echo -e "${GREEN}=== Valgrind Test Configuration ===${NC}"
echo "Package: ${PACKAGE}"
echo "Features: ${FEATURES}"
echo "Build: ${BUILD_MODE}"
echo "Test threads: ${TEST_THREADS}"
echo "Suppressions: $([ $USE_SUPPRESSIONS -eq 1 ] && echo 'enabled' || echo 'disabled')"
echo "Generate suppressions: $([ $GEN_SUPPRESSIONS -eq 1 ] && echo 'enabled' || echo 'disabled')"
echo "Strict leaks mode: $([ $STRICT_LEAKS -eq 1 ] && echo 'enabled (only definite/indirect)' || echo 'disabled')"
echo ""
# Export environment variables
export CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER="${VALGRIND_CMD}"
export AWS_LC_RS_DISABLE_SLOW_TESTS=1
echo -e "${GREEN}=== Starting Valgrind Test Run ===${NC}"
echo "Command: ${CARGO_CMD}"
echo ""
# Run the tests
if eval ${CARGO_CMD}; then
echo ""
echo -e "${GREEN}=== Valgrind tests PASSED ===${NC}"
exit 0
else
EXIT_CODE=$?
echo ""
echo -e "${RED}=== Valgrind tests FAILED ===${NC}"
echo ""
echo "Possible causes:"
echo " 1. Memory leak detected (check output above)"
echo " 2. Uninitialized memory usage"
echo " 3. Invalid memory access"
echo ""
echo "Next steps:"
echo " - Review the Valgrind output above"
echo " - Check .valgrind/KNOWN_ISSUES.md for known issues"
echo " - Run with --no-suppress to see all warnings"
echo " - Run with --gen-suppressions to generate suppression rules"
echo " - For false positives in stdlib, add to .valgrind/rust-test.supp"
exit $EXIT_CODE
fi

View File

@@ -0,0 +1,60 @@
#!/bin/bash -exu
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0 OR ISC
# Parse command line arguments
FIPS_MODE=false
for arg in "$@"; do
case $arg in
--fips)
FIPS_MODE=true
shift
;;
*)
;;
esac
done
SRC_ROOT="${GITHUB_WORKSPACE:-$(git rev-parse --show-toplevel)}/aws-lc-rs"
case `uname -s` in
CYGWIN*) echo Cygwin;;
MINGW*) echo MinGw;;
MSYS_NT*) echo MSys;;
*) echo Unknown OS: `uname -s`; exit 1;;
esac
TMP_DIR=`mktemp -d`
pushd "${TMP_DIR}"
cargo new --bin aws-lc-rs-test
pushd aws-lc-rs-test
# Add aws-lc-rs with or without fips feature
if [ "$FIPS_MODE" = true ]; then
cargo add aws-lc-rs --features fips
else
cargo add aws-lc-rs
fi
cargo add rustls rustls-platform-verifier
cat << EOF >> Cargo.toml
[profile.release]
debug = "limited"
[patch.crates-io]
"aws-lc-rs" = { path = "${SRC_ROOT//\\/\/}" }
EOF
mkdir -p .cargo
cat << EOF > .cargo/config.toml
[target.'cfg(target_os = "windows")']
rustflags = ["-C", "target-feature=+crt-static"]
EOF
cargo update
cargo build --release
popd
popd

1095
vendor/aws-lc-rs/src/aead.rs vendored Normal file

File diff suppressed because it is too large Load Diff

296
vendor/aws-lc-rs/src/aead/aead_ctx.rs vendored Normal file
View File

@@ -0,0 +1,296 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use core::mem::size_of;
use core::ptr::null_mut;
use crate::cipher::chacha;
use crate::aws_lc::{
evp_aead_direction_t, evp_aead_direction_t_evp_aead_open, evp_aead_direction_t_evp_aead_seal,
EVP_AEAD_CTX_init, EVP_AEAD_CTX_init_with_direction, EVP_AEAD_CTX_zero, EVP_aead_aes_128_gcm,
EVP_aead_aes_128_gcm_randnonce, EVP_aead_aes_128_gcm_siv, EVP_aead_aes_128_gcm_tls12,
EVP_aead_aes_128_gcm_tls13, EVP_aead_aes_192_gcm, EVP_aead_aes_256_gcm,
EVP_aead_aes_256_gcm_randnonce, EVP_aead_aes_256_gcm_siv, EVP_aead_aes_256_gcm_tls12,
EVP_aead_aes_256_gcm_tls13, EVP_aead_chacha20_poly1305, OPENSSL_malloc, EVP_AEAD_CTX,
};
use crate::cipher::aes::{AES_128_KEY_LEN, AES_192_KEY_LEN, AES_256_KEY_LEN};
use crate::error::Unspecified;
use crate::ptr::LcPtr;
pub(crate) enum AeadDirection {
Open,
Seal,
}
impl From<AeadDirection> for evp_aead_direction_t {
fn from(value: AeadDirection) -> Self {
match value {
AeadDirection::Open => evp_aead_direction_t_evp_aead_open,
AeadDirection::Seal => evp_aead_direction_t_evp_aead_seal,
}
}
}
#[allow(
clippy::large_enum_variant,
variant_size_differences,
non_camel_case_types
)]
pub(crate) enum AeadCtx {
AES_128_GCM(LcPtr<EVP_AEAD_CTX>),
AES_192_GCM(LcPtr<EVP_AEAD_CTX>),
AES_256_GCM(LcPtr<EVP_AEAD_CTX>),
AES_128_GCM_SIV(LcPtr<EVP_AEAD_CTX>),
AES_256_GCM_SIV(LcPtr<EVP_AEAD_CTX>),
AES_128_GCM_RANDNONCE(LcPtr<EVP_AEAD_CTX>),
AES_256_GCM_RANDNONCE(LcPtr<EVP_AEAD_CTX>),
AES_128_GCM_TLS12(LcPtr<EVP_AEAD_CTX>),
AES_256_GCM_TLS12(LcPtr<EVP_AEAD_CTX>),
AES_128_GCM_TLS13(LcPtr<EVP_AEAD_CTX>),
AES_256_GCM_TLS13(LcPtr<EVP_AEAD_CTX>),
CHACHA20_POLY1305(LcPtr<EVP_AEAD_CTX>),
}
unsafe impl Send for AeadCtx {}
unsafe impl Sync for AeadCtx {}
impl AeadCtx {
pub(crate) fn aes_128_gcm(key_bytes: &[u8], tag_len: usize) -> Result<Self, Unspecified> {
Ok(AeadCtx::AES_128_GCM(AeadCtx::aes_128_context(
EVP_aead_aes_128_gcm,
key_bytes,
tag_len,
None,
)?))
}
pub(crate) fn aes_128_gcm_siv(key_bytes: &[u8], tag_len: usize) -> Result<Self, Unspecified> {
Ok(AeadCtx::AES_128_GCM_SIV(AeadCtx::aes_128_context(
EVP_aead_aes_128_gcm_siv,
key_bytes,
tag_len,
None,
)?))
}
pub(crate) fn aes_192_gcm(key_bytes: &[u8], tag_len: usize) -> Result<Self, Unspecified> {
Ok(AeadCtx::AES_192_GCM(AeadCtx::aes_192_context(
EVP_aead_aes_192_gcm,
key_bytes,
tag_len,
None,
)?))
}
pub(crate) fn aes_256_gcm(key_bytes: &[u8], tag_len: usize) -> Result<Self, Unspecified> {
Ok(AeadCtx::AES_256_GCM(AeadCtx::aes_256_context(
EVP_aead_aes_256_gcm,
key_bytes,
tag_len,
None,
)?))
}
pub(crate) fn aes_256_gcm_siv(key_bytes: &[u8], tag_len: usize) -> Result<Self, Unspecified> {
Ok(AeadCtx::AES_256_GCM_SIV(AeadCtx::aes_256_context(
EVP_aead_aes_256_gcm_siv,
key_bytes,
tag_len,
None,
)?))
}
pub(crate) fn aes_128_gcm_randnonce(
key_bytes: &[u8],
tag_len: usize,
nonce_len: usize,
) -> Result<Self, Unspecified> {
Ok(AeadCtx::AES_128_GCM_RANDNONCE(AeadCtx::aes_128_context(
EVP_aead_aes_128_gcm_randnonce,
key_bytes,
tag_len + nonce_len,
None,
)?))
}
pub(crate) fn aes_256_gcm_randnonce(
key_bytes: &[u8],
tag_len: usize,
nonce_len: usize,
) -> Result<Self, Unspecified> {
Ok(AeadCtx::AES_256_GCM_RANDNONCE(AeadCtx::aes_256_context(
EVP_aead_aes_256_gcm_randnonce,
key_bytes,
tag_len + nonce_len,
None,
)?))
}
pub(crate) fn aes_128_gcm_tls12(
key_bytes: &[u8],
tag_len: usize,
direction: AeadDirection,
) -> Result<Self, Unspecified> {
Ok(AeadCtx::AES_128_GCM_TLS12(AeadCtx::aes_128_context(
EVP_aead_aes_128_gcm_tls12,
key_bytes,
tag_len,
Some(direction),
)?))
}
pub(crate) fn aes_256_gcm_tls12(
key_bytes: &[u8],
tag_len: usize,
direction: AeadDirection,
) -> Result<Self, Unspecified> {
Ok(AeadCtx::AES_256_GCM_TLS12(AeadCtx::aes_256_context(
EVP_aead_aes_256_gcm_tls12,
key_bytes,
tag_len,
Some(direction),
)?))
}
pub(crate) fn aes_128_gcm_tls13(
key_bytes: &[u8],
tag_len: usize,
direction: AeadDirection,
) -> Result<Self, Unspecified> {
Ok(AeadCtx::AES_128_GCM_TLS13(AeadCtx::aes_128_context(
EVP_aead_aes_128_gcm_tls13,
key_bytes,
tag_len,
Some(direction),
)?))
}
pub(crate) fn aes_256_gcm_tls13(
key_bytes: &[u8],
tag_len: usize,
direction: AeadDirection,
) -> Result<Self, Unspecified> {
Ok(AeadCtx::AES_256_GCM_TLS13(AeadCtx::aes_256_context(
EVP_aead_aes_256_gcm_tls13,
key_bytes,
tag_len,
Some(direction),
)?))
}
pub(crate) fn chacha20(key_bytes: &[u8], tag_len: usize) -> Result<Self, Unspecified> {
if chacha::KEY_LEN != key_bytes.len() {
return Err(Unspecified);
}
Ok(AeadCtx::CHACHA20_POLY1305(AeadCtx::build_context(
EVP_aead_chacha20_poly1305,
key_bytes,
tag_len,
None,
)?))
}
fn aes_128_context(
aead: unsafe extern "C" fn() -> *const aws_lc::evp_aead_st,
key_bytes: &[u8],
tag_len: usize,
direction: Option<AeadDirection>,
) -> Result<LcPtr<EVP_AEAD_CTX>, Unspecified> {
if AES_128_KEY_LEN != key_bytes.len() {
return Err(Unspecified);
}
AeadCtx::build_context(aead, key_bytes, tag_len, direction)
}
fn aes_192_context(
aead: unsafe extern "C" fn() -> *const aws_lc::evp_aead_st,
key_bytes: &[u8],
tag_len: usize,
direction: Option<AeadDirection>,
) -> Result<LcPtr<EVP_AEAD_CTX>, Unspecified> {
if AES_192_KEY_LEN != key_bytes.len() {
return Err(Unspecified);
}
AeadCtx::build_context(aead, key_bytes, tag_len, direction)
}
fn aes_256_context(
aead: unsafe extern "C" fn() -> *const aws_lc::evp_aead_st,
key_bytes: &[u8],
tag_len: usize,
direction: Option<AeadDirection>,
) -> Result<LcPtr<EVP_AEAD_CTX>, Unspecified> {
if AES_256_KEY_LEN != key_bytes.len() {
return Err(Unspecified);
}
AeadCtx::build_context(aead, key_bytes, tag_len, direction)
}
fn build_context(
aead_fn: unsafe extern "C" fn() -> *const aws_lc::evp_aead_st,
key_bytes: &[u8],
tag_len: usize,
direction: Option<AeadDirection>,
) -> Result<LcPtr<EVP_AEAD_CTX>, Unspecified> {
let aead = unsafe { aead_fn() };
// We are performing the allocation ourselves as EVP_AEAD_CTX_new will call EVP_AEAD_CTX_init by default
// and this avoid having to zero and reinitalize again if we need to set an explicit direction.
let mut aead_ctx: LcPtr<EVP_AEAD_CTX> =
LcPtr::new(unsafe { OPENSSL_malloc(size_of::<EVP_AEAD_CTX>()) }.cast())?;
unsafe { EVP_AEAD_CTX_zero(aead_ctx.as_mut_ptr()) };
if 1 != match direction {
Some(direction) => unsafe {
EVP_AEAD_CTX_init_with_direction(
aead_ctx.as_mut_ptr(),
aead,
key_bytes.as_ptr(),
key_bytes.len(),
tag_len,
direction.into(),
)
},
None => unsafe {
EVP_AEAD_CTX_init(
aead_ctx.as_mut_ptr(),
aead,
key_bytes.as_ptr(),
key_bytes.len(),
tag_len,
null_mut(),
)
},
} {
return Err(Unspecified);
}
Ok(aead_ctx)
}
}
impl AsRef<LcPtr<EVP_AEAD_CTX>> for AeadCtx {
#[inline]
fn as_ref(&self) -> &LcPtr<EVP_AEAD_CTX> {
match self {
AeadCtx::AES_128_GCM(ctx)
| AeadCtx::AES_192_GCM(ctx)
| AeadCtx::AES_256_GCM(ctx)
| AeadCtx::AES_128_GCM_SIV(ctx)
| AeadCtx::AES_256_GCM_SIV(ctx)
| AeadCtx::AES_128_GCM_RANDNONCE(ctx)
| AeadCtx::AES_256_GCM_RANDNONCE(ctx)
| AeadCtx::AES_128_GCM_TLS12(ctx)
| AeadCtx::AES_256_GCM_TLS12(ctx)
| AeadCtx::AES_128_GCM_TLS13(ctx)
| AeadCtx::AES_256_GCM_TLS13(ctx)
| AeadCtx::CHACHA20_POLY1305(ctx) => ctx,
}
}
}

73
vendor/aws-lc-rs/src/aead/aes_gcm.rs vendored Normal file
View File

@@ -0,0 +1,73 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aead::{Algorithm, AlgorithmID};
use crate::aead::aead_ctx::AeadCtx;
use crate::cipher::aes::{AES_128_KEY_LEN, AES_192_KEY_LEN, AES_256_KEY_LEN};
use crate::error::Unspecified;
/// AES-128 in GCM mode with 128-bit tags and 96 bit nonces.
pub const AES_128_GCM: Algorithm = Algorithm {
init: init_128_aead,
key_len: AES_128_KEY_LEN,
id: AlgorithmID::AES_128_GCM,
max_input_len: u64::MAX,
};
/// AES-192 in GCM mode with 128-bit tags and 96 bit nonces.
pub const AES_192_GCM: Algorithm = Algorithm {
init: init_192_aead,
key_len: AES_192_KEY_LEN,
id: AlgorithmID::AES_192_GCM,
max_input_len: u64::MAX,
};
/// AES-256 in GCM mode with 128-bit tags and 96 bit nonces.
pub const AES_256_GCM: Algorithm = Algorithm {
init: init_256_aead,
key_len: AES_256_KEY_LEN,
id: AlgorithmID::AES_256_GCM,
max_input_len: u64::MAX,
};
/// AES-256 in GCM mode with nonce reuse resistance, 128-bit tags and 96 bit nonces.
pub const AES_256_GCM_SIV: Algorithm = Algorithm {
init: init_256_aead_siv,
key_len: AES_256_KEY_LEN,
id: AlgorithmID::AES_256_GCM_SIV,
max_input_len: u64::MAX,
};
/// AES-128 in GCM mode with nonce reuse resistance, 128-bit tags and 96 bit nonces.
pub const AES_128_GCM_SIV: Algorithm = Algorithm {
init: init_128_aead_siv,
key_len: AES_128_KEY_LEN,
id: AlgorithmID::AES_128_GCM_SIV,
max_input_len: u64::MAX,
};
#[inline]
fn init_128_aead(key: &[u8], tag_len: usize) -> Result<AeadCtx, Unspecified> {
AeadCtx::aes_128_gcm(key, tag_len)
}
#[inline]
fn init_192_aead(key: &[u8], tag_len: usize) -> Result<AeadCtx, Unspecified> {
AeadCtx::aes_192_gcm(key, tag_len)
}
#[inline]
fn init_256_aead(key: &[u8], tag_len: usize) -> Result<AeadCtx, Unspecified> {
AeadCtx::aes_256_gcm(key, tag_len)
}
#[inline]
fn init_256_aead_siv(key: &[u8], tag_len: usize) -> Result<AeadCtx, Unspecified> {
AeadCtx::aes_256_gcm_siv(key, tag_len)
}
#[inline]
fn init_128_aead_siv(key: &[u8], tag_len: usize) -> Result<AeadCtx, Unspecified> {
AeadCtx::aes_128_gcm_siv(key, tag_len)
}

27
vendor/aws-lc-rs/src/aead/chacha.rs vendored Normal file
View File

@@ -0,0 +1,27 @@
// Copyright 2016 Brian Smith.
// Portions Copyright (c) 2016, Google Inc.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aead::aead_ctx::AeadCtx;
use crate::aead::{Algorithm, AlgorithmID};
use crate::cipher::chacha::KEY_LEN;
use crate::error;
/// ChaCha20-Poly1305 as described in [RFC 7539].
///
/// The keys are 256 bits long and the nonces are 96 bits long.
///
/// [RFC 7539]: https://tools.ietf.org/html/rfc7539
pub const CHACHA20_POLY1305: Algorithm = Algorithm {
init: init_chacha_aead,
key_len: KEY_LEN,
id: AlgorithmID::CHACHA20_POLY1305,
max_input_len: u64::MAX,
};
#[inline]
fn init_chacha_aead(key: &[u8], tag_len: usize) -> Result<AeadCtx, error::Unspecified> {
AeadCtx::chacha20(key, tag_len)
}

View File

@@ -0,0 +1,279 @@
// Copyright 2016 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! The [chacha20-poly1305@openssh.com] AEAD-ish construct.
//!
//! This should only be used by SSH implementations. It has a similar, but
//! different API from `aws_lc_rs::aead` because the construct cannot use the same
//! API as `aws_lc_rs::aead` due to the way the construct handles the encrypted
//! packet length.
//!
//! The concatenation of a and b is denoted `a||b`. `K_1` and `K_2` are defined
//! in the [chacha20-poly1305@openssh.com] specification. `packet_length`,
//! `padding_length`, `payload`, and `random padding` are defined in
//! [RFC 4253]. The term `plaintext` is used as a shorthand for
//! `padding_length||payload||random padding`.
//!
//! [chacha20-poly1305@openssh.com]:
//! http://cvsweb.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/PROTOCOL.chacha20poly1305?annotate=HEAD
//! [RFC 4253]: https://tools.ietf.org/html/rfc4253
//!
//! # FIPS
//! The APIs offered in this module must not be used.
use super::{poly1305, Nonce, Tag};
use crate::cipher::block::BLOCK_LEN;
use crate::cipher::chacha::{self, ChaCha20Key};
use crate::endian::BigEndian;
use crate::iv::FixedLength;
use crate::{constant_time, error};
/// A key for sealing packets.
pub struct SealingKey {
key: Key,
}
impl SealingKey {
/// Constructs a new `SealingKey`.
#[must_use]
pub fn new(key_material: &[u8; KEY_LEN]) -> SealingKey {
SealingKey {
key: Key::new(key_material),
}
}
/// Seals (encrypts and signs) a packet.
///
/// On input, `plaintext_in_ciphertext_out` must contain the unencrypted
/// `packet_length||plaintext` where `plaintext` is the
/// `padding_length||payload||random padding`. It will be overwritten by
/// `encrypted_packet_length||ciphertext`, where `encrypted_packet_length`
/// is encrypted with `K_1` and `ciphertext` is encrypted by `K_2`.
//
// # FIPS
// This method must not be used.
#[inline]
pub fn seal_in_place(
&self,
sequence_number: u32,
plaintext_in_ciphertext_out: &mut [u8],
tag_out: &mut [u8; TAG_LEN],
) {
let nonce = make_nonce(sequence_number);
let poly_key = derive_poly1305_key(&self.key.k_2, Nonce(FixedLength::from(nonce.as_ref())));
{
let (len_in_out, data_and_padding_in_out) =
plaintext_in_ciphertext_out.split_at_mut(PACKET_LENGTH_LEN);
self.key.k_1.encrypt_in_place(nonce.as_ref(), len_in_out, 0);
self.key
.k_2
.encrypt_in_place(nonce.as_ref(), data_and_padding_in_out, 1);
}
let Tag(tag, tag_len) = poly1305::sign(poly_key, plaintext_in_ciphertext_out);
debug_assert_eq!(TAG_LEN, tag_len);
tag_out.copy_from_slice(tag.as_ref());
}
}
/// A key for opening packets.
pub struct OpeningKey {
key: Key,
}
impl OpeningKey {
/// Constructs a new `OpeningKey`.
#[must_use]
pub fn new(key_material: &[u8; KEY_LEN]) -> OpeningKey {
OpeningKey {
key: Key::new(key_material),
}
}
/// Returns the decrypted, but unauthenticated, packet length.
///
/// Importantly, the result won't be authenticated until `open_in_place` is
/// called.
//
// # FIPS
// This method must not be used.
#[inline]
#[must_use]
pub fn decrypt_packet_length(
&self,
sequence_number: u32,
encrypted_packet_length: [u8; PACKET_LENGTH_LEN],
) -> [u8; PACKET_LENGTH_LEN] {
let mut packet_length = encrypted_packet_length;
let nonce = make_nonce(sequence_number);
self.key
.k_1
.encrypt_in_place(nonce.as_ref(), &mut packet_length, 0);
packet_length
}
/// Opens (authenticates and decrypts) a packet.
///
/// `ciphertext_in_plaintext_out` must be of the form
/// `encrypted_packet_length||ciphertext` where `ciphertext` is the
/// encrypted `plaintext`. When the function succeeds the ciphertext is
/// replaced by the plaintext and the result is `Ok(plaintext)`, where
/// `plaintext` is `&ciphertext_in_plaintext_out[PACKET_LENGTH_LEN..]`;
/// otherwise the contents of `ciphertext_in_plaintext_out` are unspecified
/// and must not be used.
///
/// # Errors
/// `error::Unspecified` when ciphertext is invalid
//
// # FIPS
// This method must not be used.
#[inline]
pub fn open_in_place<'a>(
&self,
sequence_number: u32,
ciphertext_in_plaintext_out: &'a mut [u8],
tag: &[u8; TAG_LEN],
) -> Result<&'a [u8], error::Unspecified> {
let nonce = make_nonce(sequence_number);
// We must verify the tag before decrypting so that
// `ciphertext_in_plaintext_out` is unmodified if verification fails.
// This is beyond what we guarantee.
let poly_key = derive_poly1305_key(&self.key.k_2, Nonce(FixedLength::from(nonce.as_ref())));
verify(poly_key, ciphertext_in_plaintext_out, tag)?;
let plaintext_in_ciphertext_out = &mut ciphertext_in_plaintext_out[PACKET_LENGTH_LEN..];
self.key
.k_2
.encrypt_in_place(nonce.as_ref(), plaintext_in_ciphertext_out, 1);
Ok(plaintext_in_ciphertext_out)
}
}
struct Key {
k_1: ChaCha20Key,
k_2: ChaCha20Key,
}
impl Key {
fn new(key_material: &[u8; KEY_LEN]) -> Key {
// The first half becomes K_2 and the second half becomes K_1.
let (k_2, k_1) = key_material.split_at(chacha::KEY_LEN);
let k_1: [u8; chacha::KEY_LEN] = k_1.try_into().unwrap();
let k_2: [u8; chacha::KEY_LEN] = k_2.try_into().unwrap();
Key {
k_1: ChaCha20Key::from(k_1),
k_2: ChaCha20Key::from(k_2),
}
}
}
#[inline]
fn make_nonce(sequence_number: u32) -> Nonce {
Nonce::from(BigEndian::from(sequence_number))
}
/// The length of key.
pub const KEY_LEN: usize = chacha::KEY_LEN * 2;
/// The length in bytes of the `packet_length` field in a SSH packet.
pub const PACKET_LENGTH_LEN: usize = 4; // 32 bits
/// The length in bytes of an authentication tag.
pub const TAG_LEN: usize = BLOCK_LEN;
#[inline]
fn verify(key: poly1305::Key, msg: &[u8], tag: &[u8; TAG_LEN]) -> Result<(), error::Unspecified> {
let Tag(calculated_tag, _) = poly1305::sign(key, msg);
constant_time::verify_slices_are_equal(calculated_tag.as_ref(), tag)
}
#[inline]
#[allow(clippy::needless_pass_by_value)]
pub(super) fn derive_poly1305_key(chacha_key: &ChaCha20Key, nonce: Nonce) -> poly1305::Key {
let mut key_bytes = [0u8; 2 * BLOCK_LEN];
chacha_key.encrypt_in_place(nonce.as_ref(), &mut key_bytes, 0);
poly1305::Key::new(key_bytes)
}
#[cfg(test)]
mod tests {
use crate::aead::chacha20_poly1305_openssh::{
derive_poly1305_key, OpeningKey, SealingKey, KEY_LEN, TAG_LEN,
};
use crate::aead::Nonce;
use crate::cipher::chacha::ChaCha20Key;
use crate::endian::{BigEndian, FromArray, LittleEndian};
use crate::test;
#[test]
fn derive_poly1305_test() {
let chacha_key =
test::from_hex("98bef1469be7269837a45bfbc92a5a6ac762507cf96443bf33b96b1bd4c6f8f6")
.unwrap();
let expected_poly1305_key =
test::from_hex("759de17d6d6258a436e36ecf75e3f00e4d9133ec05c4c855a9ec1a4e4e873b9d")
.unwrap();
let chacha_key = chacha_key.as_slice();
let chacha_key_bytes: [u8; 32] = <[u8; 32]>::try_from(chacha_key).unwrap();
let chacha_key = ChaCha20Key::from(chacha_key_bytes);
{
let iv = Nonce::from(&[45u32, 897, 4567]);
let poly1305_key = derive_poly1305_key(&chacha_key, iv);
assert_eq!(&expected_poly1305_key, &poly1305_key.key_and_nonce);
}
{
let iv = Nonce::from(&LittleEndian::<u32>::from_array(&[45u32, 897, 4567]));
let poly1305_key = derive_poly1305_key(&chacha_key, iv);
assert_eq!(&expected_poly1305_key, &poly1305_key.key_and_nonce);
}
{
let iv = Nonce::from(&BigEndian::<u32>::from_array(&[45u32, 897, 4567]));
let poly1305_key = derive_poly1305_key(&chacha_key, iv);
assert_ne!(&expected_poly1305_key, &poly1305_key.key_and_nonce);
}
}
#[test]
#[allow(clippy::cast_possible_truncation)]
fn test_decrypt_packet_length() {
let key_bytes: [u8; KEY_LEN] = test::from_dirty_hex("98bef1469be7269837a45bfbc92a5a6ac762\
507cf96443bf33b96b1bd4c6f8f6759de17d6d6258a436e36ecf75e3f00e4d9133ec05c4c855a9ec1a4e4e873b9d")
.try_into().unwrap();
let sealing_key = SealingKey::new(&key_bytes);
let opening_key = OpeningKey::new(&key_bytes);
let plaintext = b"Hello World!";
let packet_length = plaintext.len() as u32;
let packet_length = packet_length.to_be_bytes();
let mut in_out = Vec::new();
in_out.extend_from_slice(&packet_length);
in_out.extend_from_slice(plaintext);
let mut tag = [0u8; TAG_LEN];
sealing_key.seal_in_place(0, &mut in_out, &mut tag);
let encrypted_length: [u8; 4] = in_out[0..4].to_owned().try_into().unwrap();
let decrypted_length = opening_key.decrypt_packet_length(0, encrypted_length);
let decrypted_length = u32::from_be_bytes(decrypted_length);
assert_eq!(plaintext.len() as u32, decrypted_length);
}
#[test]
fn test_types() {
test::compile_time_assert_send::<OpeningKey>();
test::compile_time_assert_sync::<OpeningKey>();
test::compile_time_assert_send::<SealingKey>();
test::compile_time_assert_sync::<SealingKey>();
}
}

108
vendor/aws-lc-rs/src/aead/nonce.rs vendored Normal file
View File

@@ -0,0 +1,108 @@
// Copyright 2018 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::endian::{ArrayEncoding, BigEndian, Encoding, FromArray, LittleEndian};
use crate::error;
use crate::iv::FixedLength;
/// A nonce for a single AEAD opening or sealing operation.
///
/// The user must ensure, for a particular key, that each nonce is unique.
///
/// `Nonce` intentionally doesn't implement `Clone` to ensure that each one is
/// consumed at most once.
pub struct Nonce(pub(crate) FixedLength<NONCE_LEN>);
impl Nonce {
/// Constructs a `Nonce` with the given value, assuming that the value is
/// unique for the lifetime of the key it is being used with.
///
/// Fails if `value` isn't `NONCE_LEN` bytes long.
/// # Errors
/// `error::Unspecified` when byte slice length is not `NONCE_LEN`
#[inline]
pub fn try_assume_unique_for_key(value: &[u8]) -> Result<Self, error::Unspecified> {
Ok(Self(FixedLength::<NONCE_LEN>::try_from(value)?))
}
/// Constructs a `Nonce` with the given value, assuming that the value is
/// unique for the lifetime of the key it is being used with.
#[inline]
#[must_use]
pub fn assume_unique_for_key(value: [u8; NONCE_LEN]) -> Self {
Self(FixedLength::<NONCE_LEN>::from(value))
}
}
impl AsRef<[u8; NONCE_LEN]> for Nonce {
#[inline]
fn as_ref(&self) -> &[u8; NONCE_LEN] {
self.0.as_ref()
}
}
impl From<&[u8; NONCE_LEN]> for Nonce {
#[inline]
fn from(bytes: &[u8; NONCE_LEN]) -> Self {
Self(FixedLength::from(bytes))
}
}
#[allow(useless_deprecated)] // https://github.com/rust-lang/rust/issues/39935
#[deprecated]
impl From<&[u32; NONCE_LEN / 4]> for Nonce {
#[inline]
fn from(values: &[u32; NONCE_LEN / 4]) -> Self {
Nonce::from(&LittleEndian::<u32>::from_array(values))
}
}
impl From<&[BigEndian<u32>; NONCE_LEN / 4]> for Nonce {
#[inline]
fn from(values: &[BigEndian<u32>; NONCE_LEN / 4]) -> Self {
Nonce(FixedLength::from(values.as_byte_array()))
}
}
impl From<&[LittleEndian<u32>; NONCE_LEN / 4]> for Nonce {
#[inline]
fn from(nonce: &[LittleEndian<u32>; NONCE_LEN / 4]) -> Self {
Nonce(FixedLength::from(nonce.as_byte_array()))
}
}
impl From<BigEndian<u32>> for Nonce {
#[inline]
fn from(number: BigEndian<u32>) -> Self {
Nonce::from([BigEndian::ZERO, BigEndian::ZERO, number].as_byte_array())
}
}
pub const IV_LEN: usize = 16;
impl From<&[u8; IV_LEN]> for Nonce {
#[inline]
fn from(bytes: &[u8; IV_LEN]) -> Self {
let mut nonce_bytes = [0u8; NONCE_LEN];
nonce_bytes.copy_from_slice(&bytes[0..NONCE_LEN]);
Nonce(FixedLength::from(nonce_bytes))
}
}
/// All the AEADs we support use 96-bit nonces.
pub const NONCE_LEN: usize = 96 / 8;
#[cfg(test)]
mod tests {
#[test]
fn test_nonce_from_byte_array() {
use crate::aead::nonce::IV_LEN;
use crate::aead::Nonce;
let iv: [u8; IV_LEN] = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
let nonce = Nonce::from(&iv);
assert_eq!(&[1u8, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], nonce.as_ref());
}
}

View File

@@ -0,0 +1,10 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! Implementations of `NonceSequence` for use with `BoundKey`s.
mod counter32;
mod counter64;
pub use counter32::{Counter32, Counter32Builder};
pub use counter64::{Counter64, Counter64Builder};

View File

@@ -0,0 +1,199 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aead::{Nonce, NonceSequence, NONCE_LEN};
use crate::error::Unspecified;
use crate::iv::FixedLength;
/// `Counter32` is an implementation of the `NonceSequence` trait.
///
/// The internal state of a `Counter32` is a 32-bit unsigned counter that
/// increments on each call to `advance` and an optional 8-byte identifier. Counter and identifier
/// values are used to construct each nonce.
/// A limit can be set on the number of nonces allowed to be generated, by default this limit is
/// `u32::MAX`.
///
/// See [Section 3.2 of RFC 5116](https://www.rfc-editor.org/rfc/rfc5116#section-3.2).
#[allow(clippy::module_name_repetitions)]
pub struct Counter32 {
limit: u32,
generated: u32,
identifier: [u8; 8],
counter: u32,
}
/// `NonceSequenceBuilder` facilitates the building of a `Counter32`.
#[allow(clippy::module_name_repetitions)]
pub struct Counter32Builder {
limit: u32,
identifier: [u8; 8],
counter: u32,
}
impl Default for Counter32Builder {
fn default() -> Self {
Counter32Builder::new()
}
}
impl Counter32Builder {
/// Constructs a `Counter32Builder` with all default values.
#[must_use]
pub fn new() -> Counter32Builder {
Counter32Builder {
limit: u32::MAX,
identifier: [0u8; 8],
counter: 0,
}
}
/// The identifier for the `Counter32` - this value helps differentiate nonce
/// sequences.
#[must_use]
pub fn identifier<T: Into<[u8; 8]>>(mut self, identifier: T) -> Counter32Builder {
self.identifier = identifier.into();
self
}
/// The starting counter value for the `Counter32`.
#[must_use]
pub fn counter(mut self, counter: u32) -> Counter32Builder {
self.counter = counter;
self
}
/// The limit for the number of nonces the `Counter32` can produce.
#[must_use]
pub fn limit(mut self, limit: u32) -> Counter32Builder {
self.limit = limit;
self
}
/// Constructs a new `Counter32` with internal identifier and counter set to the
/// values provided by this struct.
#[must_use]
pub fn build(self) -> Counter32 {
Counter32 {
limit: self.limit,
generated: 0,
identifier: self.identifier,
counter: self.counter,
}
}
}
impl Counter32 {
/// Provides the internal identifier.
#[must_use]
pub fn identifier(&self) -> [u8; 8] {
self.identifier
}
/// Provides the current internal counter value.
#[must_use]
pub fn counter(&self) -> u32 {
self.counter
}
/// Provides the current counter indicating how many nonces have been generated.
#[must_use]
pub fn generated(&self) -> u32 {
self.generated
}
/// Provides the limit on the number of nonces allowed to be generate.
#[must_use]
pub fn limit(&self) -> u32 {
self.limit
}
}
impl NonceSequence for Counter32 {
fn advance(&mut self) -> Result<Nonce, Unspecified> {
self.generated = self.generated.checked_add(1).ok_or(Unspecified)?;
if self.generated > self.limit {
return Err(Unspecified);
}
let counter_bytes: [u8; 4] = self.counter.to_be_bytes();
let mut nonce_bytes = [0u8; NONCE_LEN];
nonce_bytes[..8].copy_from_slice(&self.identifier);
nonce_bytes[8..].copy_from_slice(&counter_bytes);
self.counter = self.counter.wrapping_add(1);
Ok(Nonce(FixedLength::from(nonce_bytes)))
}
}
#[cfg(test)]
mod tests {
use crate::aead::nonce_sequence::Counter32Builder;
use crate::aead::NonceSequence;
#[test]
fn test_counter32_identifier() {
let mut cns = Counter32Builder::default()
.identifier([0xA1, 0xB2, 0xC3, 0xD4, 0xA2, 0xB3, 0xC4, 0xD5])
.counter(7)
.build();
assert_eq!(0, cns.generated());
let nonce = cns.advance().unwrap();
let nonce = nonce.as_ref();
assert_eq!(8, cns.counter());
assert_eq!(
[0xA1, 0xB2, 0xC3, 0xD4, 0xA2, 0xB3, 0xC4, 0xD5],
cns.identifier()
);
assert_eq!(u32::MAX, cns.limit());
assert_eq!(1, cns.generated());
assert_eq!(
nonce,
&[0xA1, 0xB2, 0xC3, 0xD4, 0xA2, 0xB3, 0xC4, 0xD5, 0, 0, 0, 7]
);
let nonce = cns.advance().unwrap();
let nonce = nonce.as_ref();
assert_eq!(2, cns.generated());
assert_eq!(9, cns.counter());
assert_eq!(
[0xA1, 0xB2, 0xC3, 0xD4, 0xA2, 0xB3, 0xC4, 0xD5],
cns.identifier()
);
assert_eq!(
nonce,
&[0xA1, 0xB2, 0xC3, 0xD4, 0xA2, 0xB3, 0xC4, 0xD5, 0, 0, 0, 8]
);
}
#[test]
fn test_counter32() {
let mut cns = Counter32Builder::new().counter(0x_4CB0_16EA_u32).build();
let nonce = cns.advance().unwrap();
let nonce = nonce.as_ref();
assert_eq!(nonce, &[0, 0, 0, 0, 0, 0, 0, 0, 0x4C, 0xB0, 0x16, 0xEA]);
let nonce = cns.advance().unwrap();
let nonce = nonce.as_ref();
assert_eq!(nonce, &[0, 0, 0, 0, 0, 0, 0, 0, 0x4C, 0xB0, 0x16, 0xEB]);
}
#[test]
fn test_counter32_int_id() {
let mut cns = Counter32Builder::new()
.counter(0x_6A_u32)
.identifier(0x_7B_u64.to_be_bytes())
.build();
let nonce = cns.advance().unwrap();
let nonce = nonce.as_ref();
assert_eq!(nonce, &[0, 0, 0, 0, 0, 0, 0, 0x7B, 0, 0, 0, 0x6A]);
let nonce = cns.advance().unwrap();
let nonce = nonce.as_ref();
assert_eq!(nonce, &[0, 0, 0, 0, 0, 0, 0, 0x7B, 0, 0, 0, 0x6B]);
}
#[test]
fn test_counter32_limit() {
let mut cns = Counter32Builder::new().limit(1).build();
assert_eq!(1, cns.limit());
assert_eq!(0, cns.generated());
let _nonce = cns.advance().unwrap();
assert_eq!(1, cns.generated());
assert!(cns.advance().is_err());
}
}

View File

@@ -0,0 +1,188 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aead::{Nonce, NonceSequence, NONCE_LEN};
use crate::error::Unspecified;
use crate::iv::FixedLength;
/// `Counter64` is an implementation of the `NonceSequence` trait.
///
/// The internal state of a `Counter64` is a 64-bit unsigned counter that
/// increments on each call to `advance` and an optional 4-byte identifier. Counter and identifier
/// values are used to construct each nonce.
/// A limit can be set on the number of nonces allowed to be generated, by default this limit is
/// `u64::MAX`.
/// See [Section 3.2 of RFC 5116](https://www.rfc-editor.org/rfc/rfc5116#section-3.2).
#[allow(clippy::module_name_repetitions)]
pub struct Counter64 {
limit: u64,
generated: u64,
identifier: [u8; 4],
counter: u64,
}
/// `NonceSequenceBuilder` facilitates the building of a `Counter64`.
#[allow(clippy::module_name_repetitions)]
pub struct Counter64Builder {
limit: u64,
identifier: [u8; 4],
counter: u64,
}
impl Default for Counter64Builder {
fn default() -> Self {
Counter64Builder::new()
}
}
impl Counter64Builder {
/// Constructs a `Counter64Builder` with all default values.
#[must_use]
pub fn new() -> Counter64Builder {
Counter64Builder {
limit: u64::MAX,
identifier: [0u8; 4],
counter: 0,
}
}
/// The identifier for the `Counter64` - this value helps differentiate nonce
/// sequences.
#[must_use]
pub fn identifier<T: Into<[u8; 4]>>(mut self, identifier: T) -> Counter64Builder {
self.identifier = identifier.into();
self
}
/// The starting counter value for the `Counter64`.
#[must_use]
pub fn counter(mut self, counter: u64) -> Counter64Builder {
self.counter = counter;
self
}
/// The limit for the number of nonces the `Counter64` can produce.
#[must_use]
pub fn limit(mut self, limit: u64) -> Counter64Builder {
self.limit = limit;
self
}
/// Constructs a new `Counter64` with internal identifier and counter set to the
/// values provided by this struct.
#[must_use]
pub fn build(self) -> Counter64 {
Counter64 {
limit: self.limit,
generated: 0,
identifier: self.identifier,
counter: self.counter,
}
}
}
impl Counter64 {
/// Provides the internal identifier.
#[must_use]
pub fn identifier(&self) -> [u8; 4] {
self.identifier
}
/// Provides the current internal counter value.
#[must_use]
pub fn counter(&self) -> u64 {
self.counter
}
/// Provides the current counter indicating how many nonces have been generated.
#[must_use]
pub fn generated(&self) -> u64 {
self.generated
}
/// Provides the limit on the number of nonces allowed to be generate.
#[must_use]
pub fn limit(&self) -> u64 {
self.limit
}
}
impl NonceSequence for Counter64 {
fn advance(&mut self) -> Result<Nonce, Unspecified> {
self.generated = self.generated.checked_add(1).ok_or(Unspecified)?;
if self.generated > self.limit {
return Err(Unspecified);
}
let bytes: [u8; 8] = self.counter.to_be_bytes();
let mut nonce_bytes = [0u8; NONCE_LEN];
nonce_bytes[..4].copy_from_slice(&self.identifier);
nonce_bytes[4..].copy_from_slice(&bytes);
self.counter = self.counter.wrapping_add(1);
Ok(Nonce(FixedLength::from(nonce_bytes)))
}
}
#[cfg(test)]
mod tests {
use crate::aead::nonce_sequence::Counter64Builder;
use crate::aead::NonceSequence;
#[test]
fn test_counter64_identifier() {
let mut cns = Counter64Builder::default()
.identifier([0xA1, 0xB2, 0xC3, 0xD4])
.counter(7)
.build();
assert_eq!(0, cns.generated());
let nonce = cns.advance().unwrap();
let nonce = nonce.as_ref();
assert_eq!(8, cns.counter());
assert_eq!([0xA1, 0xB2, 0xC3, 0xD4], cns.identifier());
assert_eq!(u64::MAX, cns.limit());
assert_eq!(1, cns.generated());
assert_eq!(nonce, &[0xA1, 0xB2, 0xC3, 0xD4, 0, 0, 0, 0, 0, 0, 0, 7]);
let nonce = cns.advance().unwrap();
let nonce = nonce.as_ref();
assert_eq!(2, cns.generated());
assert_eq!(9, cns.counter());
assert_eq!([0xA1, 0xB2, 0xC3, 0xD4], cns.identifier());
assert_eq!(nonce, &[0xA1, 0xB2, 0xC3, 0xD4, 0, 0, 0, 0, 0, 0, 0, 8]);
}
#[test]
fn test_counter64() {
let mut cns = Counter64Builder::new()
.counter(0x0002_4CB0_16EA_u64)
.build();
let nonce = cns.advance().unwrap();
let nonce = nonce.as_ref();
assert_eq!(nonce, &[0, 0, 0, 0, 0, 0, 0, 0x02, 0x4C, 0xB0, 0x16, 0xEA]);
let nonce = cns.advance().unwrap();
let nonce = nonce.as_ref();
assert_eq!(nonce, &[0, 0, 0, 0, 0, 0, 0, 0x02, 0x4C, 0xB0, 0x16, 0xEB]);
}
#[test]
fn test_counter64_id() {
let mut cns = Counter64Builder::new()
.counter(0x_6A_u64)
.identifier(0x_7B_u32.to_be_bytes())
.build();
let nonce = cns.advance().unwrap();
let nonce = nonce.as_ref();
assert_eq!(nonce, &[0, 0, 0, 0x7B, 0, 0, 0, 0, 0, 0, 0, 0x6A]);
let nonce = cns.advance().unwrap();
let nonce = nonce.as_ref();
assert_eq!(nonce, &[0, 0, 0, 0x7B, 0, 0, 0, 0, 0, 0, 0, 0x6B]);
}
#[test]
fn test_counter64_limit() {
let mut cns = Counter64Builder::new().limit(1).build();
assert_eq!(1, cns.limit());
assert_eq!(0, cns.generated());
let _nonce = cns.advance().unwrap();
assert_eq!(1, cns.generated());
assert!(cns.advance().is_err());
}
}

109
vendor/aws-lc-rs/src/aead/poly1305.rs vendored Normal file
View File

@@ -0,0 +1,109 @@
// Copyright 2015-2016 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
// TODO: enforce maximum input length.
use super::{Tag, TAG_LEN};
use crate::aws_lc::{CRYPTO_poly1305_finish, CRYPTO_poly1305_init, CRYPTO_poly1305_update};
use crate::cipher::block::BLOCK_LEN;
use core::mem::MaybeUninit;
/// A Poly1305 key.
pub(super) struct Key {
pub(super) key_and_nonce: [u8; KEY_LEN],
}
const KEY_LEN: usize = 2 * BLOCK_LEN;
impl Key {
#[inline]
#[allow(dead_code)]
pub(super) fn new(key_and_nonce: [u8; KEY_LEN]) -> Self {
Self { key_and_nonce }
}
}
pub struct Context {
state: poly1305_state,
}
// Keep in sync with `poly1305_state` in GFp/poly1305.h.
//
// The C code, in particular the way the `poly1305_aligned_state` functions
// are used, is only correct when the state buffer is 64-byte aligned.
#[repr(C, align(64))]
#[allow(non_camel_case_types)]
struct poly1305_state(aws_lc::poly1305_state);
impl Context {
#[inline]
pub(super) fn from_key(Key { key_and_nonce }: Key) -> Self {
unsafe {
let mut state = MaybeUninit::<poly1305_state>::uninit();
CRYPTO_poly1305_init(state.as_mut_ptr().cast(), key_and_nonce.as_ptr());
Self {
state: state.assume_init(),
}
}
}
#[inline]
pub fn update(&mut self, input: &[u8]) {
unsafe {
CRYPTO_poly1305_update(
self.state.0.as_mut_ptr().cast(),
input.as_ptr(),
input.len(),
);
}
}
#[inline]
pub(super) fn finish(mut self) -> Tag {
unsafe {
let mut tag = MaybeUninit::<[u8; TAG_LEN]>::uninit();
CRYPTO_poly1305_finish(self.state.0.as_mut_ptr().cast(), tag.as_mut_ptr().cast());
crate::fips::set_fips_service_status_unapproved();
Tag(tag.assume_init(), TAG_LEN)
}
}
}
/// Implements the original, non-IETF padding semantics.
///
/// This is used by `chacha20_poly1305_openssh` and the standalone
/// poly1305 test vectors.
#[inline]
pub(super) fn sign(key: Key, input: &[u8]) -> Tag {
let mut ctx = Context::from_key(key);
ctx.update(input);
ctx.finish()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{test, test_file};
// Adapted from BoringSSL's crypto/poly1305/poly1305_test.cc.
#[test]
pub fn test_poly1305() {
test::run(
test_file!("data/poly1305_test.txt"),
|section, test_case| {
assert_eq!(section, "");
let key = test_case.consume_bytes("Key");
let key: &[u8; BLOCK_LEN * 2] = key.as_slice().try_into().unwrap();
let input = test_case.consume_bytes("Input");
let expected_mac = test_case.consume_bytes("MAC");
let key = Key::new(*key);
let Tag(actual_mac, _) = sign(key, &input);
assert_eq!(expected_mac, actual_mac.as_ref());
Ok(())
},
);
}
}

186
vendor/aws-lc-rs/src/aead/quic.rs vendored Normal file
View File

@@ -0,0 +1,186 @@
// Copyright 2018 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! QUIC Header Protection.
//!
//! See draft-ietf-quic-tls.
use crate::cipher::aes::encrypt_block;
use crate::cipher::block;
use crate::cipher::chacha::encrypt_block_chacha20;
use crate::cipher::key::SymmetricCipherKey;
use crate::hkdf::KeyType;
use crate::{derive_debug_via_id, error, hkdf};
/// A key for generating QUIC Header Protection masks.
pub struct HeaderProtectionKey {
inner: SymmetricCipherKey,
algorithm: &'static Algorithm,
}
impl From<hkdf::Okm<'_, &'static Algorithm>> for HeaderProtectionKey {
fn from(okm: hkdf::Okm<&'static Algorithm>) -> Self {
let mut key_bytes = [0; super::MAX_KEY_LEN];
let algorithm = *okm.len();
let key_bytes = &mut key_bytes[..algorithm.key_len()];
okm.fill(key_bytes).unwrap();
Self::new(algorithm, key_bytes).unwrap()
}
}
impl HeaderProtectionKey {
/// Create a new header protection key.
///
/// # Errors
/// `error::Unspecified` when `key_bytes` length is not `algorithm.key_len`
pub fn new(
algorithm: &'static Algorithm,
key_bytes: &[u8],
) -> Result<Self, error::Unspecified> {
Ok(Self {
inner: (algorithm.init)(key_bytes)?,
algorithm,
})
}
/// Generate a new QUIC Header Protection mask.
///
/// # Errors
/// `error::Unspecified` when `sample` length is not `self.algorithm().sample_len()`.
#[inline]
pub fn new_mask(&self, sample: &[u8]) -> Result<[u8; 5], error::Unspecified> {
let sample = <&[u8; SAMPLE_LEN]>::try_from(sample)?;
cipher_new_mask(&self.inner, *sample)
}
/// The key's algorithm.
#[inline]
#[must_use]
pub fn algorithm(&self) -> &'static Algorithm {
self.algorithm
}
}
const SAMPLE_LEN: usize = super::TAG_LEN;
/// QUIC sample for new key masks
pub type Sample = [u8; SAMPLE_LEN];
/// A QUIC Header Protection Algorithm.
pub struct Algorithm {
init: fn(key: &[u8]) -> Result<SymmetricCipherKey, error::Unspecified>,
key_len: usize,
id: AlgorithmID,
}
impl KeyType for &'static Algorithm {
#[inline]
fn len(&self) -> usize {
self.key_len()
}
}
impl Algorithm {
/// The length of the key.
#[inline]
#[must_use]
pub fn key_len(&self) -> usize {
self.key_len
}
/// The required sample length.
#[inline]
#[must_use]
pub fn sample_len(&self) -> usize {
SAMPLE_LEN
}
}
derive_debug_via_id!(Algorithm);
#[derive(Debug, Eq, PartialEq)]
#[allow(non_camel_case_types)]
enum AlgorithmID {
AES_128,
AES_256,
CHACHA20,
}
impl PartialEq for Algorithm {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for Algorithm {}
/// AES-128.
pub const AES_128: Algorithm = Algorithm {
key_len: 16,
init: SymmetricCipherKey::aes128,
id: AlgorithmID::AES_128,
};
/// AES-256.
pub const AES_256: Algorithm = Algorithm {
key_len: 32,
init: SymmetricCipherKey::aes256,
id: AlgorithmID::AES_256,
};
/// `ChaCha20`.
pub const CHACHA20: Algorithm = Algorithm {
key_len: 32,
init: SymmetricCipherKey::chacha20,
id: AlgorithmID::CHACHA20,
};
#[inline]
fn cipher_new_mask(
cipher_key: &SymmetricCipherKey,
sample: Sample,
) -> Result<[u8; 5], error::Unspecified> {
let block = block::Block::from(sample);
let encrypted_block = match cipher_key {
SymmetricCipherKey::Aes128 { enc_key, .. }
| SymmetricCipherKey::Aes192 { enc_key, .. }
| SymmetricCipherKey::Aes256 { enc_key, .. } => encrypt_block(enc_key, block),
SymmetricCipherKey::ChaCha20 { raw_key } => {
let plaintext = block.as_ref();
let counter_bytes: &[u8; 4] = plaintext[0..=3]
.try_into()
.map_err(|_| error::Unspecified)?;
let nonce: &[u8; 12] = plaintext[4..=15]
.try_into()
.map_err(|_| error::Unspecified)?;
let input = block::Block::zero();
let counter = u32::from_ne_bytes(*counter_bytes).to_le();
encrypt_block_chacha20(raw_key, input, nonce, counter)?
}
};
let mut out: [u8; 5] = [0; 5];
out.copy_from_slice(&encrypted_block.as_ref()[..5]);
Ok(out)
}
#[cfg(test)]
mod test {
use crate::aead::quic::{Algorithm, HeaderProtectionKey};
use crate::test;
#[test]
fn test_types() {
test::compile_time_assert_send::<Algorithm>();
test::compile_time_assert_sync::<Algorithm>();
test::compile_time_assert_send::<HeaderProtectionKey>();
test::compile_time_assert_sync::<HeaderProtectionKey>();
}
}

263
vendor/aws-lc-rs/src/aead/rand_nonce.rs vendored Normal file
View File

@@ -0,0 +1,263 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::error::Unspecified;
use core::fmt::Debug;
use super::aead_ctx::AeadCtx;
use super::{
Aad, Algorithm, AlgorithmID, Nonce, Tag, UnboundKey, AES_128_GCM_SIV, AES_256_GCM_SIV,
};
/// AEAD Cipher key using a randomized nonce.
///
/// `RandomizedNonceKey` handles generation random nonce values.
///
/// The following algorithms are supported:
/// * `AES_128_GCM`
/// * `AES_256_GCM`
/// * `AES_128_GCM_SIV`
/// * `AES_256_GCM_SIV`
///
/// Prefer this type in place of `LessSafeKey`, `OpeningKey`, `SealingKey`.
pub struct RandomizedNonceKey {
key: UnboundKey,
algorithm: &'static Algorithm,
}
impl RandomizedNonceKey {
/// New Random Nonce Sequence
/// # Errors
pub fn new(algorithm: &'static Algorithm, key_bytes: &[u8]) -> Result<Self, Unspecified> {
let ctx = match algorithm.id {
AlgorithmID::AES_128_GCM => AeadCtx::aes_128_gcm_randnonce(
key_bytes,
algorithm.tag_len(),
algorithm.nonce_len(),
),
AlgorithmID::AES_256_GCM => AeadCtx::aes_256_gcm_randnonce(
key_bytes,
algorithm.tag_len(),
algorithm.nonce_len(),
),
AlgorithmID::AES_128_GCM_SIV => {
AeadCtx::aes_128_gcm_siv(key_bytes, algorithm.tag_len())
}
AlgorithmID::AES_256_GCM_SIV => {
AeadCtx::aes_256_gcm_siv(key_bytes, algorithm.tag_len())
}
AlgorithmID::AES_192_GCM | AlgorithmID::CHACHA20_POLY1305 => return Err(Unspecified),
}?;
Ok(Self {
key: UnboundKey::from(ctx),
algorithm,
})
}
/// Authenticates and decrypts (“opens”) data in place.
//
// aad is the additional authenticated data (AAD), if any.
//
// On input, in_out must be the ciphertext followed by the tag. When open_in_place() returns Ok(plaintext),
// the input ciphertext has been overwritten by the plaintext; plaintext will refer to the plaintext without the tag.
///
/// # Errors
/// `error::Unspecified` when ciphertext is invalid.
#[inline]
#[allow(clippy::needless_pass_by_value)]
pub fn open_in_place<'in_out, A>(
&self,
nonce: Nonce,
aad: Aad<A>,
in_out: &'in_out mut [u8],
) -> Result<&'in_out mut [u8], Unspecified>
where
A: AsRef<[u8]>,
{
self.key.open_within(nonce, aad.as_ref(), in_out, 0..)
}
/// Encrypts and signs (“seals”) data in place, appending the tag to the
/// resulting ciphertext.
///
/// `key.seal_in_place_append_tag(aad, in_out)` is equivalent to:
///
/// ```skip
/// key.seal_in_place_separate_tag(aad, in_out.as_mut())
/// .map(|tag| in_out.extend(tag.as_ref()))
/// ```
///
/// The Nonce used for the operation is randomly generated, and returned to the caller.
///
/// # Errors
/// `error::Unspecified` if encryption operation fails.
#[inline]
#[allow(clippy::needless_pass_by_value)]
pub fn seal_in_place_append_tag<'a, A, InOut>(
&self,
aad: Aad<A>,
in_out: &'a mut InOut,
) -> Result<Nonce, Unspecified>
where
A: AsRef<[u8]>,
InOut: AsMut<[u8]> + for<'in_out> Extend<&'in_out u8>,
{
let nonce = if self.algorithm == &AES_128_GCM_SIV || self.algorithm == &AES_256_GCM_SIV {
let mut nonce = vec![0u8; self.algorithm.nonce_len()];
crate::rand::fill(&mut nonce[..])?;
Some(Nonce::try_assume_unique_for_key(nonce.as_slice())?)
} else {
None
};
self.key
.seal_in_place_append_tag(nonce, aad.as_ref(), in_out)
}
/// Encrypts and signs (“seals”) data in place.
///
/// `aad` is the additional authenticated data (AAD), if any. This is
/// authenticated but not encrypted. The type `A` could be a byte slice
/// `&[u8]`, a byte array `[u8; N]` for some constant `N`, `Vec<u8>`, etc.
/// If there is no AAD then use `Aad::empty()`.
///
/// The plaintext is given as the input value of `in_out`. `seal_in_place()`
/// will overwrite the plaintext with the ciphertext and return the tag.
/// For most protocols, the caller must append the tag to the ciphertext.
/// The tag will be `self.algorithm.tag_len()` bytes long.
///
/// The Nonce used for the operation is randomly generated, and returned to the caller.
///
/// # Errors
/// `error::Unspecified` if encryption operation fails.
#[inline]
#[allow(clippy::needless_pass_by_value)]
pub fn seal_in_place_separate_tag<A>(
&self,
aad: Aad<A>,
in_out: &mut [u8],
) -> Result<(Nonce, Tag), Unspecified>
where
A: AsRef<[u8]>,
{
let nonce = if self.algorithm == &AES_128_GCM_SIV || self.algorithm == &AES_256_GCM_SIV {
let mut nonce = vec![0u8; self.algorithm.nonce_len()];
crate::rand::fill(&mut nonce[..])?;
Some(Nonce::try_assume_unique_for_key(nonce.as_slice())?)
} else {
None
};
self.key
.seal_in_place_separate_tag(nonce, aad.as_ref(), in_out)
}
/// The key's AEAD algorithm.
#[inline]
#[must_use]
pub fn algorithm(&self) -> &'static Algorithm {
self.algorithm
}
}
#[allow(clippy::missing_fields_in_debug)]
impl Debug for RandomizedNonceKey {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("RandomizedNonceKey")
.field("algorithm", &self.algorithm)
.finish()
}
}
#[cfg(test)]
mod tests {
use super::{Aad, RandomizedNonceKey};
use crate::aead::{
AES_128_GCM, AES_128_GCM_SIV, AES_256_GCM, AES_256_GCM_SIV, CHACHA20_POLY1305,
};
use crate::test::from_hex;
use paste::paste;
const TEST_128_BIT_KEY: &[u8] = &[
0xb0, 0x37, 0x9f, 0xf8, 0xfb, 0x8e, 0xa6, 0x31, 0xf4, 0x1c, 0xe6, 0x3e, 0xb5, 0xc5, 0x20,
0x7c,
];
const TEST_256_BIT_KEY: &[u8] = &[
0x56, 0xd8, 0x96, 0x68, 0xbd, 0x96, 0xeb, 0xff, 0x5e, 0xa2, 0x0b, 0x34, 0xf2, 0x79, 0x84,
0x6e, 0x2b, 0x13, 0x01, 0x3d, 0xab, 0x1d, 0xa4, 0x07, 0x5a, 0x16, 0xd5, 0x0b, 0x53, 0xb0,
0xcc, 0x88,
];
macro_rules! test_randnonce {
($name:ident, $alg:expr, $key:expr) => {
paste! {
#[test]
fn [<test_ $name _randnonce_unsupported>]() {
assert!(RandomizedNonceKey::new($alg, $key).is_err());
}
}
};
($name:ident, $alg:expr, $key:expr, $expect_tag_len:expr, $expect_nonce_len:expr) => {
paste! {
#[test]
fn [<test_ $name _randnonce>]() {
let plaintext = from_hex("00112233445566778899aabbccddeeff").unwrap();
let rand_nonce_key =
RandomizedNonceKey::new($alg, $key).unwrap();
assert_eq!($alg, rand_nonce_key.algorithm());
assert_eq!(*$expect_tag_len, $alg.tag_len());
assert_eq!(*$expect_nonce_len, $alg.nonce_len());
let mut in_out = Vec::from(plaintext.as_slice());
let nonce = rand_nonce_key
.seal_in_place_append_tag(Aad::empty(), &mut in_out)
.unwrap();
assert_ne!(plaintext, in_out[..plaintext.len()]);
rand_nonce_key
.open_in_place(nonce, Aad::empty(), &mut in_out)
.unwrap();
assert_eq!(plaintext, in_out[..plaintext.len()]);
let mut in_out = Vec::from(plaintext.as_slice());
let (nonce, tag) = rand_nonce_key
.seal_in_place_separate_tag(Aad::empty(), &mut in_out)
.unwrap();
assert_ne!(plaintext, in_out[..plaintext.len()]);
in_out.extend(tag.as_ref());
rand_nonce_key
.open_in_place(nonce, Aad::empty(), &mut in_out)
.unwrap();
assert_eq!(plaintext, in_out[..plaintext.len()]);
}
}
};
}
test_randnonce!(aes_128_gcm, &AES_128_GCM, TEST_128_BIT_KEY, &16, &12);
test_randnonce!(aes_256_gcm, &AES_256_GCM, TEST_256_BIT_KEY, &16, &12);
test_randnonce!(
aes_128_gcm_siv,
&AES_128_GCM_SIV,
TEST_128_BIT_KEY,
&16,
&12
);
test_randnonce!(
aes_256_gcm_siv,
&AES_256_GCM_SIV,
TEST_256_BIT_KEY,
&16,
&12
);
test_randnonce!(chacha20_poly1305, &CHACHA20_POLY1305, TEST_256_BIT_KEY);
}

265
vendor/aws-lc-rs/src/aead/tests/fips.rs vendored Normal file
View File

@@ -0,0 +1,265 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#![cfg(debug_assertions)]
mod chacha20_poly1305_openssh;
mod quic;
use crate::aead::nonce_sequence::Counter64Builder;
use crate::aead::{
Aad, BoundKey, Nonce, OpeningKey, RandomizedNonceKey, SealingKey, TlsProtocolId,
TlsRecordOpeningKey, TlsRecordSealingKey, UnboundKey, AES_128_GCM, AES_256_GCM,
CHACHA20_POLY1305,
};
use crate::fips::{assert_fips_status_indicator, FipsServiceStatus};
const TEST_KEY_128_BIT: [u8; 16] = [
0x9f, 0xd9, 0x41, 0xc3, 0xa6, 0xfe, 0xb9, 0x26, 0x2a, 0x35, 0xa7, 0x44, 0xbb, 0xc0, 0x3a, 0x6a,
];
const TEST_KEY_256_BIT: [u8; 32] = [
0xd8, 0x32, 0x58, 0xa9, 0x5a, 0x62, 0x6c, 0x99, 0xc4, 0xe6, 0xb5, 0x3f, 0x97, 0x90, 0x62, 0xbe,
0x71, 0x0f, 0xd5, 0xe1, 0xd4, 0xfe, 0x95, 0xb3, 0x03, 0x46, 0xa5, 0x8e, 0x36, 0xad, 0x18, 0xe3,
];
const TEST_NONCE_96_BIT: [u8; 12] = [
0xe4, 0x39, 0x17, 0x95, 0x86, 0xcd, 0xcd, 0x5a, 0x1b, 0x46, 0x7b, 0x1d,
];
const TEST_MESSAGE: &[u8] = "test message".as_bytes();
macro_rules! nonce_sequence_api {
($name:ident, $alg:expr, $key:expr, $seal_expect:path, $open_expect:path) => {
#[test]
fn $name() {
{
let mut key = SealingKey::new(
UnboundKey::new($alg, $key).unwrap(),
Counter64Builder::new().build(),
);
let mut in_out = Vec::from(TEST_MESSAGE);
assert_fips_status_indicator!(
key.seal_in_place_append_tag(Aad::empty(), &mut in_out),
$seal_expect
)
.unwrap();
let mut key = OpeningKey::new(
UnboundKey::new($alg, $key).unwrap(),
Counter64Builder::new().build(),
);
let result = assert_fips_status_indicator!(
key.open_in_place(Aad::empty(), &mut in_out),
$open_expect
)
.unwrap();
assert_eq!(TEST_MESSAGE, result);
}
{
let mut key = SealingKey::new(
UnboundKey::new($alg, $key).unwrap(),
Counter64Builder::new().build(),
);
let mut in_out = Vec::from(TEST_MESSAGE);
let tag = assert_fips_status_indicator!(
key.seal_in_place_separate_tag(Aad::empty(), &mut in_out),
$seal_expect
)
.unwrap();
in_out.extend(tag.as_ref().iter());
let mut key = OpeningKey::new(
UnboundKey::new($alg, $key).unwrap(),
Counter64Builder::new().build(),
);
let result = assert_fips_status_indicator!(
key.open_in_place(Aad::empty(), &mut in_out),
$open_expect
)
.unwrap();
assert_eq!(TEST_MESSAGE, result);
}
}
};
}
nonce_sequence_api!(
aes_gcm_128_nonce_sequence_api,
&AES_128_GCM,
&TEST_KEY_128_BIT[..],
FipsServiceStatus::NonApproved,
FipsServiceStatus::Approved
);
nonce_sequence_api!(
aes_gcm_256_nonce_sequence_api,
&AES_256_GCM,
&TEST_KEY_256_BIT[..],
FipsServiceStatus::NonApproved,
FipsServiceStatus::Approved
);
nonce_sequence_api!(
chacha20_poly1305_nonce_sequence_api,
&CHACHA20_POLY1305,
&TEST_KEY_256_BIT[..],
FipsServiceStatus::NonApproved,
FipsServiceStatus::NonApproved
);
macro_rules! randnonce_api {
($name:ident, $alg:expr, $key:expr) => {
#[test]
fn $name() {
let key = RandomizedNonceKey::new($alg, $key).unwrap();
{
let mut in_out = Vec::from(TEST_MESSAGE);
let nonce = assert_fips_status_indicator!(
key.seal_in_place_append_tag(Aad::empty(), &mut in_out),
FipsServiceStatus::Approved
)
.unwrap();
let in_out = assert_fips_status_indicator!(
key.open_in_place(nonce, Aad::empty(), &mut in_out),
FipsServiceStatus::Approved
)
.unwrap();
assert_eq!(TEST_MESSAGE, in_out);
}
{
let mut in_out = Vec::from(TEST_MESSAGE);
let (nonce, tag) = assert_fips_status_indicator!(
key.seal_in_place_separate_tag(Aad::empty(), &mut in_out),
FipsServiceStatus::Approved
)
.unwrap();
in_out.extend(tag.as_ref().iter());
let in_out = assert_fips_status_indicator!(
key.open_in_place(nonce, Aad::empty(), &mut in_out),
FipsServiceStatus::Approved
)
.unwrap();
assert_eq!(TEST_MESSAGE, in_out);
}
}
};
// Match for unsupported variants
($name:ident, $alg:expr, $key:expr, false) => {
#[test]
fn $name() {
assert!(RandomizedNonceKey::new($alg, $key).is_err());
}
};
}
randnonce_api!(
aes_gcm_128_randnonce_api,
&AES_128_GCM,
&TEST_KEY_128_BIT[..]
);
randnonce_api!(
aes_gcm_256_randnonce_api,
&AES_256_GCM,
&TEST_KEY_256_BIT[..]
);
randnonce_api!(
chacha20_poly1305_randnonce_api,
&CHACHA20_POLY1305,
&TEST_KEY_256_BIT[..],
false
);
macro_rules! tls_nonce_api {
($name:ident, $alg:expr, $proto:expr, $key:expr) => {
#[test]
fn $name() {
let mut key = TlsRecordSealingKey::new($alg, $proto, $key).unwrap();
let mut in_out = Vec::from(TEST_MESSAGE);
assert_fips_status_indicator!(
key.seal_in_place_append_tag(
Nonce::from(&TEST_NONCE_96_BIT),
Aad::empty(),
&mut in_out,
),
FipsServiceStatus::Approved
)
.unwrap();
let key = TlsRecordOpeningKey::new($alg, $proto, $key).unwrap();
let in_out = assert_fips_status_indicator!(
key.open_in_place(Nonce::from(&TEST_NONCE_96_BIT), Aad::empty(), &mut in_out),
FipsServiceStatus::Approved
)
.unwrap();
assert_eq!(in_out, TEST_MESSAGE);
}
};
// Match for unsupported variants
($name:ident, $alg:expr, $proto:expr, $key:expr, false) => {
#[test]
fn $name() {
assert!(TlsRecordSealingKey::new($alg, $proto, $key).is_err());
assert!(TlsRecordOpeningKey::new($alg, $proto, $key).is_err());
}
};
}
tls_nonce_api!(
aes_128_tls12_nonce_api,
&AES_128_GCM,
TlsProtocolId::TLS12,
&TEST_KEY_128_BIT
);
tls_nonce_api!(
aes_256_tls12_nonce_api,
&AES_256_GCM,
TlsProtocolId::TLS12,
&TEST_KEY_256_BIT
);
tls_nonce_api!(
aes_128_tls13_nonce_api,
&AES_128_GCM,
TlsProtocolId::TLS13,
&TEST_KEY_128_BIT
);
tls_nonce_api!(
aes_256_tls13_nonce_api,
&AES_256_GCM,
TlsProtocolId::TLS13,
&TEST_KEY_256_BIT
);
tls_nonce_api!(
chaca20_poly1305_tls12_nonce_api,
&CHACHA20_POLY1305,
TlsProtocolId::TLS12,
&TEST_KEY_256_BIT,
false
);
tls_nonce_api!(
chaca20_poly1305_tls13_nonce_api,
&CHACHA20_POLY1305,
TlsProtocolId::TLS13,
&TEST_KEY_256_BIT,
false
);

View File

@@ -0,0 +1,60 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aead::chacha20_poly1305_openssh::{OpeningKey, SealingKey};
use crate::fips::{assert_fips_status_indicator, FipsServiceStatus};
use super::TEST_MESSAGE;
#[test]
fn test() {
let key_bytes = &[42u8; 64];
let key = SealingKey::new(key_bytes);
let mut message: Vec<u8> = Vec::new();
#[allow(clippy::cast_possible_truncation)]
message.extend_from_slice({
let len = TEST_MESSAGE.len() as u32;
&[
((len & 0xFF00_0000) >> 24) as u8,
((len & 0xFF_0000) >> 16) as u8,
((len & 0xFF00) >> 8) as u8,
(len & 0xFF) as u8,
]
});
message.extend_from_slice(TEST_MESSAGE);
let mut tag = [0u8; 16];
assert_fips_status_indicator!(
key.seal_in_place(1024, &mut message, &mut tag),
FipsServiceStatus::NonApproved
);
let mut encrypted_packet_length = [0u8; 4];
encrypted_packet_length.copy_from_slice(&message[0..4]);
let key = OpeningKey::new(key_bytes);
let packet_length = assert_fips_status_indicator!(
key.decrypt_packet_length(1024, encrypted_packet_length),
FipsServiceStatus::NonApproved
);
#[allow(clippy::cast_possible_truncation)]
let expected_packet_length = TEST_MESSAGE.len() as u32;
assert_eq!(
expected_packet_length,
(u32::from(packet_length[0]) << 24)
| (u32::from(packet_length[1]) << 16)
| (u32::from(packet_length[2]) << 8)
| u32::from(packet_length[3])
);
let message = assert_fips_status_indicator!(
key.open_in_place(1024, &mut message, &tag).unwrap(),
FipsServiceStatus::NonApproved
);
assert_eq!(TEST_MESSAGE, message);
}

View File

@@ -0,0 +1,37 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aead::quic::{HeaderProtectionKey, AES_128, AES_256, CHACHA20};
use crate::fips::{assert_fips_status_indicator, FipsServiceStatus};
use super::{TEST_KEY_128_BIT, TEST_KEY_256_BIT};
macro_rules! quic_api {
($name:ident, $alg:expr, $key:expr, $expect:path) => {
#[test]
fn $name() {
let key = HeaderProtectionKey::new($alg, $key).unwrap();
assert_fips_status_indicator!(key.new_mask(&[42u8; 16]), $expect).unwrap();
}
};
}
quic_api!(
aes_128,
&AES_128,
&TEST_KEY_128_BIT,
FipsServiceStatus::Approved
);
quic_api!(
aes_256,
&AES_256,
&TEST_KEY_256_BIT,
FipsServiceStatus::Approved
);
quic_api!(
chacha20,
&CHACHA20,
&TEST_KEY_256_BIT,
FipsServiceStatus::NonApproved
);

454
vendor/aws-lc-rs/src/aead/tls.rs vendored Normal file
View File

@@ -0,0 +1,454 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use super::aead_ctx::{self, AeadCtx};
use super::{Aad, Algorithm, AlgorithmID, Nonce, Tag, UnboundKey};
use crate::error::Unspecified;
use core::fmt::Debug;
use core::ops::RangeFrom;
/// The Transport Layer Security (TLS) protocol version.
#[allow(clippy::module_name_repetitions)]
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
#[non_exhaustive]
pub enum TlsProtocolId {
/// TLS 1.2 (RFC 5246)
TLS12,
/// TLS 1.3 (RFC 8446)
TLS13,
}
/// AEAD Encryption key used for TLS protocol record encryption.
///
/// This type encapsulates encryption operations for TLS AEAD algorithms.
/// It validates that the provides nonce values are monotonically increasing for each invocation.
///
/// The following algorithms are supported:
/// * `AES_128_GCM`
/// * `AES_256_GCM`
///
/// Prefer this type in place of `LessSafeKey`, `OpeningKey`, `SealingKey` for TLS protocol implementations.
#[allow(clippy::module_name_repetitions)]
pub struct TlsRecordSealingKey {
// The TLS specific construction for TLS ciphers in AWS-LC are not thread-safe!
// The choice here was either wrap the underlying EVP_AEAD_CTX in a Mutex as done here,
// or force this type to !Sync. Since this is an implementation detail of AWS-LC
// we have optex to manage this behavior internally.
key: UnboundKey,
protocol: TlsProtocolId,
}
impl TlsRecordSealingKey {
/// New TLS record sealing key. Only supports `AES_128_GCM` and `AES_256_GCM`.
///
/// # Errors
/// * `Unspecified`: Returned if the length of `key_bytes` does not match the chosen algorithm,
/// or if an unsupported algorithm is provided.
pub fn new(
algorithm: &'static Algorithm,
protocol: TlsProtocolId,
key_bytes: &[u8],
) -> Result<Self, Unspecified> {
let ctx = match (algorithm.id, protocol) {
(AlgorithmID::AES_128_GCM, TlsProtocolId::TLS12) => AeadCtx::aes_128_gcm_tls12(
key_bytes,
algorithm.tag_len(),
aead_ctx::AeadDirection::Seal,
),
(AlgorithmID::AES_128_GCM, TlsProtocolId::TLS13) => AeadCtx::aes_128_gcm_tls13(
key_bytes,
algorithm.tag_len(),
aead_ctx::AeadDirection::Seal,
),
(AlgorithmID::AES_256_GCM, TlsProtocolId::TLS12) => AeadCtx::aes_256_gcm_tls12(
key_bytes,
algorithm.tag_len(),
aead_ctx::AeadDirection::Seal,
),
(AlgorithmID::AES_256_GCM, TlsProtocolId::TLS13) => AeadCtx::aes_256_gcm_tls13(
key_bytes,
algorithm.tag_len(),
aead_ctx::AeadDirection::Seal,
),
(
AlgorithmID::AES_128_GCM_SIV
| AlgorithmID::AES_192_GCM
| AlgorithmID::AES_256_GCM_SIV
| AlgorithmID::CHACHA20_POLY1305,
_,
) => Err(Unspecified),
}?;
Ok(Self {
key: UnboundKey::from(ctx),
protocol,
})
}
/// Accepts a `Nonce` and `Aad` construction that is unique for this key and
/// TLS record sealing operation for the configured TLS protocol version.
///
/// `nonce` must be unique and incremented per each sealing operation,
/// otherwise an error is returned.
///
/// # Errors
/// `error::Unspecified` if encryption operation fails.
#[inline]
#[allow(clippy::needless_pass_by_value)]
pub fn seal_in_place_append_tag<A, InOut>(
&mut self,
nonce: Nonce,
aad: Aad<A>,
in_out: &mut InOut,
) -> Result<(), Unspecified>
where
A: AsRef<[u8]>,
InOut: AsMut<[u8]> + for<'in_out> Extend<&'in_out u8>,
{
self.key
.seal_in_place_append_tag(Some(nonce), aad.as_ref(), in_out)
.map(|_| ())
}
/// Encrypts and signs (“seals”) data in place.
///
/// `aad` is the additional authenticated data (AAD), if any. This is
/// authenticated but not encrypted. The type `A` could be a byte slice
/// `&[u8]`, a byte array `[u8; N]` for some constant `N`, `Vec<u8>`, etc.
/// If there is no AAD then use `Aad::empty()`.
///
/// The plaintext is given as the input value of `in_out`. `seal_in_place()`
/// will overwrite the plaintext with the ciphertext and return the tag.
/// For most protocols, the caller must append the tag to the ciphertext.
/// The tag will be `self.algorithm.tag_len()` bytes long.
///
/// The Nonce used for the operation is randomly generated, and returned to the caller.
///
/// # Errors
/// `error::Unspecified` if encryption operation fails.
#[inline]
#[allow(clippy::needless_pass_by_value)]
pub fn seal_in_place_separate_tag<A>(
&mut self,
nonce: Nonce,
aad: Aad<A>,
in_out: &mut [u8],
) -> Result<Tag, Unspecified>
where
A: AsRef<[u8]>,
{
self.key
.seal_in_place_separate_tag(Some(nonce), aad.as_ref(), in_out)
.map(|(_, tag)| tag)
}
/// The key's AEAD algorithm.
#[inline]
#[must_use]
pub fn algorithm(&self) -> &'static Algorithm {
self.key.algorithm()
}
/// The key's associated `TlsProtocolId`.
#[must_use]
pub fn tls_protocol_id(&self) -> TlsProtocolId {
self.protocol
}
}
#[allow(clippy::missing_fields_in_debug)]
impl Debug for TlsRecordSealingKey {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("TlsRecordSealingKey")
.field("key", &self.key)
.field("protocol", &self.protocol)
.finish()
}
}
/// AEAD Encryption key used for TLS protocol record encryption.
///
/// This type encapsulates decryption operations for TLS AEAD algorithms.
///
/// The following algorithms are supported:
/// * `AES_128_GCM`
/// * `AES_256_GCM`
///
/// Prefer this type in place of `LessSafeKey`, `OpeningKey`, `SealingKey` for TLS protocol implementations.
#[allow(clippy::module_name_repetitions)]
pub struct TlsRecordOpeningKey {
// The TLS specific construction for TLS ciphers in AWS-LC are not thread-safe!
// The choice here was either wrap the underlying EVP_AEAD_CTX in a Mutex as done here,
// or force this type to !Sync. Since this is an implementation detail of AWS-LC
// we have optex to manage this behavior internally.
key: UnboundKey,
protocol: TlsProtocolId,
}
impl TlsRecordOpeningKey {
/// New TLS record opening key. Only supports `AES_128_GCM` and `AES_256_GCM` Algorithms.
///
/// # Errors
/// * `Unspecified`: Returned if the length of `key_bytes` does not match the chosen algorithm,
/// or if an unsupported algorithm is provided.
pub fn new(
algorithm: &'static Algorithm,
protocol: TlsProtocolId,
key_bytes: &[u8],
) -> Result<Self, Unspecified> {
let ctx = match (algorithm.id, protocol) {
(AlgorithmID::AES_128_GCM, TlsProtocolId::TLS12) => AeadCtx::aes_128_gcm_tls12(
key_bytes,
algorithm.tag_len(),
aead_ctx::AeadDirection::Open,
),
(AlgorithmID::AES_128_GCM, TlsProtocolId::TLS13) => AeadCtx::aes_128_gcm_tls13(
key_bytes,
algorithm.tag_len(),
aead_ctx::AeadDirection::Open,
),
(AlgorithmID::AES_256_GCM, TlsProtocolId::TLS12) => AeadCtx::aes_256_gcm_tls12(
key_bytes,
algorithm.tag_len(),
aead_ctx::AeadDirection::Open,
),
(AlgorithmID::AES_256_GCM, TlsProtocolId::TLS13) => AeadCtx::aes_256_gcm_tls13(
key_bytes,
algorithm.tag_len(),
aead_ctx::AeadDirection::Open,
),
(
AlgorithmID::AES_128_GCM_SIV
| AlgorithmID::AES_192_GCM
| AlgorithmID::AES_256_GCM_SIV
| AlgorithmID::CHACHA20_POLY1305,
_,
) => Err(Unspecified),
}?;
Ok(Self {
key: UnboundKey::from(ctx),
protocol,
})
}
/// See [`super::OpeningKey::open_in_place()`] for details.
///
/// # Errors
/// `error::Unspecified` when ciphertext is invalid.
#[inline]
#[allow(clippy::needless_pass_by_value)]
pub fn open_in_place<'in_out, A>(
&self,
nonce: Nonce,
aad: Aad<A>,
in_out: &'in_out mut [u8],
) -> Result<&'in_out mut [u8], Unspecified>
where
A: AsRef<[u8]>,
{
self.key.open_within(nonce, aad.as_ref(), in_out, 0..)
}
/// See [`super::OpeningKey::open_within()`] for details.
///
/// # Errors
/// `error::Unspecified` when ciphertext is invalid.
#[inline]
#[allow(clippy::needless_pass_by_value)]
pub fn open_within<'in_out, A>(
&self,
nonce: Nonce,
aad: Aad<A>,
in_out: &'in_out mut [u8],
ciphertext_and_tag: RangeFrom<usize>,
) -> Result<&'in_out mut [u8], Unspecified>
where
A: AsRef<[u8]>,
{
self.key
.open_within(nonce, aad.as_ref(), in_out, ciphertext_and_tag)
}
/// The key's AEAD algorithm.
#[inline]
#[must_use]
pub fn algorithm(&self) -> &'static Algorithm {
self.key.algorithm()
}
/// The key's associated `TlsProtocolId`.
#[must_use]
pub fn tls_protocol_id(&self) -> TlsProtocolId {
self.protocol
}
}
#[allow(clippy::missing_fields_in_debug)]
impl Debug for TlsRecordOpeningKey {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("TlsRecordOpeningKey")
.field("key", &self.key)
.field("protocol", &self.protocol)
.finish()
}
}
#[cfg(test)]
mod tests {
use super::{TlsProtocolId, TlsRecordOpeningKey, TlsRecordSealingKey};
use crate::aead::{Aad, Nonce, AES_128_GCM, AES_256_GCM, CHACHA20_POLY1305};
use crate::test::from_hex;
use paste::paste;
const TEST_128_BIT_KEY: &[u8] = &[
0xb0, 0x37, 0x9f, 0xf8, 0xfb, 0x8e, 0xa6, 0x31, 0xf4, 0x1c, 0xe6, 0x3e, 0xb5, 0xc5, 0x20,
0x7c,
];
const TEST_256_BIT_KEY: &[u8] = &[
0x56, 0xd8, 0x96, 0x68, 0xbd, 0x96, 0xeb, 0xff, 0x5e, 0xa2, 0x0b, 0x34, 0xf2, 0x79, 0x84,
0x6e, 0x2b, 0x13, 0x01, 0x3d, 0xab, 0x1d, 0xa4, 0x07, 0x5a, 0x16, 0xd5, 0x0b, 0x53, 0xb0,
0xcc, 0x88,
];
struct TlsNonceTestCase {
nonce: &'static str,
expect_err: bool,
}
const TLS_NONCE_TEST_CASES: &[TlsNonceTestCase] = &[
TlsNonceTestCase {
nonce: "9fab40177c900aad9fc28cc3",
expect_err: false,
},
TlsNonceTestCase {
nonce: "9fab40177c900aad9fc28cc4",
expect_err: false,
},
TlsNonceTestCase {
nonce: "9fab40177c900aad9fc28cc2",
expect_err: true,
},
];
macro_rules! test_tls_aead {
($name:ident, $alg:expr, $proto:expr, $key:expr) => {
paste! {
#[test]
fn [<test_ $name _tls_aead_unsupported>]() {
assert!(TlsRecordSealingKey::new($alg, $proto, $key).is_err());
assert!(TlsRecordOpeningKey::new($alg, $proto, $key).is_err());
}
}
};
($name:ident, $alg:expr, $proto:expr, $key:expr, $expect_tag_len:expr, $expect_nonce_len:expr) => {
paste! {
#[test]
fn [<test_ $name>]() {
let mut sealing_key =
TlsRecordSealingKey::new($alg, $proto, $key).unwrap();
let opening_key =
TlsRecordOpeningKey::new($alg, $proto, $key).unwrap();
for case in TLS_NONCE_TEST_CASES {
let plaintext = from_hex("00112233445566778899aabbccddeeff").unwrap();
assert_eq!($alg, sealing_key.algorithm());
assert_eq!(*$expect_tag_len, $alg.tag_len());
assert_eq!(*$expect_nonce_len, $alg.nonce_len());
let mut in_out = Vec::from(plaintext.as_slice());
let nonce = from_hex(case.nonce).unwrap();
let nonce_bytes = nonce.as_slice();
let result = sealing_key.seal_in_place_append_tag(
Nonce::try_assume_unique_for_key(nonce_bytes).unwrap(),
Aad::empty(),
&mut in_out,
);
match (result, case.expect_err) {
(Ok(()), true) => panic!("expected error for seal_in_place_append_tag"),
(Ok(()), false) => {}
(Err(_), true) => return,
(Err(e), false) => panic!("{e}"),
}
assert_ne!(plaintext, in_out[..plaintext.len()]);
// copy ciphertext with prefix, to exercise `open_within`
let mut offset_cipher_text = vec![ 1, 2, 3, 4 ];
offset_cipher_text.extend_from_slice(&in_out);
opening_key
.open_in_place(
Nonce::try_assume_unique_for_key(nonce_bytes).unwrap(),
Aad::empty(),
&mut in_out,
)
.unwrap();
assert_eq!(plaintext, in_out[..plaintext.len()]);
opening_key
.open_within(
Nonce::try_assume_unique_for_key(nonce_bytes).unwrap(),
Aad::empty(),
&mut offset_cipher_text,
4..)
.unwrap();
assert_eq!(plaintext, offset_cipher_text[..plaintext.len()]);
}
}
}
};
}
test_tls_aead!(
aes_128_gcm_tls12,
&AES_128_GCM,
TlsProtocolId::TLS12,
TEST_128_BIT_KEY,
&16,
&12
);
test_tls_aead!(
aes_128_gcm_tls13,
&AES_128_GCM,
TlsProtocolId::TLS13,
TEST_128_BIT_KEY,
&16,
&12
);
test_tls_aead!(
aes_256_gcm_tls12,
&AES_256_GCM,
TlsProtocolId::TLS12,
TEST_256_BIT_KEY,
&16,
&12
);
test_tls_aead!(
aes_256_gcm_tls13,
&AES_256_GCM,
TlsProtocolId::TLS13,
TEST_256_BIT_KEY,
&16,
&12
);
test_tls_aead!(
chacha20_poly1305_tls12,
&CHACHA20_POLY1305,
TlsProtocolId::TLS12,
TEST_256_BIT_KEY
);
test_tls_aead!(
chacha20_poly1305_tls13,
&CHACHA20_POLY1305,
TlsProtocolId::TLS13,
TEST_256_BIT_KEY
);
}

513
vendor/aws-lc-rs/src/aead/unbound_key.rs vendored Normal file
View File

@@ -0,0 +1,513 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use super::aead_ctx::AeadCtx;
use super::{
Algorithm, Nonce, Tag, AES_128_GCM, AES_128_GCM_SIV, AES_192_GCM, AES_256_GCM, AES_256_GCM_SIV,
CHACHA20_POLY1305, MAX_KEY_LEN, MAX_TAG_LEN, NONCE_LEN,
};
use crate::aws_lc::{
EVP_AEAD_CTX_open, EVP_AEAD_CTX_open_gather, EVP_AEAD_CTX_seal, EVP_AEAD_CTX_seal_scatter,
};
use crate::error::Unspecified;
use crate::fips::indicator_check;
use crate::hkdf;
use crate::iv::FixedLength;
use core::fmt::Debug;
use core::mem::MaybeUninit;
use core::ops::RangeFrom;
use core::ptr::null;
/// The maximum length of a nonce returned by our AEAD API.
const MAX_NONCE_LEN: usize = NONCE_LEN;
/// The maximum required tag buffer needed if using AWS-LC generated nonce construction
const MAX_TAG_NONCE_BUFFER_LEN: usize = MAX_TAG_LEN + MAX_NONCE_LEN;
/// An AEAD key without a designated role or nonce sequence.
pub struct UnboundKey {
ctx: AeadCtx,
algorithm: &'static Algorithm,
}
#[allow(clippy::missing_fields_in_debug)]
impl Debug for UnboundKey {
fn fmt(&self, f: &mut core::fmt::Formatter) -> Result<(), core::fmt::Error> {
f.debug_struct("UnboundKey")
.field("algorithm", &self.algorithm)
.finish()
}
}
impl UnboundKey {
/// Constructs an `UnboundKey`.
/// # Errors
/// `error::Unspecified` if `key_bytes.len() != algorithm.key_len()`.
pub fn new(algorithm: &'static Algorithm, key_bytes: &[u8]) -> Result<Self, Unspecified> {
Ok(Self {
ctx: (algorithm.init)(key_bytes, algorithm.tag_len())?,
algorithm,
})
}
#[inline]
pub(crate) fn open_within<'in_out>(
&self,
nonce: Nonce,
aad: &[u8],
in_out: &'in_out mut [u8],
ciphertext_and_tag: RangeFrom<usize>,
) -> Result<&'in_out mut [u8], Unspecified> {
let in_prefix_len = ciphertext_and_tag.start;
let ciphertext_and_tag_len = in_out.len().checked_sub(in_prefix_len).ok_or(Unspecified)?;
let ciphertext_len = ciphertext_and_tag_len
.checked_sub(self.algorithm().tag_len())
.ok_or(Unspecified)?;
self.check_per_nonce_max_bytes(ciphertext_len)?;
match self.ctx {
AeadCtx::AES_128_GCM_RANDNONCE(_) | AeadCtx::AES_256_GCM_RANDNONCE(_) => {
self.open_combined_randnonce(nonce, aad, &mut in_out[in_prefix_len..])
}
_ => self.open_combined(nonce, aad.as_ref(), &mut in_out[in_prefix_len..]),
}?;
// shift the plaintext to the left
in_out.copy_within(in_prefix_len..in_prefix_len + ciphertext_len, 0);
// `ciphertext_len` is also the plaintext length.
Ok(&mut in_out[..ciphertext_len])
}
#[inline]
pub(crate) fn open_separate_gather(
&self,
nonce: &Nonce,
aad: &[u8],
in_ciphertext: &[u8],
in_tag: &[u8],
out_plaintext: &mut [u8],
) -> Result<(), Unspecified> {
self.check_per_nonce_max_bytes(in_ciphertext.len())?;
// ensure that the lengths match
{
let actual = in_ciphertext.len();
let expected = out_plaintext.len();
if actual != expected {
return Err(Unspecified);
}
}
unsafe {
let aead_ctx = self.ctx.as_ref();
let nonce = nonce.as_ref();
if 1 != EVP_AEAD_CTX_open_gather(
aead_ctx.as_const_ptr(),
out_plaintext.as_mut_ptr(),
nonce.as_ptr(),
nonce.len(),
in_ciphertext.as_ptr(),
in_ciphertext.len(),
in_tag.as_ptr(),
in_tag.len(),
aad.as_ptr(),
aad.len(),
) {
return Err(Unspecified);
}
Ok(())
}
}
#[inline]
pub(crate) fn seal_in_place_append_tag<'a, InOut>(
&self,
nonce: Option<Nonce>,
aad: &[u8],
in_out: &'a mut InOut,
) -> Result<Nonce, Unspecified>
where
InOut: AsMut<[u8]> + for<'in_out> Extend<&'in_out u8>,
{
self.check_per_nonce_max_bytes(in_out.as_mut().len())?;
match nonce {
Some(nonce) => self.seal_combined(nonce, aad, in_out),
None => self.seal_combined_randnonce(aad, in_out),
}
}
#[inline]
pub(crate) fn seal_in_place_separate_tag(
&self,
nonce: Option<Nonce>,
aad: &[u8],
in_out: &mut [u8],
) -> Result<(Nonce, Tag), Unspecified> {
self.check_per_nonce_max_bytes(in_out.len())?;
match nonce {
Some(nonce) => self.seal_separate(nonce, aad, in_out),
None => self.seal_separate_randnonce(aad, in_out),
}
}
#[inline]
#[allow(clippy::needless_pass_by_value)]
pub(crate) fn seal_in_place_separate_scatter(
&self,
nonce: Nonce,
aad: &[u8],
in_out: &mut [u8],
extra_in: &[u8],
extra_out_and_tag: &mut [u8],
) -> Result<(), Unspecified> {
self.check_per_nonce_max_bytes(in_out.len())?;
// ensure that the extra lengths match
{
let actual = extra_in.len() + self.algorithm().tag_len();
let expected = extra_out_and_tag.len();
if actual != expected {
return Err(Unspecified);
}
}
let nonce = nonce.as_ref();
let mut out_tag_len = extra_out_and_tag.len();
if 1 != unsafe {
EVP_AEAD_CTX_seal_scatter(
self.ctx.as_ref().as_const_ptr(),
in_out.as_mut_ptr(),
extra_out_and_tag.as_mut_ptr(),
&mut out_tag_len,
extra_out_and_tag.len(),
nonce.as_ptr(),
nonce.len(),
in_out.as_ptr(),
in_out.len(),
extra_in.as_ptr(),
extra_in.len(),
aad.as_ptr(),
aad.len(),
)
} {
return Err(Unspecified);
}
Ok(())
}
/// The key's AEAD algorithm.
#[inline]
#[must_use]
pub fn algorithm(&self) -> &'static Algorithm {
self.algorithm
}
#[inline]
pub(crate) fn check_per_nonce_max_bytes(&self, in_out_len: usize) -> Result<(), Unspecified> {
if in_out_len as u64 > self.algorithm().max_input_len {
return Err(Unspecified);
}
Ok(())
}
#[inline]
#[allow(clippy::needless_pass_by_value)]
fn open_combined(
&self,
nonce: Nonce,
aad: &[u8],
in_out: &mut [u8],
) -> Result<(), Unspecified> {
let nonce = nonce.as_ref();
debug_assert_eq!(nonce.len(), self.algorithm().nonce_len());
let plaintext_len = in_out.len() - self.algorithm().tag_len();
let mut out_len = MaybeUninit::<usize>::uninit();
if 1 != indicator_check!(unsafe {
EVP_AEAD_CTX_open(
self.ctx.as_ref().as_const_ptr(),
in_out.as_mut_ptr(),
out_len.as_mut_ptr(),
plaintext_len,
nonce.as_ptr(),
nonce.len(),
in_out.as_ptr(),
plaintext_len + self.algorithm().tag_len(),
aad.as_ptr(),
aad.len(),
)
}) {
return Err(Unspecified);
}
Ok(())
}
#[inline]
#[allow(clippy::needless_pass_by_value)]
fn open_combined_randnonce(
&self,
nonce: Nonce,
aad: &[u8],
in_out: &mut [u8],
) -> Result<(), Unspecified> {
let nonce = nonce.as_ref();
let alg_nonce_len = self.algorithm().nonce_len();
let alg_tag_len = self.algorithm().tag_len();
debug_assert_eq!(nonce.len(), alg_nonce_len);
debug_assert!(alg_tag_len + alg_nonce_len <= MAX_TAG_NONCE_BUFFER_LEN);
let plaintext_len = in_out.len() - alg_tag_len;
let mut tag_buffer = [0u8; MAX_TAG_NONCE_BUFFER_LEN];
tag_buffer[..alg_tag_len]
.copy_from_slice(&in_out[plaintext_len..plaintext_len + alg_tag_len]);
tag_buffer[alg_tag_len..alg_tag_len + alg_nonce_len].copy_from_slice(nonce);
let tag_slice = &tag_buffer[0..alg_tag_len + alg_nonce_len];
if 1 != indicator_check!(unsafe {
EVP_AEAD_CTX_open_gather(
self.ctx.as_ref().as_const_ptr(),
in_out.as_mut_ptr(),
null(),
0,
in_out.as_ptr(),
plaintext_len,
tag_slice.as_ptr(),
tag_slice.len(),
aad.as_ptr(),
aad.len(),
)
}) {
return Err(Unspecified);
}
Ok(())
}
#[inline]
fn seal_combined<InOut>(
&self,
nonce: Nonce,
aad: &[u8],
in_out: &mut InOut,
) -> Result<Nonce, Unspecified>
where
InOut: AsMut<[u8]> + for<'in_out> Extend<&'in_out u8>,
{
let plaintext_len = in_out.as_mut().len();
let alg_tag_len = self.algorithm().tag_len();
debug_assert!(alg_tag_len <= MAX_TAG_LEN);
let tag_buffer = [0u8; MAX_TAG_LEN];
in_out.extend(tag_buffer[..alg_tag_len].iter());
let mut out_len = MaybeUninit::<usize>::uninit();
let mut_in_out = in_out.as_mut();
{
let nonce = nonce.as_ref();
debug_assert_eq!(nonce.len(), self.algorithm().nonce_len());
if 1 != indicator_check!(unsafe {
EVP_AEAD_CTX_seal(
self.ctx.as_ref().as_const_ptr(),
mut_in_out.as_mut_ptr(),
out_len.as_mut_ptr(),
plaintext_len + alg_tag_len,
nonce.as_ptr(),
nonce.len(),
mut_in_out.as_ptr(),
plaintext_len,
aad.as_ptr(),
aad.len(),
)
}) {
return Err(Unspecified);
}
}
Ok(nonce)
}
#[inline]
fn seal_combined_randnonce<InOut>(
&self,
aad: &[u8],
in_out: &mut InOut,
) -> Result<Nonce, Unspecified>
where
InOut: AsMut<[u8]> + for<'in_out> Extend<&'in_out u8>,
{
let mut tag_buffer = [0u8; MAX_TAG_NONCE_BUFFER_LEN];
let mut out_tag_len = MaybeUninit::<usize>::uninit();
{
let plaintext_len = in_out.as_mut().len();
let in_out = in_out.as_mut();
if 1 != indicator_check!(unsafe {
EVP_AEAD_CTX_seal_scatter(
self.ctx.as_ref().as_const_ptr(),
in_out.as_mut_ptr(),
tag_buffer.as_mut_ptr(),
out_tag_len.as_mut_ptr(),
tag_buffer.len(),
null(),
0,
in_out.as_ptr(),
plaintext_len,
null(),
0,
aad.as_ptr(),
aad.len(),
)
}) {
return Err(Unspecified);
}
}
let tag_len = self.algorithm().tag_len();
let nonce_len = self.algorithm().nonce_len();
let nonce = Nonce(FixedLength::<NONCE_LEN>::try_from(
&tag_buffer[tag_len..tag_len + nonce_len],
)?);
in_out.extend(&tag_buffer[..tag_len]);
Ok(nonce)
}
#[inline]
fn seal_separate(
&self,
nonce: Nonce,
aad: &[u8],
in_out: &mut [u8],
) -> Result<(Nonce, Tag), Unspecified> {
let mut tag = [0u8; MAX_TAG_LEN];
let mut out_tag_len = MaybeUninit::<usize>::uninit();
{
let nonce = nonce.as_ref();
debug_assert_eq!(nonce.len(), self.algorithm().nonce_len());
if 1 != indicator_check!(unsafe {
EVP_AEAD_CTX_seal_scatter(
self.ctx.as_ref().as_const_ptr(),
in_out.as_mut_ptr(),
tag.as_mut_ptr(),
out_tag_len.as_mut_ptr(),
tag.len(),
nonce.as_ptr(),
nonce.len(),
in_out.as_ptr(),
in_out.len(),
null(),
0usize,
aad.as_ptr(),
aad.len(),
)
}) {
return Err(Unspecified);
}
}
Ok((nonce, Tag(tag, unsafe { out_tag_len.assume_init() })))
}
#[inline]
fn seal_separate_randnonce(
&self,
aad: &[u8],
in_out: &mut [u8],
) -> Result<(Nonce, Tag), Unspecified> {
let mut tag_buffer = [0u8; MAX_TAG_NONCE_BUFFER_LEN];
debug_assert!(
self.algorithm().tag_len() + self.algorithm().nonce_len() <= tag_buffer.len()
);
let mut out_tag_len = MaybeUninit::<usize>::uninit();
if 1 != indicator_check!(unsafe {
EVP_AEAD_CTX_seal_scatter(
self.ctx.as_ref().as_const_ptr(),
in_out.as_mut_ptr(),
tag_buffer.as_mut_ptr(),
out_tag_len.as_mut_ptr(),
tag_buffer.len(),
null(),
0,
in_out.as_ptr(),
in_out.len(),
null(),
0usize,
aad.as_ptr(),
aad.len(),
)
}) {
return Err(Unspecified);
}
let tag_len = self.algorithm().tag_len();
let nonce_len = self.algorithm().nonce_len();
let nonce = Nonce(FixedLength::<NONCE_LEN>::try_from(
&tag_buffer[tag_len..tag_len + nonce_len],
)?);
let mut tag = [0u8; MAX_TAG_LEN];
tag.copy_from_slice(&tag_buffer[..tag_len]);
Ok((nonce, Tag(tag, tag_len)))
}
}
impl From<AeadCtx> for UnboundKey {
fn from(value: AeadCtx) -> Self {
let algorithm = match value {
AeadCtx::AES_128_GCM(_)
| AeadCtx::AES_128_GCM_TLS12(_)
| AeadCtx::AES_128_GCM_TLS13(_)
| AeadCtx::AES_128_GCM_RANDNONCE(_) => &AES_128_GCM,
AeadCtx::AES_192_GCM(_) => &AES_192_GCM,
AeadCtx::AES_128_GCM_SIV(_) => &AES_128_GCM_SIV,
AeadCtx::AES_256_GCM(_)
| AeadCtx::AES_256_GCM_RANDNONCE(_)
| AeadCtx::AES_256_GCM_TLS12(_)
| AeadCtx::AES_256_GCM_TLS13(_) => &AES_256_GCM,
AeadCtx::AES_256_GCM_SIV(_) => &AES_256_GCM_SIV,
AeadCtx::CHACHA20_POLY1305(_) => &CHACHA20_POLY1305,
};
Self {
ctx: value,
algorithm,
}
}
}
impl From<hkdf::Okm<'_, &'static Algorithm>> for UnboundKey {
fn from(okm: hkdf::Okm<&'static Algorithm>) -> Self {
let mut key_bytes = [0; MAX_KEY_LEN];
let key_bytes = &mut key_bytes[..okm.len().key_len];
let algorithm = *okm.len();
okm.fill(key_bytes).unwrap();
Self::new(algorithm, key_bytes).unwrap()
}
}

902
vendor/aws-lc-rs/src/agreement.rs vendored Normal file
View File

@@ -0,0 +1,902 @@
// Copyright 2015-2017 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! Key Agreement: ECDH, including X25519.
//!
//! # Example
//!
//! Note that this example uses X25519, but ECDH using NIST P-256/P-384 is done
//! exactly the same way, just substituting
//! `agreement::ECDH_P256`/`agreement::ECDH_P384` for `agreement::X25519`.
//!
//! ```
//! use aws_lc_rs::{agreement, rand};
//!
//! let rng = rand::SystemRandom::new();
//!
//! let my_private_key = agreement::EphemeralPrivateKey::generate(&agreement::X25519, &rng)?;
//!
//! // Make `my_public_key` a byte slice containing my public key. In a real
//! // application, this would be sent to the peer in an encoded protocol
//! // message.
//! let my_public_key = my_private_key.compute_public_key()?;
//!
//! let peer_public_key = {
//! // In a real application, the peer public key would be parsed out of a
//! // protocol message. Here we just generate one.
//! let peer_public_key = {
//! let peer_private_key =
//! agreement::EphemeralPrivateKey::generate(&agreement::X25519, &rng)?;
//! peer_private_key.compute_public_key()?
//! };
//!
//! agreement::UnparsedPublicKey::new(&agreement::X25519, peer_public_key)
//! };
//!
//! agreement::agree_ephemeral(
//! my_private_key,
//! &peer_public_key,
//! aws_lc_rs::error::Unspecified,
//! |_key_material| {
//! // In a real application, we'd apply a KDF to the key material and the
//! // public keys (as recommended in RFC 7748) and then derive session
//! // keys from the result. We omit all that here.
//! Ok(())
//! },
//! )?;
//!
//! # Ok::<(), aws_lc_rs::error::Unspecified>(())
//! ```
mod ephemeral;
use crate::ec::encoding::sec1::{
marshal_sec1_private_key, marshal_sec1_public_point, marshal_sec1_public_point_into_buffer,
parse_sec1_private_bn, parse_sec1_public_point,
};
#[cfg(not(feature = "fips"))]
use crate::ec::verify_evp_key_nid;
use crate::ec::{evp_key_generate, validate_ec_evp_key};
use crate::error::{KeyRejected, Unspecified};
use crate::hex;
pub use ephemeral::{agree_ephemeral, EphemeralPrivateKey};
use crate::aws_lc::{
i2d_ECPrivateKey, EVP_PKEY_get0_EC_KEY, NID_X9_62_prime256v1, NID_secp384r1, NID_secp521r1,
EVP_PKEY, EVP_PKEY_EC, EVP_PKEY_X25519, NID_X25519,
};
use crate::buffer::Buffer;
use crate::ec;
use crate::ec::encoding::rfc5915::parse_rfc5915_private_key;
use crate::encoding::{
AsBigEndian, AsDer, Curve25519SeedBin, EcPrivateKeyBin, EcPrivateKeyRfc5915Der,
EcPublicKeyCompressedBin, EcPublicKeyUncompressedBin, Pkcs8V1Der, PublicKeyX509Der,
};
use crate::evp_pkey::No_EVP_PKEY_CTX_consumer;
use crate::pkcs8::Version;
use crate::ptr::LcPtr;
use core::fmt;
use core::fmt::{Debug, Formatter};
use core::ptr::null_mut;
#[allow(non_camel_case_types)]
#[derive(PartialEq, Eq)]
enum AlgorithmID {
ECDH_P256,
ECDH_P384,
ECDH_P521,
X25519,
}
impl AlgorithmID {
#[inline]
const fn nid(&self) -> i32 {
match self {
AlgorithmID::ECDH_P256 => NID_X9_62_prime256v1,
AlgorithmID::ECDH_P384 => NID_secp384r1,
AlgorithmID::ECDH_P521 => NID_secp521r1,
AlgorithmID::X25519 => NID_X25519,
}
}
// Uncompressed public key length in bytes
#[inline]
const fn pub_key_len(&self) -> usize {
match self {
AlgorithmID::ECDH_P256 => ec::uncompressed_public_key_size_bytes(256),
AlgorithmID::ECDH_P384 => ec::uncompressed_public_key_size_bytes(384),
AlgorithmID::ECDH_P521 => ec::uncompressed_public_key_size_bytes(521),
AlgorithmID::X25519 => 32,
}
}
#[inline]
const fn private_key_len(&self) -> usize {
match self {
AlgorithmID::ECDH_P256 | AlgorithmID::X25519 => 32,
AlgorithmID::ECDH_P384 => 48,
AlgorithmID::ECDH_P521 => 66,
}
}
}
impl Debug for AlgorithmID {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
let output = match self {
AlgorithmID::ECDH_P256 => "curve: P256",
AlgorithmID::ECDH_P384 => "curve: P384",
AlgorithmID::ECDH_P521 => "curve: P521",
AlgorithmID::X25519 => "curve: Curve25519",
};
f.write_str(output)
}
}
/// A key agreement algorithm.
#[derive(PartialEq, Eq)]
pub struct Algorithm {
id: AlgorithmID,
}
impl Debug for Algorithm {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
f.write_str(&format!("Algorithm {{ {:?} }}", self.id))
}
}
/// ECDH using the NSA Suite B P-256 (secp256r1) curve.
pub const ECDH_P256: Algorithm = Algorithm {
id: AlgorithmID::ECDH_P256,
};
/// ECDH using the NSA Suite B P-384 (secp384r1) curve.
pub const ECDH_P384: Algorithm = Algorithm {
id: AlgorithmID::ECDH_P384,
};
/// ECDH using the NSA Suite B P-521 (secp521r1) curve.
pub const ECDH_P521: Algorithm = Algorithm {
id: AlgorithmID::ECDH_P521,
};
/// X25519 (ECDH using Curve25519) as described in [RFC 7748].
///
/// Everything is as described in RFC 7748. Key agreement will fail if the
/// result of the X25519 operation is zero; see the notes on the
/// "all-zero value" in [RFC 7748 section 6.1].
///
/// [RFC 7748]: https://tools.ietf.org/html/rfc7748
/// [RFC 7748 section 6.1]: https://tools.ietf.org/html/rfc7748#section-6.1
pub const X25519: Algorithm = Algorithm {
id: AlgorithmID::X25519,
};
#[allow(non_camel_case_types)]
enum KeyInner {
ECDH_P256(LcPtr<EVP_PKEY>),
ECDH_P384(LcPtr<EVP_PKEY>),
ECDH_P521(LcPtr<EVP_PKEY>),
X25519(LcPtr<EVP_PKEY>),
}
impl Clone for KeyInner {
fn clone(&self) -> KeyInner {
match self {
KeyInner::ECDH_P256(evp_pkey) => KeyInner::ECDH_P256(evp_pkey.clone()),
KeyInner::ECDH_P384(evp_pkey) => KeyInner::ECDH_P384(evp_pkey.clone()),
KeyInner::ECDH_P521(evp_pkey) => KeyInner::ECDH_P521(evp_pkey.clone()),
KeyInner::X25519(evp_pkey) => KeyInner::X25519(evp_pkey.clone()),
}
}
}
/// A private key for use (only) with `agree`. The
/// signature of `agree` allows `PrivateKey` to be
/// used for more than one key agreement.
pub struct PrivateKey {
inner_key: KeyInner,
}
impl KeyInner {
#[inline]
fn algorithm(&self) -> &'static Algorithm {
match self {
KeyInner::ECDH_P256(..) => &ECDH_P256,
KeyInner::ECDH_P384(..) => &ECDH_P384,
KeyInner::ECDH_P521(..) => &ECDH_P521,
KeyInner::X25519(..) => &X25519,
}
}
fn get_evp_pkey(&self) -> &LcPtr<EVP_PKEY> {
match self {
KeyInner::ECDH_P256(evp_pkey)
| KeyInner::ECDH_P384(evp_pkey)
| KeyInner::ECDH_P521(evp_pkey)
| KeyInner::X25519(evp_pkey) => evp_pkey,
}
}
}
// See EVP_PKEY documentation here:
// https://github.com/aws/aws-lc/blob/125af14c57451565b875fbf1282a38a6ecf83782/include/openssl/evp.h#L83-L89
// An |EVP_PKEY| object represents a public or private key. A given object may
// be used concurrently on multiple threads by non-mutating functions, provided
// no other thread is concurrently calling a mutating function. Unless otherwise
// documented, functions which take a |const| pointer are non-mutating and
// functions which take a non-|const| pointer are mutating.
unsafe impl Send for PrivateKey {}
unsafe impl Sync for PrivateKey {}
impl Debug for PrivateKey {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
f.write_str(&format!(
"PrivateKey {{ algorithm: {:?} }}",
self.inner_key.algorithm()
))
}
}
impl PrivateKey {
fn new(alg: &'static Algorithm, evp_pkey: LcPtr<EVP_PKEY>) -> Self {
match alg.id {
AlgorithmID::X25519 => Self {
inner_key: KeyInner::X25519(evp_pkey),
},
AlgorithmID::ECDH_P256 => Self {
inner_key: KeyInner::ECDH_P256(evp_pkey),
},
AlgorithmID::ECDH_P384 => Self {
inner_key: KeyInner::ECDH_P384(evp_pkey),
},
AlgorithmID::ECDH_P521 => Self {
inner_key: KeyInner::ECDH_P521(evp_pkey),
},
}
}
#[inline]
/// Generate a new private key for the given algorithm.
// # FIPS
// Use this function with one of the following algorithms:
// * `ECDH_P256`
// * `ECDH_P384`
// * `ECDH_P521`
//
/// # Errors
/// `error::Unspecified` when operation fails due to internal error.
pub fn generate(alg: &'static Algorithm) -> Result<Self, Unspecified> {
let evp_pkey = match alg.id {
AlgorithmID::X25519 => generate_x25519()?,
_ => evp_key_generate(alg.id.nid())?,
};
Ok(Self::new(alg, evp_pkey))
}
/// Deserializes a DER-encoded private key structure to produce a `agreement::PrivateKey`.
///
/// This function is typically used to deserialize RFC 5915 encoded private keys, but it will
/// attempt to automatically detect other key formats. This function supports unencrypted
/// PKCS#8 `PrivateKeyInfo` structures as well as key type specific formats.
///
/// X25519 keys are not supported. See `PrivateKey::as_der`.
///
/// # Errors
/// `error::KeyRejected` if parsing failed or key otherwise unacceptable.
///
/// # Panics
pub fn from_private_key_der(
alg: &'static Algorithm,
key_bytes: &[u8],
) -> Result<Self, KeyRejected> {
if AlgorithmID::X25519 == alg.id {
return Err(KeyRejected::invalid_encoding());
}
let evp_pkey = LcPtr::<EVP_PKEY>::parse_rfc5208_private_key(key_bytes, EVP_PKEY_EC)
.or(parse_rfc5915_private_key(key_bytes, alg.id.nid()))?;
#[cfg(not(feature = "fips"))]
verify_evp_key_nid(&evp_pkey.as_const(), alg.id.nid())?;
#[cfg(feature = "fips")]
validate_ec_evp_key(&evp_pkey.as_const(), alg.id.nid())?;
Ok(Self::new(alg, evp_pkey))
}
/// Constructs an ECDH key from private key bytes
///
/// The private key must encoded as a big-endian fixed-length integer. For
/// example, a P-256 private key must be 32 bytes prefixed with leading
/// zeros as needed.
///
/// # Errors
/// `error::KeyRejected` if parsing failed or key otherwise unacceptable.
pub fn from_private_key(
alg: &'static Algorithm,
key_bytes: &[u8],
) -> Result<Self, KeyRejected> {
if key_bytes.len() != alg.id.private_key_len() {
return Err(KeyRejected::wrong_algorithm());
}
let evp_pkey = if AlgorithmID::X25519 == alg.id {
LcPtr::<EVP_PKEY>::parse_raw_private_key(key_bytes, EVP_PKEY_X25519)?
} else {
parse_sec1_private_bn(key_bytes, alg.id.nid())?
};
Ok(Self::new(alg, evp_pkey))
}
#[cfg(any(test, dev_tests_only))]
#[allow(missing_docs, clippy::missing_errors_doc)]
pub fn generate_for_test(
alg: &'static Algorithm,
rng: &mut dyn crate::rand::SecureRandom,
) -> Result<Self, Unspecified> {
match alg.id {
AlgorithmID::X25519 => {
let mut priv_key = [0u8; AlgorithmID::X25519.private_key_len()];
rng.mut_fill(&mut priv_key)?;
Self::from_x25519_private_key(&priv_key)
}
AlgorithmID::ECDH_P256 => {
let mut priv_key = [0u8; AlgorithmID::ECDH_P256.private_key_len()];
rng.mut_fill(&mut priv_key)?;
Self::from_p256_private_key(&priv_key)
}
AlgorithmID::ECDH_P384 => {
let mut priv_key = [0u8; AlgorithmID::ECDH_P384.private_key_len()];
rng.mut_fill(&mut priv_key)?;
Self::from_p384_private_key(&priv_key)
}
AlgorithmID::ECDH_P521 => {
let mut priv_key = [0u8; AlgorithmID::ECDH_P521.private_key_len()];
rng.mut_fill(&mut priv_key)?;
Self::from_p521_private_key(&priv_key)
}
}
}
#[cfg(any(test, dev_tests_only))]
fn from_x25519_private_key(
priv_key: &[u8; AlgorithmID::X25519.private_key_len()],
) -> Result<Self, Unspecified> {
let pkey = LcPtr::<EVP_PKEY>::parse_raw_private_key(priv_key, EVP_PKEY_X25519)?;
Ok(PrivateKey {
inner_key: KeyInner::X25519(pkey),
})
}
#[cfg(any(test, dev_tests_only))]
fn from_p256_private_key(priv_key: &[u8]) -> Result<Self, Unspecified> {
let pkey = parse_sec1_private_bn(priv_key, ECDH_P256.id.nid())?;
Ok(PrivateKey {
inner_key: KeyInner::ECDH_P256(pkey),
})
}
#[cfg(any(test, dev_tests_only))]
fn from_p384_private_key(priv_key: &[u8]) -> Result<Self, Unspecified> {
let pkey = parse_sec1_private_bn(priv_key, ECDH_P384.id.nid())?;
Ok(PrivateKey {
inner_key: KeyInner::ECDH_P384(pkey),
})
}
#[cfg(any(test, dev_tests_only))]
fn from_p521_private_key(priv_key: &[u8]) -> Result<Self, Unspecified> {
let pkey = parse_sec1_private_bn(priv_key, ECDH_P521.id.nid())?;
Ok(PrivateKey {
inner_key: KeyInner::ECDH_P521(pkey),
})
}
/// Computes the public key from the private key.
///
/// # Errors
/// `error::Unspecified` when operation fails due to internal error.
pub fn compute_public_key(&self) -> Result<PublicKey, Unspecified> {
match &self.inner_key {
KeyInner::ECDH_P256(evp_pkey)
| KeyInner::ECDH_P384(evp_pkey)
| KeyInner::ECDH_P521(evp_pkey) => {
let mut public_key = [0u8; MAX_PUBLIC_KEY_LEN];
let len = marshal_sec1_public_point_into_buffer(&mut public_key, evp_pkey, false)?;
Ok(PublicKey {
inner_key: self.inner_key.clone(),
key_bytes: public_key,
len,
})
}
KeyInner::X25519(priv_key) => {
let mut buffer = [0u8; MAX_PUBLIC_KEY_LEN];
let out_len = priv_key
.as_const()
.marshal_raw_public_to_buffer(&mut buffer)?;
Ok(PublicKey {
inner_key: self.inner_key.clone(),
key_bytes: buffer,
len: out_len,
})
}
}
}
/// The algorithm for the private key.
#[inline]
#[must_use]
pub fn algorithm(&self) -> &'static Algorithm {
self.inner_key.algorithm()
}
}
impl AsDer<EcPrivateKeyRfc5915Der<'static>> for PrivateKey {
/// Serializes the key as a DER-encoded `ECPrivateKey` (RFC 5915) structure.
///
/// X25519 is not supported.
///
/// # Errors
/// `error::Unspecified` if serialization failed.
fn as_der(&self) -> Result<EcPrivateKeyRfc5915Der<'static>, Unspecified> {
if AlgorithmID::X25519 == self.inner_key.algorithm().id {
return Err(Unspecified);
}
let mut outp = null_mut::<u8>();
let ec_key = {
self.inner_key
.get_evp_pkey()
.project_const_lifetime(unsafe {
|evp_pkey| EVP_PKEY_get0_EC_KEY(evp_pkey.as_const_ptr())
})?
};
let length = usize::try_from(unsafe { i2d_ECPrivateKey(ec_key.as_const_ptr(), &mut outp) })
.map_err(|_| Unspecified)?;
let mut outp = LcPtr::new(outp)?;
Ok(EcPrivateKeyRfc5915Der::take_from_slice(unsafe {
core::slice::from_raw_parts_mut(outp.as_mut_ptr(), length)
}))
}
}
impl AsDer<Pkcs8V1Der<'static>> for PrivateKey {
/// Serializes the key as a PKCS #8 private key structure.
///
/// X25519 is not supported.
///
/// # Errors
/// `error::Unspecified` if serialization failed.
fn as_der(&self) -> Result<Pkcs8V1Der<'static>, Unspecified> {
if AlgorithmID::X25519 == self.inner_key.algorithm().id {
return Err(Unspecified);
}
Ok(Pkcs8V1Der::new(
self.inner_key
.get_evp_pkey()
.as_const()
.marshal_rfc5208_private_key(Version::V1)?,
))
}
}
impl AsBigEndian<EcPrivateKeyBin<'static>> for PrivateKey {
/// Exposes the private key encoded as a big-endian fixed-length integer.
///
/// X25519 is not supported.
///
/// # Errors
/// `error::Unspecified` if serialization failed.
fn as_be_bytes(&self) -> Result<EcPrivateKeyBin<'static>, Unspecified> {
if AlgorithmID::X25519 == self.inner_key.algorithm().id {
return Err(Unspecified);
}
let buffer = marshal_sec1_private_key(self.inner_key.get_evp_pkey())?;
Ok(EcPrivateKeyBin::new(buffer))
}
}
impl AsBigEndian<Curve25519SeedBin<'static>> for PrivateKey {
/// Exposes the seed encoded as a big-endian fixed-length integer.
///
/// Only X25519 is supported.
///
/// # Errors
/// `error::Unspecified` if serialization failed.
fn as_be_bytes(&self) -> Result<Curve25519SeedBin<'static>, Unspecified> {
if AlgorithmID::X25519 != self.inner_key.algorithm().id {
return Err(Unspecified);
}
let evp_pkey = self.inner_key.get_evp_pkey();
Ok(Curve25519SeedBin::new(
evp_pkey.as_const().marshal_raw_private_key()?,
))
}
}
pub(crate) fn generate_x25519() -> Result<LcPtr<EVP_PKEY>, Unspecified> {
LcPtr::<EVP_PKEY>::generate(EVP_PKEY_X25519, No_EVP_PKEY_CTX_consumer)
}
const MAX_PUBLIC_KEY_LEN: usize = ec::PUBLIC_KEY_MAX_LEN;
/// A public key for key agreement.
pub struct PublicKey {
inner_key: KeyInner,
key_bytes: [u8; MAX_PUBLIC_KEY_LEN],
len: usize,
}
impl PublicKey {
/// The algorithm for the public key.
#[must_use]
pub fn algorithm(&self) -> &'static Algorithm {
self.inner_key.algorithm()
}
}
// See EVP_PKEY documentation here:
// https://github.com/aws/aws-lc/blob/125af14c57451565b875fbf1282a38a6ecf83782/include/openssl/evp.h#L83-L89
// An |EVP_PKEY| object represents a public or private key. A given object may
// be used concurrently on multiple threads by non-mutating functions, provided
// no other thread is concurrently calling a mutating function. Unless otherwise
// documented, functions which take a |const| pointer are non-mutating and
// functions which take a non-|const| pointer are mutating.
unsafe impl Send for PublicKey {}
unsafe impl Sync for PublicKey {}
impl Debug for PublicKey {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(&format!(
"PublicKey {{ algorithm: {:?}, bytes: \"{}\" }}",
self.inner_key.algorithm(),
hex::encode(&self.key_bytes[0..self.len])
))
}
}
impl AsRef<[u8]> for PublicKey {
/// Serializes the public key in an uncompressed form (X9.62) using the
/// Octet-String-to-Elliptic-Curve-Point algorithm in
/// [SEC 1: Elliptic Curve Cryptography, Version 2.0].
fn as_ref(&self) -> &[u8] {
&self.key_bytes[0..self.len]
}
}
impl Clone for PublicKey {
fn clone(&self) -> Self {
PublicKey {
inner_key: self.inner_key.clone(),
key_bytes: self.key_bytes,
len: self.len,
}
}
}
impl AsDer<PublicKeyX509Der<'static>> for PublicKey {
/// Provides the public key as a DER-encoded (X.509) `SubjectPublicKeyInfo` structure.
/// # Errors
/// Returns an error if the public key fails to marshal to X.509.
fn as_der(&self) -> Result<PublicKeyX509Der<'static>, crate::error::Unspecified> {
match &self.inner_key {
KeyInner::ECDH_P256(evp_pkey)
| KeyInner::ECDH_P384(evp_pkey)
| KeyInner::ECDH_P521(evp_pkey)
| KeyInner::X25519(evp_pkey) => {
let der = evp_pkey.as_const().marshal_rfc5280_public_key()?;
Ok(PublicKeyX509Der::from(Buffer::new(der)))
}
}
}
}
impl AsBigEndian<EcPublicKeyCompressedBin<'static>> for PublicKey {
/// Provides the public key elliptic curve point to a compressed point format.
/// # Errors
/// Returns an error if the underlying implementation is unable to marshal the public key to this format.
fn as_be_bytes(&self) -> Result<EcPublicKeyCompressedBin<'static>, crate::error::Unspecified> {
let evp_pkey = match &self.inner_key {
KeyInner::ECDH_P256(evp_pkey)
| KeyInner::ECDH_P384(evp_pkey)
| KeyInner::ECDH_P521(evp_pkey) => evp_pkey,
KeyInner::X25519(_) => return Err(Unspecified),
};
let pub_point = marshal_sec1_public_point(evp_pkey, true)?;
Ok(EcPublicKeyCompressedBin::new(pub_point))
}
}
impl AsBigEndian<EcPublicKeyUncompressedBin<'static>> for PublicKey {
/// Provides the public key elliptic curve point to a compressed point format.
///
/// Equivalent to [`PublicKey::as_ref`] for ECDH key types, except that it provides you a copy instead of a reference.
///
/// # Errors
/// Returns an error if the underlying implementation is unable to marshal the public key to this format.
fn as_be_bytes(
&self,
) -> Result<EcPublicKeyUncompressedBin<'static>, crate::error::Unspecified> {
if self.algorithm().id == AlgorithmID::X25519 {
return Err(Unspecified);
}
let mut buffer = vec![0u8; self.len];
buffer.copy_from_slice(&self.key_bytes[0..self.len]);
Ok(EcPublicKeyUncompressedBin::new(buffer))
}
}
/// An unparsed, possibly malformed, public key for key agreement.
#[derive(Clone)]
pub struct UnparsedPublicKey<B: AsRef<[u8]>> {
alg: &'static Algorithm,
bytes: B,
}
impl<B: Copy + AsRef<[u8]>> Copy for UnparsedPublicKey<B> {}
impl<B: Debug + AsRef<[u8]>> Debug for UnparsedPublicKey<B> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
f.write_str(&format!(
"UnparsedPublicKey {{ algorithm: {:?}, bytes: {:?} }}",
self.alg,
hex::encode(self.bytes.as_ref())
))
}
}
impl<B: AsRef<[u8]>> UnparsedPublicKey<B> {
/// Constructs a new `UnparsedPublicKey`.
pub fn new(algorithm: &'static Algorithm, bytes: B) -> Self {
UnparsedPublicKey {
alg: algorithm,
bytes,
}
}
/// The agreement algorithm associated with this public key
pub fn algorithm(&self) -> &'static Algorithm {
self.alg
}
/// The bytes provided for this public key
pub fn bytes(&self) -> &B {
&self.bytes
}
}
/// A parsed public key for key agreement.
///
/// This represents a public key that has been successfully parsed and validated
/// from its encoded form. The key can be used with the `agree` function to
/// perform key agreement operations.
#[derive(Debug, Clone)]
pub struct ParsedPublicKey {
format: ParsedPublicKeyFormat,
nid: i32,
key: LcPtr<EVP_PKEY>,
bytes: Box<[u8]>,
}
// See EVP_PKEY documentation here:
// https://github.com/aws/aws-lc/blob/125af14c57451565b875fbf1282a38a6ecf83782/include/openssl/evp.h#L83-L89
// An |EVP_PKEY| object represents a public or private key. A given object may
// be used concurrently on multiple threads by non-mutating functions, provided
// no other thread is concurrently calling a mutating function. Unless otherwise
// documented, functions which take a |const| pointer are non-mutating and
// functions which take a non-|const| pointer are mutating.
unsafe impl Send for ParsedPublicKey {}
unsafe impl Sync for ParsedPublicKey {}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
/// The format of a parsed public key.
///
/// This is used to distinguish between different types of public key formats
/// supported by *aws-lc-rs*.
#[non_exhaustive]
pub enum ParsedPublicKeyFormat {
/// The key is in an X.509 SubjectPublicKeyInfo format.
X509,
/// The key is in an uncompressed form (X9.62).
Uncompressed,
/// The key is in a compressed form (SEC 1: Elliptic Curve Cryptography, Version 2.0).
Compressed,
/// The key is in a hybrid form (SEC 1: Elliptic Curve Cryptography, Version 2.0).
Hybrid,
/// The key is in a raw form. (X25519 only)
Raw,
/// The key is in an unknown format.
Unknown,
}
/// A parsed public key for key agreement.
impl ParsedPublicKey {
fn nid(&self) -> i32 {
self.nid
}
/// The format of the data the public key was parsed from.
#[must_use]
pub fn format(&self) -> ParsedPublicKeyFormat {
self.format
}
pub(crate) fn mut_key(&mut self) -> &mut LcPtr<EVP_PKEY> {
&mut self.key
}
/// The algorithm of the public key.
#[must_use]
#[allow(non_upper_case_globals)]
pub fn alg(&self) -> &'static Algorithm {
match self.nid() {
NID_X25519 => &X25519,
NID_X9_62_prime256v1 => &ECDH_P256,
NID_secp384r1 => &ECDH_P384,
NID_secp521r1 => &ECDH_P521,
_ => unreachable!("Unreachable agreement algorithm nid: {}", self.nid()),
}
}
}
impl ParsedPublicKey {
#[allow(non_upper_case_globals)]
pub(crate) fn new(bytes: impl AsRef<[u8]>, nid: i32) -> Result<Self, KeyRejected> {
let bytes = bytes.as_ref().to_vec().into_boxed_slice();
if bytes.is_empty() {
return Err(KeyRejected::unspecified());
}
match nid {
NID_X25519 => {
let format: ParsedPublicKeyFormat;
let key = if let Ok(evp_pkey) =
LcPtr::<EVP_PKEY>::parse_rfc5280_public_key(&bytes, EVP_PKEY_X25519)
{
format = ParsedPublicKeyFormat::X509;
evp_pkey
} else {
format = ParsedPublicKeyFormat::Raw;
try_parse_x25519_public_key_raw_bytes(&bytes)?
};
Ok(ParsedPublicKey {
format,
nid,
key,
bytes,
})
}
NID_X9_62_prime256v1 | NID_secp384r1 | NID_secp521r1 => {
let format: ParsedPublicKeyFormat;
let key = if let Ok(evp_pkey) =
LcPtr::<EVP_PKEY>::parse_rfc5280_public_key(&bytes, EVP_PKEY_EC)
{
validate_ec_evp_key(&evp_pkey.as_const(), nid)?;
format = ParsedPublicKeyFormat::X509;
evp_pkey
} else if let Ok(evp_pkey) = parse_sec1_public_point(&bytes, nid) {
format = match bytes[0] {
0x02 | 0x03 => ParsedPublicKeyFormat::Compressed,
0x04 => ParsedPublicKeyFormat::Uncompressed,
0x06 | 0x07 => ParsedPublicKeyFormat::Hybrid,
_ => ParsedPublicKeyFormat::Unknown,
};
evp_pkey
} else {
return Err(KeyRejected::invalid_encoding());
};
Ok(ParsedPublicKey {
format,
nid,
key,
bytes,
})
}
_ => Err(KeyRejected::unspecified()),
}
}
}
impl AsRef<[u8]> for ParsedPublicKey {
/// Returns the original bytes from which this key was parsed.
fn as_ref(&self) -> &[u8] {
&self.bytes
}
}
impl<B: AsRef<[u8]>> UnparsedPublicKey<B> {
#[allow(dead_code)]
fn parse(&self) -> Result<ParsedPublicKey, KeyRejected> {
ParsedPublicKey::new(&self.bytes, self.alg.id.nid())
}
}
impl<B: AsRef<[u8]>> TryFrom<&UnparsedPublicKey<B>> for ParsedPublicKey {
type Error = KeyRejected;
fn try_from(upk: &UnparsedPublicKey<B>) -> Result<Self, Self::Error> {
upk.parse()
}
}
impl<B: AsRef<[u8]>> TryFrom<UnparsedPublicKey<B>> for ParsedPublicKey {
type Error = KeyRejected;
fn try_from(upk: UnparsedPublicKey<B>) -> Result<Self, Self::Error> {
upk.parse()
}
}
/// Performs a key agreement with a private key and the given public key.
///
/// `my_private_key` is the private key to use. Only a reference to the key
/// is required, allowing the key to continue to be used.
///
/// `peer_public_key` is the peer's public key. `agree` will return
/// `Err(error_value)` if it does not match `my_private_key's` algorithm/curve.
/// `agree` verifies that it is encoded in the standard form for the
/// algorithm and that the key is *valid*; see the algorithm's documentation for
/// details on how keys are to be encoded and what constitutes a valid key for
/// that algorithm.
///
/// `error_value` is the value to return if an error occurs before `kdf` is
/// called, e.g. when decoding of the peer's public key fails or when the public
/// key is otherwise invalid.
///
/// After the key agreement is done, `agree` calls `kdf` with the raw
/// key material from the key agreement operation and then returns what `kdf`
/// returns.
// # FIPS
// Use this function with one of the following key algorithms:
// * `ECDH_P256`
// * `ECDH_P384`
// * `ECDH_P521`
//
/// # Errors
/// `error_value` on internal failure.
#[inline]
#[allow(clippy::missing_panics_doc)]
pub fn agree<B: TryInto<ParsedPublicKey>, F, R, E>(
my_private_key: &PrivateKey,
peer_public_key: B,
error_value: E,
kdf: F,
) -> Result<R, E>
where
F: FnOnce(&[u8]) -> Result<R, E>,
{
let expected_alg = my_private_key.algorithm();
let parse_result = peer_public_key.try_into();
if let Ok(mut peer_pub_key) = parse_result {
if peer_pub_key.alg() != expected_alg {
return Err(error_value);
}
let secret = my_private_key
.inner_key
.get_evp_pkey()
.agree(peer_pub_key.mut_key())
.or(Err(error_value))?;
kdf(secret.as_ref())
} else {
Err(error_value)
}
}
fn try_parse_x25519_public_key_raw_bytes(key_bytes: &[u8]) -> Result<LcPtr<EVP_PKEY>, KeyRejected> {
let expected_pub_key_len = X25519.id.pub_key_len();
if key_bytes.len() != expected_pub_key_len {
return Err(KeyRejected::invalid_encoding());
}
LcPtr::<EVP_PKEY>::parse_raw_public_key(key_bytes, EVP_PKEY_X25519)
}
#[cfg(test)]
mod agreement_tests;
#[cfg(test)]
mod parsed_public_key_tests;

View File

@@ -0,0 +1,482 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::agreement::{
agree, Algorithm, PrivateKey, PublicKey, UnparsedPublicKey, ECDH_P256, ECDH_P384, ECDH_P521,
X25519,
};
use crate::encoding::{
AsBigEndian, AsDer, Curve25519SeedBin, EcPrivateKeyBin, EcPrivateKeyRfc5915Der,
EcPublicKeyCompressedBin, EcPublicKeyUncompressedBin, Pkcs8V1Der, PublicKeyX509Der,
};
use crate::{rand, test};
#[test]
fn test_agreement_x25519() {
let alg = &X25519;
let peer_public = UnparsedPublicKey::new(
alg,
test::from_dirty_hex("e6db6867583030db3594c1a424b15f7c726624ec26b3353b10a903a6d0ab1c4c"),
);
let my_private =
test::from_dirty_hex("a546e36bf0527c9d3b16154b82465edd62144c0ac1fc5a18506a2244ba449ac4");
let my_private = {
let mut rng = test::rand::FixedSliceRandom { bytes: &my_private };
PrivateKey::generate_for_test(alg, &mut rng).unwrap()
};
let my_public =
test::from_dirty_hex("1c9fd88f45606d932a80c71824ae151d15d73e77de38e8e000852e614fae7019");
let output =
test::from_dirty_hex("c3da55379de9c6908e94ea4df28d084f32eccf03491c71f754b4075577a28552");
assert_eq!(my_private.algorithm(), alg);
let be_private_key_buffer: Curve25519SeedBin = my_private.as_be_bytes().unwrap();
let be_private_key =
PrivateKey::from_private_key(&X25519, be_private_key_buffer.as_ref()).unwrap();
{
let result = agree(&be_private_key, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
let computed_public = my_private.compute_public_key().unwrap();
assert_eq!(computed_public.as_ref(), &my_public[..]);
assert_eq!(computed_public.algorithm(), alg);
{
let result = agree(&my_private, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
{
let result = agree(&my_private, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
}
#[test]
fn test_agreement_invalid_keys() {
fn test_with_key(alg: &'static Algorithm, my_private_key: &PrivateKey, test_key: &[u8]) {
assert!(PrivateKey::from_private_key(alg, test_key).is_err());
assert!(PrivateKey::from_private_key_der(alg, test_key).is_err());
assert!(agree(
my_private_key,
UnparsedPublicKey::new(alg, test_key),
(),
|_| Ok(())
)
.is_err());
}
let alg_variants: [&'static Algorithm; 4] = [&X25519, &ECDH_P256, &ECDH_P384, &ECDH_P521];
for alg in alg_variants {
let my_private_key = PrivateKey::generate(alg).unwrap();
let empty_key = [];
test_with_key(alg, &my_private_key, &empty_key);
let wrong_size_key: [u8; 31] = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
24, 25, 26, 27, 28, 29, 30,
];
test_with_key(alg, &my_private_key, &wrong_size_key);
}
}
#[test]
fn test_agreement_ecdh_p256() {
let alg = &ECDH_P256;
let peer_public = UnparsedPublicKey::new(
alg,
test::from_dirty_hex(
"04D12DFB5289C8D4F81208B70270398C342296970A0BCCB74C736FC7554494BF6356FBF3CA366CC23E8157854C13C58D6AAC23F046ADA30F8353E74F33039872AB",
),
);
assert_eq!(peer_public.algorithm(), alg);
assert_eq!(peer_public.bytes(), &peer_public.bytes);
let my_private =
test::from_dirty_hex("C88F01F510D9AC3F70A292DAA2316DE544E9AAB8AFE84049C62A9C57862D1433");
let my_private = {
let mut rng = test::rand::FixedSliceRandom { bytes: &my_private };
PrivateKey::generate_for_test(alg, &mut rng).unwrap()
};
let my_public = test::from_dirty_hex(
"04DAD0B65394221CF9B051E1FECA5787D098DFE637FC90B9EF945D0C37725811805271A0461CDB8252D61F1C456FA3E59AB1F45B33ACCF5F58389E0577B8990BB3",
);
let output =
test::from_dirty_hex("D6840F6B42F6EDAFD13116E0E12565202FEF8E9ECE7DCE03812464D04B9442DE");
assert_eq!(my_private.algorithm(), alg);
let be_private_key_buffer: EcPrivateKeyBin = my_private.as_be_bytes().unwrap();
let be_private_key =
PrivateKey::from_private_key(&ECDH_P256, be_private_key_buffer.as_ref()).unwrap();
{
let result = agree(&be_private_key, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
let der_private_key_buffer: EcPrivateKeyRfc5915Der = my_private.as_der().unwrap();
let der_private_key =
PrivateKey::from_private_key_der(&ECDH_P256, der_private_key_buffer.as_ref()).unwrap();
{
let result = agree(&der_private_key, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
let pkcs8_private_key_buffer: Pkcs8V1Der = my_private.as_der().unwrap();
let pkcs8_private_key =
PrivateKey::from_private_key_der(&ECDH_P256, pkcs8_private_key_buffer.as_ref()).unwrap();
{
let result = agree(&pkcs8_private_key, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
let computed_public = my_private.compute_public_key().unwrap();
assert_eq!(computed_public.as_ref(), &my_public[..]);
assert_eq!(computed_public.algorithm(), alg);
{
let result = agree(&my_private, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
{
let result = agree(&my_private, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
}
#[test]
fn test_agreement_ecdh_p384() {
let alg = &ECDH_P384;
let peer_public = UnparsedPublicKey::new(
alg,
test::from_dirty_hex(
"04E558DBEF53EECDE3D3FCCFC1AEA08A89A987475D12FD950D83CFA41732BC509D0D1AC43A0336DEF96FDA41D0774A3571DCFBEC7AACF3196472169E838430367F66EEBE3C6E70C416DD5F0C68759DD1FFF83FA40142209DFF5EAAD96DB9E6386C",
),
);
let my_private = test::from_dirty_hex(
"099F3C7034D4A2C699884D73A375A67F7624EF7C6B3C0F160647B67414DCE655E35B538041E649EE3FAEF896783AB194",
);
let my_private = {
let mut rng = test::rand::FixedSliceRandom { bytes: &my_private };
PrivateKey::generate_for_test(alg, &mut rng).unwrap()
};
let my_public = test::from_dirty_hex(
"04667842D7D180AC2CDE6F74F37551F55755C7645C20EF73E31634FE72B4C55EE6DE3AC808ACB4BDB4C88732AEE95F41AA9482ED1FC0EEB9CAFC4984625CCFC23F65032149E0E144ADA024181535A0F38EEB9FCFF3C2C947DAE69B4C634573A81C",
);
let output = test::from_dirty_hex(
"11187331C279962D93D604243FD592CB9D0A926F422E47187521287E7156C5C4D603135569B9E9D09CF5D4A270F59746",
);
assert_eq!(my_private.algorithm(), alg);
let be_private_key_buffer: EcPrivateKeyBin = my_private.as_be_bytes().unwrap();
let be_private_key =
PrivateKey::from_private_key(&ECDH_P384, be_private_key_buffer.as_ref()).unwrap();
{
let result = agree(&be_private_key, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
let der_private_key_buffer: EcPrivateKeyRfc5915Der = my_private.as_der().unwrap();
let der_private_key =
PrivateKey::from_private_key_der(&ECDH_P384, der_private_key_buffer.as_ref()).unwrap();
{
let result = agree(&der_private_key, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
let computed_public = my_private.compute_public_key().unwrap();
assert_eq!(computed_public.as_ref(), &my_public[..]);
assert_eq!(computed_public.algorithm(), alg);
{
let result = agree(&my_private, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
}
#[test]
fn test_agreement_ecdh_p521() {
let alg = &ECDH_P521;
let peer_public = UnparsedPublicKey::new(
alg,
test::from_dirty_hex(
"0401a32099b02c0bd85371f60b0dd20890e6c7af048c8179890fda308b359dbbc2b7a832bb8c6526c4af99a7ea3f0b3cb96ae1eb7684132795c478ad6f962e4a6f446d017627357b39e9d7632a1370b3e93c1afb5c851b910eb4ead0c9d387df67cde85003e0e427552f1cd09059aad0262e235cce5fba8cedc4fdc1463da76dcd4b6d1a46",
),
);
let my_private = test::from_dirty_hex(
"00df14b1f1432a7b0fb053965fd8643afee26b2451ecb6a8a53a655d5fbe16e4c64ce8647225eb11e7fdcb23627471dffc5c2523bd2ae89957cba3a57a23933e5a78",
);
let my_private = {
let mut rng = test::rand::FixedSliceRandom { bytes: &my_private };
PrivateKey::generate_for_test(alg, &mut rng).unwrap()
};
let my_public = test::from_dirty_hex(
"04004e8583bbbb2ecd93f0714c332dff5ab3bc6396e62f3c560229664329baa5138c3bb1c36428abd4e23d17fcb7a2cfcc224b2e734c8941f6f121722d7b6b9415457601cf0874f204b0363f020864672fadbf87c8811eb147758b254b74b14fae742159f0f671a018212bbf25b8519e126d4cad778cfff50d288fd39ceb0cac635b175ec0",
);
let output = test::from_dirty_hex(
"01aaf24e5d47e4080c18c55ea35581cd8da30f1a079565045d2008d51b12d0abb4411cda7a0785b15d149ed301a3697062f42da237aa7f07e0af3fd00eb1800d9c41",
);
assert_eq!(my_private.algorithm(), alg);
let be_private_key_buffer: EcPrivateKeyBin = my_private.as_be_bytes().unwrap();
let be_private_key =
PrivateKey::from_private_key(&ECDH_P521, be_private_key_buffer.as_ref()).unwrap();
{
let result = agree(&be_private_key, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
let der_private_key_buffer: EcPrivateKeyRfc5915Der = my_private.as_der().unwrap();
let der_private_key =
PrivateKey::from_private_key_der(&ECDH_P521, der_private_key_buffer.as_ref()).unwrap();
{
let result = agree(&der_private_key, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
let computed_public = my_private.compute_public_key().unwrap();
assert_eq!(computed_public.as_ref(), &my_public[..]);
assert_eq!(computed_public.algorithm(), alg);
{
let result = agree(&my_private, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
{
let result = agree(&my_private, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
}
#[test]
fn agreement_traits() {
use crate::test;
use regex::{self, Regex};
let mut rng = rand::SystemRandom::new();
let private_key = PrivateKey::generate_for_test(&ECDH_P256, &mut rng).unwrap();
test::compile_time_assert_send::<PrivateKey>();
test::compile_time_assert_sync::<PrivateKey>();
assert_eq!(
format!("{:?}", &private_key),
"PrivateKey { algorithm: Algorithm { curve: P256 } }"
);
let ephemeral_private_key = PrivateKey::generate_for_test(&ECDH_P256, &mut rng).unwrap();
test::compile_time_assert_send::<PrivateKey>();
test::compile_time_assert_sync::<PrivateKey>();
assert_eq!(
format!("{:?}", &ephemeral_private_key),
"PrivateKey { algorithm: Algorithm { curve: P256 } }"
);
let public_key = private_key.compute_public_key().unwrap();
let pubkey_re = Regex::new(
"PublicKey \\{ algorithm: Algorithm \\{ curve: P256 \\}, bytes: \"[0-9a-f]+\" \\}",
)
.unwrap();
let pubkey_debug = format!("{:?}", &public_key);
assert!(
pubkey_re.is_match(&pubkey_debug),
"pubkey_debug: {pubkey_debug}"
);
#[allow(clippy::redundant_clone)]
let pubkey_clone = public_key.clone();
assert_eq!(public_key.as_ref(), pubkey_clone.as_ref());
assert_eq!(pubkey_debug, format!("{:?}", &pubkey_clone));
test::compile_time_assert_clone::<PublicKey>();
test::compile_time_assert_send::<PublicKey>();
test::compile_time_assert_sync::<PublicKey>();
// Verify `PublicKey` implements `Debug`.
//
// TODO: Test the actual output.
let _: &dyn core::fmt::Debug = &public_key;
test::compile_time_assert_clone::<UnparsedPublicKey<&[u8]>>();
test::compile_time_assert_copy::<UnparsedPublicKey<&[u8]>>();
test::compile_time_assert_sync::<UnparsedPublicKey<&[u8]>>();
test::compile_time_assert_clone::<UnparsedPublicKey<Vec<u8>>>();
test::compile_time_assert_sync::<UnparsedPublicKey<Vec<u8>>>();
let bytes = [0x01, 0x02, 0x03];
let unparsed_public_key = UnparsedPublicKey::new(&X25519, &bytes);
let unparsed_pubkey_clone = unparsed_public_key;
assert_eq!(
format!("{unparsed_public_key:?}"),
r#"UnparsedPublicKey { algorithm: Algorithm { curve: Curve25519 }, bytes: "010203" }"#
);
assert_eq!(
format!("{unparsed_pubkey_clone:?}"),
r#"UnparsedPublicKey { algorithm: Algorithm { curve: Curve25519 }, bytes: "010203" }"#
);
let unparsed_public_key = UnparsedPublicKey::new(&X25519, Vec::from(bytes));
#[allow(clippy::redundant_clone)]
let unparsed_pubkey_clone = unparsed_public_key.clone();
assert_eq!(
format!("{unparsed_public_key:?}"),
r#"UnparsedPublicKey { algorithm: Algorithm { curve: Curve25519 }, bytes: "010203" }"#
);
assert_eq!(
format!("{unparsed_pubkey_clone:?}"),
r#"UnparsedPublicKey { algorithm: Algorithm { curve: Curve25519 }, bytes: "010203" }"#
);
}
#[test]
fn test_agreement_random() {
let test_algorithms = [&ECDH_P256, &ECDH_P384, &ECDH_P521, &X25519];
for alg in test_algorithms {
test_agreement_random_helper(alg);
}
}
fn test_agreement_random_helper(alg: &'static Algorithm) {
let peer_private = PrivateKey::generate(alg).unwrap();
let my_private = PrivateKey::generate(alg).unwrap();
let peer_public_keys = public_key_formats_helper(&peer_private.compute_public_key().unwrap());
let my_public_keys = public_key_formats_helper(&my_private.compute_public_key().unwrap());
let mut results: Vec<Vec<u8>> = Vec::new();
for peer_public in peer_public_keys {
let peer_public = UnparsedPublicKey::new(alg, peer_public);
let result = agree(&my_private, &peer_public, (), |key_material| {
results.push(key_material.to_vec());
Ok(())
});
assert_eq!(result, Ok(()));
}
for my_public in my_public_keys {
let my_public = UnparsedPublicKey::new(alg, my_public);
let result = agree(&peer_private, &my_public, (), |key_material| {
results.push(key_material.to_vec());
Ok(())
});
assert_eq!(result, Ok(()));
}
let key_types_tested = match alg.id {
crate::agreement::AlgorithmID::ECDH_P256
| crate::agreement::AlgorithmID::ECDH_P384
| crate::agreement::AlgorithmID::ECDH_P521 => 4,
crate::agreement::AlgorithmID::X25519 => 2,
};
assert_eq!(results.len(), key_types_tested * 2); // Multiplied by two because we tested the other direction
assert_eq!(results[0..key_types_tested], results[key_types_tested..]);
}
fn public_key_formats_helper(public_key: &PublicKey) -> Vec<Vec<u8>> {
let verify_ec_raw_traits = matches!(
public_key.algorithm().id,
crate::agreement::AlgorithmID::ECDH_P256
| crate::agreement::AlgorithmID::ECDH_P384
| crate::agreement::AlgorithmID::ECDH_P521
);
let mut public_keys = Vec::<Vec<u8>>::new();
public_keys.push(public_key.as_ref().into());
if verify_ec_raw_traits {
let raw = AsBigEndian::<EcPublicKeyCompressedBin>::as_be_bytes(public_key).unwrap();
public_keys.push(raw.as_ref().into());
let raw = AsBigEndian::<EcPublicKeyUncompressedBin>::as_be_bytes(public_key).unwrap();
public_keys.push(raw.as_ref().into());
}
let peer_x509 = AsDer::<PublicKeyX509Der>::as_der(public_key).unwrap();
public_keys.push(peer_x509.as_ref().into());
public_keys
}
#[test]
fn private_key_drop() {
let private_key = PrivateKey::generate(&ECDH_P256).unwrap();
let public_key = private_key.compute_public_key().unwrap();
// PublicKey maintains a reference counted pointer to private keys EVP_PKEY so we test that with drop
drop(private_key);
let _ = AsBigEndian::<EcPublicKeyCompressedBin>::as_be_bytes(&public_key).unwrap();
let _ = AsBigEndian::<EcPublicKeyUncompressedBin>::as_be_bytes(&public_key).unwrap();
let _ = AsDer::<PublicKeyX509Der>::as_der(&public_key).unwrap();
}

View File

@@ -0,0 +1,504 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::agreement::{agree, Algorithm, ParsedPublicKey, PrivateKey, PublicKey};
use crate::error::Unspecified;
use crate::rand::SecureRandom;
use core::fmt;
use core::fmt::{Debug, Formatter};
/// An ephemeral private key for use (only) with `agree_ephemeral`. The
/// signature of `agree_ephemeral` ensures that an `PrivateKey` can be
/// used for at most one key agreement.
#[allow(clippy::module_name_repetitions)]
pub struct EphemeralPrivateKey(PrivateKey);
impl Debug for EphemeralPrivateKey {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
f.write_str(&format!(
"EphemeralPrivateKey {{ algorithm: {:?} }}",
self.0.inner_key.algorithm()
))
}
}
impl EphemeralPrivateKey {
#[inline]
/// Generate a new ephemeral private key for the given algorithm.
///
/// # *ring* Compatibility
/// Our implementation ignores the `SecureRandom` parameter.
// # FIPS
// Use this function with one of the following algorithms:
// * `ECDH_P256`
// * `ECDH_P384`
// * `ECDH_P521`
//
/// # Errors
/// `error::Unspecified` when operation fails due to internal error.
pub fn generate(alg: &'static Algorithm, _rng: &dyn SecureRandom) -> Result<Self, Unspecified> {
Ok(Self(PrivateKey::generate(alg)?))
}
#[cfg(any(test, dev_tests_only))]
#[allow(missing_docs, clippy::missing_errors_doc)]
pub fn generate_for_test(
alg: &'static Algorithm,
rng: &mut dyn SecureRandom,
) -> Result<Self, Unspecified> {
Ok(Self(PrivateKey::generate_for_test(alg, rng)?))
}
/// Computes the public key from the private key.
///
/// # Errors
/// `error::Unspecified` when operation fails due to internal error.
pub fn compute_public_key(&self) -> Result<PublicKey, Unspecified> {
self.0.compute_public_key()
}
/// The algorithm for the private key.
#[inline]
#[must_use]
pub fn algorithm(&self) -> &'static Algorithm {
self.0.algorithm()
}
}
/// Performs a key agreement with an ephemeral private key and the given public
/// key.
///
/// `my_private_key` is the ephemeral private key to use. Since it is moved, it
/// will not be usable after calling `agree_ephemeral`, thus guaranteeing that
/// the key is used for only one key agreement.
///
/// `peer_public_key` is the peer's public key. `agree_ephemeral` will return
/// `Err(error_value)` if it does not match `my_private_key's` algorithm/curve.
/// `agree_ephemeral` verifies that it is encoded in the standard form for the
/// algorithm and that the key is *valid*; see the algorithm's documentation for
/// details on how keys are to be encoded and what constitutes a valid key for
/// that algorithm.
///
/// `error_value` is the value to return if an error occurs before `kdf` is
/// called, e.g. when decoding of the peer's public key fails or when the public
/// key is otherwise invalid.
///
/// After the key agreement is done, `agree_ephemeral` calls `kdf` with the raw
/// key material from the key agreement operation and then returns what `kdf`
/// returns.
// # FIPS
// Use this function with one of the following key algorithms:
// * `ECDH_P256`
// * `ECDH_P384`
// * `ECDH_P521`
//
/// # Errors
/// `error_value` on internal failure.
#[inline]
#[allow(clippy::needless_pass_by_value)]
#[allow(clippy::missing_panics_doc)]
#[allow(clippy::module_name_repetitions)]
pub fn agree_ephemeral<B: TryInto<ParsedPublicKey>, F, R, E>(
my_private_key: EphemeralPrivateKey,
peer_public_key: B,
error_value: E,
kdf: F,
) -> Result<R, E>
where
F: FnOnce(&[u8]) -> Result<R, E>,
{
agree(&my_private_key.0, peer_public_key, error_value, kdf)
}
#[cfg(test)]
mod tests {
use crate::agreement::{AlgorithmID, PublicKey};
use crate::encoding::{
AsBigEndian, AsDer, EcPublicKeyCompressedBin, EcPublicKeyUncompressedBin, PublicKeyX509Der,
};
use crate::error::Unspecified;
use crate::{agreement, rand, test, test_file};
#[test]
fn test_agreement_ecdh_x25519_rfc_iterated() {
fn expect_iterated_x25519(
expected_result: &str,
range: core::ops::Range<usize>,
k: &mut Vec<u8>,
u: &mut Vec<u8>,
) {
for _ in range {
let new_k = x25519(k, u);
u.clone_from(k);
*k = new_k;
}
assert_eq!(&from_hex(expected_result), k);
}
let mut k = from_hex("0900000000000000000000000000000000000000000000000000000000000000");
let mut u = k.clone();
expect_iterated_x25519(
"422c8e7a6227d7bca1350b3e2bb7279f7897b87bb6854b783c60e80311ae3079",
0..1,
&mut k,
&mut u,
);
expect_iterated_x25519(
"684cf59ba83309552800ef566f2f4d3c1c3887c49360e3875f2eb94d99532c51",
1..1_000,
&mut k,
&mut u,
);
// The spec gives a test vector for 1,000,000 iterations but it takes
// too long to do 1,000,000 iterations by default right now. This
// 10,000 iteration vector is self-computed.
#[cfg(not(disable_slow_tests))]
expect_iterated_x25519(
"2c125a20f639d504a7703d2e223c79a79de48c4ee8c23379aa19a62ecd211815",
1_000..10_000,
&mut k,
&mut u,
);
/*
expect_iterated_x25519(
"7c3911e0ab2586fd864497297e575e6f3bc601c0883c30df5f4dd2d24f665424",
10_000..1_000_000,
&mut k,
&mut u,
);
*/
}
#[test]
fn test_agreement_x25519() {
let alg = &agreement::X25519;
let peer_public = agreement::UnparsedPublicKey::new(
alg,
test::from_dirty_hex(
"e6db6867583030db3594c1a424b15f7c726624ec26b3353b10a903a6d0ab1c4c",
),
);
let my_private = test::from_dirty_hex(
"a546e36bf0527c9d3b16154b82465edd62144c0ac1fc5a18506a2244ba449ac4",
);
let my_private = {
let mut rng = test::rand::FixedSliceRandom { bytes: &my_private };
agreement::EphemeralPrivateKey::generate_for_test(alg, &mut rng).unwrap()
};
let my_public = test::from_dirty_hex(
"1c9fd88f45606d932a80c71824ae151d15d73e77de38e8e000852e614fae7019",
);
let output = test::from_dirty_hex(
"c3da55379de9c6908e94ea4df28d084f32eccf03491c71f754b4075577a28552",
);
assert_eq!(my_private.algorithm(), alg);
let computed_public = my_private.compute_public_key().unwrap();
assert_eq!(computed_public.as_ref(), &my_public[..]);
assert_eq!(computed_public.algorithm(), alg);
let result = agreement::agree_ephemeral(my_private, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
#[test]
fn test_agreement_ecdh_p256() {
let alg = &agreement::ECDH_P256;
let peer_public = agreement::UnparsedPublicKey::new(
alg,
test::from_dirty_hex(
"04D12DFB5289C8D4F81208B70270398C342296970A0BCCB74C736FC7554494BF6356FBF3CA366CC23E8157854C13C58D6AAC23F046ADA30F8353E74F33039872AB",
),
);
assert_eq!(peer_public.algorithm(), alg);
assert_eq!(peer_public.bytes(), &peer_public.bytes);
let my_private = test::from_dirty_hex(
"C88F01F510D9AC3F70A292DAA2316DE544E9AAB8AFE84049C62A9C57862D1433",
);
let my_private = {
let mut rng = test::rand::FixedSliceRandom { bytes: &my_private };
agreement::EphemeralPrivateKey::generate_for_test(alg, &mut rng).unwrap()
};
let my_public = test::from_dirty_hex(
"04DAD0B65394221CF9B051E1FECA5787D098DFE637FC90B9EF945D0C37725811805271A0461CDB8252D61F1C456FA3E59AB1F45B33ACCF5F58389E0577B8990BB3",
);
let output = test::from_dirty_hex(
"D6840F6B42F6EDAFD13116E0E12565202FEF8E9ECE7DCE03812464D04B9442DE",
);
assert_eq!(my_private.algorithm(), alg);
let computed_public = my_private.compute_public_key().unwrap();
assert_eq!(computed_public.as_ref(), &my_public[..]);
assert_eq!(computed_public.algorithm(), alg);
let result = agreement::agree_ephemeral(my_private, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
#[test]
fn test_agreement_ecdh_p384() {
let alg = &agreement::ECDH_P384;
let peer_public = agreement::UnparsedPublicKey::new(
alg,
test::from_dirty_hex(
"04E558DBEF53EECDE3D3FCCFC1AEA08A89A987475D12FD950D83CFA41732BC509D0D1AC43A0336DEF96FDA41D0774A3571DCFBEC7AACF3196472169E838430367F66EEBE3C6E70C416DD5F0C68759DD1FFF83FA40142209DFF5EAAD96DB9E6386C",
),
);
let my_private = test::from_dirty_hex(
"099F3C7034D4A2C699884D73A375A67F7624EF7C6B3C0F160647B67414DCE655E35B538041E649EE3FAEF896783AB194",
);
let my_private = {
let mut rng = test::rand::FixedSliceRandom { bytes: &my_private };
agreement::EphemeralPrivateKey::generate_for_test(alg, &mut rng).unwrap()
};
let my_public = test::from_dirty_hex(
"04667842D7D180AC2CDE6F74F37551F55755C7645C20EF73E31634FE72B4C55EE6DE3AC808ACB4BDB4C88732AEE95F41AA9482ED1FC0EEB9CAFC4984625CCFC23F65032149E0E144ADA024181535A0F38EEB9FCFF3C2C947DAE69B4C634573A81C",
);
let output = test::from_dirty_hex(
"11187331C279962D93D604243FD592CB9D0A926F422E47187521287E7156C5C4D603135569B9E9D09CF5D4A270F59746",
);
assert_eq!(my_private.algorithm(), alg);
let computed_public = my_private.compute_public_key().unwrap();
assert_eq!(computed_public.as_ref(), &my_public[..]);
assert_eq!(computed_public.algorithm(), alg);
let result = agreement::agree_ephemeral(my_private, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
#[test]
fn test_agreement_ecdh_p521() {
let alg = &agreement::ECDH_P521;
let peer_public = agreement::UnparsedPublicKey::new(
alg,
test::from_dirty_hex(
"0401a32099b02c0bd85371f60b0dd20890e6c7af048c8179890fda308b359dbbc2b7a832bb8c6526c4af99a7ea3f0b3cb96ae1eb7684132795c478ad6f962e4a6f446d017627357b39e9d7632a1370b3e93c1afb5c851b910eb4ead0c9d387df67cde85003e0e427552f1cd09059aad0262e235cce5fba8cedc4fdc1463da76dcd4b6d1a46",
),
);
let my_private = test::from_dirty_hex(
"00df14b1f1432a7b0fb053965fd8643afee26b2451ecb6a8a53a655d5fbe16e4c64ce8647225eb11e7fdcb23627471dffc5c2523bd2ae89957cba3a57a23933e5a78",
);
let my_private = {
let mut rng = test::rand::FixedSliceRandom { bytes: &my_private };
agreement::EphemeralPrivateKey::generate_for_test(alg, &mut rng).unwrap()
};
let my_public = test::from_dirty_hex(
"04004e8583bbbb2ecd93f0714c332dff5ab3bc6396e62f3c560229664329baa5138c3bb1c36428abd4e23d17fcb7a2cfcc224b2e734c8941f6f121722d7b6b9415457601cf0874f204b0363f020864672fadbf87c8811eb147758b254b74b14fae742159f0f671a018212bbf25b8519e126d4cad778cfff50d288fd39ceb0cac635b175ec0",
);
let output = test::from_dirty_hex(
"01aaf24e5d47e4080c18c55ea35581cd8da30f1a079565045d2008d51b12d0abb4411cda7a0785b15d149ed301a3697062f42da237aa7f07e0af3fd00eb1800d9c41",
);
assert_eq!(my_private.algorithm(), alg);
let computed_public = my_private.compute_public_key().unwrap();
assert_eq!(computed_public.as_ref(), &my_public[..]);
assert_eq!(computed_public.algorithm(), alg);
let result = agreement::agree_ephemeral(my_private, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(result, Ok(()));
}
#[test]
fn agreement_traits() {
use crate::test;
let mut rng = rand::SystemRandom::new();
let ephemeral_private_key =
agreement::EphemeralPrivateKey::generate_for_test(&agreement::ECDH_P256, &mut rng)
.unwrap();
test::compile_time_assert_send::<agreement::EphemeralPrivateKey>();
test::compile_time_assert_sync::<agreement::EphemeralPrivateKey>();
assert_eq!(
format!("{:?}", &ephemeral_private_key),
"EphemeralPrivateKey { algorithm: Algorithm { curve: P256 } }"
);
}
fn check_computed_public_key(
algorithm: &AlgorithmID,
expected_format: &str,
expected_public_key_bytes: &[u8],
computed_public: &PublicKey,
) {
match (algorithm, expected_format) {
(_, "X509") => {
let der = AsDer::<PublicKeyX509Der>::as_der(computed_public)
.expect("serialize to uncompressed format");
assert_eq!(
expected_public_key_bytes,
der.as_ref(),
"hex: {:x?}",
der.as_ref()
);
}
(
AlgorithmID::ECDH_P256 | AlgorithmID::ECDH_P384 | AlgorithmID::ECDH_P521,
"COMPRESSED",
) => {
let bin = AsBigEndian::<EcPublicKeyCompressedBin>::as_be_bytes(computed_public)
.expect("serialize to compressed format");
assert_eq!(expected_public_key_bytes, bin.as_ref());
}
(
AlgorithmID::ECDH_P256 | AlgorithmID::ECDH_P384 | AlgorithmID::ECDH_P521,
"UNCOMPRESSED" | "",
) => {
let bin = AsBigEndian::<EcPublicKeyUncompressedBin>::as_be_bytes(computed_public)
.expect("serialize to uncompressed format");
assert_eq!(expected_public_key_bytes, bin.as_ref());
assert_eq!(expected_public_key_bytes, computed_public.as_ref());
}
(AlgorithmID::X25519, "") => {
assert_eq!(expected_public_key_bytes, computed_public.as_ref());
}
(ai, pf) => {
panic!("Unexpected PeerFormat={pf:?} for {ai:?}")
}
}
}
#[test]
fn agreement_agree_ephemeral() {
let rng = rand::SystemRandom::new();
test::run(
test_file!("data/agreement_tests.txt"),
|section, test_case| {
assert_eq!(section, "");
let curve_name = test_case.consume_string("Curve");
let alg = alg_from_curve_name(&curve_name);
let peer_public =
agreement::UnparsedPublicKey::new(alg, test_case.consume_bytes("PeerQ"));
let myq_format = test_case
.consume_optional_string("MyQFormat")
.unwrap_or_default();
if test_case.consume_optional_string("Error").is_none() {
let my_private_bytes = test_case.consume_bytes("D");
let my_private = {
let mut rng = test::rand::FixedSliceRandom {
bytes: &my_private_bytes,
};
agreement::EphemeralPrivateKey::generate_for_test(alg, &mut rng)?
};
let my_public = test_case.consume_bytes("MyQ");
let output = test_case.consume_bytes("Output");
assert_eq!(my_private.algorithm(), alg);
let computed_public = my_private.compute_public_key().unwrap();
check_computed_public_key(&alg.id, &myq_format, &my_public, &computed_public);
assert_eq!(my_private.algorithm(), alg);
let result =
agreement::agree_ephemeral(my_private, &peer_public, (), |key_material| {
assert_eq!(key_material, &output[..]);
Ok(())
});
assert_eq!(
result,
Ok(()),
"Failed on private key: {:?}",
test::to_hex(my_private_bytes)
);
} else {
fn kdf_not_called(_: &[u8]) -> Result<(), ()> {
panic!(
"The KDF was called during ECDH when the peer's \
public key is invalid."
);
}
let dummy_private_key = agreement::EphemeralPrivateKey::generate(alg, &rng)?;
assert!(agreement::agree_ephemeral(
dummy_private_key,
&peer_public,
(),
kdf_not_called
)
.is_err());
}
Ok(())
},
);
}
fn from_hex(s: &str) -> Vec<u8> {
match test::from_hex(s) {
Ok(v) => v,
Err(msg) => {
panic!("{msg} in {s}");
}
}
}
fn alg_from_curve_name(curve_name: &str) -> &'static agreement::Algorithm {
if curve_name == "P-256" {
&agreement::ECDH_P256
} else if curve_name == "P-384" {
&agreement::ECDH_P384
} else if curve_name == "P-521" {
&agreement::ECDH_P521
} else if curve_name == "X25519" {
&agreement::X25519
} else {
panic!("Unsupported curve: {curve_name}");
}
}
fn x25519(private_key: &[u8], public_key: &[u8]) -> Vec<u8> {
try_x25519(private_key, public_key).unwrap()
}
fn try_x25519(private_key: &[u8], public_key: &[u8]) -> Result<Vec<u8>, Unspecified> {
let mut rng = test::rand::FixedSliceRandom { bytes: private_key };
let private_key =
agreement::EphemeralPrivateKey::generate_for_test(&agreement::X25519, &mut rng)?;
let public_key = agreement::UnparsedPublicKey::new(&agreement::X25519, public_key);
agreement::agree_ephemeral(private_key, public_key, Unspecified, |agreed_value| {
Ok(Vec::from(agreed_value))
})
}
}

View File

@@ -0,0 +1,190 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::agreement::{
agree, ParsedPublicKey, ParsedPublicKeyFormat, PrivateKey, UnparsedPublicKey, ECDH_P256,
ECDH_P384, ECDH_P521, X25519,
};
use crate::encoding::{AsBigEndian, AsDer, EcPublicKeyCompressedBin, PublicKeyX509Der};
use crate::test;
#[test]
fn test_types() {
test::compile_time_assert_send::<UnparsedPublicKey<&[u8]>>();
test::compile_time_assert_sync::<UnparsedPublicKey<&[u8]>>();
test::compile_time_assert_send::<UnparsedPublicKey<Vec<u8>>>();
test::compile_time_assert_sync::<UnparsedPublicKey<Vec<u8>>>();
test::compile_time_assert_clone::<UnparsedPublicKey<&[u8]>>();
test::compile_time_assert_clone::<UnparsedPublicKey<Vec<u8>>>();
test::compile_time_assert_send::<ParsedPublicKey>();
test::compile_time_assert_sync::<ParsedPublicKey>();
test::compile_time_assert_clone::<ParsedPublicKey>();
}
#[test]
fn test_parsed_public_key_x25519_raw() {
let raw_key =
test::from_dirty_hex("e6db6867583030db3594c1a424b15f7c726624ec26b3353b10a903a6d0ab1c4c");
let parsed = ParsedPublicKey::new(&raw_key, X25519.id.nid()).unwrap();
assert_eq!(&raw_key, parsed.as_ref());
assert_eq!(parsed.format(), ParsedPublicKeyFormat::Raw);
assert_eq!(parsed.alg(), &X25519);
}
#[test]
fn test_parsed_public_key_x25519_x509() {
let private_key = PrivateKey::generate(&X25519).unwrap();
let public_key = private_key.compute_public_key().unwrap();
let x509_der: PublicKeyX509Der = public_key.as_der().unwrap();
let parsed = ParsedPublicKey::new(x509_der.as_ref(), X25519.id.nid()).unwrap();
assert_eq!(parsed.format(), ParsedPublicKeyFormat::X509);
assert_eq!(parsed.alg(), &X25519);
}
#[test]
fn test_parsed_public_key_p256_uncompressed() {
let uncompressed_key = test::from_dirty_hex(
"04D12DFB5289C8D4F81208B70270398C342296970A0BCCB74C736FC7554494BF6356FBF3CA366CC23E8157854C13C58D6AAC23F046ADA30F8353E74F33039872AB",
);
let parsed = ParsedPublicKey::new(&uncompressed_key, ECDH_P256.id.nid()).unwrap();
assert_eq!(parsed.format(), ParsedPublicKeyFormat::Uncompressed);
assert_eq!(parsed.alg(), &ECDH_P256);
}
#[test]
fn test_parsed_public_key_p256_compressed() {
let private_key = PrivateKey::generate(&ECDH_P256).unwrap();
let public_key = private_key.compute_public_key().unwrap();
let compressed: EcPublicKeyCompressedBin = public_key.as_be_bytes().unwrap();
let parsed = ParsedPublicKey::new(compressed.as_ref(), ECDH_P256.id.nid()).unwrap();
assert_eq!(parsed.format(), ParsedPublicKeyFormat::Compressed);
assert_eq!(parsed.alg(), &ECDH_P256);
}
#[test]
fn test_parsed_public_key_p256_x509() {
let private_key = PrivateKey::generate(&ECDH_P256).unwrap();
let public_key = private_key.compute_public_key().unwrap();
let x509_der: PublicKeyX509Der = public_key.as_der().unwrap();
let parsed = ParsedPublicKey::new(x509_der.as_ref(), ECDH_P256.id.nid()).unwrap();
assert_eq!(parsed.format(), ParsedPublicKeyFormat::X509);
assert_eq!(parsed.alg(), &ECDH_P256);
}
#[test]
fn test_parsed_public_key_p384() {
let private_key = PrivateKey::generate(&ECDH_P384).unwrap();
let public_key = private_key.compute_public_key().unwrap();
let x509_der: PublicKeyX509Der = public_key.as_der().unwrap();
let parsed = ParsedPublicKey::new(x509_der.as_ref(), ECDH_P384.id.nid()).unwrap();
assert_eq!(parsed.format(), ParsedPublicKeyFormat::X509);
assert_eq!(parsed.alg(), &ECDH_P384);
}
#[test]
fn test_parsed_public_key_p521() {
let private_key = PrivateKey::generate(&ECDH_P521).unwrap();
let public_key = private_key.compute_public_key().unwrap();
let x509_der: PublicKeyX509Der = public_key.as_der().unwrap();
let parsed = ParsedPublicKey::new(x509_der.as_ref(), ECDH_P521.id.nid()).unwrap();
assert_eq!(parsed.format(), ParsedPublicKeyFormat::X509);
assert_eq!(parsed.alg(), &ECDH_P521);
}
#[test]
fn test_parsed_public_key_invalid_empty() {
let empty_key = [];
assert!(ParsedPublicKey::new(empty_key, X25519.id.nid()).is_err());
assert!(ParsedPublicKey::new(empty_key, ECDH_P256.id.nid()).is_err());
}
#[test]
fn test_parsed_public_key_invalid_nid() {
let raw_key =
test::from_dirty_hex("e6db6867583030db3594c1a424b15f7c726624ec26b3353b10a903a6d0ab1c4c");
assert!(ParsedPublicKey::new(&raw_key, 999).is_err());
}
#[test]
fn test_unparsed_to_parsed_conversion() {
let raw_key =
test::from_dirty_hex("e6db6867583030db3594c1a424b15f7c726624ec26b3353b10a903a6d0ab1c4c");
let unparsed = UnparsedPublicKey::new(&X25519, raw_key);
let parsed: ParsedPublicKey = (&unparsed).try_into().unwrap();
assert_eq!(parsed.format(), ParsedPublicKeyFormat::Raw);
assert_eq!(parsed.alg(), &X25519);
let parsed: ParsedPublicKey = unparsed.try_into().unwrap();
assert_eq!(parsed.format(), ParsedPublicKeyFormat::Raw);
assert_eq!(parsed.alg(), &X25519);
}
#[test]
fn test_agree_with_parsed_public_key() {
let my_private = PrivateKey::generate(&X25519).unwrap();
let peer_private = PrivateKey::generate(&X25519).unwrap();
let peer_public = peer_private.compute_public_key().unwrap();
let parsed = ParsedPublicKey::new(peer_public.as_ref(), X25519.id.nid()).unwrap();
let result = agree(&my_private, parsed, (), |_key_material| Ok(()));
assert!(result.is_ok());
}
#[test]
fn test_agree_with_parsed_public_key_algorithm_mismatch() {
let my_private = PrivateKey::generate(&ECDH_P256).unwrap();
let peer_private = PrivateKey::generate(&X25519).unwrap();
let peer_public = peer_private.compute_public_key().unwrap();
let parsed = ParsedPublicKey::new(peer_public.as_ref(), X25519.id.nid()).unwrap();
let result = agree(&my_private, parsed, "error", |_key_material| Ok(()));
assert_eq!(result, Err("error"));
}
#[test]
fn test_parsed_public_key_debug() {
let raw_key =
test::from_dirty_hex("e6db6867583030db3594c1a424b15f7c726624ec26b3353b10a903a6d0ab1c4c");
let parsed = ParsedPublicKey::new(&raw_key, X25519.id.nid()).unwrap();
let debug_str = format!("{parsed:?}");
assert!(debug_str.contains("ParsedPublicKey"));
}
#[test]
fn test_parsed_public_key_format_debug() {
assert_eq!(format!("{:?}", ParsedPublicKeyFormat::Raw), "Raw");
assert_eq!(format!("{:?}", ParsedPublicKeyFormat::X509), "X509");
assert_eq!(
format!("{:?}", ParsedPublicKeyFormat::Compressed),
"Compressed"
);
assert_eq!(
format!("{:?}", ParsedPublicKeyFormat::Uncompressed),
"Uncompressed"
);
}
#[test]
fn test_parsed_public_key_send_sync() {
fn assert_send<T: Send>() {}
fn assert_sync<T: Sync>() {}
assert_send::<ParsedPublicKey>();
assert_sync::<ParsedPublicKey>();
}

View File

@@ -0,0 +1,58 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#![cfg(debug_assertions)]
use crate::{
agreement::{
agree_ephemeral, EphemeralPrivateKey, UnparsedPublicKey, ECDH_P256, ECDH_P384, ECDH_P521,
X25519,
},
error::Unspecified,
fips::{assert_fips_status_indicator, FipsServiceStatus},
rand::SystemRandom,
};
macro_rules! agree_ephemeral_api {
($name:ident, $alg:expr, $expect:path) => {
#[test]
fn $name() {
let rng = SystemRandom::new();
let alice_private =
assert_fips_status_indicator!(EphemeralPrivateKey::generate($alg, &rng), $expect)
.unwrap();
let bob_private =
assert_fips_status_indicator!(EphemeralPrivateKey::generate($alg, &rng), $expect)
.unwrap();
let alice_public = alice_private.compute_public_key().unwrap();
let alice_public = UnparsedPublicKey::new($alg, alice_public.as_ref());
let bob_public = bob_private.compute_public_key().unwrap();
let bob_public = UnparsedPublicKey::new($alg, bob_public.as_ref());
let alice_secret = assert_fips_status_indicator!(
agree_ephemeral(alice_private, &bob_public, Unspecified, |secret| {
Ok(Vec::from(secret))
}),
$expect
)
.unwrap();
let bob_secret = assert_fips_status_indicator!(
agree_ephemeral(bob_private, &alice_public, Unspecified, |secret| {
Ok(Vec::from(secret))
}),
$expect
)
.unwrap();
assert_eq!(alice_secret, bob_secret);
}
};
}
agree_ephemeral_api!(ecdh_p256, &ECDH_P256, FipsServiceStatus::Approved);
agree_ephemeral_api!(ecdh_p384, &ECDH_P384, FipsServiceStatus::Approved);
agree_ephemeral_api!(ecdh_p521, &ECDH_P521, FipsServiceStatus::Approved);
agree_ephemeral_api!(x25519, &X25519, FipsServiceStatus::NonApproved);

49
vendor/aws-lc-rs/src/bn.rs vendored Normal file
View File

@@ -0,0 +1,49 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aws_lc::{BN_bin2bn, BN_bn2bin, BN_new, BN_num_bytes, BN_set_u64, BIGNUM};
use crate::ptr::{ConstPointer, DetachableLcPtr, LcPtr};
use core::ptr::null_mut;
impl TryFrom<&[u8]> for LcPtr<BIGNUM> {
type Error = ();
fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
unsafe { LcPtr::new(BN_bin2bn(bytes.as_ptr(), bytes.len(), null_mut())) }
}
}
impl TryFrom<&[u8]> for DetachableLcPtr<BIGNUM> {
type Error = ();
fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
unsafe { DetachableLcPtr::new(BN_bin2bn(bytes.as_ptr(), bytes.len(), null_mut())) }
}
}
impl TryFrom<u64> for DetachableLcPtr<BIGNUM> {
type Error = ();
fn try_from(value: u64) -> Result<Self, Self::Error> {
unsafe {
let mut bn = DetachableLcPtr::new(BN_new())?;
if 1 != BN_set_u64(bn.as_mut_ptr(), value) {
return Err(());
}
Ok(bn)
}
}
}
impl ConstPointer<'_, BIGNUM> {
pub(crate) fn to_be_bytes(&self) -> Vec<u8> {
unsafe {
let bn_bytes = BN_num_bytes(self.as_const_ptr());
let mut byte_vec = Vec::with_capacity(bn_bytes as usize);
let out_bytes = BN_bn2bin(self.as_const_ptr(), byte_vec.as_mut_ptr());
debug_assert_eq!(out_bytes, bn_bytes as usize);
byte_vec.set_len(out_bytes);
byte_vec
}
}
}

72
vendor/aws-lc-rs/src/buffer.rs vendored Normal file
View File

@@ -0,0 +1,72 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! This module exposes a buffer type used in crate APIs returning private keys and other "private"
//! contents.
#![allow(clippy::module_name_repetitions)]
use alloc::borrow::Cow;
use core::fmt;
use core::marker::PhantomData;
use zeroize::Zeroize;
/// This is a buffer type for some data exposed by various APIs in this crate.
///
/// `T` acts as a discriminant between different kinds of data.
///
/// The buffer will be zeroed on drop if it is owned.
pub struct Buffer<'a, T>(Cow<'a, [u8]>, PhantomData<T>);
impl<T> Drop for Buffer<'_, T> {
fn drop(&mut self) {
if let Cow::Owned(b) = &mut self.0 {
b.zeroize();
}
}
}
impl<'a, T> Buffer<'a, T> {
pub(crate) fn new(owned: Vec<u8>) -> Buffer<'a, T> {
Buffer(Cow::Owned(owned), PhantomData)
}
pub(crate) fn take_from_slice(slice: &mut [u8]) -> Buffer<'a, T> {
let owned = slice.to_vec();
slice.zeroize();
Buffer(Cow::Owned(owned), PhantomData)
}
}
impl<T> fmt::Debug for Buffer<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
f.write_str("Buffer(...)")
}
}
impl<T> AsRef<[u8]> for Buffer<'_, T> {
#[inline]
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_new() {
let buffer: Buffer<u8> = Buffer::new(vec![1, 2, 3]);
assert_eq!(buffer.as_ref(), &[1, 2, 3]);
}
#[test]
fn test_take_from_slice() {
let mut slice = [1, 2, 3];
let buffer: Buffer<u8> = Buffer::take_from_slice(&mut slice);
assert_eq!(buffer.as_ref(), &[1, 2, 3]);
assert_eq!(slice, [0, 0, 0]);
}
}

107
vendor/aws-lc-rs/src/cbb.rs vendored Normal file
View File

@@ -0,0 +1,107 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aws_lc::{CBB_cleanup, CBB_finish, CBB_init, CBB_init_fixed, CBB};
use crate::error::Unspecified;
use crate::ptr::LcPtr;
use core::marker::PhantomData;
use core::mem::MaybeUninit;
use core::ptr::null_mut;
pub(crate) struct LcCBB<'a>(CBB, PhantomData<&'a CBB>);
impl LcCBB<'static> {
pub(crate) fn new(initial_capacity: usize) -> LcCBB<'static> {
let mut cbb = MaybeUninit::<CBB>::uninit();
let cbb = unsafe {
CBB_init(cbb.as_mut_ptr(), initial_capacity);
cbb.assume_init()
};
Self(cbb, PhantomData)
}
pub(crate) fn into_vec(mut self) -> Result<Vec<u8>, Unspecified> {
let mut out_data = null_mut::<u8>();
let mut out_len: usize = 0;
if 1 != unsafe { CBB_finish(self.as_mut_ptr(), &mut out_data, &mut out_len) } {
return Err(Unspecified);
}
let out_data = LcPtr::new(out_data)?;
let slice = unsafe { std::slice::from_raw_parts(out_data.as_const_ptr(), out_len) };
// `to_vec()` copies the data into a new `Vec`
Ok(slice.to_vec())
}
}
impl<'a> LcCBB<'a> {
pub(crate) fn new_from_slice(buffer: &'a mut [u8]) -> LcCBB<'a> {
let mut cbb = MaybeUninit::<CBB>::uninit();
let cbb = unsafe {
CBB_init_fixed(cbb.as_mut_ptr(), buffer.as_mut_ptr(), buffer.len());
cbb.assume_init()
};
Self(cbb, PhantomData)
}
pub(crate) fn finish(mut self) -> Result<usize, Unspecified> {
let mut pkcs8_bytes_ptr = null_mut::<u8>();
let mut out_len: usize = 0;
if 1 != unsafe { CBB_finish(self.as_mut_ptr(), &mut pkcs8_bytes_ptr, &mut out_len) } {
return Err(Unspecified);
}
Ok(out_len)
}
}
impl LcCBB<'_> {
pub(crate) fn as_mut_ptr(&mut self) -> *mut CBB {
&mut self.0
}
}
impl Drop for LcCBB<'_> {
fn drop(&mut self) {
unsafe {
CBB_cleanup(&mut self.0);
}
}
}
#[cfg(test)]
mod tests {
use super::LcCBB;
use crate::aws_lc::CBB_add_asn1_bool;
#[test]
fn dynamic_vec() {
let mut cbb = LcCBB::new(4);
assert_eq!(1, unsafe { CBB_add_asn1_bool(cbb.as_mut_ptr(), 1) });
let vec = cbb.into_vec().expect("be copied to buffer");
assert_eq!(vec.as_slice(), &[1, 1, 255]);
}
#[test]
fn dynamic_buffer_grows() {
let mut cbb = LcCBB::new(1);
assert_eq!(1, unsafe { CBB_add_asn1_bool(cbb.as_mut_ptr(), 1) });
let vec = cbb.into_vec().expect("be copied to buffer");
assert_eq!(vec.as_slice(), &[1, 1, 255]);
}
#[test]
fn fixed_buffer() {
let mut buffer = [0u8; 4];
let mut cbb = LcCBB::new_from_slice(&mut buffer);
assert_eq!(1, unsafe { CBB_add_asn1_bool(cbb.as_mut_ptr(), 1) });
let out_len = cbb.finish().expect("cbb finishable");
assert_eq!(&buffer[..out_len], &[1, 1, 255]);
}
#[test]
fn fixed_buffer_no_growth() {
let mut buffer = [0u8; 1];
let mut cbb = LcCBB::new_from_slice(&mut buffer);
assert_ne!(1, unsafe { CBB_add_asn1_bool(cbb.as_mut_ptr(), 1) });
}
}

13
vendor/aws-lc-rs/src/cbs.rs vendored Normal file
View File

@@ -0,0 +1,13 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aws_lc::{CBS_init, CBS};
use core::mem::MaybeUninit;
#[inline]
#[allow(non_snake_case)]
pub fn build_CBS(data: &[u8]) -> CBS {
let mut cbs = MaybeUninit::<CBS>::uninit();
unsafe { CBS_init(cbs.as_mut_ptr(), data.as_ptr(), data.len()) };
unsafe { cbs.assume_init() }
}

1190
vendor/aws-lc-rs/src/cipher.rs vendored Normal file

File diff suppressed because it is too large Load Diff

342
vendor/aws-lc-rs/src/cipher/aes.rs vendored Normal file
View File

@@ -0,0 +1,342 @@
// Copyright 2018 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aws_lc::{
AES_cbc_encrypt, AES_cfb128_encrypt, AES_ctr128_encrypt, AES_ecb_encrypt, AES_DECRYPT,
AES_ENCRYPT, AES_KEY,
};
use crate::cipher::block::Block;
use crate::error::Unspecified;
use crate::fips::indicator_check;
use zeroize::Zeroize;
use super::{DecryptionContext, EncryptionContext, OperatingMode, SymmetricCipherKey};
/// Length of an AES-128 key in bytes.
pub const AES_128_KEY_LEN: usize = 16;
/// Length of an AES-192 key in bytes.
pub const AES_192_KEY_LEN: usize = 24;
/// Length of an AES-256 key in bytes.
pub const AES_256_KEY_LEN: usize = 32;
/// The number of bytes for an AES-CBC initialization vector (IV)
pub const AES_CBC_IV_LEN: usize = 16;
/// The number of bytes for an AES-CTR initialization vector (IV)
pub const AES_CTR_IV_LEN: usize = 16;
/// The number of bytes for an AES-CFB initialization vector (IV)
pub const AES_CFB_IV_LEN: usize = 16;
pub const AES_BLOCK_LEN: usize = 16;
#[inline]
pub(crate) fn encrypt_block(aes_key: &AES_KEY, mut block: Block) -> Block {
{
let block_ref = block.as_mut();
debug_assert_eq!(block_ref.len(), AES_BLOCK_LEN);
aes_ecb_encrypt(aes_key, block_ref);
}
block
}
pub(super) fn encrypt_ctr_mode(
key: &SymmetricCipherKey,
context: EncryptionContext,
in_out: &mut [u8],
) -> Result<DecryptionContext, Unspecified> {
let (SymmetricCipherKey::Aes128 { enc_key, .. }
| SymmetricCipherKey::Aes192 { enc_key, .. }
| SymmetricCipherKey::Aes256 { enc_key, .. }) = &key
else {
unreachable!()
};
let mut iv = {
let mut iv = [0u8; AES_CTR_IV_LEN];
iv.copy_from_slice((&context).try_into()?);
iv
};
let mut buffer = [0u8; AES_BLOCK_LEN];
aes_ctr128_encrypt(enc_key, &mut iv, &mut buffer, in_out);
iv.zeroize();
Ok(context.into())
}
pub(super) fn decrypt_ctr_mode<'in_out>(
key: &SymmetricCipherKey,
context: DecryptionContext,
in_out: &'in_out mut [u8],
) -> Result<&'in_out mut [u8], Unspecified> {
// it's the same in CTR, just providing a nice named wrapper to match
encrypt_ctr_mode(key, context.into(), in_out).map(|_| in_out)
}
pub(super) fn encrypt_cbc_mode(
key: &SymmetricCipherKey,
context: EncryptionContext,
in_out: &mut [u8],
) -> Result<DecryptionContext, Unspecified> {
let (SymmetricCipherKey::Aes128 { enc_key, .. }
| SymmetricCipherKey::Aes192 { enc_key, .. }
| SymmetricCipherKey::Aes256 { enc_key, .. }) = &key
else {
unreachable!()
};
let mut iv = {
let mut iv = [0u8; AES_CBC_IV_LEN];
iv.copy_from_slice((&context).try_into()?);
iv
};
aes_cbc_encrypt(enc_key, &mut iv, in_out);
iv.zeroize();
Ok(context.into())
}
#[allow(clippy::needless_pass_by_value)]
pub(super) fn decrypt_cbc_mode<'in_out>(
key: &SymmetricCipherKey,
context: DecryptionContext,
in_out: &'in_out mut [u8],
) -> Result<&'in_out mut [u8], Unspecified> {
let (SymmetricCipherKey::Aes128 { dec_key, .. }
| SymmetricCipherKey::Aes192 { dec_key, .. }
| SymmetricCipherKey::Aes256 { dec_key, .. }) = &key
else {
unreachable!()
};
let mut iv = {
let mut iv = [0u8; AES_CBC_IV_LEN];
iv.copy_from_slice((&context).try_into()?);
iv
};
aes_cbc_decrypt(dec_key, &mut iv, in_out);
iv.zeroize();
Ok(in_out)
}
#[allow(clippy::needless_pass_by_value)]
pub(super) fn encrypt_cfb_mode(
key: &SymmetricCipherKey,
mode: OperatingMode,
context: EncryptionContext,
in_out: &mut [u8],
) -> Result<DecryptionContext, Unspecified> {
let (SymmetricCipherKey::Aes128 { enc_key, .. }
| SymmetricCipherKey::Aes192 { enc_key, .. }
| SymmetricCipherKey::Aes256 { enc_key, .. }) = &key
else {
unreachable!()
};
let mut iv = {
let mut iv = [0u8; AES_CFB_IV_LEN];
iv.copy_from_slice((&context).try_into()?);
iv
};
let cfb_encrypt: fn(&AES_KEY, &mut [u8], &mut [u8]) = match mode {
// TODO: Hopefully support CFB1, and CFB8
OperatingMode::CFB128 => aes_cfb128_encrypt,
_ => unreachable!(),
};
cfb_encrypt(enc_key, &mut iv, in_out);
iv.zeroize();
Ok(context.into())
}
#[allow(clippy::needless_pass_by_value)]
pub(super) fn decrypt_cfb_mode<'in_out>(
key: &SymmetricCipherKey,
mode: OperatingMode,
context: DecryptionContext,
in_out: &'in_out mut [u8],
) -> Result<&'in_out mut [u8], Unspecified> {
let (SymmetricCipherKey::Aes128 { enc_key, .. }
| SymmetricCipherKey::Aes192 { enc_key, .. }
| SymmetricCipherKey::Aes256 { enc_key, .. }) = &key
else {
unreachable!()
};
let mut iv = {
let mut iv = [0u8; AES_CFB_IV_LEN];
iv.copy_from_slice((&context).try_into()?);
iv
};
let cfb_decrypt: fn(&AES_KEY, &mut [u8], &mut [u8]) = match mode {
// TODO: Hopefully support CFB1, and CFB8
OperatingMode::CFB128 => aes_cfb128_decrypt,
_ => unreachable!(),
};
cfb_decrypt(enc_key, &mut iv, in_out);
iv.zeroize();
Ok(in_out)
}
#[allow(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
pub(super) fn encrypt_ecb_mode(
key: &SymmetricCipherKey,
context: EncryptionContext,
in_out: &mut [u8],
) -> Result<DecryptionContext, Unspecified> {
if !matches!(context, EncryptionContext::None) {
unreachable!();
}
let (SymmetricCipherKey::Aes128 { enc_key, .. }
| SymmetricCipherKey::Aes192 { enc_key, .. }
| SymmetricCipherKey::Aes256 { enc_key, .. }) = &key
else {
unreachable!()
};
let mut in_out_iter = in_out.chunks_exact_mut(AES_BLOCK_LEN);
for block in in_out_iter.by_ref() {
aes_ecb_encrypt(enc_key, block);
}
// This is a sanity check that should not happen. We validate in `encrypt` that in_out.len() % block_len == 0
// for this mode.
debug_assert!(in_out_iter.into_remainder().is_empty());
Ok(context.into())
}
#[allow(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
pub(super) fn decrypt_ecb_mode<'in_out>(
key: &SymmetricCipherKey,
context: DecryptionContext,
in_out: &'in_out mut [u8],
) -> Result<&'in_out mut [u8], Unspecified> {
if !matches!(context, DecryptionContext::None) {
unreachable!();
}
let (SymmetricCipherKey::Aes128 { dec_key, .. }
| SymmetricCipherKey::Aes192 { dec_key, .. }
| SymmetricCipherKey::Aes256 { dec_key, .. }) = &key
else {
unreachable!()
};
{
let mut in_out_iter = in_out.chunks_exact_mut(AES_BLOCK_LEN);
for block in in_out_iter.by_ref() {
aes_ecb_decrypt(dec_key, block);
}
// This is a sanity check hat should not fail. We validate in `decrypt` that in_out.len() % block_len == 0 for
// this mode.
debug_assert!(in_out_iter.into_remainder().is_empty());
}
Ok(in_out)
}
fn aes_ecb_encrypt(key: &AES_KEY, in_out: &mut [u8]) {
indicator_check!(unsafe {
AES_ecb_encrypt(in_out.as_ptr(), in_out.as_mut_ptr(), key, AES_ENCRYPT);
});
}
fn aes_ecb_decrypt(key: &AES_KEY, in_out: &mut [u8]) {
indicator_check!(unsafe {
AES_ecb_encrypt(in_out.as_ptr(), in_out.as_mut_ptr(), key, AES_DECRYPT);
});
}
fn aes_ctr128_encrypt(key: &AES_KEY, iv: &mut [u8], block_buffer: &mut [u8], in_out: &mut [u8]) {
let mut num: u32 = 0;
indicator_check!(unsafe {
AES_ctr128_encrypt(
in_out.as_ptr(),
in_out.as_mut_ptr(),
in_out.len(),
key,
iv.as_mut_ptr(),
block_buffer.as_mut_ptr(),
&mut num,
);
});
Zeroize::zeroize(block_buffer);
}
fn aes_cbc_encrypt(key: &AES_KEY, iv: &mut [u8], in_out: &mut [u8]) {
indicator_check!(unsafe {
AES_cbc_encrypt(
in_out.as_ptr(),
in_out.as_mut_ptr(),
in_out.len(),
key,
iv.as_mut_ptr(),
AES_ENCRYPT,
);
});
}
fn aes_cbc_decrypt(key: &AES_KEY, iv: &mut [u8], in_out: &mut [u8]) {
indicator_check!(unsafe {
AES_cbc_encrypt(
in_out.as_ptr(),
in_out.as_mut_ptr(),
in_out.len(),
key,
iv.as_mut_ptr(),
AES_DECRYPT,
);
});
}
fn aes_cfb128_encrypt(key: &AES_KEY, iv: &mut [u8], in_out: &mut [u8]) {
let mut num: i32 = 0;
indicator_check!(unsafe {
AES_cfb128_encrypt(
in_out.as_ptr(),
in_out.as_mut_ptr(),
in_out.len(),
key,
iv.as_mut_ptr(),
&mut num,
AES_ENCRYPT,
);
});
}
fn aes_cfb128_decrypt(key: &AES_KEY, iv: &mut [u8], in_out: &mut [u8]) {
let mut num: i32 = 0;
indicator_check!(unsafe {
AES_cfb128_encrypt(
in_out.as_ptr(),
in_out.as_mut_ptr(),
in_out.len(),
key,
iv.as_mut_ptr(),
&mut num,
AES_DECRYPT,
);
});
}

74
vendor/aws-lc-rs/src/cipher/block.rs vendored Normal file
View File

@@ -0,0 +1,74 @@
// Copyright 2018 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
/// An array of 16 bytes that can (in the `x86_64` and `AAarch64` ABIs, at least)
/// be efficiently passed by value and returned by value (i.e. in registers),
/// and which meets the alignment requirements of `u32` and `u64` (at least)
/// for the target.
#[repr(C)]
#[derive(Copy, Clone)]
pub(crate) struct Block {
subblocks: [u64; 2],
}
/// Block length
pub(crate) const BLOCK_LEN: usize = 16;
impl Block {
#[inline]
pub(crate) fn zero() -> Self {
Self { subblocks: [0, 0] }
}
}
impl From<[u8; BLOCK_LEN]> for Block {
#[inline]
fn from(bytes: [u8; BLOCK_LEN]) -> Self {
unsafe { core::mem::transmute(bytes) }
}
}
impl AsRef<[u8; BLOCK_LEN]> for Block {
#[allow(clippy::transmute_ptr_to_ptr)]
#[inline]
fn as_ref(&self) -> &[u8; BLOCK_LEN] {
unsafe { core::mem::transmute(self) }
}
}
impl AsMut<[u8; BLOCK_LEN]> for Block {
#[allow(clippy::transmute_ptr_to_ptr)]
#[inline]
fn as_mut(&mut self) -> &mut [u8; BLOCK_LEN] {
unsafe { core::mem::transmute(self) }
}
}
#[cfg(test)]
mod tests {
#[test]
fn test_block_clone() {
use super::{Block, BLOCK_LEN};
let block_a = Block::from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]);
#[allow(clippy::clone_on_copy)]
let block_b = block_a.clone();
for i in 0..BLOCK_LEN {
assert_eq!(block_a.as_ref()[i], block_b.as_ref()[i]);
}
}
#[test]
fn test_block_clone_mut_ref() {
use super::{Block, BLOCK_LEN};
let mut block_a = Block::from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]);
#[allow(clippy::clone_on_copy)]
let mut block_b = block_a.clone();
for i in 0..BLOCK_LEN {
assert_eq!(block_a.as_mut()[i], block_b.as_mut()[i]);
}
}
}

185
vendor/aws-lc-rs/src/cipher/chacha.rs vendored Normal file
View File

@@ -0,0 +1,185 @@
// Copyright 2016 Brian Smith.
// Portions Copyright (c) 2016, Google Inc.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aws_lc::CRYPTO_chacha_20;
use crate::cipher::block::{Block, BLOCK_LEN};
use zeroize::Zeroize;
use crate::error;
pub(crate) const KEY_LEN: usize = 32usize;
pub(crate) const NONCE_LEN: usize = 96 / 8;
pub(crate) struct ChaCha20Key(pub(super) [u8; KEY_LEN]);
impl From<[u8; KEY_LEN]> for ChaCha20Key {
fn from(bytes: [u8; KEY_LEN]) -> Self {
ChaCha20Key(bytes)
}
}
impl Drop for ChaCha20Key {
fn drop(&mut self) {
self.0.zeroize();
}
}
#[allow(clippy::needless_pass_by_value)]
impl ChaCha20Key {
#[inline]
pub(crate) fn encrypt_in_place(&self, nonce: &[u8; NONCE_LEN], in_out: &mut [u8], ctr: u32) {
encrypt_in_place_chacha20(self, nonce, in_out, ctr);
}
}
#[inline]
#[allow(clippy::needless_pass_by_value)]
pub(crate) fn encrypt_block_chacha20(
key: &ChaCha20Key,
block: Block,
nonce: &[u8; NONCE_LEN],
counter: u32,
) -> Result<Block, error::Unspecified> {
let mut cipher_text = [0u8; BLOCK_LEN];
encrypt_chacha20(
key,
block.as_ref().as_slice(),
&mut cipher_text,
nonce,
counter,
)?;
crate::fips::set_fips_service_status_unapproved();
Ok(Block::from(cipher_text))
}
#[inline]
pub(crate) fn encrypt_chacha20(
key: &ChaCha20Key,
plaintext: &[u8],
ciphertext: &mut [u8],
nonce: &[u8; NONCE_LEN],
counter: u32,
) -> Result<(), error::Unspecified> {
if ciphertext.len() < plaintext.len() {
return Err(error::Unspecified);
}
let key_bytes = &key.0;
unsafe {
CRYPTO_chacha_20(
ciphertext.as_mut_ptr(),
plaintext.as_ptr(),
plaintext.len(),
key_bytes.as_ptr(),
nonce.as_ptr(),
counter,
);
};
Ok(())
}
#[inline]
pub(crate) fn encrypt_in_place_chacha20(
key: &ChaCha20Key,
nonce: &[u8; NONCE_LEN],
in_out: &mut [u8],
counter: u32,
) {
let key_bytes = &key.0;
unsafe {
CRYPTO_chacha_20(
in_out.as_mut_ptr(),
in_out.as_ptr(),
in_out.len(),
key_bytes.as_ptr(),
nonce.as_ptr(),
counter,
);
}
crate::fips::set_fips_service_status_unapproved();
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{test, test_file};
const MAX_ALIGNMENT: usize = 15;
// Verifies the encryption is successful when done on overlapping buffers.
//
// On some branches of the 32-bit x86 and ARM assembly code the in-place
// operation fails in some situations where the input/output buffers are
// not exactly overlapping. Such failures are dependent not only on the
// degree of overlapping but also the length of the data. `encrypt_within`
// works around that.
#[test]
fn chacha20_test() {
// Reuse a buffer to avoid slowing down the tests with allocations.
let mut buf = vec![0u8; 1300];
test::run(
test_file!("data/chacha_tests.txt"),
move |section, test_case| {
assert_eq!(section, "");
let key = test_case.consume_bytes("Key");
let key: &[u8; KEY_LEN] = key.as_slice().try_into()?;
let key = ChaCha20Key::from(*key);
#[allow(clippy::cast_possible_truncation)]
let ctr = test_case.consume_usize("Ctr") as u32;
let nonce: [u8; NONCE_LEN] = test_case.consume_bytes("Nonce").try_into().unwrap();
let input = test_case.consume_bytes("Input");
let output = test_case.consume_bytes("Output");
// Run the test case over all prefixes of the input because the
// behavior of ChaCha20 implementation changes dependent on the
// length of the input.
for len in 0..=input.len() {
chacha20_test_case_inner(
&key,
nonce,
ctr,
&input[..len],
&output[..len],
&mut buf,
);
}
Ok(())
},
);
}
fn chacha20_test_case_inner(
key: &ChaCha20Key,
nonce: [u8; NONCE_LEN],
ctr: u32,
input: &[u8],
expected: &[u8],
buf: &mut [u8],
) {
// Straightforward encryption into disjoint buffers is computed
// correctly.
const ARBITRARY: u8 = 123;
for alignment in 0..=MAX_ALIGNMENT {
buf[..alignment].fill(ARBITRARY);
let buf = &mut buf[..input.len()];
buf.copy_from_slice(input);
let nonce = &nonce;
key.encrypt_in_place(nonce, buf, ctr);
assert_eq!(
&buf[..input.len()],
expected,
"Failed on alignment: {alignment}",
);
}
}
}

162
vendor/aws-lc-rs/src/cipher/key.rs vendored Normal file
View File

@@ -0,0 +1,162 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aws_lc::{AES_set_decrypt_key, AES_set_encrypt_key, AES_KEY};
use crate::cipher::block::Block;
use crate::cipher::chacha::ChaCha20Key;
use crate::cipher::{AES_128_KEY_LEN, AES_192_KEY_LEN, AES_256_KEY_LEN};
use crate::error::Unspecified;
use core::mem::{size_of, MaybeUninit};
use core::ptr::copy_nonoverlapping;
// TODO: Uncomment when MSRV >= 1.64
// use core::ffi::c_uint;
use std::os::raw::c_uint;
use zeroize::Zeroize;
pub(crate) enum SymmetricCipherKey {
Aes128 { enc_key: AES_KEY, dec_key: AES_KEY },
Aes192 { enc_key: AES_KEY, dec_key: AES_KEY },
Aes256 { enc_key: AES_KEY, dec_key: AES_KEY },
ChaCha20 { raw_key: ChaCha20Key },
}
unsafe impl Send for SymmetricCipherKey {}
// The AES_KEY value is only used as a `*const AES_KEY` in calls to `AES_encrypt`.
unsafe impl Sync for SymmetricCipherKey {}
impl Drop for SymmetricCipherKey {
fn drop(&mut self) {
// Aes128Key, Aes256Key and ChaCha20Key implement Drop separately.
match self {
SymmetricCipherKey::Aes128 { enc_key, dec_key }
| SymmetricCipherKey::Aes192 { enc_key, dec_key }
| SymmetricCipherKey::Aes256 { enc_key, dec_key } => unsafe {
let enc_bytes: &mut [u8; size_of::<AES_KEY>()] = (enc_key as *mut AES_KEY)
.cast::<[u8; size_of::<AES_KEY>()]>()
.as_mut()
.unwrap();
enc_bytes.zeroize();
let dec_bytes: &mut [u8; size_of::<AES_KEY>()] = (dec_key as *mut AES_KEY)
.cast::<[u8; size_of::<AES_KEY>()]>()
.as_mut()
.unwrap();
dec_bytes.zeroize();
},
SymmetricCipherKey::ChaCha20 { .. } => {}
}
}
}
impl SymmetricCipherKey {
fn aes(key_bytes: &[u8]) -> Result<(AES_KEY, AES_KEY), Unspecified> {
let mut enc_key = MaybeUninit::<AES_KEY>::uninit();
let mut dec_key = MaybeUninit::<AES_KEY>::uninit();
#[allow(clippy::cast_possible_truncation)]
if unsafe {
0 != AES_set_encrypt_key(
key_bytes.as_ptr(),
(key_bytes.len() * 8) as c_uint,
enc_key.as_mut_ptr(),
)
} {
return Err(Unspecified);
}
#[allow(clippy::cast_possible_truncation)]
if unsafe {
0 != AES_set_decrypt_key(
key_bytes.as_ptr(),
(key_bytes.len() * 8) as c_uint,
dec_key.as_mut_ptr(),
)
} {
return Err(Unspecified);
}
unsafe { Ok((enc_key.assume_init(), dec_key.assume_init())) }
}
pub(crate) fn aes128(key_bytes: &[u8]) -> Result<Self, Unspecified> {
if key_bytes.len() != AES_128_KEY_LEN {
return Err(Unspecified);
}
let (enc_key, dec_key) = SymmetricCipherKey::aes(key_bytes)?;
Ok(SymmetricCipherKey::Aes128 { enc_key, dec_key })
}
pub(crate) fn aes192(key_bytes: &[u8]) -> Result<Self, Unspecified> {
if key_bytes.len() != AES_192_KEY_LEN {
return Err(Unspecified);
}
let (enc_key, dec_key) = SymmetricCipherKey::aes(key_bytes)?;
Ok(SymmetricCipherKey::Aes192 { enc_key, dec_key })
}
pub(crate) fn aes256(key_bytes: &[u8]) -> Result<Self, Unspecified> {
if key_bytes.len() != AES_256_KEY_LEN {
return Err(Unspecified);
}
let (enc_key, dec_key) = SymmetricCipherKey::aes(key_bytes)?;
Ok(SymmetricCipherKey::Aes256 { enc_key, dec_key })
}
pub(crate) fn chacha20(key_bytes: &[u8]) -> Result<Self, Unspecified> {
if key_bytes.len() != 32 {
return Err(Unspecified);
}
let mut kb = MaybeUninit::<[u8; 32]>::uninit();
unsafe {
copy_nonoverlapping(key_bytes.as_ptr(), kb.as_mut_ptr().cast(), 32);
Ok(SymmetricCipherKey::ChaCha20 {
raw_key: ChaCha20Key(kb.assume_init()),
})
}
}
#[allow(dead_code)]
#[inline]
pub(crate) fn encrypt_block(&self, block: Block) -> Block {
match self {
SymmetricCipherKey::Aes128 { enc_key, .. }
| SymmetricCipherKey::Aes192 { enc_key, .. }
| SymmetricCipherKey::Aes256 { enc_key, .. } => {
super::aes::encrypt_block(enc_key, block)
}
SymmetricCipherKey::ChaCha20 { .. } => panic!("Unsupported algorithm!"),
}
}
}
#[cfg(test)]
mod tests {
use crate::cipher::block::{Block, BLOCK_LEN};
use crate::cipher::key::SymmetricCipherKey;
use crate::test::from_hex;
#[test]
fn test_encrypt_block_aes_128() {
let key = from_hex("000102030405060708090a0b0c0d0e0f").unwrap();
let input = from_hex("00112233445566778899aabbccddeeff").unwrap();
let expected_result = from_hex("69c4e0d86a7b0430d8cdb78070b4c55a").unwrap();
let input_block: [u8; BLOCK_LEN] = <[u8; BLOCK_LEN]>::try_from(input).unwrap();
let aes128 = SymmetricCipherKey::aes128(key.as_slice()).unwrap();
let result = aes128.encrypt_block(Block::from(input_block));
assert_eq!(expected_result.as_slice(), result.as_ref());
}
#[test]
fn test_encrypt_block_aes_256() {
let key =
from_hex("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f").unwrap();
let input = from_hex("00112233445566778899aabbccddeeff").unwrap();
let expected_result = from_hex("8ea2b7ca516745bfeafc49904b496089").unwrap();
let input_block: [u8; BLOCK_LEN] = <[u8; BLOCK_LEN]>::try_from(input).unwrap();
let aes128 = SymmetricCipherKey::aes256(key.as_slice()).unwrap();
let result = aes128.encrypt_block(Block::from(input_block));
assert_eq!(expected_result.as_slice(), result.as_ref());
}
}

537
vendor/aws-lc-rs/src/cipher/padded.rs vendored Normal file
View File

@@ -0,0 +1,537 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::cipher;
use crate::cipher::key::SymmetricCipherKey;
use crate::cipher::{
Algorithm, DecryptionContext, EncryptionContext, OperatingMode, UnboundCipherKey,
MAX_CIPHER_BLOCK_LEN,
};
use crate::error::Unspecified;
use core::fmt::Debug;
/// The cipher block padding strategy.
#[non_exhaustive]
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub(crate) enum PaddingStrategy {
/// ISO 10126 padding. For compatibility purposes only. Applies non-random PKCS7 padding.
ISO10126,
/// PKCS#7 Padding. ([See RFC 5652](https://datatracker.ietf.org/doc/html/rfc5652#section-6.3))
PKCS7,
}
impl PaddingStrategy {
fn add_padding<InOut>(self, block_len: usize, in_out: &mut InOut) -> Result<(), Unspecified>
where
InOut: AsMut<[u8]> + for<'in_out> Extend<&'in_out u8>,
{
match self {
// PKCS7 padding can be unpadded as ISO 10126 padding
PaddingStrategy::ISO10126 | PaddingStrategy::PKCS7 => {
let mut padding_buffer = [0u8; MAX_CIPHER_BLOCK_LEN];
let in_out_len = in_out.as_mut().len();
// This implements PKCS#7 padding scheme, used by aws-lc if we were using EVP_CIPHER API's
let remainder = in_out_len % block_len;
let padding_size = block_len - remainder;
let v: u8 = padding_size.try_into().map_err(|_| Unspecified)?;
padding_buffer.fill(v);
// Possible heap allocation here :(
in_out.extend(padding_buffer[0..padding_size].iter());
}
}
Ok(())
}
fn remove_padding(self, block_len: usize, in_out: &mut [u8]) -> Result<&mut [u8], Unspecified> {
if in_out.is_empty() || in_out.len() < block_len {
return Err(Unspecified);
}
match self {
PaddingStrategy::ISO10126 => {
let padding: u8 = in_out[in_out.len() - 1];
if padding == 0 || padding as usize > block_len {
return Err(Unspecified);
}
// ISO 10126 padding is a random padding scheme, so we cannot verify the padding bytes
let final_len = in_out.len() - padding as usize;
Ok(&mut in_out[0..final_len])
}
PaddingStrategy::PKCS7 => {
let block_size: u8 = block_len.try_into().map_err(|_| Unspecified)?;
let padding: u8 = in_out[in_out.len() - 1];
if padding == 0 || padding > block_size {
return Err(Unspecified);
}
for item in in_out.iter().skip(in_out.len() - padding as usize) {
if *item != padding {
return Err(Unspecified);
}
}
let final_len = in_out.len() - padding as usize;
Ok(&mut in_out[0..final_len])
}
}
}
}
/// A cipher encryption key that performs block padding.
pub struct PaddedBlockEncryptingKey {
algorithm: &'static Algorithm,
key: SymmetricCipherKey,
mode: OperatingMode,
padding: PaddingStrategy,
}
impl PaddedBlockEncryptingKey {
/// Constructs a new `PaddedBlockEncryptingKey` cipher with chaining block cipher (CBC) mode.
/// Plaintext data is padded following the PKCS#7 scheme.
///
// # FIPS
// Use this function with an `UnboundCipherKey` constructed with one of the following algorithms:
// * `AES_128`
// * `AES_256`
//
/// # Errors
/// * [`Unspecified`]: Returned if there is an error constructing a `PaddedBlockEncryptingKey`.
pub fn cbc_pkcs7(key: UnboundCipherKey) -> Result<Self, Unspecified> {
Self::new(key, OperatingMode::CBC, PaddingStrategy::PKCS7)
}
/// Constructs a new `PaddedBlockEncryptingKey` cipher with electronic code book (ECB) mode.
/// Plaintext data is padded following the PKCS#7 scheme.
///
/// # ☠️ DANGER ☠️
/// Offered for computability purposes only. This is an extremely dangerous mode, and
/// very likely not what you want to use.
///
/// # Errors
/// * [`Unspecified`]: Returned if there is an error constructing a `PaddedBlockEncryptingKey`.
pub fn ecb_pkcs7(key: UnboundCipherKey) -> Result<Self, Unspecified> {
Self::new(key, OperatingMode::ECB, PaddingStrategy::PKCS7)
}
#[allow(clippy::unnecessary_wraps)]
fn new(
key: UnboundCipherKey,
mode: OperatingMode,
padding: PaddingStrategy,
) -> Result<PaddedBlockEncryptingKey, Unspecified> {
let algorithm = key.algorithm();
let key = key.try_into()?;
Ok(Self {
algorithm,
key,
mode,
padding,
})
}
/// Returns the cipher algorithm.
#[must_use]
pub fn algorithm(&self) -> &Algorithm {
self.algorithm
}
/// Returns the cipher operating mode.
#[must_use]
pub fn mode(&self) -> OperatingMode {
self.mode
}
/// Pads and encrypts data provided in `in_out` in-place.
/// Returns a references to the encrypted data.
///
/// # Errors
/// * [`Unspecified`]: Returned if encryption fails.
pub fn encrypt<InOut>(&self, in_out: &mut InOut) -> Result<DecryptionContext, Unspecified>
where
InOut: AsMut<[u8]> + for<'a> Extend<&'a u8>,
{
let context = self.algorithm.new_encryption_context(self.mode)?;
self.less_safe_encrypt(in_out, context)
}
/// Pads and encrypts data provided in `in_out` in-place.
/// Returns a references to the encryted data.
///
/// # Errors
/// * [`Unspecified`]: Returned if encryption fails.
pub fn less_safe_encrypt<InOut>(
&self,
in_out: &mut InOut,
context: EncryptionContext,
) -> Result<DecryptionContext, Unspecified>
where
InOut: AsMut<[u8]> + for<'a> Extend<&'a u8>,
{
if !self
.algorithm()
.is_valid_encryption_context(self.mode, &context)
{
return Err(Unspecified);
}
self.padding
.add_padding(self.algorithm().block_len(), in_out)?;
cipher::encrypt(
self.algorithm(),
&self.key,
self.mode,
in_out.as_mut(),
context,
)
}
}
impl Debug for PaddedBlockEncryptingKey {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("PaddedBlockEncryptingKey")
.field("algorithm", &self.algorithm)
.field("mode", &self.mode)
.field("padding", &self.padding)
.finish_non_exhaustive()
}
}
/// A cipher decryption key that performs block padding.
pub struct PaddedBlockDecryptingKey {
algorithm: &'static Algorithm,
key: SymmetricCipherKey,
mode: OperatingMode,
padding: PaddingStrategy,
}
impl PaddedBlockDecryptingKey {
/// Constructs a new `PaddedBlockDecryptingKey` cipher with chaining block cipher (CBC) mode.
/// Decrypted data is unpadded following the PKCS#7 scheme.
///
// # FIPS
// Use this function with an `UnboundCipherKey` constructed with one of the following algorithms:
// * `AES_128`
// * `AES_256`
//
/// # Errors
/// * [`Unspecified`]: Returned if there is an error constructing the `PaddedBlockDecryptingKey`.
pub fn cbc_pkcs7(key: UnboundCipherKey) -> Result<Self, Unspecified> {
Self::new(key, OperatingMode::CBC, PaddingStrategy::PKCS7)
}
/// Constructs a new `PaddedBlockDecryptingKey` cipher with chaining block cipher (CBC) mode.
/// Decrypted data is unpadded following the ISO 10126 scheme
/// (compatible with PKCS#7 and ANSI X.923).
///
/// Offered for computability purposes only.
///
// # FIPS
// Use this function with an `UnboundCipherKey` constructed with one of the following algorithms:
// * `AES_128`
// * `AES_256`
//
/// # Errors
/// * [`Unspecified`]: Returned if there is an error constructing the `PaddedBlockDecryptingKey`.
pub fn cbc_iso10126(key: UnboundCipherKey) -> Result<Self, Unspecified> {
Self::new(key, OperatingMode::CBC, PaddingStrategy::ISO10126)
}
/// Constructs a new `PaddedBlockDecryptingKey` cipher with electronic code book (ECB) mode.
/// Decrypted data is unpadded following the PKCS#7 scheme.
///
/// # ☠️ DANGER ☠️
/// Offered for computability purposes only. This is an extremely dangerous mode, and
/// very likely not what you want to use.
///
// # FIPS
// Use this function with an `UnboundCipherKey` constructed with one of the following algorithms:
// * `AES_128`
// * `AES_256`
//
/// # Errors
/// * [`Unspecified`]: Returned if there is an error constructing the `PaddedBlockDecryptingKey`.
pub fn ecb_pkcs7(key: UnboundCipherKey) -> Result<Self, Unspecified> {
Self::new(key, OperatingMode::ECB, PaddingStrategy::PKCS7)
}
#[allow(clippy::unnecessary_wraps)]
fn new(
key: UnboundCipherKey,
mode: OperatingMode,
padding: PaddingStrategy,
) -> Result<PaddedBlockDecryptingKey, Unspecified> {
let algorithm = key.algorithm();
let key = key.try_into()?;
Ok(PaddedBlockDecryptingKey {
algorithm,
key,
mode,
padding,
})
}
/// Returns the cipher algorithm.
#[must_use]
pub fn algorithm(&self) -> &Algorithm {
self.algorithm
}
/// Returns the cipher operating mode.
#[must_use]
pub fn mode(&self) -> OperatingMode {
self.mode
}
/// Decrypts and unpads data provided in `in_out` in-place.
/// Returns a references to the decrypted data.
///
/// # Errors
/// * [`Unspecified`]: Returned if decryption fails.
pub fn decrypt<'in_out>(
&self,
in_out: &'in_out mut [u8],
context: DecryptionContext,
) -> Result<&'in_out mut [u8], Unspecified> {
if !self
.algorithm()
.is_valid_decryption_context(self.mode, &context)
{
return Err(Unspecified);
}
let block_len = self.algorithm().block_len();
let padding = self.padding;
let mut in_out = cipher::decrypt(self.algorithm, &self.key, self.mode, in_out, context)?;
in_out = padding.remove_padding(block_len, in_out)?;
Ok(in_out)
}
}
impl Debug for PaddedBlockDecryptingKey {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("PaddedBlockDecryptingKey")
.field("algorithm", &self.algorithm)
.field("mode", &self.mode)
.field("padding", &self.padding)
.finish_non_exhaustive()
}
}
#[cfg(test)]
mod tests {
use crate::cipher::padded::PaddingStrategy;
use crate::cipher::{
Algorithm, EncryptionContext, OperatingMode, PaddedBlockDecryptingKey,
PaddedBlockEncryptingKey, UnboundCipherKey, AES_128, AES_256,
};
use crate::iv::FixedLength;
use crate::test::from_hex;
fn helper_test_padded_cipher_n_bytes(
key: &[u8],
alg: &'static Algorithm,
mode: OperatingMode,
padding: PaddingStrategy,
n: usize,
) {
let mut input: Vec<u8> = Vec::with_capacity(n);
for i in 0..n {
let byte: u8 = i.try_into().unwrap();
input.push(byte);
}
let cipher_key = UnboundCipherKey::new(alg, key).unwrap();
let encrypting_key = PaddedBlockEncryptingKey::new(cipher_key, mode, padding).unwrap();
let mut in_out = input.clone();
let decrypt_iv = encrypting_key.encrypt(&mut in_out).unwrap();
if n > 5 {
// There's no more than a 1 in 2^48 chance that this will fail randomly
assert_ne!(input.as_slice(), in_out);
}
let cipher_key2 = UnboundCipherKey::new(alg, key).unwrap();
let decrypting_key = PaddedBlockDecryptingKey::new(cipher_key2, mode, padding).unwrap();
let plaintext = decrypting_key.decrypt(&mut in_out, decrypt_iv).unwrap();
assert_eq!(input.as_slice(), plaintext);
}
#[test]
fn test_unpad_iso10126() {
let mut input = from_hex("01020304050607fedcba9805").unwrap();
let padding = PaddingStrategy::ISO10126;
let block_len = 8;
let unpadded = padding.remove_padding(block_len, &mut input).unwrap();
assert_eq!(unpadded, &mut [1, 2, 3, 4, 5, 6, 7]);
}
#[test]
fn test_aes_128_cbc() {
let key = from_hex("000102030405060708090a0b0c0d0e0f").unwrap();
for i in 0..=50 {
helper_test_padded_cipher_n_bytes(
key.as_slice(),
&AES_128,
OperatingMode::CBC,
PaddingStrategy::PKCS7,
i,
);
}
}
#[test]
fn test_aes_256_cbc() {
let key =
from_hex("000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f").unwrap();
for i in 0..=50 {
helper_test_padded_cipher_n_bytes(
key.as_slice(),
&AES_256,
OperatingMode::CBC,
PaddingStrategy::PKCS7,
i,
);
}
}
macro_rules! padded_cipher_kat {
($name:ident, $alg:expr, $mode:expr, $padding:expr, $key:literal, $iv: literal, $plaintext:literal, $ciphertext:literal) => {
#[test]
fn $name() {
let key = from_hex($key).unwrap();
let input = from_hex($plaintext).unwrap();
let expected_ciphertext = from_hex($ciphertext).unwrap();
let mut iv = from_hex($iv).unwrap();
let iv = {
let slice = iv.as_mut_slice();
let mut iv = [0u8; $iv.len() / 2];
{
let x = iv.as_mut_slice();
x.copy_from_slice(slice);
}
iv
};
let ec = EncryptionContext::Iv128(FixedLength::from(iv));
let alg = $alg;
let unbound_key = UnboundCipherKey::new(alg, &key).unwrap();
let encrypting_key =
PaddedBlockEncryptingKey::new(unbound_key, $mode, $padding).unwrap();
let mut in_out = input.clone();
let context = encrypting_key.less_safe_encrypt(&mut in_out, ec).unwrap();
if ($padding == PaddingStrategy::ISO10126) {
// This padding scheme is technically non-deterministic in nature if the padding is more then one
// byte. So just validate the input length of in_out is no longer the plaintext.
assert_ne!(input, in_out[..input.len()]);
} else {
assert_eq!(expected_ciphertext, in_out);
}
let unbound_key2 = UnboundCipherKey::new(alg, &key).unwrap();
let decrypting_key =
PaddedBlockDecryptingKey::new(unbound_key2, $mode, $padding).unwrap();
let plaintext = decrypting_key.decrypt(&mut in_out, context).unwrap();
assert_eq!(input.as_slice(), plaintext);
}
};
}
padded_cipher_kat!(
test_iv_aes_128_cbc_16_bytes,
&AES_128,
OperatingMode::CBC,
PaddingStrategy::PKCS7,
"000102030405060708090a0b0c0d0e0f",
"00000000000000000000000000000000",
"00112233445566778899aabbccddeeff",
"69c4e0d86a7b0430d8cdb78070b4c55a9e978e6d16b086570ef794ef97984232"
);
padded_cipher_kat!(
test_iv_aes_256_cbc_15_bytes,
&AES_256,
OperatingMode::CBC,
PaddingStrategy::PKCS7,
"000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f",
"00000000000000000000000000000000",
"00112233445566778899aabbccddee",
"2ddfb635a651a43f582997966840ca0c"
);
padded_cipher_kat!(
test_openssl_aes_128_cbc_15_bytes,
&AES_128,
OperatingMode::CBC,
PaddingStrategy::PKCS7,
"053304bb3899e1d99db9d29343ea782d",
"b5313560244a4822c46c2a0c9d0cf7fd",
"a3e4c990356c01f320043c3d8d6f43",
"ad96993f248bd6a29760ec7ccda95ee1"
);
padded_cipher_kat!(
test_openssl_aes_128_cbc_iso10126_15_bytes,
&AES_128,
OperatingMode::CBC,
PaddingStrategy::ISO10126,
"053304bb3899e1d99db9d29343ea782d",
"b5313560244a4822c46c2a0c9d0cf7fd",
"a3e4c990356c01f320043c3d8d6f43",
"ad96993f248bd6a29760ec7ccda95ee1"
);
padded_cipher_kat!(
test_openssl_aes_128_cbc_iso10126_16_bytes,
&AES_128,
OperatingMode::CBC,
PaddingStrategy::ISO10126,
"053304bb3899e1d99db9d29343ea782d",
"b83452fc9c80215a6ecdc505b5154c90",
"736e65616b7920726163636f6f6e7321",
"44563399c6bb2133e013161dc5bd4fa8ce83ef997ddb04bbbbe3632b68e9cde0"
);
padded_cipher_kat!(
test_openssl_aes_128_cbc_16_bytes,
&AES_128,
OperatingMode::CBC,
PaddingStrategy::PKCS7,
"95af71f1c63e4a1d0b0b1a27fb978283",
"89e40797dca70197ff87d3dbb0ef2802",
"aece7b5e3c3df1ffc9802d2dfe296dc7",
"301b5dab49fb11e919d0d39970d06739301919743304f23f3cbc67d28564b25b"
);
padded_cipher_kat!(
test_openssl_aes_256_cbc_15_bytes,
&AES_256,
OperatingMode::CBC,
PaddingStrategy::PKCS7,
"d369e03e9752784917cc7bac1db7399598d9555e691861d9dd7b3292a693ef57",
"1399bb66b2f6ad99a7f064140eaaa885",
"7385f5784b85bf0a97768ddd896d6d",
"4351082bac9b4593ae8848cc9dfb5a01"
);
padded_cipher_kat!(
test_openssl_aes_256_cbc_16_bytes,
&AES_256,
OperatingMode::CBC,
PaddingStrategy::PKCS7,
"d4a8206dcae01242f9db79a4ecfe277d0f7bb8ccbafd8f9809adb39f35aa9b41",
"24f6076548fb9d93c8f7ed9f6e661ef9",
"a39c1fdf77ea3e1f18178c0ec237c70a",
"f1af484830a149ee0387b854d65fe87ca0e62efc1c8e6909d4b9ab8666470453"
);
}

1614
vendor/aws-lc-rs/src/cipher/streaming.rs vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,189 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#![cfg(debug_assertions)]
use crate::cipher::{
DecryptingKey, EncryptingKey, PaddedBlockDecryptingKey, PaddedBlockEncryptingKey,
StreamingDecryptingKey, StreamingEncryptingKey, UnboundCipherKey, AES_128, AES_192, AES_256,
};
use crate::fips::{assert_fips_status_indicator, FipsServiceStatus};
const TEST_KEY_128_BIT: [u8; 16] = [
0x9f, 0xd9, 0x41, 0xc3, 0xa6, 0xfe, 0xb9, 0x26, 0x2a, 0x35, 0xa7, 0x44, 0xbb, 0xc0, 0x3a, 0x6a,
];
const TEST_KEY_192_BIT: [u8; 24] = [
0x50, 0x2a, 0x6a, 0xb3, 0x69, 0x84, 0xaf, 0x26, 0x8b, 0xf4, 0x23, 0xc7, 0xf5, 0x09, 0x20, 0x52,
0x07, 0xfc, 0x15, 0x52, 0xaf, 0x4a, 0x91, 0xe5,
];
const TEST_KEY_256_BIT: [u8; 32] = [
0xd8, 0x32, 0x58, 0xa9, 0x5a, 0x62, 0x6c, 0x99, 0xc4, 0xe6, 0xb5, 0x3f, 0x97, 0x90, 0x62, 0xbe,
0x71, 0x0f, 0xd5, 0xe1, 0xd4, 0xfe, 0x95, 0xb3, 0x03, 0x46, 0xa5, 0x8e, 0x36, 0xad, 0x18, 0xe3,
];
const TEST_MESSAGE: &str = "test message";
macro_rules! block_api {
($name:ident, $alg:expr, $encrypt_mode:path, $decrypt_mode:path, $key:expr) => {
#[test]
fn $name() {
let key = $encrypt_mode(UnboundCipherKey::new($alg, $key).unwrap()).unwrap();
let mut in_out = Vec::from(TEST_MESSAGE);
let context = assert_fips_status_indicator!(
key.encrypt(&mut in_out),
FipsServiceStatus::Approved
)
.unwrap();
let key = $decrypt_mode(UnboundCipherKey::new($alg, $key).unwrap()).unwrap();
let in_out = assert_fips_status_indicator!(
key.decrypt(&mut in_out, context),
FipsServiceStatus::Approved
)
.unwrap();
assert_eq!(TEST_MESSAGE.as_bytes(), in_out);
}
};
}
macro_rules! streaming_api {
($name:ident, $alg:expr, $encrypt_mode:path, $decrypt_mode:path, $key:expr) => {
#[test]
fn $name() {
let mut key = $encrypt_mode(UnboundCipherKey::new($alg, $key).unwrap()).unwrap();
let input = TEST_MESSAGE.as_bytes();
let mut encrypt_output = vec![0u8; TEST_MESSAGE.len() + $alg.block_len()];
let mut buffer_update = key.update(&input, &mut encrypt_output).unwrap();
let outlen = buffer_update.written().len();
let (context, buffer_update) = assert_fips_status_indicator!(
key.finish(buffer_update.remainder_mut()),
FipsServiceStatus::Approved
)
.unwrap();
let outlen = outlen + buffer_update.written().len();
let ciphertext = &encrypt_output[0..outlen];
let mut decrypt_output = vec![0u8; outlen + $alg.block_len()];
let mut key =
$decrypt_mode(UnboundCipherKey::new($alg, $key).unwrap(), context).unwrap();
let mut buffer_update = key.update(ciphertext, &mut decrypt_output).unwrap();
let outlen = buffer_update.written().len();
let buffer_update = assert_fips_status_indicator!(
key.finish(buffer_update.remainder_mut()),
FipsServiceStatus::Approved
)
.unwrap();
let outlen = outlen + buffer_update.written().len();
let plaintext = &decrypt_output[0..outlen];
assert_eq!(TEST_MESSAGE.as_bytes(), plaintext);
}
};
}
streaming_api!(
streaming_aes_128_cbc_pkcs7,
&AES_128,
StreamingEncryptingKey::cbc_pkcs7,
StreamingDecryptingKey::cbc_pkcs7,
&TEST_KEY_128_BIT
);
streaming_api!(
streaming_aes_128_ctr,
&AES_128,
StreamingEncryptingKey::ctr,
StreamingDecryptingKey::ctr,
&TEST_KEY_128_BIT
);
streaming_api!(
streaming_aes_192_cbc_pkcs7,
&AES_192,
StreamingEncryptingKey::cbc_pkcs7,
StreamingDecryptingKey::cbc_pkcs7,
&TEST_KEY_192_BIT
);
streaming_api!(
streaming_aes_192_ctr,
&AES_192,
StreamingEncryptingKey::ctr,
StreamingDecryptingKey::ctr,
&TEST_KEY_192_BIT
);
streaming_api!(
streaming_aes_256_cbc_pkcs7,
&AES_256,
StreamingEncryptingKey::cbc_pkcs7,
StreamingDecryptingKey::cbc_pkcs7,
&TEST_KEY_256_BIT
);
streaming_api!(
streaming_aes_256_ctr,
&AES_256,
StreamingEncryptingKey::ctr,
StreamingDecryptingKey::ctr,
&TEST_KEY_256_BIT
);
block_api!(
block_aes_128_cbc_pkcs7,
&AES_128,
PaddedBlockEncryptingKey::cbc_pkcs7,
PaddedBlockDecryptingKey::cbc_pkcs7,
&TEST_KEY_128_BIT
);
block_api!(
block_aes_128_ctr,
&AES_128,
EncryptingKey::ctr,
DecryptingKey::ctr,
&TEST_KEY_128_BIT
);
block_api!(
block_aes_192_cbc_pkcs7,
&AES_192,
PaddedBlockEncryptingKey::cbc_pkcs7,
PaddedBlockDecryptingKey::cbc_pkcs7,
&TEST_KEY_192_BIT
);
block_api!(
block_aes_192_ctr,
&AES_192,
EncryptingKey::ctr,
DecryptingKey::ctr,
&TEST_KEY_192_BIT
);
block_api!(
block_aes_256_cbc_pkcs7,
&AES_256,
PaddedBlockEncryptingKey::cbc_pkcs7,
PaddedBlockDecryptingKey::cbc_pkcs7,
&TEST_KEY_256_BIT
);
block_api!(
block_aes_256_ctr,
&AES_256,
EncryptingKey::ctr,
DecryptingKey::ctr,
&TEST_KEY_256_BIT
);

758
vendor/aws-lc-rs/src/cmac.rs vendored Normal file
View File

@@ -0,0 +1,758 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! CMAC is specified in [RFC 4493] and [NIST SP 800-38B].
//!
//! After a `Key` is constructed, it can be used for multiple signing or
//! verification operations. Separating the construction of the key from the
//! rest of the CMAC operation allows the per-key precomputation to be done
//! only once, instead of it being done in every CMAC operation.
//!
//! Frequently all the data to be signed in a message is available in a single
//! contiguous piece. In that case, the module-level `sign` function can be
//! used. Otherwise, if the input is in multiple parts, `Context` should be
//! used.
//!
//! # Examples:
//!
//! ## Signing a value and verifying it wasn't tampered with
//!
//! ```
//! use aws_lc_rs::cmac;
//!
//! let key = cmac::Key::generate(cmac::AES_128)?;
//!
//! let msg = "hello, world";
//!
//! let tag = cmac::sign(&key, msg.as_bytes())?;
//!
//! // [We give access to the message to an untrusted party, and they give it
//! // back to us. We need to verify they didn't tamper with it.]
//!
//! cmac::verify(&key, msg.as_bytes(), tag.as_ref())?;
//!
//! # Ok::<(), aws_lc_rs::error::Unspecified>(())
//! ```
//!
//! ## Using the one-shot API:
//!
//! ```
//! use aws_lc_rs::{cmac, rand};
//!
//! let msg = "hello, world";
//!
//! // The sender generates a secure key value and signs the message with it.
//! // Note that in a real protocol, a key agreement protocol would be used to
//! // derive `key_value`.
//! let rng = rand::SystemRandom::new();
//! let key_value: [u8; 16] = rand::generate(&rng)?.expose();
//!
//! let s_key = cmac::Key::new(cmac::AES_128, key_value.as_ref())?;
//! let tag = cmac::sign(&s_key, msg.as_bytes())?;
//!
//! // The receiver (somehow!) knows the key value, and uses it to verify the
//! // integrity of the message.
//! let v_key = cmac::Key::new(cmac::AES_128, key_value.as_ref())?;
//! cmac::verify(&v_key, msg.as_bytes(), tag.as_ref())?;
//!
//! # Ok::<(), aws_lc_rs::error::Unspecified>(())
//! ```
//!
//! ## Using the multi-part API:
//! ```
//! use aws_lc_rs::{cmac, rand};
//!
//! let parts = ["hello", ", ", "world"];
//!
//! // The sender generates a secure key value and signs the message with it.
//! // Note that in a real protocol, a key agreement protocol would be used to
//! // derive `key_value`.
//! let rng = rand::SystemRandom::new();
//! let key_value: [u8; 32] = rand::generate(&rng)?.expose();
//!
//! let s_key = cmac::Key::new(cmac::AES_256, key_value.as_ref())?;
//! let mut s_ctx = cmac::Context::with_key(&s_key);
//! for part in &parts {
//! s_ctx.update(part.as_bytes())?;
//! }
//! let tag = s_ctx.sign()?;
//!
//! // The receiver (somehow!) knows the key value, and uses it to verify the
//! // integrity of the message.
//! let v_key = cmac::Key::new(cmac::AES_256, key_value.as_ref())?;
//! let mut msg = Vec::<u8>::new();
//! for part in &parts {
//! msg.extend(part.as_bytes());
//! }
//! cmac::verify(&v_key, &msg.as_ref(), tag.as_ref())?;
//!
//! # Ok::<(), aws_lc_rs::error::Unspecified>(())
//! ```
//! [RFC 4493]: https://tools.ietf.org/html/rfc4493
//! [NIST SP 800-38B]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-38b.pdf
use crate::aws_lc::{
CMAC_CTX_copy, CMAC_CTX_new, CMAC_Final, CMAC_Init, CMAC_Update, EVP_aes_128_cbc,
EVP_aes_192_cbc, EVP_aes_256_cbc, EVP_des_ede3_cbc, CMAC_CTX, EVP_CIPHER,
};
use crate::error::Unspecified;
use crate::fips::indicator_check;
use crate::ptr::{ConstPointer, LcPtr};
use crate::{constant_time, rand};
use core::mem::MaybeUninit;
use core::ptr::null_mut;
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
enum AlgorithmId {
Aes128,
Aes192,
Aes256,
Tdes,
}
/// A CMAC algorithm.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct Algorithm {
id: AlgorithmId,
key_len: usize,
tag_len: usize,
}
impl Algorithm {
/// The key length for this CMAC algorithm.
#[inline]
#[must_use]
pub fn key_len(&self) -> usize {
self.key_len
}
/// The tag length for this CMAC algorithm.
#[inline]
#[must_use]
pub fn tag_len(&self) -> usize {
self.tag_len
}
}
impl AlgorithmId {
fn evp_cipher(&self) -> ConstPointer<'_, EVP_CIPHER> {
unsafe {
ConstPointer::new_static(match self {
AlgorithmId::Aes128 => EVP_aes_128_cbc(),
AlgorithmId::Aes192 => EVP_aes_192_cbc(),
AlgorithmId::Aes256 => EVP_aes_256_cbc(),
AlgorithmId::Tdes => EVP_des_ede3_cbc(),
})
.unwrap()
}
}
}
/// CMAC using AES-128.
pub const AES_128: Algorithm = Algorithm {
id: AlgorithmId::Aes128,
key_len: 16,
tag_len: 16,
};
/// CMAC using AES-192.
pub const AES_192: Algorithm = Algorithm {
id: AlgorithmId::Aes192,
key_len: 24,
tag_len: 16,
};
/// CMAC using AES-256.
pub const AES_256: Algorithm = Algorithm {
id: AlgorithmId::Aes256,
key_len: 32,
tag_len: 16,
};
/// CMAC using 3DES (Triple DES). Obsolete
pub const TDES_FOR_LEGACY_USE_ONLY: Algorithm = Algorithm {
id: AlgorithmId::Tdes,
key_len: 24,
tag_len: 8,
};
/// Maximum CMAC tag length (AES block size).
const MAX_CMAC_TAG_LEN: usize = 16;
/// A CMAC tag.
///
/// For a given tag `t`, use `t.as_ref()` to get the tag value as a byte slice.
#[derive(Clone, Copy, Debug)]
pub struct Tag {
bytes: [u8; MAX_CMAC_TAG_LEN],
len: usize,
}
impl AsRef<[u8]> for Tag {
#[inline]
fn as_ref(&self) -> &[u8] {
&self.bytes[..self.len]
}
}
/// A key to use for CMAC signing.
//
// # FIPS
// Use this type with one of the following algorithms:
// * `AES_128`
// * `AES_256`
#[derive(Clone)]
pub struct Key {
algorithm: Algorithm,
ctx: LcPtr<CMAC_CTX>,
}
impl Clone for LcPtr<CMAC_CTX> {
fn clone(&self) -> Self {
let mut new_ctx = LcPtr::new(unsafe { CMAC_CTX_new() }).expect("CMAC_CTX_new failed");
unsafe {
assert!(
1 == CMAC_CTX_copy(new_ctx.as_mut_ptr(), self.as_const_ptr()),
"CMAC_CTX_copy failed"
);
}
new_ctx
}
}
unsafe impl Send for Key {}
// All uses of *mut CMAC_CTX require the creation of a Context, which will clone the Key.
unsafe impl Sync for Key {}
#[allow(clippy::missing_fields_in_debug)]
impl core::fmt::Debug for Key {
fn fmt(&self, f: &mut core::fmt::Formatter) -> Result<(), core::fmt::Error> {
f.debug_struct("Key")
.field("algorithm", &self.algorithm)
.finish()
}
}
impl Key {
/// Generate a CMAC signing key using the given algorithm with a
/// random value.
///
//
// # FIPS
// Use this type with one of the following algorithms:
// * `AES_128`
// * `AES_256`
//
/// # Errors
/// `error::Unspecified` if random generation or key construction fails.
pub fn generate(algorithm: Algorithm) -> Result<Self, Unspecified> {
let mut key_bytes = vec![0u8; algorithm.key_len()];
rand::fill(&mut key_bytes)?;
Self::new(algorithm, &key_bytes)
}
/// Construct a CMAC signing key using the given algorithm and key value.
///
/// `key_value` should be a value generated using a secure random number
/// generator or derived from a random key by a key derivation function.
///
/// # Errors
/// `error::Unspecified` if the key length doesn't match the algorithm or if CMAC context
/// initialization fails.
pub fn new(algorithm: Algorithm, key_value: &[u8]) -> Result<Self, Unspecified> {
if key_value.len() != algorithm.key_len() {
return Err(Unspecified);
}
let mut ctx = LcPtr::new(unsafe { CMAC_CTX_new() })?;
unsafe {
let cipher = algorithm.id.evp_cipher();
if 1 != CMAC_Init(
ctx.as_mut_ptr(),
key_value.as_ptr().cast(),
key_value.len(),
cipher.as_const_ptr(),
null_mut(),
) {
return Err(Unspecified);
}
}
Ok(Self { algorithm, ctx })
}
/// The algorithm for the key.
#[inline]
#[must_use]
pub fn algorithm(&self) -> Algorithm {
self.algorithm
}
}
/// A context for multi-step (Init-Update-Finish) CMAC signing.
///
/// Use `sign` for single-step CMAC signing.
pub struct Context {
key: Key,
}
impl Clone for Context {
fn clone(&self) -> Self {
Self {
key: self.key.clone(),
}
}
}
unsafe impl Send for Context {}
impl core::fmt::Debug for Context {
fn fmt(&self, f: &mut core::fmt::Formatter) -> Result<(), core::fmt::Error> {
f.debug_struct("Context")
.field("algorithm", &self.key.algorithm)
.finish()
}
}
impl Context {
/// Constructs a new CMAC signing context using the given key.
#[inline]
#[must_use]
pub fn with_key(key: &Key) -> Self {
Self { key: key.clone() }
}
/// Updates the CMAC with all the data in `data`. `update` may be called
/// zero or more times until `sign` is called.
///
/// # Errors
/// `error::Unspecified` if the CMAC cannot be updated.
pub fn update(&mut self, data: &[u8]) -> Result<(), Unspecified> {
unsafe {
if 1 != CMAC_Update(self.key.ctx.as_mut_ptr(), data.as_ptr(), data.len()) {
return Err(Unspecified);
}
}
Ok(())
}
/// Finalizes the CMAC calculation and returns the CMAC value. `sign`
/// consumes the context so it cannot be (mis-)used after `sign` has been
/// called.
///
/// It is generally not safe to implement CMAC verification by comparing
/// the return value of `sign` to a tag. Use `verify` for verification
/// instead.
///
//
// # FIPS
// Use this method with one of the following algorithms:
// * `AES_128`
// * `AES_256`
//
/// # Errors
/// `error::Unspecified` if the CMAC calculation cannot be finalized.
///
/// # Panics
/// Panics if the CMAC tag length exceeds the maximum allowed length, indicating memory corruption.
pub fn sign(mut self) -> Result<Tag, Unspecified> {
let mut output = [0u8; MAX_CMAC_TAG_LEN];
let output_len = {
let result = internal_sign(&mut self, &mut output)?;
result.len()
};
Ok(Tag {
bytes: output,
len: output_len,
})
}
/// Finalizes the CMAC calculation and verifies whether the resulting value
/// equals the provided `tag`.
///
/// `verify` consumes the context so it cannot be (mis-)used after `verify`
/// has been called.
///
/// The verification is done in constant time to prevent timing attacks.
///
/// # Errors
/// `error::Unspecified` if the tag does not match or if CMAC calculation fails.
//
// # FIPS
// Use this function with one of the following algorithms:
// * `AES_128`
// * `AES_256`
#[inline]
pub fn verify(mut self, tag: &[u8]) -> Result<(), Unspecified> {
let mut output = [0u8; MAX_CMAC_TAG_LEN];
let output_len = {
let result = internal_sign(&mut self, &mut output)?;
result.len()
};
constant_time::verify_slices_are_equal(&output[0..output_len], tag)
}
}
pub(crate) fn internal_sign<'in_out>(
ctx: &mut Context,
output: &'in_out mut [u8],
) -> Result<&'in_out mut [u8], Unspecified> {
let mut out_len = MaybeUninit::<usize>::uninit();
if 1 != indicator_check!(unsafe {
CMAC_Final(
ctx.key.ctx.as_mut_ptr(),
output.as_mut_ptr(),
out_len.as_mut_ptr(),
)
}) {
return Err(Unspecified);
}
let actual_len = unsafe { out_len.assume_init() };
// This indicates a memory corruption.
debug_assert!(
actual_len <= MAX_CMAC_TAG_LEN,
"CMAC tag length {actual_len} exceeds maximum {MAX_CMAC_TAG_LEN}"
);
if actual_len != ctx.key.algorithm.tag_len() {
return Err(Unspecified);
}
Ok(&mut output[0..actual_len])
}
/// Calculates the CMAC of `data` using the key `key` in one step.
///
/// Use `Context` to calculate CMACs where the input is in multiple parts.
///
/// It is generally not safe to implement CMAC verification by comparing the
/// return value of `sign` to a tag. Use `verify` for verification instead.
//
// # FIPS
// Use this function with one of the following algorithms:
// * `AES_128`
// * `AES_256`
//
/// # Errors
/// `error::Unspecified` if the CMAC calculation fails.
#[inline]
pub fn sign(key: &Key, data: &[u8]) -> Result<Tag, Unspecified> {
let mut ctx = Context::with_key(key);
ctx.update(data)?;
ctx.sign()
}
/// Calculates the CMAC of `data` using the key `key` in one step, writing the
/// result into the provided `output` buffer.
///
/// Use `Context` to calculate CMACs where the input is in multiple parts.
///
/// The `output` buffer must be at least as large as the algorithm's tag length
/// (obtainable via `key.algorithm().tag_len()`). The returned slice will be a
/// sub-slice of `output` containing exactly the tag bytes.
///
/// It is generally not safe to implement CMAC verification by comparing the
/// return value of `sign_to_buffer` to a tag. Use `verify` for verification instead.
//
// # FIPS
// Use this function with one of the following algorithms:
// * `AES_128`
// * `AES_256`
//
/// # Errors
/// `error::Unspecified` if the output buffer is too small or if the CMAC calculation fails.
#[inline]
pub fn sign_to_buffer<'out>(
key: &Key,
data: &[u8],
output: &'out mut [u8],
) -> Result<&'out mut [u8], Unspecified> {
if output.len() < key.algorithm().tag_len() {
return Err(Unspecified);
}
let mut ctx = Context::with_key(key);
ctx.update(data)?;
internal_sign(&mut ctx, output)
}
/// Calculates the CMAC of `data` using the signing key `key`, and verifies
/// whether the resultant value equals `tag`, in one step.
///
/// The verification is done in constant time to prevent timing attacks.
///
/// # Errors
/// `error::Unspecified` if the tag does not match or if CMAC calculation fails.
//
// # FIPS
// Use this function with one of the following algorithms:
// * `AES_128`
// * `AES_256`
#[inline]
pub fn verify(key: &Key, data: &[u8], tag: &[u8]) -> Result<(), Unspecified> {
let mut output = [0u8; MAX_CMAC_TAG_LEN];
let output_len = {
let result = sign_to_buffer(key, data, &mut output)?;
result.len()
};
constant_time::verify_slices_are_equal(&output[0..output_len], tag)
}
#[cfg(test)]
mod tests {
use super::*;
#[cfg(feature = "fips")]
mod fips;
#[test]
fn cmac_basic_test() {
for &algorithm in &[AES_128, AES_192, AES_256, TDES_FOR_LEGACY_USE_ONLY] {
let key = Key::generate(algorithm).unwrap();
let data = b"hello, world";
let tag = sign(&key, data).unwrap();
assert!(verify(&key, data, tag.as_ref()).is_ok());
assert!(verify(&key, b"hello, worle", tag.as_ref()).is_err());
}
}
// Make sure that `Key::generate` and `verify` aren't completely wacky.
#[test]
pub fn cmac_signing_key_coverage() {
const HELLO_WORLD_GOOD: &[u8] = b"hello, world";
const HELLO_WORLD_BAD: &[u8] = b"hello, worle";
for algorithm in &[AES_128, AES_192, AES_256, TDES_FOR_LEGACY_USE_ONLY] {
let key = Key::generate(*algorithm).unwrap();
let tag = sign(&key, HELLO_WORLD_GOOD).unwrap();
println!("{key:?}");
assert!(verify(&key, HELLO_WORLD_GOOD, tag.as_ref()).is_ok());
assert!(verify(&key, HELLO_WORLD_BAD, tag.as_ref()).is_err());
}
}
#[test]
fn cmac_coverage() {
// Something would have gone horribly wrong for this to not pass, but we test this so our
// coverage reports will look better.
assert_ne!(AES_128, AES_256);
assert_ne!(AES_192, AES_256);
for &alg in &[AES_128, AES_192, AES_256, TDES_FOR_LEGACY_USE_ONLY] {
// Clone after updating context with message, then check if the final Tag is the same.
let key_bytes = vec![0u8; alg.key_len()];
let key = Key::new(alg, &key_bytes).unwrap();
let mut ctx = Context::with_key(&key);
ctx.update(b"hello, world").unwrap();
let ctx_clone = ctx.clone();
let orig_tag = ctx.sign().unwrap();
let clone_tag = ctx_clone.sign().unwrap();
assert_eq!(orig_tag.as_ref(), clone_tag.as_ref());
assert_eq!(orig_tag.clone().as_ref(), clone_tag.as_ref());
}
}
#[test]
fn cmac_context_test() {
let key = Key::generate(AES_192).unwrap();
let mut ctx = Context::with_key(&key);
ctx.update(b"hello").unwrap();
ctx.update(b", ").unwrap();
ctx.update(b"world").unwrap();
let tag1 = ctx.sign().unwrap();
let tag2 = sign(&key, b"hello, world").unwrap();
assert_eq!(tag1.as_ref(), tag2.as_ref());
}
#[test]
fn cmac_multi_part_test() {
let parts = ["hello", ", ", "world"];
for &algorithm in &[AES_128, AES_256] {
let key = Key::generate(algorithm).unwrap();
// Multi-part signing
let mut ctx = Context::with_key(&key);
for part in &parts {
ctx.update(part.as_bytes()).unwrap();
}
let tag = ctx.sign().unwrap();
// Verification with concatenated message
let mut msg = Vec::<u8>::new();
for part in &parts {
msg.extend(part.as_bytes());
}
assert!(verify(&key, &msg, tag.as_ref()).is_ok());
}
}
#[test]
fn cmac_key_new_test() {
// Test Key::new with explicit key values
let key_128 = [0u8; 16];
let key_192 = [0u8; 24];
let key_256 = [0u8; 32];
let key_3des = [0u8; 24];
let k1 = Key::new(AES_128, &key_128).unwrap();
let k2 = Key::new(AES_192, &key_192).unwrap();
let k3 = Key::new(AES_256, &key_256).unwrap();
let k4 = Key::new(TDES_FOR_LEGACY_USE_ONLY, &key_3des).unwrap();
let data = b"test message";
// All should produce valid tags
let _ = sign(&k1, data).unwrap();
let _ = sign(&k2, data).unwrap();
let _ = sign(&k3, data).unwrap();
let _ = sign(&k4, data).unwrap();
}
#[test]
fn cmac_key_new_wrong_length_test() {
let key_256 = [0u8; 32];
// Wrong key length should return error
assert!(Key::new(AES_128, &key_256).is_err());
}
#[test]
fn cmac_algorithm_properties() {
assert_eq!(AES_128.key_len(), 16);
assert_eq!(AES_128.tag_len(), 16);
assert_eq!(AES_192.key_len(), 24);
assert_eq!(AES_192.tag_len(), 16);
assert_eq!(AES_256.key_len(), 32);
assert_eq!(AES_256.tag_len(), 16);
assert_eq!(TDES_FOR_LEGACY_USE_ONLY.key_len(), 24);
assert_eq!(TDES_FOR_LEGACY_USE_ONLY.tag_len(), 8);
}
#[test]
fn cmac_empty_data() {
let key = Key::generate(AES_128).unwrap();
// CMAC should work with empty data
let tag = sign(&key, b"").unwrap();
assert!(verify(&key, b"", tag.as_ref()).is_ok());
// Context version
let ctx = Context::with_key(&key);
let tag2 = ctx.sign().unwrap();
assert_eq!(tag.as_ref(), tag2.as_ref());
}
#[test]
fn des_ede3_cmac_test() {
let key = Key::generate(TDES_FOR_LEGACY_USE_ONLY).unwrap();
let data = b"test data for 3DES CMAC";
let tag = sign(&key, data).unwrap();
assert_eq!(tag.as_ref().len(), 8); // 3DES block size
assert!(verify(&key, data, tag.as_ref()).is_ok());
}
#[test]
fn cmac_sign_to_buffer_test() {
for &algorithm in &[AES_128, AES_192, AES_256, TDES_FOR_LEGACY_USE_ONLY] {
let key = Key::generate(algorithm).unwrap();
let data = b"hello, world";
// Test with exact size buffer
let mut output = vec![0u8; algorithm.tag_len()];
let result = sign_to_buffer(&key, data, &mut output).unwrap();
assert_eq!(result.len(), algorithm.tag_len());
// Verify the tag matches sign()
let tag = sign(&key, data).unwrap();
assert_eq!(result, tag.as_ref());
// Test with larger buffer
let mut large_output = vec![0u8; algorithm.tag_len() + 10];
let result2 = sign_to_buffer(&key, data, &mut large_output).unwrap();
assert_eq!(result2.len(), algorithm.tag_len());
assert_eq!(result2, tag.as_ref());
}
}
#[test]
fn cmac_sign_to_buffer_too_small_test() {
let key = Key::generate(AES_128).unwrap();
let data = b"hello";
// Buffer too small should fail
let mut small_buffer = vec![0u8; AES_128.tag_len() - 1];
assert!(sign_to_buffer(&key, data, &mut small_buffer).is_err());
// Empty buffer should fail
let mut empty_buffer = vec![];
assert!(sign_to_buffer(&key, data, &mut empty_buffer).is_err());
}
#[test]
fn cmac_context_verify_test() {
for &algorithm in &[AES_128, AES_192, AES_256, TDES_FOR_LEGACY_USE_ONLY] {
let key = Key::generate(algorithm).unwrap();
let data = b"hello, world";
// Generate a valid tag
let tag = sign(&key, data).unwrap();
// Verify with Context::verify
let mut ctx = Context::with_key(&key);
ctx.update(data).unwrap();
assert!(ctx.verify(tag.as_ref()).is_ok());
// Verify with wrong tag should fail
let mut ctx2 = Context::with_key(&key);
ctx2.update(data).unwrap();
let wrong_tag = vec![0u8; algorithm.tag_len()];
assert!(ctx2.verify(&wrong_tag).is_err());
// Verify with different data should fail
let mut ctx3 = Context::with_key(&key);
ctx3.update(b"wrong data").unwrap();
assert!(ctx3.verify(tag.as_ref()).is_err());
}
}
#[test]
fn cmac_context_verify_multipart_test() {
let key = Key::generate(AES_256).unwrap();
let parts = ["hello", ", ", "world"];
// Create tag from concatenated message
let mut full_msg = Vec::new();
for part in &parts {
full_msg.extend_from_slice(part.as_bytes());
}
let tag = sign(&key, &full_msg).unwrap();
// Verify using multi-part context
let mut ctx = Context::with_key(&key);
for part in &parts {
ctx.update(part.as_bytes()).unwrap();
}
assert!(ctx.verify(tag.as_ref()).is_ok());
// Verify with missing part should fail
let mut ctx2 = Context::with_key(&key);
ctx2.update(parts[0].as_bytes()).unwrap();
ctx2.update(parts[1].as_bytes()).unwrap();
// Missing parts[2]
assert!(ctx2.verify(tag.as_ref()).is_err());
}
}

45
vendor/aws-lc-rs/src/cmac/tests/fips.rs vendored Normal file
View File

@@ -0,0 +1,45 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#![cfg(debug_assertions)]
use crate::cmac::{sign, verify, Key, AES_128, AES_192, AES_256, TDES_FOR_LEGACY_USE_ONLY};
use crate::fips::{assert_fips_status_indicator, FipsServiceStatus};
use crate::rand::{self, SystemRandom};
const TEST_MESSAGE: &str = "test message";
macro_rules! cmac_api {
($name:ident, $alg:expr, $key_len:expr, $expect:path) => {
#[test]
fn $name() -> Result<(), Box<dyn std::error::Error>> {
let rng = SystemRandom::new();
let key_value: [u8; $key_len] = rand::generate(&rng).unwrap().expose();
let s_key = Key::new($alg, key_value.as_ref()).unwrap();
let tag =
assert_fips_status_indicator!(sign(&s_key, TEST_MESSAGE.as_bytes())?, $expect);
let v_key = Key::new($alg, key_value.as_ref()).unwrap();
assert_fips_status_indicator!(
verify(&v_key, TEST_MESSAGE.as_bytes(), tag.as_ref())?,
$expect
);
Ok(())
}
};
}
cmac_api!(aes_128, AES_128, 16, FipsServiceStatus::Approved);
cmac_api!(aes_192, AES_192, 24, FipsServiceStatus::NonApproved);
cmac_api!(aes_256, AES_256, 32, FipsServiceStatus::Approved);
cmac_api!(
tdes,
TDES_FOR_LEGACY_USE_ONLY,
24,
FipsServiceStatus::NonApproved
);

29
vendor/aws-lc-rs/src/constant_time.rs vendored Normal file
View File

@@ -0,0 +1,29 @@
// Copyright 2015-2022 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! Constant-time operations.
use crate::aws_lc::CRYPTO_memcmp;
use crate::error;
/// Returns `Ok(())` if `a == b` and `Err(error::Unspecified)` otherwise.
///
/// The comparison of `a` and `b` is done in constant time with respect to the
/// contents of each, but NOT in constant time with respect to the lengths of
/// `a` and `b`.
///
/// # Errors
/// `error::Unspecified` when `a` and `b` differ.
#[inline]
pub fn verify_slices_are_equal(a: &[u8], b: &[u8]) -> Result<(), error::Unspecified> {
if a.len() != b.len() {
return Err(error::Unspecified);
}
let result = unsafe { CRYPTO_memcmp(a.as_ptr().cast(), b.as_ptr().cast(), a.len()) };
match result {
0 => Ok(()),
_ => Err(error::Unspecified),
}
}

60
vendor/aws-lc-rs/src/debug.rs vendored Normal file
View File

@@ -0,0 +1,60 @@
// Copyright 2018 Trent Clarke.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
// Generates an implementation of the Debug trait for a type that defers to the
// Debug implementation for a given field.
#![allow(missing_docs)]
macro_rules! derive_debug_via_id {
($typename:ident) => {
impl ::core::fmt::Debug for $typename {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> Result<(), ::core::fmt::Error> {
::core::fmt::Debug::fmt(&self.id, f)
}
}
};
}
pub(crate) use derive_debug_via_id;
#[allow(unused_macros)]
macro_rules! derive_debug_via_field {
($type:ty, $field:ident) => {
derive_debug_via_field!($type, stringify!($type), $field);
};
($type:ty, $typename:expr, $field:ident) => {
impl ::core::fmt::Debug for $type {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> Result<(), ::core::fmt::Error> {
f.debug_struct($typename)
.field(stringify!($field), &self.$field)
.finish()
}
}
};
}
// Generates an implementation of the Debug trait for a type that outputs the
// hex encoding of the byte slice representation of the value.
#[allow(unused_macros)]
macro_rules! derive_debug_self_as_ref_hex_bytes {
($typename:ident) => {
impl ::core::fmt::Debug for $typename {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> Result<(), ::core::fmt::Error> {
crate::debug::write_hex_tuple(f, stringify!($typename), self)
}
}
};
}
pub(crate) fn write_hex_bytes(
fmt: &mut core::fmt::Formatter,
bytes: &[u8],
) -> Result<(), core::fmt::Error> {
for byte in bytes {
write!(fmt, "{byte:02x}")?;
}
Ok(())
}

497
vendor/aws-lc-rs/src/digest.rs vendored Normal file
View File

@@ -0,0 +1,497 @@
// Copyright 2015-2019 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! SHA-2 and the legacy SHA-1 digest algorithm.
//!
//! If all the data is available in a single contiguous slice then the `digest`
//! function should be used. Otherwise, the digest can be calculated in
//! multiple steps using `Context`.
//! # Example
//!
//! ```
//! use aws_lc_rs::digest;
//!
//! // Using `digest::digest`
//! let one_shot = digest::digest(&digest::SHA384, b"hello, world");
//!
//! // Using `digest::Context`
//! let mut ctx = digest::Context::new(&digest::SHA384);
//! ctx.update(b"hello");
//! ctx.update(b", ");
//! ctx.update(b"world");
//! let multi_part = ctx.finish();
//!
//! assert_eq!(&one_shot.as_ref(), &multi_part.as_ref());
//! ```
#![allow(non_snake_case)]
use crate::fips::indicator_check;
use crate::{debug, derive_debug_via_id};
pub(crate) mod digest_ctx;
mod sha;
use crate::aws_lc::{
EVP_DigestFinal, EVP_DigestUpdate, EVP_sha1, EVP_sha224, EVP_sha256, EVP_sha384, EVP_sha3_256,
EVP_sha3_384, EVP_sha3_512, EVP_sha512, EVP_sha512_256, EVP_MD,
};
use crate::error::Unspecified;
use crate::ptr::ConstPointer;
use core::ffi::c_uint;
use core::mem::MaybeUninit;
use digest_ctx::DigestContext;
pub use sha::{
SHA1_FOR_LEGACY_USE_ONLY, SHA1_OUTPUT_LEN, SHA224, SHA224_OUTPUT_LEN, SHA256,
SHA256_OUTPUT_LEN, SHA384, SHA384_OUTPUT_LEN, SHA3_256, SHA3_384, SHA3_512, SHA512, SHA512_256,
SHA512_256_OUTPUT_LEN, SHA512_OUTPUT_LEN,
};
/// A context for multi-step (Init-Update-Finish) digest calculations.
//
// # FIPS
// Context must be used with one of the following algorithms:
// * `SHA1_FOR_LEGACY_USE_ONLY`
// * `SHA224`
// * `SHA256`
// * `SHA384`
// * `SHA512`
// * `SHA512_256`
#[derive(Clone)]
pub struct Context {
/// The context's algorithm.
pub(crate) algorithm: &'static Algorithm,
digest_ctx: DigestContext,
// The spec specifies that SHA-1 and SHA-256 support up to
// 2^64-1 bits of input. SHA-384 and SHA-512 support up to
// 2^128-1 bits.
// Implementations of `digest` only support up
// to 2^64-1 bits of input, which should be sufficient enough for
// practical use cases.
msg_len: u64,
max_input_reached: bool,
}
impl Context {
/// Constructs a new context.
///
/// # Panics
///
/// `new` panics if it fails to initialize an aws-lc digest context for the given
/// algorithm.
#[must_use]
pub fn new(algorithm: &'static Algorithm) -> Self {
Self {
algorithm,
digest_ctx: DigestContext::new(algorithm).unwrap(),
msg_len: 0u64,
max_input_reached: false,
}
}
/// Updates the message to digest with all the data in `data`.
///
/// # Panics
/// Panics if update causes total input length to exceed maximum allowed (`u64::MAX`).
#[inline]
pub fn update(&mut self, data: &[u8]) {
Self::try_update(self, data).expect("digest update failed");
}
#[inline]
fn try_update(&mut self, data: &[u8]) -> Result<(), Unspecified> {
unsafe {
// Check if the message has reached the algorithm's maximum allowed input, or overflowed
// the msg_len counter.
let (msg_len, overflowed) = self.msg_len.overflowing_add(data.len() as u64);
if overflowed || msg_len > self.algorithm.max_input_len {
return Err(Unspecified);
}
self.msg_len = msg_len;
self.max_input_reached = self.msg_len == self.algorithm.max_input_len;
// Doesn't require boundary_check! guard
if 1 != EVP_DigestUpdate(
self.digest_ctx.as_mut_ptr(),
data.as_ptr().cast(),
data.len(),
) {
return Err(Unspecified);
}
Ok(())
}
}
/// Finalizes the digest calculation and returns the digest value.
///
/// `finish` consumes the context so it cannot be (mis-)used after `finish`
/// has been called.
///
/// # Panics
/// Panics if the digest is unable to be finalized
#[inline]
#[must_use]
pub fn finish(self) -> Digest {
Self::try_finish(self).expect("EVP_DigestFinal failed")
}
#[inline]
fn try_finish(mut self) -> Result<Digest, Unspecified> {
let mut output = [0u8; MAX_OUTPUT_LEN];
let mut out_len = MaybeUninit::<c_uint>::uninit();
if 1 != indicator_check!(unsafe {
EVP_DigestFinal(
self.digest_ctx.as_mut_ptr(),
output.as_mut_ptr(),
out_len.as_mut_ptr(),
)
}) {
return Err(Unspecified);
}
Ok(Digest {
algorithm: self.algorithm,
message: output,
len: self.algorithm.output_len,
})
}
/// The algorithm that this context is using.
#[inline]
#[must_use]
pub fn algorithm(&self) -> &'static Algorithm {
self.algorithm
}
}
/// Returns the digest of `data` using the given digest algorithm.
///
// # FIPS
// This function must only be used with one of the following algorithms:
// * `SHA1_FOR_LEGACY_USE_ONLY`
// * `SHA224`
// * `SHA256`
// * `SHA384`
// * `SHA512`
// * `SHA512_256`
//
/// # Examples:
///
/// ```
/// # {
/// use aws_lc_rs::{digest, test};
/// let expected_hex = "09ca7e4eaa6e8ae9c7d261167129184883644d07dfba7cbfbc4c8a2e08360d5b";
/// let expected: Vec<u8> = test::from_hex(expected_hex).unwrap();
/// let actual = digest::digest(&digest::SHA256, b"hello, world");
///
/// assert_eq!(&expected, &actual.as_ref());
/// # }
/// ```
#[inline]
#[must_use]
pub fn digest(algorithm: &'static Algorithm, data: &[u8]) -> Digest {
let mut output = [0u8; MAX_OUTPUT_LEN];
(algorithm.one_shot_hash)(data, &mut output);
Digest {
algorithm,
message: output,
len: algorithm.output_len,
}
}
/// A calculated digest value.
///
/// Use [`Self::as_ref`] to get the value as a `&[u8]`.
#[derive(Clone, Copy)]
pub struct Digest {
/// The trait `Copy` can't be implemented for dynamic arrays, so we set a
/// fixed array and the appropriate length.
message: [u8; MAX_OUTPUT_LEN],
len: usize,
algorithm: &'static Algorithm,
}
impl Digest {
/// Imports a digest value provide by an external source. This allows for the signing of
/// content that might not be directly accessible.
///
/// WARNING: Ensure that the digest is provided by a trusted source.
/// When possible, prefer to directly compute the digest of content.
///
/// # Errors
/// Returns `Unspecified` if the imported value is the wrong length for the specified algorithm.
pub fn import_less_safe(
digest: &[u8],
algorithm: &'static Algorithm,
) -> Result<Self, Unspecified> {
if digest.len() != algorithm.output_len {
return Err(Unspecified);
}
let mut my_digest = [0u8; MAX_OUTPUT_LEN];
my_digest[0..digest.len()].copy_from_slice(&digest[0..digest.len()]);
Ok(Digest {
message: my_digest,
len: digest.len(),
algorithm,
})
}
/// The algorithm that was used to calculate the digest value.
#[inline]
#[must_use]
pub fn algorithm(&self) -> &'static Algorithm {
self.algorithm
}
}
impl AsRef<[u8]> for Digest {
#[inline]
fn as_ref(&self) -> &[u8] {
&self.message[..self.len]
}
}
impl core::fmt::Debug for Digest {
fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {
write!(fmt, "{:?}:", self.algorithm)?;
debug::write_hex_bytes(fmt, self.as_ref())
}
}
/// A digest algorithm.
pub struct Algorithm {
/// The length of a finalized digest.
pub output_len: usize,
/// The size of the chaining value of the digest function, in bytes. For
/// non-truncated algorithms (SHA-1, SHA-256, SHA-512), this is equal to
/// `output_len`. For truncated algorithms (e.g. SHA-224, SHA-384, SHA-512/256),
/// this is equal to the length before truncation. This is mostly helpful
/// for determining the size of an HMAC key that is appropriate for the
/// digest algorithm.
///
/// This function isn't actually used in *aws-lc-rs*, and is only
/// kept for compatibility with the original *ring* implementation.
#[deprecated]
pub chaining_len: usize,
/// The internal block length.
pub block_len: usize,
// max_input_len is computed as u64 instead of usize to prevent overflowing on 32-bit machines.
max_input_len: u64,
one_shot_hash: fn(msg: &[u8], output: &mut [u8]),
pub(crate) id: AlgorithmID,
}
unsafe impl Send for Algorithm {}
impl Algorithm {
/// The length of a finalized digest.
#[inline]
#[must_use]
pub fn output_len(&self) -> usize {
self.output_len
}
/// The size of the chaining value of the digest function, in bytes. For
/// non-truncated algorithms (SHA-1, SHA-256, SHA-512), this is equal to
/// `output_len`. For truncated algorithms (e.g. SHA-224, SHA-384, SHA-512/256),
/// this is equal to the length before truncation. This is mostly helpful
/// for determining the size of an HMAC key that is appropriate for the
/// digest algorithm.
///
/// This function isn't actually used in *aws-lc-rs*, and is only
/// kept for compatibility with the original *ring* implementation.
#[deprecated]
#[inline]
#[must_use]
pub fn chaining_len(&self) -> usize {
// clippy warns on deprecated functions accessing deprecated fields
#![allow(deprecated)]
self.chaining_len
}
/// The internal block length.
#[inline]
#[must_use]
pub fn block_len(&self) -> usize {
self.block_len
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub(crate) enum AlgorithmID {
SHA1,
SHA224,
SHA256,
SHA384,
SHA512,
SHA512_256,
SHA3_256,
SHA3_384,
SHA3_512,
}
impl PartialEq for Algorithm {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for Algorithm {}
derive_debug_via_id!(Algorithm);
/// The maximum block length ([`Algorithm::block_len`]) of all the algorithms
/// in this module.
pub const MAX_BLOCK_LEN: usize = 1024 / 8;
/// The maximum output length ([`Algorithm::output_len`]) of all the
/// algorithms in this module.
pub const MAX_OUTPUT_LEN: usize = 512 / 8;
/// The maximum chaining length ([`Algorithm::chaining_len`]) of all the
/// algorithms in this module.
pub const MAX_CHAINING_LEN: usize = MAX_OUTPUT_LEN;
/// Match digest types for `EVP_MD` functions.
pub(crate) fn match_digest_type(algorithm_id: &AlgorithmID) -> ConstPointer<'_, EVP_MD> {
unsafe {
ConstPointer::new_static(match algorithm_id {
AlgorithmID::SHA1 => EVP_sha1(),
AlgorithmID::SHA224 => EVP_sha224(),
AlgorithmID::SHA256 => EVP_sha256(),
AlgorithmID::SHA384 => EVP_sha384(),
AlgorithmID::SHA512 => EVP_sha512(),
AlgorithmID::SHA512_256 => EVP_sha512_256(),
AlgorithmID::SHA3_256 => EVP_sha3_256(),
AlgorithmID::SHA3_384 => EVP_sha3_384(),
AlgorithmID::SHA3_512 => EVP_sha3_512(),
})
.unwrap_or_else(|()| panic!("Digest algorithm not found: {algorithm_id:?}"))
}
}
#[cfg(test)]
mod tests {
use crate::digest;
#[cfg(feature = "fips")]
mod fips;
mod max_input {
extern crate alloc;
use super::super::super::digest;
use crate::digest::digest_ctx::DigestContext;
use crate::digest::Digest;
use alloc::vec;
macro_rules! max_input_tests {
( $algorithm_name:ident ) => {
mod $algorithm_name {
use super::super::super::super::digest;
#[test]
fn max_input_test() {
super::max_input_test(&digest::$algorithm_name);
}
#[test]
#[should_panic(expected = "digest update failed")]
fn too_long_input_test_block() {
super::too_long_input_test_block(&digest::$algorithm_name);
}
#[test]
#[should_panic(expected = "digest update failed")]
fn too_long_input_test_byte() {
super::too_long_input_test_byte(&digest::$algorithm_name);
}
}
};
}
fn max_input_test(alg: &'static digest::Algorithm) {
let mut context = nearly_full_context(alg);
let next_input = vec![0u8; alg.block_len - 1];
context.update(&next_input);
let _: Digest = context.finish(); // no panic
}
fn too_long_input_test_block(alg: &'static digest::Algorithm) {
let mut context = nearly_full_context(alg);
let next_input = vec![0u8; alg.block_len];
context.update(&next_input);
let _: Digest = context.finish(); // should panic
}
fn too_long_input_test_byte(alg: &'static digest::Algorithm) {
let mut context = nearly_full_context(alg);
let next_input = vec![0u8; alg.block_len - 1];
context.update(&next_input); // no panic
context.update(&[0]);
let _: Digest = context.finish(); // should panic
}
fn nearly_full_context(alg: &'static digest::Algorithm) -> digest::Context {
// Implementations of `digest` only support up
// to 2^64-1 bits of input.
let block_len = alg.block_len as u64;
digest::Context {
algorithm: alg,
digest_ctx: DigestContext::new(alg).unwrap(),
msg_len: alg.max_input_len - block_len + 1,
max_input_reached: false,
}
}
max_input_tests!(SHA1_FOR_LEGACY_USE_ONLY);
max_input_tests!(SHA224);
max_input_tests!(SHA256);
max_input_tests!(SHA384);
max_input_tests!(SHA512);
max_input_tests!(SHA3_384);
max_input_tests!(SHA3_512);
}
#[test]
fn digest_coverage() {
for alg in [
&digest::SHA1_FOR_LEGACY_USE_ONLY,
&digest::SHA224,
&digest::SHA256,
&digest::SHA384,
&digest::SHA512,
&digest::SHA3_384,
&digest::SHA3_512,
] {
// Clone after updating context with message, then check if the final Digest is the same.
let mut ctx = digest::Context::new(alg);
ctx.update(b"hello, world");
let ctx_clone = ctx.clone();
assert_eq!(ctx_clone.algorithm(), ctx.algorithm());
let orig_digest = ctx.finish();
let clone_digest = ctx_clone.finish();
assert_eq!(orig_digest.algorithm(), clone_digest.algorithm());
assert_eq!(orig_digest.as_ref(), clone_digest.as_ref());
assert_eq!(orig_digest.clone().as_ref(), clone_digest.as_ref());
}
}
#[test]
fn test_import_less_safe() {
let digest = digest::digest(&digest::SHA256, b"hello, world");
let digest_copy =
digest::Digest::import_less_safe(digest.as_ref(), &digest::SHA256).unwrap();
assert_eq!(digest.as_ref(), digest_copy.as_ref());
assert_eq!(digest.algorithm, digest_copy.algorithm);
}
}

View File

@@ -0,0 +1,72 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aws_lc::{
EVP_DigestInit_ex, EVP_MD_CTX_cleanup, EVP_MD_CTX_copy, EVP_MD_CTX_init, EVP_MD_CTX,
};
use crate::digest::{match_digest_type, Algorithm};
use crate::error::Unspecified;
use core::mem::MaybeUninit;
use core::ptr::null_mut;
pub(crate) struct DigestContext(EVP_MD_CTX);
impl DigestContext {
pub fn new(algorithm: &'static Algorithm) -> Result<DigestContext, Unspecified> {
let evp_md_type = match_digest_type(&algorithm.id);
let mut dc = Self::new_uninit();
unsafe {
if 1 != EVP_DigestInit_ex(dc.as_mut_ptr(), evp_md_type.as_const_ptr(), null_mut()) {
return Err(Unspecified);
}
Ok(dc)
}
}
pub fn new_uninit() -> DigestContext {
let mut dc = MaybeUninit::<EVP_MD_CTX>::uninit();
unsafe {
EVP_MD_CTX_init(dc.as_mut_ptr());
Self(dc.assume_init())
}
}
pub(crate) fn as_mut_ptr(&mut self) -> *mut EVP_MD_CTX {
&mut self.0
}
pub(crate) fn as_ptr(&self) -> *const EVP_MD_CTX {
&self.0
}
}
unsafe impl Send for DigestContext {}
unsafe impl Sync for DigestContext {}
impl Clone for DigestContext {
fn clone(&self) -> Self {
self.try_clone().expect("Unable to clone DigestContext")
}
}
impl Drop for DigestContext {
fn drop(&mut self) {
unsafe {
EVP_MD_CTX_cleanup(self.as_mut_ptr());
}
}
}
impl DigestContext {
fn try_clone(&self) -> Result<Self, &'static str> {
let mut dc = MaybeUninit::<EVP_MD_CTX>::uninit();
unsafe {
// The first parameter of `EVP_MD_CTX_copy` should not be initialized.
// https://github.com/aws/aws-lc/blob/98ccf4a316401112943bed604562102ad52efac6/include/openssl/digest.h#L280
if 1 != EVP_MD_CTX_copy(dc.as_mut_ptr(), self.as_ptr()) {
return Err("EVP_MD_CTX_copy failed");
}
Ok(Self(dc.assume_init()))
}
}
}

255
vendor/aws-lc-rs/src/digest/sha.rs vendored Normal file
View File

@@ -0,0 +1,255 @@
// Copyright 2015-2022 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::digest::{Algorithm, AlgorithmID, Context};
/// The length of a block for SHA-1, in bytes.
const SHA1_BLOCK_LEN: usize = 512 / 8;
/// The length of the output of SHA-1, in bytes.
pub const SHA1_OUTPUT_LEN: usize = 160 / 8;
/// The length of the output of SHA-224, in bytes.
pub const SHA224_OUTPUT_LEN: usize = 224 / 8;
/// The length of the output of SHA-256, in bytes.
pub const SHA256_OUTPUT_LEN: usize = 256 / 8;
/// The length of a block for SHA-256-based algorithms, in bytes.
const SHA256_BLOCK_LEN: usize = 512 / 8;
/// The length of the output of SHA-384, in bytes.
pub const SHA384_OUTPUT_LEN: usize = 384 / 8;
/// The length of the output of SHA-512, in bytes.
pub const SHA512_OUTPUT_LEN: usize = 512 / 8;
/// The length of the output of SHA-512/256, in bytes.
pub const SHA512_256_OUTPUT_LEN: usize = 256 / 8;
/// The length of a block for SHA-512-based algorithms, in bytes.
const SHA512_BLOCK_LEN: usize = 1024 / 8;
/// The length of a block for SHA3-256-based algorithms, in bytes.
const SHA3_256_BLOCK_LEN: usize = 136;
/// The length of a block for SHA3-384-based algorithms, in bytes.
const SHA3_384_BLOCK_LEN: usize = 104;
/// The length of a block for SHA3-512-based algorithms, in bytes.
const SHA3_512_BLOCK_LEN: usize = 72;
/// The length of the output of SHA3-256 in bytes.
pub const SHA3_256_OUTPUT_LEN: usize = 256 / 8;
/// The length of the output of SHA3-384, in bytes.
pub const SHA3_384_OUTPUT_LEN: usize = 384 / 8;
/// The length of the output of SHA3-512, in bytes.
pub const SHA3_512_OUTPUT_LEN: usize = 512 / 8;
/// SHA-1, SHA-224, and SHA-256 are limited to an input size of 2^64-1 bits.
/// SHA-384, SHA-512, and SHA-512/256 are limited to an input size of 2^128-1 bits according to the spec.
/// u64 is more than sufficient enough for practical usecases, so we limit the input length to 2^64-1 bits.
#[allow(clippy::cast_possible_truncation)]
const DIGEST_MAX_INPUT_LEN: u64 = u64::MAX;
/// SHA-1 as specified in [FIPS 180-4]. Deprecated.
///
/// [FIPS 180-4]: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
#[allow(deprecated)]
pub const SHA1_FOR_LEGACY_USE_ONLY: Algorithm = Algorithm {
output_len: SHA1_OUTPUT_LEN,
chaining_len: SHA1_OUTPUT_LEN,
block_len: SHA1_BLOCK_LEN,
max_input_len: DIGEST_MAX_INPUT_LEN,
one_shot_hash: sha1_digest,
id: AlgorithmID::SHA1,
};
/// SHA-224 as specified in [FIPS 180-4].
///
/// [FIPS 180-4]: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
#[allow(deprecated)]
pub const SHA224: Algorithm = Algorithm {
output_len: SHA224_OUTPUT_LEN,
// The chaining length is equivalent to the length before truncation.
// SHA-224 is truncated from 256 bits so the chaining length is 256 bits, or 32 bytes.
chaining_len: SHA256_OUTPUT_LEN,
block_len: SHA256_BLOCK_LEN,
max_input_len: DIGEST_MAX_INPUT_LEN,
one_shot_hash: sha224_digest,
id: AlgorithmID::SHA224,
};
/// SHA-256 as specified in [FIPS 180-4].
///
/// [FIPS 180-4]: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
#[allow(deprecated)]
pub const SHA256: Algorithm = Algorithm {
output_len: SHA256_OUTPUT_LEN,
chaining_len: SHA256_OUTPUT_LEN,
block_len: SHA256_BLOCK_LEN,
max_input_len: DIGEST_MAX_INPUT_LEN,
one_shot_hash: sha256_digest,
id: AlgorithmID::SHA256,
};
/// SHA-384 as specified in [FIPS 180-4].
///
/// [FIPS 180-4]: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
#[allow(deprecated)]
pub const SHA384: Algorithm = Algorithm {
output_len: SHA384_OUTPUT_LEN,
// The chaining length is equivalent to the length before truncation.
// SHA-384 is truncated from 512 bits so the chaining length is 512 bits, or 64 bytes.
chaining_len: SHA512_OUTPUT_LEN,
block_len: SHA512_BLOCK_LEN,
max_input_len: DIGEST_MAX_INPUT_LEN,
one_shot_hash: sha384_digest,
id: AlgorithmID::SHA384,
};
/// SHA-512 as specified in [FIPS 180-4].
///
/// [FIPS 180-4]: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
#[allow(deprecated)]
pub const SHA512: Algorithm = Algorithm {
output_len: SHA512_OUTPUT_LEN,
chaining_len: SHA512_OUTPUT_LEN,
block_len: SHA512_BLOCK_LEN,
max_input_len: DIGEST_MAX_INPUT_LEN,
one_shot_hash: sha512_digest,
id: AlgorithmID::SHA512,
};
/// SHA-512/256 as specified in [FIPS 180-4].
///
/// [FIPS 180-4]: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
#[allow(deprecated)]
pub const SHA512_256: Algorithm = Algorithm {
output_len: SHA512_256_OUTPUT_LEN,
chaining_len: SHA512_OUTPUT_LEN,
block_len: SHA512_BLOCK_LEN,
max_input_len: DIGEST_MAX_INPUT_LEN,
one_shot_hash: sha512_256_digest,
id: AlgorithmID::SHA512_256,
};
/// SHA3-256 as specified in [FIPS 202].
///
/// [FIPS 202]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
#[allow(deprecated)]
pub const SHA3_256: Algorithm = Algorithm {
output_len: SHA3_256_OUTPUT_LEN,
chaining_len: SHA3_256_OUTPUT_LEN,
block_len: SHA3_256_BLOCK_LEN,
max_input_len: DIGEST_MAX_INPUT_LEN,
one_shot_hash: sha3_256_digest,
id: AlgorithmID::SHA3_256,
};
/// SHA3-384 as specified in [FIPS 202].
///
/// [FIPS 202]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
#[allow(deprecated)]
pub const SHA3_384: Algorithm = Algorithm {
output_len: SHA3_384_OUTPUT_LEN,
chaining_len: SHA3_384_OUTPUT_LEN,
block_len: SHA3_384_BLOCK_LEN,
max_input_len: DIGEST_MAX_INPUT_LEN,
one_shot_hash: sha3_384_digest,
id: AlgorithmID::SHA3_384,
};
/// SHA3-512 as specified in [FIPS 202].
///
/// [FIPS 202]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
#[allow(deprecated)]
pub const SHA3_512: Algorithm = Algorithm {
output_len: SHA3_512_OUTPUT_LEN,
chaining_len: SHA3_512_OUTPUT_LEN,
block_len: SHA3_512_BLOCK_LEN,
max_input_len: DIGEST_MAX_INPUT_LEN,
one_shot_hash: sha3_512_digest,
id: AlgorithmID::SHA3_512,
};
fn sha1_digest(msg: &[u8], output: &mut [u8]) {
unsafe {
aws_lc::SHA1(msg.as_ptr(), msg.len(), output.as_mut_ptr());
}
}
fn sha224_digest(msg: &[u8], output: &mut [u8]) {
unsafe {
aws_lc::SHA224(msg.as_ptr(), msg.len(), output.as_mut_ptr());
}
}
fn sha256_digest(msg: &[u8], output: &mut [u8]) {
unsafe {
aws_lc::SHA256(msg.as_ptr(), msg.len(), output.as_mut_ptr());
}
}
fn sha384_digest(msg: &[u8], output: &mut [u8]) {
unsafe {
aws_lc::SHA384(msg.as_ptr(), msg.len(), output.as_mut_ptr());
}
}
fn sha512_digest(msg: &[u8], output: &mut [u8]) {
unsafe {
aws_lc::SHA512(msg.as_ptr(), msg.len(), output.as_mut_ptr());
}
}
fn sha512_256_digest(msg: &[u8], output: &mut [u8]) {
unsafe {
aws_lc::SHA512_256(msg.as_ptr(), msg.len(), output.as_mut_ptr());
}
}
fn sha3_256_digest(msg: &[u8], output: &mut [u8]) {
let mut ctx = Context::new(&SHA3_256);
ctx.update(msg);
let digest = ctx.finish();
output[0..SHA3_256_OUTPUT_LEN].copy_from_slice(digest.as_ref());
}
fn sha3_384_digest(msg: &[u8], output: &mut [u8]) {
let mut ctx = Context::new(&SHA3_384);
ctx.update(msg);
let digest = ctx.finish();
output[0..SHA3_384_OUTPUT_LEN].copy_from_slice(digest.as_ref());
}
fn sha3_512_digest(msg: &[u8], output: &mut [u8]) {
let mut ctx = Context::new(&SHA3_512);
ctx.update(msg);
let digest = ctx.finish();
output[0..SHA3_512_OUTPUT_LEN].copy_from_slice(digest.as_ref());
}

View File

@@ -0,0 +1,43 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#![cfg(debug_assertions)]
use crate::digest::{
Context, SHA1_FOR_LEGACY_USE_ONLY, SHA224, SHA256, SHA384, SHA3_256, SHA3_384, SHA3_512,
SHA512, SHA512_256,
};
use crate::fips::{assert_fips_status_indicator, FipsServiceStatus};
const TEST_MESSAGE: &str = "test message";
macro_rules! digest_api {
($name:ident, $alg:expr, $expect:path) => {
#[test]
fn $name() {
// Regardless of the algorithm you can always construct the context, and the status
// should will not change.
let mut context =
assert_fips_status_indicator!(Context::new($alg), FipsServiceStatus::Unset);
// AWS-LC digest update API does not set the inidicator API.
assert_fips_status_indicator!(
context.update(TEST_MESSAGE.as_bytes()),
FipsServiceStatus::Unset
);
// Finish API expected to set the service indicator.
let _digest = assert_fips_status_indicator!(context.finish(), $expect);
}
};
}
digest_api!(sha1, &SHA1_FOR_LEGACY_USE_ONLY, FipsServiceStatus::Approved);
digest_api!(sha224, &SHA224, FipsServiceStatus::Approved);
digest_api!(sha256, &SHA256, FipsServiceStatus::Approved);
digest_api!(sha384, &SHA384, FipsServiceStatus::Approved);
digest_api!(sha512, &SHA512, FipsServiceStatus::Approved);
digest_api!(sha512_256, &SHA512_256, FipsServiceStatus::Approved);
digest_api!(sha3_256, &SHA3_256, FipsServiceStatus::Approved);
digest_api!(sha3_384, &SHA3_384, FipsServiceStatus::Approved);
digest_api!(sha3_512, &SHA3_512, FipsServiceStatus::Approved);

256
vendor/aws-lc-rs/src/ec.rs vendored Normal file
View File

@@ -0,0 +1,256 @@
// Copyright 2015-2016 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#[cfg(feature = "fips")]
use crate::aws_lc::EC_KEY_check_fips;
#[cfg(not(feature = "fips"))]
use crate::aws_lc::EC_KEY_check_key;
use crate::aws_lc::{
ECDSA_SIG_from_bytes, ECDSA_SIG_get0_r, ECDSA_SIG_get0_s, EC_GROUP_get_curve_name,
EC_KEY_get0_group, EC_group_p224, EC_group_p256, EC_group_p384, EC_group_p521,
EC_group_secp256k1, EVP_PKEY_CTX_set_ec_paramgen_curve_nid, EVP_PKEY_get0_EC_KEY,
NID_X9_62_prime256v1, NID_secp224r1, NID_secp256k1, NID_secp384r1, NID_secp521r1, EC_GROUP,
EC_KEY, EVP_PKEY, EVP_PKEY_EC,
};
use crate::ec::signature::AlgorithmID;
use crate::error::{KeyRejected, Unspecified};
#[cfg(feature = "fips")]
use crate::fips::indicator_check;
use crate::ptr::{ConstPointer, LcPtr};
use crate::signature::Signature;
use core::ffi::c_int;
use std::ptr::null;
pub(crate) mod encoding;
pub(crate) mod key_pair;
pub(crate) mod signature;
const ELEM_MAX_BITS: usize = 521;
pub(crate) const ELEM_MAX_BYTES: usize = (ELEM_MAX_BITS + 7) / 8;
/// The maximum length, in bytes, of an encoded public key.
pub(crate) const PUBLIC_KEY_MAX_LEN: usize = 1 + (2 * ELEM_MAX_BYTES);
fn verify_ec_key_nid(
ec_key: &ConstPointer<EC_KEY>,
expected_curve_nid: i32,
) -> Result<(), KeyRejected> {
let ec_group = ec_key
.project_const_lifetime(unsafe { |ec_key| EC_KEY_get0_group(ec_key.as_const_ptr()) })?;
let key_nid = unsafe { EC_GROUP_get_curve_name(ec_group.as_const_ptr()) };
if key_nid != expected_curve_nid {
return Err(KeyRejected::wrong_algorithm());
}
Ok(())
}
#[inline]
#[cfg(not(feature = "fips"))]
pub(crate) fn verify_evp_key_nid(
evp_pkey: &ConstPointer<EVP_PKEY>,
expected_curve_nid: i32,
) -> Result<(), KeyRejected> {
let ec_key = evp_pkey.project_const_lifetime(unsafe {
|evp_pkey| EVP_PKEY_get0_EC_KEY(evp_pkey.as_const_ptr())
})?;
verify_ec_key_nid(&ec_key, expected_curve_nid)?;
Ok(())
}
#[inline]
pub(crate) fn validate_ec_evp_key(
evp_pkey: &ConstPointer<EVP_PKEY>,
expected_curve_nid: i32,
) -> Result<(), KeyRejected> {
let ec_key = evp_pkey.project_const_lifetime(unsafe {
|evp_pkey| EVP_PKEY_get0_EC_KEY(evp_pkey.as_const_ptr())
})?;
verify_ec_key_nid(&ec_key, expected_curve_nid)?;
#[cfg(not(feature = "fips"))]
if 1 != unsafe { EC_KEY_check_key(ec_key.as_const_ptr()) } {
return Err(KeyRejected::inconsistent_components());
}
#[cfg(feature = "fips")]
if 1 != indicator_check!(unsafe { EC_KEY_check_fips(ec_key.as_const_ptr()) }) {
return Err(KeyRejected::inconsistent_components());
}
Ok(())
}
#[inline]
pub(crate) fn evp_key_generate(nid: c_int) -> Result<LcPtr<EVP_PKEY>, Unspecified> {
let params_fn = |ctx| {
if 1 == unsafe { EVP_PKEY_CTX_set_ec_paramgen_curve_nid(ctx, nid) } {
Ok(())
} else {
Err(())
}
};
LcPtr::<EVP_PKEY>::generate(EVP_PKEY_EC, Some(params_fn))
}
#[inline]
#[allow(non_upper_case_globals)]
pub(crate) fn ec_group_from_nid(nid: i32) -> Result<ConstPointer<'static, EC_GROUP>, Unspecified> {
Ok(unsafe {
ConstPointer::new_static(match nid {
NID_secp224r1 => EC_group_p224(),
NID_X9_62_prime256v1 => EC_group_p256(),
NID_secp384r1 => EC_group_p384(),
NID_secp521r1 => EC_group_p521(),
NID_secp256k1 => EC_group_secp256k1(),
_ => {
// OPENSSL_PUT_ERROR(EC, EC_R_UNKNOWN_GROUP);
null()
}
})?
})
}
#[inline]
fn ecdsa_asn1_to_fixed(alg_id: &'static AlgorithmID, sig: &[u8]) -> Result<Signature, Unspecified> {
let expected_number_size = alg_id.private_key_size();
let ecdsa_sig = LcPtr::new(unsafe { ECDSA_SIG_from_bytes(sig.as_ptr(), sig.len()) })?;
let r_bn = ecdsa_sig.project_const_lifetime(unsafe {
|ecdsa_sig| ECDSA_SIG_get0_r(ecdsa_sig.as_const_ptr())
})?;
let r_buffer = r_bn.to_be_bytes();
let s_bn = ecdsa_sig.project_const_lifetime(unsafe {
|ecdsa_sig| ECDSA_SIG_get0_s(ecdsa_sig.as_const_ptr())
})?;
let s_buffer = s_bn.to_be_bytes();
Ok(Signature::new(|slice| {
let (r_start, r_end) = (expected_number_size - r_buffer.len(), expected_number_size);
let (s_start, s_end) = (
2 * expected_number_size - s_buffer.len(),
2 * expected_number_size,
);
slice[r_start..r_end].copy_from_slice(r_buffer.as_slice());
slice[s_start..s_end].copy_from_slice(s_buffer.as_slice());
2 * expected_number_size
}))
}
#[inline]
pub(crate) const fn compressed_public_key_size_bytes(curve_field_bits: usize) -> usize {
1 + (curve_field_bits + 7) / 8
}
#[inline]
pub(crate) const fn uncompressed_public_key_size_bytes(curve_field_bits: usize) -> usize {
1 + 2 * ((curve_field_bits + 7) / 8)
}
#[cfg(test)]
mod tests {
use crate::encoding::{
AsBigEndian, AsDer, EcPublicKeyCompressedBin, EcPublicKeyUncompressedBin, PublicKeyX509Der,
};
use crate::signature::{
EcdsaKeyPair, KeyPair, UnparsedPublicKey, ECDSA_P256_SHA256_FIXED,
ECDSA_P256_SHA256_FIXED_SIGNING,
};
use crate::test::from_dirty_hex;
use crate::{signature, test};
#[test]
fn test_from_pkcs8() {
let input = from_dirty_hex(
r"308187020100301306072a8648ce3d020106082a8648ce3d030107046d306b0201010420090460075f15d
2a256248000fb02d83ad77593dde4ae59fc5e96142dffb2bd07a14403420004cf0d13a3a7577231ea1b66cf4
021cd54f21f4ac4f5f2fdd28e05bc7d2bd099d1374cd08d2ef654d6f04498db462f73e0282058dd661a4c9b0
437af3f7af6e724",
);
let result = EcdsaKeyPair::from_pkcs8(&ECDSA_P256_SHA256_FIXED_SIGNING, &input);
assert!(result.is_ok());
let key_pair = result.unwrap();
assert_eq!("EcdsaKeyPair { public_key: EcdsaPublicKey(\"04cf0d13a3a7577231ea1b66cf4021cd54f21f4ac4f5f2fdd28e05bc7d2bd099d1374cd08d2ef654d6f04498db462f73e0282058dd661a4c9b0437af3f7af6e724\") }",
format!("{key_pair:?}"));
assert_eq!(
"EcdsaPrivateKey(ECDSA_P256)",
format!("{:?}", key_pair.private_key())
);
let pub_key = key_pair.public_key();
let der_pub_key: PublicKeyX509Der = pub_key.as_der().unwrap();
assert_eq!(
from_dirty_hex(
r"3059301306072a8648ce3d020106082a8648ce3d03010703420004cf0d13a3a7577231ea1b66cf402
1cd54f21f4ac4f5f2fdd28e05bc7d2bd099d1374cd08d2ef654d6f04498db462f73e0282058dd661a4c9
b0437af3f7af6e724",
)
.as_slice(),
der_pub_key.as_ref()
);
}
#[test]
fn test_ecdsa_asn1_verify() {
/*
Curve = P-256
Digest = SHA256
Msg = ""
Q = 0430345fd47ea21a11129be651b0884bfac698377611acc9f689458e13b9ed7d4b9d7599a68dcf125e7f31055ccb374cd04f6d6fd2b217438a63f6f667d50ef2f0
Sig = 30440220341f6779b75e98bb42e01095dd48356cbf9002dc704ac8bd2a8240b88d3796c60220555843b1b4e264fe6ffe6e2b705a376c05c09404303ffe5d2711f3e3b3a010a1
Result = P (0 )
*/
let alg = &signature::ECDSA_P256_SHA256_ASN1;
let msg = "";
let public_key = from_dirty_hex(
r"0430345fd47ea21a11129be651b0884bfac698377611acc9f689458e1
3b9ed7d4b9d7599a68dcf125e7f31055ccb374cd04f6d6fd2b217438a63f6f667d50ef2f0",
);
let sig = from_dirty_hex(
r"30440220341f6779b75e98bb42e01095dd48356cbf9002dc704ac8bd2a8240b8
8d3796c60220555843b1b4e264fe6ffe6e2b705a376c05c09404303ffe5d2711f3e3b3a010a1",
);
let unparsed_pub_key = signature::UnparsedPublicKey::new(alg, &public_key);
let actual_result = unparsed_pub_key.verify(msg.as_bytes(), &sig);
assert!(actual_result.is_ok(), "Key: {}", test::to_hex(public_key));
}
#[test]
fn public_key_formats() {
const MESSAGE: &[u8] = b"message to be signed";
let key_pair = EcdsaKeyPair::generate(&ECDSA_P256_SHA256_FIXED_SIGNING).unwrap();
let public_key = key_pair.public_key();
let as_ref_bytes = public_key.as_ref();
let compressed = AsBigEndian::<EcPublicKeyCompressedBin>::as_be_bytes(public_key).unwrap();
let uncompressed =
AsBigEndian::<EcPublicKeyUncompressedBin>::as_be_bytes(public_key).unwrap();
let pub_x509 = AsDer::<PublicKeyX509Der>::as_der(public_key).unwrap();
assert_eq!(as_ref_bytes, uncompressed.as_ref());
assert_ne!(compressed.as_ref()[0], 0x04);
let rng = crate::rand::SystemRandom::new();
let signature = key_pair.sign(&rng, MESSAGE).unwrap();
for pub_key_bytes in [
as_ref_bytes,
compressed.as_ref(),
uncompressed.as_ref(),
pub_x509.as_ref(),
] {
UnparsedPublicKey::new(&ECDSA_P256_SHA256_FIXED, pub_key_bytes)
.verify(MESSAGE, signature.as_ref())
.unwrap();
}
}
}

273
vendor/aws-lc-rs/src/ec/encoding.rs vendored Normal file
View File

@@ -0,0 +1,273 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aws_lc::{EVP_PKEY, EVP_PKEY_EC};
use crate::ec::encoding::sec1::parse_sec1_public_point;
use crate::ec::validate_ec_evp_key;
use crate::error::KeyRejected;
use crate::ptr::LcPtr;
// [SEC 1](https://secg.org/sec1-v2.pdf)
//
// SEC 1: Elliptic Curve Cryptography, Version 2.0
pub(crate) mod sec1 {
use crate::aws_lc::{
point_conversion_form_t, BN_bn2cbb_padded, EC_GROUP_get_curve_name, EC_KEY_get0_group,
EC_KEY_get0_private_key, EC_KEY_get0_public_key, EC_KEY_new, EC_KEY_set_group,
EC_KEY_set_private_key, EC_KEY_set_public_key, EC_POINT_mul, EC_POINT_new,
EC_POINT_oct2point, EC_POINT_point2cbb, EVP_PKEY_assign_EC_KEY, EVP_PKEY_get0_EC_KEY,
EVP_PKEY_new, NID_X9_62_prime256v1, NID_secp256k1, NID_secp384r1, NID_secp521r1, BIGNUM,
EC_GROUP, EC_POINT, EVP_PKEY,
};
use crate::cbb::LcCBB;
use crate::ec::{
compressed_public_key_size_bytes, ec_group_from_nid, uncompressed_public_key_size_bytes,
validate_ec_evp_key, KeyRejected,
};
use crate::error::Unspecified;
use crate::ptr::{ConstPointer, DetachableLcPtr, LcPtr};
use std::ptr::{null, null_mut};
pub(crate) fn parse_sec1_public_point(
key_bytes: &[u8],
expected_curve_nid: i32,
) -> Result<LcPtr<EVP_PKEY>, KeyRejected> {
let ec_group = ec_group_from_nid(expected_curve_nid)?;
let mut ec_point = LcPtr::new(unsafe { EC_POINT_new(ec_group.as_const_ptr()) })?;
if 1 != unsafe {
EC_POINT_oct2point(
ec_group.as_const_ptr(),
ec_point.as_mut_ptr(),
key_bytes.as_ptr(),
key_bytes.len(),
null_mut(),
)
} {
return Err(KeyRejected::invalid_encoding());
}
from_ec_public_point(&ec_group, &ec_point)
}
#[inline]
fn from_ec_public_point(
ec_group: &ConstPointer<EC_GROUP>,
public_ec_point: &LcPtr<EC_POINT>,
) -> Result<LcPtr<EVP_PKEY>, KeyRejected> {
let nid = unsafe { EC_GROUP_get_curve_name(ec_group.as_const_ptr()) };
let mut ec_key = DetachableLcPtr::new(unsafe { EC_KEY_new() })?;
if 1 != unsafe { EC_KEY_set_group(ec_key.as_mut_ptr(), ec_group.as_const_ptr()) } {
return Err(KeyRejected::unexpected_error());
}
if 1 != unsafe {
EC_KEY_set_public_key(ec_key.as_mut_ptr(), public_ec_point.as_const_ptr())
} {
return Err(KeyRejected::inconsistent_components());
}
let mut pkey = LcPtr::new(unsafe { EVP_PKEY_new() })?;
if 1 != unsafe { EVP_PKEY_assign_EC_KEY(pkey.as_mut_ptr(), ec_key.as_mut_ptr()) } {
return Err(KeyRejected::unexpected_error());
}
ec_key.detach();
validate_ec_evp_key(&pkey.as_const(), nid)?;
Ok(pkey)
}
pub(crate) fn parse_sec1_private_bn(
priv_key: &[u8],
nid: i32,
) -> Result<LcPtr<EVP_PKEY>, KeyRejected> {
let ec_group = ec_group_from_nid(nid)?;
let priv_key = LcPtr::<BIGNUM>::try_from(priv_key)?;
let pkey = from_ec_private_bn(&ec_group, &priv_key.as_const())?;
Ok(pkey)
}
fn from_ec_private_bn(
ec_group: &ConstPointer<EC_GROUP>,
private_big_num: &ConstPointer<BIGNUM>,
) -> Result<LcPtr<EVP_PKEY>, KeyRejected> {
let mut ec_key = DetachableLcPtr::new(unsafe { EC_KEY_new() })?;
if 1 != unsafe { EC_KEY_set_group(ec_key.as_mut_ptr(), ec_group.as_const_ptr()) } {
return Err(KeyRejected::unexpected_error());
}
if 1 != unsafe {
EC_KEY_set_private_key(ec_key.as_mut_ptr(), private_big_num.as_const_ptr())
} {
return Err(KeyRejected::invalid_encoding());
}
let mut pub_key = LcPtr::new(unsafe { EC_POINT_new(ec_group.as_const_ptr()) })?;
if 1 != unsafe {
EC_POINT_mul(
ec_group.as_const_ptr(),
pub_key.as_mut_ptr(),
private_big_num.as_const_ptr(),
null(),
null(),
null_mut(),
)
} {
return Err(KeyRejected::unexpected_error());
}
if 1 != unsafe { EC_KEY_set_public_key(ec_key.as_mut_ptr(), pub_key.as_const_ptr()) } {
return Err(KeyRejected::unexpected_error());
}
let expected_curve_nid = unsafe { EC_GROUP_get_curve_name(ec_group.as_const_ptr()) };
let mut pkey = LcPtr::new(unsafe { EVP_PKEY_new() })?;
if 1 != unsafe { EVP_PKEY_assign_EC_KEY(pkey.as_mut_ptr(), ec_key.as_mut_ptr()) } {
return Err(KeyRejected::unexpected_error());
}
ec_key.detach();
// Validate the EC_KEY before returning it.
validate_ec_evp_key(&pkey.as_const(), expected_curve_nid)?;
Ok(pkey)
}
pub(crate) fn marshal_sec1_public_point(
evp_pkey: &LcPtr<EVP_PKEY>,
compressed: bool,
) -> Result<Vec<u8>, Unspecified> {
let pub_key_size = if compressed {
compressed_public_key_size_bytes(evp_pkey.as_const().key_size_bits())
} else {
uncompressed_public_key_size_bytes(evp_pkey.as_const().key_size_bits())
};
let mut cbb = LcCBB::new(pub_key_size);
marshal_sec1_public_point_into_cbb(&mut cbb, evp_pkey, compressed)?;
cbb.into_vec()
}
pub(crate) fn marshal_sec1_public_point_into_buffer(
buffer: &mut [u8],
evp_pkey: &LcPtr<EVP_PKEY>,
compressed: bool,
) -> Result<usize, Unspecified> {
let mut cbb = LcCBB::new_from_slice(buffer);
marshal_sec1_public_point_into_cbb(&mut cbb, evp_pkey, compressed)?;
cbb.finish()
}
fn marshal_sec1_public_point_into_cbb(
cbb: &mut LcCBB,
evp_pkey: &LcPtr<EVP_PKEY>,
compressed: bool,
) -> Result<(), Unspecified> {
let ec_key = evp_pkey.project_const_lifetime(unsafe {
|evp_pkey| EVP_PKEY_get0_EC_KEY(evp_pkey.as_const_ptr())
})?;
let ec_group = ec_key
.project_const_lifetime(unsafe { |ec_key| EC_KEY_get0_group(ec_key.as_const_ptr()) })?;
let ec_point = ec_key.project_const_lifetime(unsafe {
|ec_key| EC_KEY_get0_public_key(ec_key.as_const_ptr())
})?;
let point_conversion_form = if compressed {
point_conversion_form_t::POINT_CONVERSION_COMPRESSED
} else {
point_conversion_form_t::POINT_CONVERSION_UNCOMPRESSED
};
if 1 != unsafe {
EC_POINT_point2cbb(
cbb.as_mut_ptr(),
ec_group.as_const_ptr(),
ec_point.as_const_ptr(),
point_conversion_form,
null_mut(),
)
} {
return Err(Unspecified);
}
Ok(())
}
pub(crate) fn marshal_sec1_private_key(
evp_pkey: &LcPtr<EVP_PKEY>,
) -> Result<Vec<u8>, Unspecified> {
let ec_key = evp_pkey.project_const_lifetime(unsafe {
|evp_pkey| EVP_PKEY_get0_EC_KEY(evp_pkey.as_const_ptr())
})?;
let ec_group = ec_key
.project_const_lifetime(unsafe { |ec_key| EC_KEY_get0_group(ec_key.as_const_ptr()) })?;
let nid = unsafe { EC_GROUP_get_curve_name(ec_group.as_const_ptr()) };
#[allow(non_upper_case_globals)]
let key_size: usize = match nid {
NID_X9_62_prime256v1 | NID_secp256k1 => Ok(32usize),
NID_secp384r1 => Ok(48usize),
NID_secp521r1 => Ok(66usize),
_ => Err(Unspecified),
}?;
let private_bn = ec_key.project_const_lifetime(unsafe {
|ec_key| EC_KEY_get0_private_key(ec_key.as_const_ptr())
})?;
let mut cbb = LcCBB::new(key_size);
if 1 != unsafe { BN_bn2cbb_padded(cbb.as_mut_ptr(), key_size, private_bn.as_const_ptr()) } {
return Err(Unspecified);
}
cbb.into_vec()
}
}
pub(crate) mod rfc5915 {
use crate::aws_lc::{
EC_KEY_get_enc_flags, EC_KEY_marshal_private_key, EC_KEY_parse_private_key,
EVP_PKEY_get0_EC_KEY, EVP_PKEY_new, EVP_PKEY_set1_EC_KEY, EVP_PKEY,
};
use crate::cbb::LcCBB;
use crate::cbs::build_CBS;
use crate::ec::ec_group_from_nid;
use crate::error::{KeyRejected, Unspecified};
use crate::ptr::LcPtr;
pub(crate) fn parse_rfc5915_private_key(
key_bytes: &[u8],
expected_curve_nid: i32,
) -> Result<LcPtr<EVP_PKEY>, KeyRejected> {
let ec_group = ec_group_from_nid(expected_curve_nid)?;
let mut cbs = build_CBS(key_bytes);
let mut ec_key =
LcPtr::new(unsafe { EC_KEY_parse_private_key(&mut cbs, ec_group.as_const_ptr()) })?;
let mut evp_pkey = LcPtr::new(unsafe { EVP_PKEY_new() })?;
if 1 != unsafe { EVP_PKEY_set1_EC_KEY(evp_pkey.as_mut_ptr(), ec_key.as_mut_ptr()) } {
return Err(KeyRejected::unexpected_error());
}
Ok(evp_pkey)
}
pub(crate) fn marshal_rfc5915_private_key(
evp_pkey: &LcPtr<EVP_PKEY>,
) -> Result<Vec<u8>, Unspecified> {
let ec_key = evp_pkey.project_const_lifetime(unsafe {
|evp_pkey| EVP_PKEY_get0_EC_KEY(evp_pkey.as_const_ptr())
})?;
let mut cbb = LcCBB::new(evp_pkey.as_const().key_size_bytes());
let enc_flags = unsafe { EC_KEY_get_enc_flags(ec_key.as_const_ptr()) };
if 1 != unsafe {
EC_KEY_marshal_private_key(cbb.as_mut_ptr(), ec_key.as_const_ptr(), enc_flags)
} {
return Err(Unspecified);
}
cbb.into_vec()
}
}
pub(crate) fn parse_ec_public_key(
key_bytes: &[u8],
expected_curve_nid: i32,
) -> Result<LcPtr<EVP_PKEY>, KeyRejected> {
LcPtr::<EVP_PKEY>::parse_rfc5280_public_key(key_bytes, EVP_PKEY_EC)
.or(parse_sec1_public_point(key_bytes, expected_curve_nid))
.and_then(|key| validate_ec_evp_key(&key.as_const(), expected_curve_nid).map(|()| key))
}

354
vendor/aws-lc-rs/src/ec/key_pair.rs vendored Normal file
View File

@@ -0,0 +1,354 @@
// Copyright 2015-2016 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aws_lc::{EVP_PKEY, EVP_PKEY_EC};
use crate::digest::Digest;
use crate::ec::evp_key_generate;
use crate::ec::signature::{EcdsaSignatureFormat, EcdsaSigningAlgorithm, PublicKey};
#[cfg(feature = "fips")]
use crate::ec::validate_ec_evp_key;
#[cfg(not(feature = "fips"))]
use crate::ec::verify_evp_key_nid;
use core::fmt;
use core::fmt::{Debug, Formatter};
use crate::ec;
use crate::ec::encoding::rfc5915::{marshal_rfc5915_private_key, parse_rfc5915_private_key};
use crate::ec::encoding::sec1::{
marshal_sec1_private_key, parse_sec1_private_bn, parse_sec1_public_point,
};
use crate::encoding::{AsBigEndian, AsDer, EcPrivateKeyBin, EcPrivateKeyRfc5915Der};
use crate::error::{KeyRejected, Unspecified};
use crate::evp_pkey::No_EVP_PKEY_CTX_consumer;
use crate::pkcs8::{Document, Version};
use crate::ptr::LcPtr;
use crate::rand::SecureRandom;
use crate::signature::{KeyPair, Signature};
/// An ECDSA key pair, used for signing.
#[allow(clippy::module_name_repetitions)]
pub struct EcdsaKeyPair {
algorithm: &'static EcdsaSigningAlgorithm,
evp_pkey: LcPtr<EVP_PKEY>,
pubkey: PublicKey,
}
impl Debug for EcdsaKeyPair {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
f.write_str(&format!("EcdsaKeyPair {{ public_key: {:?} }}", self.pubkey))
}
}
unsafe impl Send for EcdsaKeyPair {}
unsafe impl Sync for EcdsaKeyPair {}
impl KeyPair for EcdsaKeyPair {
type PublicKey = PublicKey;
#[inline]
/// Provides the public key.
fn public_key(&self) -> &Self::PublicKey {
&self.pubkey
}
}
impl EcdsaKeyPair {
#[allow(clippy::needless_pass_by_value)]
fn new(
algorithm: &'static EcdsaSigningAlgorithm,
evp_pkey: LcPtr<EVP_PKEY>,
) -> Result<Self, ()> {
let pubkey = ec::signature::public_key_from_evp_pkey(&evp_pkey, algorithm)?;
Ok(Self {
algorithm,
evp_pkey,
pubkey,
})
}
/// Generates a new key pair.
///
/// # Errors
/// `error::Unspecified` on internal error.
///
pub fn generate(alg: &'static EcdsaSigningAlgorithm) -> Result<Self, Unspecified> {
let evp_pkey = evp_key_generate(alg.0.id.nid())?;
Ok(Self::new(alg, evp_pkey)?)
}
/// Constructs an ECDSA key pair by parsing an unencrypted PKCS#8 v1
/// id-ecPublicKey `ECPrivateKey` key.
///
/// # Errors
/// `error::KeyRejected` if bytes do not encode an ECDSA key pair or if the key is otherwise not
/// acceptable.
pub fn from_pkcs8(
alg: &'static EcdsaSigningAlgorithm,
pkcs8: &[u8],
) -> Result<Self, KeyRejected> {
// Includes a call to `EC_KEY_check_key`
let evp_pkey = LcPtr::<EVP_PKEY>::parse_rfc5208_private_key(pkcs8, EVP_PKEY_EC)?;
#[cfg(not(feature = "fips"))]
verify_evp_key_nid(&evp_pkey.as_const(), alg.id.nid())?;
#[cfg(feature = "fips")]
validate_ec_evp_key(&evp_pkey.as_const(), alg.id.nid())?;
let key_pair = Self::new(alg, evp_pkey)?;
Ok(key_pair)
}
/// Generates a new key pair and returns the key pair serialized as a
/// PKCS#8 v1 document.
///
/// # *ring* Compatibility
/// Our implementation ignores the `SecureRandom` parameter.
///
/// # Errors
/// `error::Unspecified` on internal error.
pub fn generate_pkcs8(
alg: &'static EcdsaSigningAlgorithm,
_rng: &dyn SecureRandom,
) -> Result<Document, Unspecified> {
let key_pair = Self::generate(alg)?;
key_pair.to_pkcs8v1()
}
/// Serializes this `EcdsaKeyPair` into a PKCS#8 v1 document.
///
/// # Errors
/// `error::Unspecified` on internal error.
///
pub fn to_pkcs8v1(&self) -> Result<Document, Unspecified> {
Ok(Document::new(
self.evp_pkey
.as_const()
.marshal_rfc5208_private_key(Version::V1)?,
))
}
/// Constructs an ECDSA key pair from the private key and public key bytes
///
/// The private key must encoded as a big-endian fixed-length integer. For
/// example, a P-256 private key must be 32 bytes prefixed with leading
/// zeros as needed.
///
/// The public key is encoding in uncompressed form using the
/// Octet-String-to-Elliptic-Curve-Point algorithm in
/// [SEC 1: Elliptic Curve Cryptography, Version 2.0].
///
/// This is intended for use by code that deserializes key pairs. It is
/// recommended to use `EcdsaKeyPair::from_pkcs8()` (with a PKCS#8-encoded
/// key) instead.
///
/// [SEC 1: Elliptic Curve Cryptography, Version 2.0]:
/// http://www.secg.org/sec1-v2.pdf
///
/// # Errors
/// `error::KeyRejected` if parsing failed or key otherwise unacceptable.
pub fn from_private_key_and_public_key(
alg: &'static EcdsaSigningAlgorithm,
private_key: &[u8],
public_key: &[u8],
) -> Result<Self, KeyRejected> {
let priv_evp_pkey = parse_sec1_private_bn(private_key, alg.id.nid())?;
let pub_evp_pkey = parse_sec1_public_point(public_key, alg.id.nid())?;
// EVP_PKEY_cmp only compares params and public key
if !priv_evp_pkey.eq(&pub_evp_pkey) {
return Err(KeyRejected::inconsistent_components());
}
let key_pair = Self::new(alg, priv_evp_pkey)?;
Ok(key_pair)
}
/// Deserializes a DER-encoded private key structure to produce a `EcdsaKeyPair`.
///
/// This function is typically used to deserialize RFC 5915 encoded private keys, but it will
/// attempt to automatically detect other key formats. This function supports unencrypted
/// PKCS#8 `PrivateKeyInfo` structures as well as key type specific formats.
///
/// See `EcdsaPrivateKey::as_der`.
///
/// # Errors
/// `error::KeyRejected` if parsing failed or key otherwise unacceptable.
///
/// # Panics
pub fn from_private_key_der(
alg: &'static EcdsaSigningAlgorithm,
private_key: &[u8],
) -> Result<Self, KeyRejected> {
let evp_pkey = LcPtr::<EVP_PKEY>::parse_rfc5208_private_key(private_key, EVP_PKEY_EC)
.or(parse_rfc5915_private_key(private_key, alg.id.nid()))?;
#[cfg(not(feature = "fips"))]
verify_evp_key_nid(&evp_pkey.as_const(), alg.id.nid())?;
#[cfg(feature = "fips")]
validate_ec_evp_key(&evp_pkey.as_const(), alg.id.nid())?;
Ok(Self::new(alg, evp_pkey)?)
}
/// Access functions related to the private key.
#[must_use]
pub fn private_key(&self) -> PrivateKey<'_> {
PrivateKey(self)
}
/// [`EcdsaSigningAlgorithm`] which was used to create this [`EcdsaKeyPair`]
#[must_use]
pub fn algorithm(&self) -> &'static EcdsaSigningAlgorithm {
self.algorithm
}
/// Returns a signature for the message.
///
/// # *ring* Compatibility
/// Our implementation ignores the `SecureRandom` parameter.
///
/// # Errors
/// `error::Unspecified` on internal error.
//
// # FIPS
// The following conditions must be met:
// * NIST Elliptic Curves: P256, P384, P521
// * Digest Algorithms: SHA256, SHA384, SHA512
#[inline]
pub fn sign(&self, _rng: &dyn SecureRandom, message: &[u8]) -> Result<Signature, Unspecified> {
let out_sig = self.evp_pkey.sign(
message,
Some(self.algorithm.digest),
No_EVP_PKEY_CTX_consumer,
)?;
Ok(match self.algorithm.sig_format {
EcdsaSignatureFormat::ASN1 => Signature::new(|slice| {
slice[..out_sig.len()].copy_from_slice(&out_sig);
out_sig.len()
}),
EcdsaSignatureFormat::Fixed => ec::ecdsa_asn1_to_fixed(self.algorithm.id, &out_sig)?,
})
}
/// Returns a signature for the message corresponding to the provided digest.
///
/// # Errors
/// `error::Unspecified` on internal error.
//
// # FIPS
// Not allowed.
#[inline]
pub fn sign_digest(&self, digest: &Digest) -> Result<Signature, Unspecified> {
let out_sig = self
.evp_pkey
.sign_digest(digest, No_EVP_PKEY_CTX_consumer)?;
if self.algorithm.digest != digest.algorithm() {
return Err(Unspecified);
}
Ok(match self.algorithm.sig_format {
EcdsaSignatureFormat::ASN1 => Signature::new(|slice| {
slice[..out_sig.len()].copy_from_slice(&out_sig);
out_sig.len()
}),
EcdsaSignatureFormat::Fixed => ec::ecdsa_asn1_to_fixed(self.algorithm.id, &out_sig)?,
})
}
}
/// Elliptic curve private key.
pub struct PrivateKey<'a>(&'a EcdsaKeyPair);
impl Debug for PrivateKey<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.write_str(&format!("EcdsaPrivateKey({:?})", self.0.algorithm.id))
}
}
impl AsBigEndian<EcPrivateKeyBin<'static>> for PrivateKey<'_> {
/// Exposes the private key encoded as a big-endian fixed-length integer.
///
/// For most use-cases, `EcdsaKeyPair::to_pkcs8()` should be preferred.
///
/// # Errors
/// `error::Unspecified` if serialization failed.
fn as_be_bytes(&self) -> Result<EcPrivateKeyBin<'static>, Unspecified> {
let buffer = marshal_sec1_private_key(&self.0.evp_pkey)?;
Ok(EcPrivateKeyBin::new(buffer))
}
}
impl AsDer<EcPrivateKeyRfc5915Der<'static>> for PrivateKey<'_> {
/// Serializes the key as a DER-encoded `ECPrivateKey` (RFC 5915) structure.
///
/// # Errors
/// `error::Unspecified` if serialization failed.
fn as_der(&self) -> Result<EcPrivateKeyRfc5915Der<'static>, Unspecified> {
let bytes = marshal_rfc5915_private_key(&self.0.evp_pkey)?;
Ok(EcPrivateKeyRfc5915Der::new(bytes))
}
}
#[cfg(test)]
mod tests {
use crate::encoding::AsDer;
use crate::signature::{
EcdsaKeyPair, ECDSA_P256K1_SHA256_ASN1_SIGNING, ECDSA_P256_SHA256_FIXED_SIGNING,
ECDSA_P384_SHA3_384_FIXED_SIGNING, ECDSA_P521_SHA512_FIXED_SIGNING,
};
#[test]
fn test_reject_wrong_curve() {
let supported_algs = [
&ECDSA_P256_SHA256_FIXED_SIGNING,
&ECDSA_P384_SHA3_384_FIXED_SIGNING,
&ECDSA_P521_SHA512_FIXED_SIGNING,
&ECDSA_P256K1_SHA256_ASN1_SIGNING,
];
for marshal_alg in supported_algs {
let key_pair = EcdsaKeyPair::generate(marshal_alg).unwrap();
let key_pair_doc = key_pair.to_pkcs8v1().unwrap();
let key_pair_bytes = key_pair_doc.as_ref();
for parse_alg in supported_algs {
if parse_alg == marshal_alg {
continue;
}
let result = EcdsaKeyPair::from_private_key_der(parse_alg, key_pair_bytes);
assert!(result.is_err());
}
}
}
#[test]
fn test_from_private_key_der() {
let key_pair = EcdsaKeyPair::generate(&ECDSA_P256_SHA256_FIXED_SIGNING).unwrap();
let bytes_5208 = key_pair.to_pkcs8v1().unwrap();
let bytes_5915 = key_pair.private_key().as_der().unwrap();
let key_pair_5208 = EcdsaKeyPair::from_private_key_der(
&ECDSA_P256_SHA256_FIXED_SIGNING,
bytes_5208.as_ref(),
)
.unwrap();
let key_pair_5915 = EcdsaKeyPair::from_private_key_der(
&ECDSA_P256_SHA256_FIXED_SIGNING,
bytes_5915.as_ref(),
)
.unwrap();
assert_eq!(key_pair.evp_pkey, key_pair_5208.evp_pkey);
assert_eq!(key_pair.evp_pkey, key_pair_5915.evp_pkey);
assert_eq!(key_pair_5208.evp_pkey, key_pair_5915.evp_pkey);
assert_eq!(key_pair_5915.algorithm, &ECDSA_P256_SHA256_FIXED_SIGNING);
}
}

335
vendor/aws-lc-rs/src/ec/signature.rs vendored Normal file
View File

@@ -0,0 +1,335 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aws_lc::{
ECDSA_SIG_new, ECDSA_SIG_set0, ECDSA_SIG_to_bytes, NID_X9_62_prime256v1, NID_secp256k1,
NID_secp384r1, NID_secp521r1, BIGNUM, ECDSA_SIG, EVP_PKEY,
};
use crate::digest::Digest;
use crate::ec::compressed_public_key_size_bytes;
use crate::ec::encoding::parse_ec_public_key;
use crate::ec::encoding::sec1::marshal_sec1_public_point;
use crate::encoding::{
AsBigEndian, AsDer, EcPublicKeyCompressedBin, EcPublicKeyUncompressedBin, PublicKeyX509Der,
};
use crate::error::Unspecified;
use crate::evp_pkey::No_EVP_PKEY_CTX_consumer;
use crate::ptr::{DetachableLcPtr, LcPtr};
use crate::signature::{ParsedPublicKey, ParsedVerificationAlgorithm, VerificationAlgorithm};
use crate::{digest, sealed};
use core::fmt;
use core::fmt::{Debug, Formatter};
use std::mem::MaybeUninit;
use std::ops::Deref;
use std::ptr::null_mut;
#[cfg(feature = "ring-sig-verify")]
use untrusted::Input;
/// An ECDSA verification algorithm.
#[derive(Debug, Eq, PartialEq)]
pub struct EcdsaVerificationAlgorithm {
pub(crate) id: &'static AlgorithmID,
pub(crate) digest: &'static digest::Algorithm,
pub(crate) sig_format: EcdsaSignatureFormat,
}
/// An ECDSA signing algorithm.
#[derive(Debug, Eq, PartialEq)]
pub struct EcdsaSigningAlgorithm(pub(crate) &'static EcdsaVerificationAlgorithm);
impl Deref for EcdsaSigningAlgorithm {
type Target = EcdsaVerificationAlgorithm;
#[inline]
fn deref(&self) -> &Self::Target {
self.0
}
}
impl sealed::Sealed for EcdsaVerificationAlgorithm {}
impl sealed::Sealed for EcdsaSigningAlgorithm {}
#[derive(Debug, Eq, PartialEq)]
pub(crate) enum EcdsaSignatureFormat {
ASN1,
Fixed,
}
#[derive(Debug, Eq, PartialEq)]
#[allow(non_camel_case_types)]
pub(crate) enum AlgorithmID {
ECDSA_P256,
ECDSA_P384,
ECDSA_P521,
ECDSA_P256K1,
}
impl AlgorithmID {
#[inline]
pub(crate) fn nid(&'static self) -> i32 {
match self {
AlgorithmID::ECDSA_P256 => NID_X9_62_prime256v1,
AlgorithmID::ECDSA_P384 => NID_secp384r1,
AlgorithmID::ECDSA_P521 => NID_secp521r1,
AlgorithmID::ECDSA_P256K1 => NID_secp256k1,
}
}
pub(crate) fn private_key_size(&self) -> usize {
match self {
AlgorithmID::ECDSA_P256 | AlgorithmID::ECDSA_P256K1 => 32,
AlgorithmID::ECDSA_P384 => 48,
AlgorithmID::ECDSA_P521 => 66,
}
}
// Compressed public key length in bytes
#[inline]
#[allow(dead_code)]
const fn compressed_pub_key_len(&self) -> usize {
match self {
AlgorithmID::ECDSA_P256 | AlgorithmID::ECDSA_P256K1 => {
compressed_public_key_size_bytes(256)
}
AlgorithmID::ECDSA_P384 => compressed_public_key_size_bytes(384),
AlgorithmID::ECDSA_P521 => compressed_public_key_size_bytes(521),
}
}
}
/// Elliptic curve public key.
#[derive(Clone)]
pub struct PublicKey {
#[allow(dead_code)]
algorithm: &'static EcdsaSigningAlgorithm,
evp_pkey: LcPtr<EVP_PKEY>,
octets: Box<[u8]>,
}
pub(crate) fn public_key_from_evp_pkey(
evp_pkey: &LcPtr<EVP_PKEY>,
algorithm: &'static EcdsaSigningAlgorithm,
) -> Result<PublicKey, Unspecified> {
let pub_key_bytes = marshal_sec1_public_point(evp_pkey, false)?;
Ok(PublicKey {
evp_pkey: evp_pkey.clone(),
algorithm,
octets: pub_key_bytes.into_boxed_slice(),
})
}
impl AsDer<PublicKeyX509Der<'static>> for PublicKey {
/// Provides the public key as a DER-encoded (X.509) `SubjectPublicKeyInfo` structure.
/// # Errors
/// Returns an error if the public key fails to marshal to X.509.
fn as_der(&self) -> Result<PublicKeyX509Der<'static>, Unspecified> {
let der = self.evp_pkey.as_const().marshal_rfc5280_public_key()?;
Ok(PublicKeyX509Der::new(der))
}
}
impl AsBigEndian<EcPublicKeyCompressedBin<'static>> for PublicKey {
/// Provides the public key elliptic curve point to a compressed point bytes format.
/// # Errors
/// Returns an error if the public key fails to marshal.
fn as_be_bytes(&self) -> Result<EcPublicKeyCompressedBin<'static>, crate::error::Unspecified> {
let pub_point = marshal_sec1_public_point(&self.evp_pkey, true)?;
Ok(EcPublicKeyCompressedBin::new(pub_point))
}
}
impl AsBigEndian<EcPublicKeyUncompressedBin<'static>> for PublicKey {
/// Provides the public key elliptic curve point to an uncompressed point bytes format.
/// # Errors
/// Returns an error if the public key fails to marshal.
fn as_be_bytes(
&self,
) -> Result<EcPublicKeyUncompressedBin<'static>, crate::error::Unspecified> {
let mut uncompressed_bytes = vec![0u8; self.octets.len()];
uncompressed_bytes.copy_from_slice(&self.octets);
Ok(EcPublicKeyUncompressedBin::new(uncompressed_bytes))
}
}
impl Debug for PublicKey {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
f.write_str(&format!(
"EcdsaPublicKey(\"{}\")",
crate::hex::encode(self.octets.as_ref())
))
}
}
impl AsRef<[u8]> for PublicKey {
#[inline]
/// Serializes the public key in an uncompressed form (X9.62) using the
/// Octet-String-to-Elliptic-Curve-Point algorithm in
/// [SEC 1: Elliptic Curve Cryptography, Version 2.0].
fn as_ref(&self) -> &[u8] {
self.octets.as_ref()
}
}
unsafe impl Send for PublicKey {}
unsafe impl Sync for PublicKey {}
impl VerificationAlgorithm for EcdsaVerificationAlgorithm {
#[inline]
#[cfg(feature = "ring-sig-verify")]
fn verify(
&self,
public_key: Input<'_>,
msg: Input<'_>,
signature: Input<'_>,
) -> Result<(), Unspecified> {
self.verify_sig(
public_key.as_slice_less_safe(),
msg.as_slice_less_safe(),
signature.as_slice_less_safe(),
)
}
fn verify_sig(
&self,
public_key: &[u8],
msg: &[u8],
signature: &[u8],
) -> Result<(), Unspecified> {
let public_key = parse_ec_public_key(public_key, self.id.nid())?;
self.verify_ecdsa(msg, signature, &public_key)
}
fn verify_digest_sig(
&self,
public_key: &[u8],
digest: &digest::Digest,
signature: &[u8],
) -> Result<(), Unspecified> {
let public_key = parse_ec_public_key(public_key, self.id.nid())?;
self.verify_digest_ecdsa(digest, signature, &public_key)
}
}
impl EcdsaVerificationAlgorithm {
fn verify_ecdsa(
&self,
msg: &[u8],
signature: &[u8],
public_key: &LcPtr<EVP_PKEY>,
) -> Result<(), Unspecified> {
match self.sig_format {
EcdsaSignatureFormat::ASN1 => {
verify_asn1_signature(self.digest, public_key, msg, signature)
}
EcdsaSignatureFormat::Fixed => {
let (out_bytes, out_bytes_len) = convert_fixed_signature(self.id, signature)?;
verify_asn1_signature(self.digest, public_key, msg, unsafe {
out_bytes.as_slice(out_bytes_len)
})
}
}
}
fn verify_digest_ecdsa(
&self,
digest: &Digest,
signature: &[u8],
public_key: &LcPtr<EVP_PKEY>,
) -> Result<(), Unspecified> {
if self.digest != digest.algorithm() {
return Err(Unspecified);
}
match self.sig_format {
EcdsaSignatureFormat::ASN1 => {
verify_asn1_digest_signature(digest, public_key, signature)
}
EcdsaSignatureFormat::Fixed => {
let (out_bytes, out_bytes_len) = convert_fixed_signature(self.id, signature)?;
verify_asn1_digest_signature(digest, public_key, unsafe {
out_bytes.as_slice(out_bytes_len)
})
}
}
}
}
impl ParsedVerificationAlgorithm for EcdsaVerificationAlgorithm {
fn parsed_verify_sig(
&self,
public_key: &ParsedPublicKey,
msg: &[u8],
signature: &[u8],
) -> Result<(), Unspecified> {
self.verify_ecdsa(msg, signature, public_key.key())
}
fn parsed_verify_digest_sig(
&self,
public_key: &ParsedPublicKey,
digest: &Digest,
signature: &[u8],
) -> Result<(), Unspecified> {
self.verify_digest_ecdsa(digest, signature, public_key.key())
}
}
fn convert_fixed_signature(
alg: &'static AlgorithmID,
signature: &[u8],
) -> Result<(LcPtr<u8>, usize), Unspecified> {
let mut out_bytes = null_mut::<u8>();
let mut out_bytes_len = MaybeUninit::<usize>::uninit();
let sig = unsafe { ecdsa_sig_from_fixed(alg, signature)? };
if 1 != unsafe {
ECDSA_SIG_to_bytes(
&mut out_bytes,
out_bytes_len.as_mut_ptr(),
sig.as_const_ptr(),
)
} {
return Err(Unspecified);
}
Ok((LcPtr::new(out_bytes)?, unsafe {
out_bytes_len.assume_init()
}))
}
fn verify_asn1_signature(
digest_alg: &'static digest::Algorithm,
public_key: &LcPtr<EVP_PKEY>,
msg: &[u8],
signature: &[u8],
) -> Result<(), Unspecified> {
public_key.verify(msg, Some(digest_alg), No_EVP_PKEY_CTX_consumer, signature)
}
fn verify_asn1_digest_signature(
digest: &Digest,
public_key: &LcPtr<EVP_PKEY>,
signature: &[u8],
) -> Result<(), Unspecified> {
public_key.verify_digest_sig(digest, No_EVP_PKEY_CTX_consumer, signature)
}
#[inline]
unsafe fn ecdsa_sig_from_fixed(
alg_id: &'static AlgorithmID,
signature: &[u8],
) -> Result<LcPtr<ECDSA_SIG>, ()> {
let num_size_bytes = alg_id.private_key_size();
if signature.len() != 2 * num_size_bytes {
return Err(());
}
let mut r_bn = DetachableLcPtr::<BIGNUM>::try_from(&signature[..num_size_bytes])?;
let mut s_bn = DetachableLcPtr::<BIGNUM>::try_from(&signature[num_size_bytes..])?;
let mut ecdsa_sig = LcPtr::new(ECDSA_SIG_new())?;
if 1 != ECDSA_SIG_set0(ecdsa_sig.as_mut_ptr(), r_bn.as_mut_ptr(), s_bn.as_mut_ptr()) {
return Err(());
}
r_bn.detach();
s_bn.detach();
Ok(ecdsa_sig)
}

622
vendor/aws-lc-rs/src/ed25519.rs vendored Normal file
View File

@@ -0,0 +1,622 @@
// Copyright 2015-2016 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use core::fmt;
use core::fmt::{Debug, Formatter};
use std::marker::PhantomData;
#[cfg(feature = "ring-sig-verify")]
use untrusted::Input;
use crate::aws_lc::{EVP_PKEY, EVP_PKEY_ED25519};
use crate::buffer::Buffer;
use crate::digest::Digest;
use crate::encoding::{
AsBigEndian, AsDer, Curve25519SeedBin, Pkcs8V1Der, Pkcs8V2Der, PublicKeyX509Der,
};
use crate::error::{KeyRejected, Unspecified};
use crate::evp_pkey::No_EVP_PKEY_CTX_consumer;
use crate::pkcs8::{Document, Version};
use crate::ptr::LcPtr;
use crate::rand::SecureRandom;
use crate::signature::{
KeyPair, ParsedPublicKey, ParsedVerificationAlgorithm, Signature, VerificationAlgorithm,
};
use crate::{constant_time, digest, hex, sealed};
/// The length of an Ed25519 public key.
pub const ED25519_PUBLIC_KEY_LEN: usize = crate::aws_lc::ED25519_PUBLIC_KEY_LEN as usize;
const ED25519_SIGNATURE_LEN: usize = crate::aws_lc::ED25519_SIGNATURE_LEN as usize;
const ED25519_SEED_LEN: usize = 32;
/// Parameters for `EdDSA` signing and verification.
#[derive(Debug)]
pub struct EdDSAParameters;
impl sealed::Sealed for EdDSAParameters {}
impl ParsedVerificationAlgorithm for EdDSAParameters {
fn parsed_verify_sig(
&self,
public_key: &ParsedPublicKey,
msg: &[u8],
signature: &[u8],
) -> Result<(), Unspecified> {
public_key
.key()
.verify(msg, None, No_EVP_PKEY_CTX_consumer, signature)
}
fn parsed_verify_digest_sig(
&self,
_public_key: &ParsedPublicKey,
_digest: &Digest,
_signature: &[u8],
) -> Result<(), Unspecified> {
Err(Unspecified)
}
}
impl VerificationAlgorithm for EdDSAParameters {
#[inline]
#[cfg(feature = "ring-sig-verify")]
fn verify(
&self,
public_key: Input<'_>,
msg: Input<'_>,
signature: Input<'_>,
) -> Result<(), Unspecified> {
self.verify_sig(
public_key.as_slice_less_safe(),
msg.as_slice_less_safe(),
signature.as_slice_less_safe(),
)
}
/// Verify `signature` for `msg` using `public_key`.
///
/// # Errors
/// Returns `Unspecified` if the `msg` cannot be verified using `public_key`.
fn verify_sig(
&self,
public_key: &[u8],
msg: &[u8],
signature: &[u8],
) -> Result<(), Unspecified> {
let evp_pkey = parse_ed25519_public_key(public_key)?;
evp_pkey.verify(msg, None, No_EVP_PKEY_CTX_consumer, signature)
}
/// DO NOT USE. This function is required by `VerificationAlgorithm` but cannot be used w/ Ed25519.
///
/// # Errors
/// Always returns `Unspecified`.
fn verify_digest_sig(
&self,
_public_key: &[u8],
_digest: &digest::Digest,
_signature: &[u8],
) -> Result<(), Unspecified> {
Err(Unspecified)
}
}
pub(crate) fn parse_ed25519_public_key(key_bytes: &[u8]) -> Result<LcPtr<EVP_PKEY>, KeyRejected> {
// If the length of key bytes matches the raw public key size then it has to be that
if key_bytes.len() == ED25519_PUBLIC_KEY_LEN {
LcPtr::<EVP_PKEY>::parse_raw_public_key(key_bytes, EVP_PKEY_ED25519)
} else {
// Otherwise we support X.509 SubjectPublicKeyInfo formatted keys which are inherently larger
LcPtr::<EVP_PKEY>::parse_rfc5280_public_key(key_bytes, EVP_PKEY_ED25519)
}
}
/// An Ed25519 key pair, for signing.
#[allow(clippy::module_name_repetitions)]
pub struct Ed25519KeyPair {
evp_pkey: LcPtr<EVP_PKEY>,
public_key: PublicKey,
}
impl Debug for Ed25519KeyPair {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
f.write_str(&format!(
"Ed25519KeyPair {{ public_key: PublicKey(\"{}\") }}",
hex::encode(&self.public_key)
))
}
}
#[derive(Clone)]
#[allow(clippy::module_name_repetitions)]
/// The seed value for the `EdDSA` signature scheme using Curve25519
pub struct Seed<'a> {
bytes: Box<[u8]>,
phantom: PhantomData<&'a [u8]>,
}
impl AsBigEndian<Curve25519SeedBin<'static>> for Seed<'_> {
/// Exposes the seed encoded as a big-endian fixed-length integer.
///
/// For most use-cases, `EcdsaKeyPair::to_pkcs8()` should be preferred.
///
/// # Errors
/// `error::Unspecified` if serialization failed.
fn as_be_bytes(&self) -> Result<Curve25519SeedBin<'static>, Unspecified> {
Ok(Curve25519SeedBin::new(self.bytes.to_vec()))
}
}
impl Debug for Seed<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.write_str("Ed25519Seed()")
}
}
#[derive(Clone)]
#[allow(clippy::module_name_repetitions)]
/// Ed25519 Public Key
pub struct PublicKey {
evp_pkey: LcPtr<EVP_PKEY>,
public_key_bytes: [u8; ED25519_PUBLIC_KEY_LEN],
}
impl AsRef<[u8]> for PublicKey {
#[inline]
/// Returns the "raw" bytes of the ED25519 public key
fn as_ref(&self) -> &[u8] {
&self.public_key_bytes
}
}
impl Debug for PublicKey {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.write_str(&format!(
"PublicKey(\"{}\")",
hex::encode(self.public_key_bytes)
))
}
}
unsafe impl Send for PublicKey {}
unsafe impl Sync for PublicKey {}
impl AsDer<PublicKeyX509Der<'static>> for PublicKey {
/// Provides the public key as a DER-encoded (X.509) `SubjectPublicKeyInfo` structure.
/// # Errors
/// Returns an error if the public key fails to marshal to X.509.
fn as_der(&self) -> Result<PublicKeyX509Der<'static>, crate::error::Unspecified> {
// Initial size of 44 based on:
// 0:d=0 hl=2 l= 42 cons: SEQUENCE
// 2:d=1 hl=2 l= 5 cons: SEQUENCE
// 4:d=2 hl=2 l= 3 prim: OBJECT :ED25519
// 9:d=1 hl=2 l= 33 prim: BIT STRING
let der = self.evp_pkey.as_const().marshal_rfc5280_public_key()?;
Ok(PublicKeyX509Der::from(Buffer::new(der)))
}
}
impl KeyPair for Ed25519KeyPair {
type PublicKey = PublicKey;
#[inline]
fn public_key(&self) -> &Self::PublicKey {
&self.public_key
}
}
unsafe impl Send for Ed25519KeyPair {}
unsafe impl Sync for Ed25519KeyPair {}
pub(crate) fn generate_key() -> Result<LcPtr<EVP_PKEY>, Unspecified> {
LcPtr::<EVP_PKEY>::generate(EVP_PKEY_ED25519, No_EVP_PKEY_CTX_consumer)
}
impl Ed25519KeyPair {
/// Generates a new key pair and returns the key pair.
///
/// # Errors
/// `error::Unspecified` if key generation fails.
pub fn generate() -> Result<Self, Unspecified> {
let evp_pkey = generate_key()?;
let mut public_key = [0u8; ED25519_PUBLIC_KEY_LEN];
let out_len: usize = evp_pkey
.as_const()
.marshal_raw_public_to_buffer(&mut public_key)?;
debug_assert_eq!(public_key.len(), out_len);
Ok(Self {
public_key: PublicKey {
public_key_bytes: public_key,
evp_pkey: evp_pkey.clone(),
},
evp_pkey,
})
}
/// Generates a new key pair and returns the key pair serialized as a
/// PKCS#8 document.
///
/// The PKCS#8 document will be a v2 `OneAsymmetricKey` with the public key,
/// as described in [RFC 5958 Section 2]; see [RFC 8410 Section 10.3] for an
/// example.
///
/// [RFC 5958 Section 2]: https://tools.ietf.org/html/rfc5958#section-2
/// [RFC 8410 Section 10.3]: https://tools.ietf.org/html/rfc8410#section-10.3
///
/// # *ring* Compatibility
/// The ring 0.16.x API did not produce encoded v2 documents that were compliant with RFC 5958.
/// The aws-lc-ring implementation produces PKCS#8 v2 encoded documents that are compliant per
/// the RFC specification.
///
/// Our implementation ignores the `SecureRandom` parameter.
///
// # FIPS
// This function must not be used.
//
/// # Errors
/// `error::Unspecified` if `rng` cannot provide enough bits or if there's an internal error.
pub fn generate_pkcs8(_rng: &dyn SecureRandom) -> Result<Document, Unspecified> {
let evp_pkey = generate_key()?;
Ok(Document::new(
evp_pkey
.as_const()
.marshal_rfc5208_private_key(Version::V2)?,
))
}
/// Serializes this `Ed25519KeyPair` into a PKCS#8 v2 document.
///
/// # Errors
/// `error::Unspecified` on internal error.
///
pub fn to_pkcs8(&self) -> Result<Document, Unspecified> {
Ok(Document::new(
self.evp_pkey
.as_const()
.marshal_rfc5208_private_key(Version::V2)?,
))
}
/// Generates a `Ed25519KeyPair` using the `rng` provided, then serializes that key as a
/// PKCS#8 document.
///
/// The PKCS#8 document will be a v1 `PrivateKeyInfo` structure (RFC5208). Use this method
/// when needing to produce documents that are compatible with the OpenSSL CLI.
///
/// # *ring* Compatibility
/// Our implementation ignores the `SecureRandom` parameter.
///
// # FIPS
// This function must not be used.
//
/// # Errors
/// `error::Unspecified` if `rng` cannot provide enough bits or if there's an internal error.
pub fn generate_pkcs8v1(_rng: &dyn SecureRandom) -> Result<Document, Unspecified> {
let evp_pkey = generate_key()?;
Ok(Document::new(
evp_pkey
.as_const()
.marshal_rfc5208_private_key(Version::V1)?,
))
}
/// Serializes this `Ed25519KeyPair` into a PKCS#8 v1 document.
///
/// # Errors
/// `error::Unspecified` on internal error.
///
pub fn to_pkcs8v1(&self) -> Result<Document, Unspecified> {
Ok(Document::new(
self.evp_pkey
.as_const()
.marshal_rfc5208_private_key(Version::V1)?,
))
}
/// Constructs an Ed25519 key pair from the private key seed `seed` and its
/// public key `public_key`.
///
/// It is recommended to use `Ed25519KeyPair::from_pkcs8()` instead.
///
/// The private and public keys will be verified to be consistent with each
/// other. This helps avoid misuse of the key (e.g. accidentally swapping
/// the private key and public key, or using the wrong private key for the
/// public key). This also detects any corruption of the public or private
/// key.
///
/// # Errors
/// `error::KeyRejected` if parse error, or if key is otherwise unacceptable.
pub fn from_seed_and_public_key(seed: &[u8], public_key: &[u8]) -> Result<Self, KeyRejected> {
let this = Self::from_seed_unchecked(seed)?;
constant_time::verify_slices_are_equal(public_key, &this.public_key.public_key_bytes)
.map_err(|_| KeyRejected::inconsistent_components())?;
Ok(this)
}
/// Constructs an Ed25519 key pair from the private key seed `seed`.
///
/// It is recommended to use `Ed25519KeyPair::from_pkcs8()` instead. If the public key is
/// available, prefer to use `Ed25519KeyPair::from_seed_and_public_key()` as it will verify
/// the validity of the key pair.
///
/// CAUTION: Both an Ed25519 seed and its public key are 32-bytes. If the bytes of a public key
/// are provided this function will create an (effectively) invalid `Ed25519KeyPair`. This
/// problem is undetectable by the API.
///
/// # Errors
/// `error::KeyRejected` if parse error, or if key is otherwise unacceptable.
pub fn from_seed_unchecked(seed: &[u8]) -> Result<Self, KeyRejected> {
if seed.len() < ED25519_SEED_LEN {
return Err(KeyRejected::inconsistent_components());
}
let evp_pkey = LcPtr::<EVP_PKEY>::parse_raw_private_key(seed, EVP_PKEY_ED25519)?;
let mut derived_public_key = [0u8; ED25519_PUBLIC_KEY_LEN];
let out_len: usize = evp_pkey
.as_const()
.marshal_raw_public_to_buffer(&mut derived_public_key)?;
debug_assert_eq!(derived_public_key.len(), out_len);
Ok(Self {
public_key: PublicKey {
public_key_bytes: derived_public_key,
evp_pkey: evp_pkey.clone(),
},
evp_pkey,
})
}
/// Constructs an Ed25519 key pair by parsing an unencrypted PKCS#8 v1 or v2
/// Ed25519 private key.
///
/// `openssl genpkey -algorithm ED25519` generates PKCS#8 v1 keys.
///
/// # Ring Compatibility
/// * This method accepts either v1 or v2 encoded keys, if a v2 encoded key is provided, with the
/// public key component present, it will be verified to match the one derived from the
/// encoded private key.
/// * The ring 0.16.x API did not produce encoded v2 documents that were compliant with RFC 5958.
/// The aws-lc-ring implementation produces PKCS#8 v2 encoded documents that are compliant per
/// the RFC specification.
///
/// # Errors
/// `error::KeyRejected` on parse error, or if key is otherwise unacceptable.
pub fn from_pkcs8(pkcs8: &[u8]) -> Result<Self, KeyRejected> {
Self::parse_pkcs8(pkcs8)
}
/// Constructs an Ed25519 key pair by parsing an unencrypted PKCS#8 v1 or v2
/// Ed25519 private key.
///
/// `openssl genpkey -algorithm ED25519` generates PKCS# v1 keys.
///
/// # Ring Compatibility
/// * This method accepts either v1 or v2 encoded keys, if a v2 encoded key is provided, with the
/// public key component present, it will be verified to match the one derived from the
/// encoded private key.
/// * The ring 0.16.x API did not produce encoded v2 documents that were compliant with RFC 5958.
/// The aws-lc-ring implementation produces PKCS#8 v2 encoded documents that are compliant per
/// the RFC specification.
///
/// # Errors
/// `error::KeyRejected` on parse error, or if key is otherwise unacceptable.
pub fn from_pkcs8_maybe_unchecked(pkcs8: &[u8]) -> Result<Self, KeyRejected> {
Self::parse_pkcs8(pkcs8)
}
fn parse_pkcs8(pkcs8: &[u8]) -> Result<Self, KeyRejected> {
let evp_pkey = LcPtr::<EVP_PKEY>::parse_rfc5208_private_key(pkcs8, EVP_PKEY_ED25519)?;
evp_pkey.as_const().validate_as_ed25519()?;
let mut public_key = [0u8; ED25519_PUBLIC_KEY_LEN];
let out_len: usize = evp_pkey
.as_const()
.marshal_raw_public_to_buffer(&mut public_key)?;
debug_assert_eq!(public_key.len(), out_len);
Ok(Self {
public_key: PublicKey {
public_key_bytes: public_key,
evp_pkey: evp_pkey.clone(),
},
evp_pkey,
})
}
/// Returns the signature of the message msg.
///
// # FIPS
// This method must not be used.
//
/// # Panics
/// Panics if the message is unable to be signed
#[inline]
#[must_use]
pub fn sign(&self, msg: &[u8]) -> Signature {
Self::try_sign(self, msg).expect("ED25519 signing failed")
}
/// Returns the signature of the message `msg`.
///
// # FIPS
// This method must not be used.
//
/// # Errors
/// Returns `error::Unspecified` if the signing operation fails.
#[inline]
pub fn try_sign(&self, msg: &[u8]) -> Result<Signature, Unspecified> {
let sig_bytes = self.evp_pkey.sign(msg, None, No_EVP_PKEY_CTX_consumer)?;
Ok(Signature::new(|slice| {
slice[0..ED25519_SIGNATURE_LEN].copy_from_slice(&sig_bytes);
ED25519_SIGNATURE_LEN
}))
}
/// Provides the private key "seed" for this `Ed25519` key pair.
///
/// For serialization of the key pair, `Ed25519KeyPair::to_pkcs8()` is preferred.
///
/// # Errors
/// Currently the function cannot fail, but it might in future implementations.
pub fn seed(&self) -> Result<Seed<'static>, Unspecified> {
Ok(Seed {
bytes: self
.evp_pkey
.as_const()
.marshal_raw_private_key()?
.into_boxed_slice(),
phantom: PhantomData,
})
}
}
impl AsDer<Pkcs8V1Der<'static>> for Ed25519KeyPair {
/// Serializes this `Ed25519KeyPair` into a PKCS#8 v1 document.
///
/// # Errors
/// `error::Unspecified` on internal error.
fn as_der(&self) -> Result<Pkcs8V1Der<'static>, crate::error::Unspecified> {
Ok(Pkcs8V1Der::new(
self.evp_pkey
.as_const()
.marshal_rfc5208_private_key(Version::V1)?,
))
}
}
impl AsDer<Pkcs8V2Der<'static>> for Ed25519KeyPair {
/// Serializes this `Ed25519KeyPair` into a PKCS#8 v1 document.
///
/// # Errors
/// `error::Unspecified` on internal error.
fn as_der(&self) -> Result<Pkcs8V2Der<'static>, crate::error::Unspecified> {
Ok(Pkcs8V2Der::new(
self.evp_pkey
.as_const()
.marshal_rfc5208_private_key(Version::V2)?,
))
}
}
#[cfg(test)]
mod tests {
use crate::ed25519::Ed25519KeyPair;
use crate::encoding::{AsBigEndian, AsDer, Pkcs8V1Der, Pkcs8V2Der, PublicKeyX509Der};
use crate::rand::SystemRandom;
use crate::signature::{KeyPair, UnparsedPublicKey, ED25519};
use crate::{hex, test};
#[test]
fn test_generate() {
const MESSAGE: &[u8] = b"test message";
let key_pair = Ed25519KeyPair::generate().unwrap();
let public_key = key_pair.public_key();
let signature = key_pair.sign(MESSAGE);
let unparsed_public_key = UnparsedPublicKey::new(&ED25519, public_key.as_ref());
unparsed_public_key
.verify(MESSAGE, signature.as_ref())
.unwrap();
}
#[test]
fn test_generate_pkcs8() {
let rng = SystemRandom::new();
let document = Ed25519KeyPair::generate_pkcs8(&rng).unwrap();
let kp1: Ed25519KeyPair = Ed25519KeyPair::from_pkcs8(document.as_ref()).unwrap();
assert_eq!(
document.as_ref(),
AsDer::<Pkcs8V2Der>::as_der(&kp1).unwrap().as_ref()
);
let kp2: Ed25519KeyPair =
Ed25519KeyPair::from_pkcs8_maybe_unchecked(document.as_ref()).unwrap();
assert_eq!(
kp1.seed().unwrap().as_be_bytes().unwrap().as_ref(),
kp2.seed().unwrap().as_be_bytes().unwrap().as_ref(),
);
assert_eq!(kp1.public_key.as_ref(), kp2.public_key.as_ref());
let document = Ed25519KeyPair::generate_pkcs8v1(&rng).unwrap();
let kp1: Ed25519KeyPair = Ed25519KeyPair::from_pkcs8(document.as_ref()).unwrap();
assert_eq!(
document.as_ref(),
AsDer::<Pkcs8V1Der>::as_der(&kp1).unwrap().as_ref()
);
let kp2: Ed25519KeyPair =
Ed25519KeyPair::from_pkcs8_maybe_unchecked(document.as_ref()).unwrap();
assert_eq!(
kp1.seed().unwrap().as_be_bytes().unwrap().as_ref(),
kp2.seed().unwrap().as_be_bytes().unwrap().as_ref(),
);
assert_eq!(kp1.public_key.as_ref(), kp2.public_key.as_ref());
let seed = kp1.seed().unwrap();
assert_eq!("Ed25519Seed()", format!("{seed:?}"));
}
#[test]
fn test_from_pkcs8() {
struct TestCase {
key: &'static str,
expected_public: &'static str,
}
for case in [
TestCase {
key: "302e020100300506032b6570042204209d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60",
expected_public: "d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a",
},
TestCase {
key: "3051020101300506032b657004220420756434bd5b824753007a138d27abbc14b5cc786adb78fb62435e6419a2b2e72b8121000faccd81e57de15fa6343a7fbb43b2b93f28be6435100ae8bd633c6dfee3d198",
expected_public: "0faccd81e57de15fa6343a7fbb43b2b93f28be6435100ae8bd633c6dfee3d198",
},
TestCase {
key: "304f020100300506032b657004220420d4ee72dbf913584ad5b6d8f1f769f8ad3afe7c28cbf1d4fbe097a88f44755842a01f301d060a2a864886f70d01090914310f0c0d437572646c6520436861697273",
expected_public: "19bf44096984cdfe8541bac167dc3b96c85086aa30b6b6cb0c5c38ad703166e1",
},
TestCase {
key: "3072020101300506032b657004220420d4ee72dbf913584ad5b6d8f1f769f8ad3afe7c28cbf1d4fbe097a88f44755842a01f301d060a2a864886f70d01090914310f0c0d437572646c652043686169727381210019bf44096984cdfe8541bac167dc3b96c85086aa30b6b6cb0c5c38ad703166e1",
expected_public: "19bf44096984cdfe8541bac167dc3b96c85086aa30b6b6cb0c5c38ad703166e1",
}
] {
let key_pair = Ed25519KeyPair::from_pkcs8(&test::from_dirty_hex(case.key)).unwrap();
assert_eq!(
format!(
r#"Ed25519KeyPair {{ public_key: PublicKey("{}") }}"#,
case.expected_public
),
format!("{key_pair:?}")
);
let key_pair = Ed25519KeyPair::from_pkcs8_maybe_unchecked(&test::from_dirty_hex(case.key)).unwrap();
assert_eq!(
format!(
r#"Ed25519KeyPair {{ public_key: PublicKey("{}") }}"#,
case.expected_public
),
format!("{key_pair:?}")
);
}
}
#[test]
fn test_public_key_as_der_x509() {
let key_pair = Ed25519KeyPair::from_pkcs8(&hex::decode("302e020100300506032b6570042204209d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60").unwrap()).unwrap();
let public_key = key_pair.public_key();
let x509der = AsDer::<PublicKeyX509Der>::as_der(public_key).unwrap();
assert_eq!(
x509der.as_ref(),
&[
0x30, 0x2a, 0x30, 0x05, 0x06, 0x03, 0x2b, 0x65, 0x70, 0x03, 0x21, 0x00, 0xd7, 0x5a,
0x98, 0x01, 0x82, 0xb1, 0x0a, 0xb7, 0xd5, 0x4b, 0xfe, 0xd3, 0xc9, 0x64, 0x07, 0x3a,
0x0e, 0xe1, 0x72, 0xf3, 0xda, 0xa6, 0x23, 0x25, 0xaf, 0x02, 0x1a, 0x68, 0xf7, 0x07,
0x51, 0x1a
]
);
}
}

95
vendor/aws-lc-rs/src/encoding.rs vendored Normal file
View File

@@ -0,0 +1,95 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! Serialization formats
use crate::buffer::Buffer;
macro_rules! generated_encodings {
($(($name:ident, $name_type:ident)),*) => {
use core::fmt::{Debug, Error, Formatter};
use core::ops::Deref;
mod buffer_type {
$(
pub struct $name_type {
_priv: (),
}
)*
}
$(
/// Serialized bytes
pub struct $name<'a>(Buffer<'a, buffer_type::$name_type>);
impl<'a> Deref for $name<'a> {
type Target = Buffer<'a, buffer_type::$name_type>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl $name<'static> {
#[allow(dead_code)]
pub(crate) fn new(owned: Vec<u8>) -> Self {
Self(Buffer::new(owned))
}
#[allow(dead_code)]
pub(crate) fn take_from_slice(owned: &mut [u8]) -> Self {
Self(Buffer::take_from_slice(owned))
}
}
impl Debug for $name<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.debug_struct(stringify!($name)).finish()
}
}
impl<'a> From<Buffer<'a, buffer_type::$name_type>> for $name<'a> {
fn from(value: Buffer<'a, buffer_type::$name_type>) -> Self {
Self(value)
}
}
)*
}
}
pub(crate) use generated_encodings;
generated_encodings!(
(Curve25519SeedBin, Curve25519SeedBinType),
(EcPrivateKeyBin, EcPrivateKeyBinType),
(EcPrivateKeyRfc5915Der, EcPrivateKeyRfc5915DerType),
(EcPublicKeyCompressedBin, EcPublicKeyCompressedBinType),
(EcPublicKeyUncompressedBin, EcPublicKeyUncompressedBinType),
(Pkcs8V1Der, Pkcs8V1DerType),
(Pkcs8V2Der, Pkcs8V2DerType),
(PqdsaPrivateKeyRaw, PqdsaPrivateKeyRawType),
(PqdsaSeedRaw, PqdsaSeedRawType),
(PublicKeyX509Der, PublicKeyX509DerType)
);
/// Trait for types that can be serialized into a DER format.
pub trait AsDer<T> {
/// Serializes into a DER format.
///
/// # Errors
/// Returns Unspecified if serialization fails.
fn as_der(&self) -> Result<T, crate::error::Unspecified>;
}
/// Trait for values that can be serialized into a big-endian format
pub trait AsBigEndian<T> {
/// Serializes into a big-endian format.
///
/// # Errors
/// Returns Unspecified if serialization fails.
fn as_be_bytes(&self) -> Result<T, crate::error::Unspecified>;
}
/// Trait for values that can be serialized into a raw format
pub trait AsRawBytes<T> {
/// Serializes into a raw format.
///
/// # Errors
/// Returns Unspecified if serialization fails.
fn as_raw_bytes(&self) -> Result<T, crate::error::Unspecified>;
}

133
vendor/aws-lc-rs/src/endian.rs vendored Normal file
View File

@@ -0,0 +1,133 @@
// Copyright 2015-2021 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
/// An `Encoding` of a type `T` can be converted to/from its byte
/// representation without any byte swapping or other computation.
///
/// The `Self: Copy` constraint addresses `clippy::declare_interior_mutable_const`.
pub trait Encoding<T>: From<T> + Into<T>
where
Self: Copy,
{
const ZERO: Self;
}
use core::mem::size_of_val;
pub fn as_byte_slice<E: Encoding<T>, T>(x: &[E]) -> &[u8] {
unsafe { core::slice::from_raw_parts(x.as_ptr().cast::<u8>(), size_of_val(x)) }
}
/// Work around the inability to implement `AsRef` for arrays of `Encoding`s
/// due to the coherence rules.
pub trait ArrayEncoding<T> {
fn as_byte_array(&self) -> &T;
}
/// Work around the inability to implement `from` for arrays of `Encoding`s
/// due to the coherence rules.
pub trait FromArray<const N: usize, T>
where
Self: Sized,
{
fn from_array(a: &[T; N]) -> [Self; N];
}
macro_rules! define_endian {
($endian:ident) => {
#[derive(Copy, Clone)]
#[repr(transparent)]
pub struct $endian<T>(T);
};
}
macro_rules! impl_array_encoding {
// This may be converted to use const generics once generic_const_exprs is stable.
// https://github.com/rust-lang/rust/issues/76560
($endian:ident, $base:ident, $elems:expr) => {
impl ArrayEncoding<[u8; $elems * core::mem::size_of::<$base>()]>
for [$endian<$base>; $elems]
{
fn as_byte_array(&self) -> &[u8; $elems * core::mem::size_of::<$base>()] {
as_byte_slice(self).try_into().unwrap()
}
}
};
}
macro_rules! impl_endian {
($endian:ident, $base:ident, $to_endian:ident, $from_endian:ident, $size:expr) => {
impl Encoding<$base> for $endian<$base> {
const ZERO: Self = Self(0);
}
impl From<$base> for $endian<$base> {
#[inline]
fn from(value: $base) -> Self {
Self($base::$to_endian(value))
}
}
impl From<$endian<$base>> for $base {
#[inline]
fn from($endian(value): $endian<$base>) -> Self {
$base::$from_endian(value)
}
}
impl<const N: usize> FromArray<N, $base> for $endian<$base> {
fn from_array(value: &[$base; N]) -> [Self; N] {
let mut result: [$endian<$base>; N] = [$endian::ZERO; N];
for i in 0..N {
result[i] = $endian::from(value[i]);
}
return result;
}
}
impl_array_encoding!($endian, $base, 1);
impl_array_encoding!($endian, $base, 2);
impl_array_encoding!($endian, $base, 3);
impl_array_encoding!($endian, $base, 4);
impl_array_encoding!($endian, $base, 8);
};
}
define_endian!(BigEndian);
define_endian!(LittleEndian);
impl_endian!(BigEndian, u32, to_be, from_be, 4);
impl_endian!(BigEndian, u64, to_be, from_be, 8);
impl_endian!(LittleEndian, u32, to_le, from_le, 4);
impl_endian!(LittleEndian, u64, to_le, from_le, 8);
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_big_endian() {
let x = BigEndian::from(1u32);
let x2 = x;
assert_eq!(u32::from(x), 1);
assert_eq!(u32::from(x2), 1);
}
#[test]
fn test_endian_from_array() {
let be: [BigEndian<u32>; 2] =
BigEndian::<u32>::from_array(&[0x_AABB_CCDD_u32, 0x_2233_4455_u32]);
let le: [LittleEndian<u32>; 2] =
LittleEndian::<u32>::from_array(&[0x_DDCC_BBAA_u32, 0x_5544_3322_u32]);
assert_eq!(be.as_byte_array(), le.as_byte_array());
let be: [BigEndian<u64>; 2] =
BigEndian::<u64>::from_array(&[0x_AABB_CCDD_EEFF_0011_u64, 0x_2233_4455_6677_8899_u64]);
let le: [LittleEndian<u64>; 2] = LittleEndian::<u64>::from_array(&[
0x_1100_FFEE_DDCC_BBAA_u64,
0x_9988_7766_5544_3322_u64,
]);
assert_eq!(be.as_byte_array(), le.as_byte_array());
}
}

270
vendor/aws-lc-rs/src/error.rs vendored Normal file
View File

@@ -0,0 +1,270 @@
// Copyright 2015-2021 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! Error reporting.
extern crate std;
use core::num::TryFromIntError;
// The Error trait is not in core: https://github.com/rust-lang/rust/issues/103765
use std::error::Error;
/// An error with absolutely no details.
///
/// *aws-lc-rs* uses this unit type as the error type in most of its results
/// because (a) usually the specific reasons for a failure are obvious or are
/// not useful to know, and/or (b) providing more details about a failure might
/// provide a dangerous side channel, and/or (c) it greatly simplifies the
/// error handling logic.
///
/// `Result<T, aws_lc_rs::error::Unspecified>` is mostly equivalent to
/// `Result<T, ()>`. However, `aws_lc_rs::error::Unspecified` implements
/// [`std::error::Error`] and users can implement
/// `From<error::Unspecified>` to map this to their own error types, as
/// described in [“Error Handling” in the Rust Book](https://doc.rust-lang.org/book/ch09-00-error-handling.html):
///
/// ```
/// use aws_lc_rs::rand::{self, SecureRandom};
///
/// enum Error {
/// CryptoError,
///
/// IOError(std::io::Error),
/// // [...]
/// }
///
/// impl From<aws_lc_rs::error::Unspecified> for Error {
/// fn from(_: aws_lc_rs::error::Unspecified) -> Self {
/// Error::CryptoError
/// }
/// }
///
/// fn eight_random_bytes() -> Result<[u8; 8], Error> {
/// let rng = rand::SystemRandom::new();
/// let mut bytes = [0; 8];
///
/// // The `From<aws_lc_rs::error::Unspecified>` implementation above makes this
/// // equivalent to
/// // `rng.fill(&mut bytes).map_err(|_| Error::CryptoError)?`.
/// rng.fill(&mut bytes)?;
///
/// Ok(bytes)
/// }
///
/// assert!(eight_random_bytes().is_ok());
/// ```
///
/// Experience with using and implementing other crypto libraries like has
/// shown that sophisticated error reporting facilities often cause significant
/// bugs themselves, both within the crypto library and within users of the
/// crypto library. This approach attempts to minimize complexity in the hopes
/// of avoiding such problems. In some cases, this approach may be too extreme,
/// and it may be important for an operation to provide some details about the
/// cause of a failure. Users of *aws-lc-rs* are encouraged to report such cases so
/// that they can be addressed individually.
///
/// [`std::error::Error`]: https://doc.rust-lang.org/std/error/trait.Error.html
/// [“Error Handling” in the Rust Book]:
/// https://doc.rust-lang.org/book/first-edition/error-handling.html#the-from-trait
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct Unspecified;
// This is required for the implementation of `std::error::Error`.
impl core::fmt::Display for Unspecified {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
f.write_str("Unspecified")
}
}
impl From<core::array::TryFromSliceError> for Unspecified {
fn from(_: core::array::TryFromSliceError) -> Self {
Self
}
}
/// An error parsing or validating a key.
///
/// The `Display` implementation and `<KeyRejected as Error>::description()`
/// will return a string that will help you better understand why a key was
/// rejected change which errors are reported in which situations while
/// minimizing the likelihood that any applications will be broken.
///
/// Here is an incomplete list of reasons a key may be unsupported:
///
/// * Invalid or Inconsistent Components: A component of the key has an invalid
/// value, or the mathematical relationship between two (or more) components
/// required for a valid key does not hold.
///
/// * The encoding of the key is invalid. Perhaps the key isn't in the correct
/// format; e.g. it may be Base64 ("PEM") encoded, in which case the Base64
/// encoding needs to be undone first.
///
/// * The encoding includes a versioning mechanism and that mechanism indicates
/// that the key is encoded in a version of the encoding that isn't supported.
/// This might happen for multi-prime RSA keys (keys with more than two
/// private prime factors), which aren't supported, for example.
///
/// * Too small or too Large: One of the primary components of the key is too
/// small or two large. Too-small keys are rejected for security reasons. Some
/// unnecessarily large keys are rejected for performance reasons.
///
/// * Wrong algorithm: The key is not valid for the algorithm in which it was
/// being used.
///
/// * Unexpected errors: Report this as a bug.
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct KeyRejected(&'static str);
impl KeyRejected {
/// The value returned from `<Self as std::error::Error>::description()`
#[must_use]
pub fn description_(&self) -> &'static str {
self.0
}
pub(crate) fn inconsistent_components() -> Self {
KeyRejected("InconsistentComponents")
}
#[inline]
pub(crate) fn invalid_encoding() -> Self {
KeyRejected("InvalidEncoding")
}
pub(crate) fn too_small() -> Self {
KeyRejected("TooSmall")
}
pub(crate) fn too_large() -> Self {
KeyRejected("TooLarge")
}
pub(crate) fn wrong_algorithm() -> Self {
KeyRejected("WrongAlgorithm")
}
pub(crate) fn unexpected_error() -> Self {
KeyRejected("UnexpectedError")
}
pub(crate) fn unspecified() -> Self {
KeyRejected("Unspecified")
}
}
impl Error for KeyRejected {
fn description(&self) -> &str {
self.description_()
}
fn cause(&self) -> Option<&dyn Error> {
None
}
}
impl Error for Unspecified {
#[allow(clippy::unnecessary_literal_bound)]
fn description(&self) -> &str {
"Unspecified"
}
#[inline]
fn cause(&self) -> Option<&dyn Error> {
None
}
}
impl core::fmt::Display for KeyRejected {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
f.write_str(self.description_())
}
}
impl From<KeyRejected> for Unspecified {
fn from(_: KeyRejected) -> Self {
Unspecified
}
}
impl From<()> for Unspecified {
fn from((): ()) -> Self {
Unspecified
}
}
impl From<Unspecified> for () {
fn from(_: Unspecified) -> Self {}
}
impl From<()> for KeyRejected {
fn from((): ()) -> Self {
KeyRejected::unexpected_error()
}
}
#[cfg(any(feature = "ring-sig-verify", feature = "ring-io"))]
impl From<untrusted::EndOfInput> for Unspecified {
fn from(_: untrusted::EndOfInput) -> Self {
Unspecified
}
}
impl From<TryFromIntError> for Unspecified {
fn from(_: TryFromIntError) -> Self {
Unspecified
}
}
impl From<TryFromIntError> for KeyRejected {
fn from(_: TryFromIntError) -> Self {
KeyRejected::unexpected_error()
}
}
impl From<Unspecified> for KeyRejected {
fn from(_: Unspecified) -> Self {
Self::unspecified()
}
}
#[allow(deprecated, unused_imports)]
#[cfg(test)]
mod tests {
use crate::error::KeyRejected;
use crate::test;
use std::error::Error;
#[test]
fn display_unspecified() {
let output = format!("{}", super::Unspecified);
assert_eq!("Unspecified", output);
}
#[test]
fn unexpected_error() {
let key_rejected = super::KeyRejected::from(());
assert_eq!("UnexpectedError", key_rejected.description());
let unspecified = super::Unspecified::from(key_rejected);
assert_eq!("Unspecified", unspecified.description());
#[allow(clippy::redundant_locals)]
let unspecified = unspecified;
assert_eq!("Unspecified", unspecified.description());
}
#[test]
fn std_error() {
let key_rejected = KeyRejected::wrong_algorithm();
assert!(key_rejected.cause().is_none());
assert_eq!("WrongAlgorithm", key_rejected.description());
let unspecified = super::Unspecified;
assert!(unspecified.cause().is_none());
assert_eq!("Unspecified", unspecified.description());
test::compile_time_assert_std_error_error::<KeyRejected>();
}
}

574
vendor/aws-lc-rs/src/evp_pkey.rs vendored Normal file
View File

@@ -0,0 +1,574 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aws_lc::{
EVP_DigestSign, EVP_DigestSignInit, EVP_DigestVerify, EVP_DigestVerifyInit, EVP_PKEY_CTX_new,
EVP_PKEY_CTX_new_id, EVP_PKEY_bits, EVP_PKEY_cmp, EVP_PKEY_derive, EVP_PKEY_derive_init,
EVP_PKEY_derive_set_peer, EVP_PKEY_get0_EC_KEY, EVP_PKEY_get0_RSA,
EVP_PKEY_get_raw_private_key, EVP_PKEY_get_raw_public_key, EVP_PKEY_id, EVP_PKEY_keygen,
EVP_PKEY_keygen_init, EVP_PKEY_new_raw_private_key, EVP_PKEY_new_raw_public_key, EVP_PKEY_sign,
EVP_PKEY_sign_init, EVP_PKEY_size, EVP_PKEY_up_ref, EVP_PKEY_verify, EVP_PKEY_verify_init,
EVP_marshal_private_key, EVP_marshal_private_key_v2, EVP_marshal_public_key,
EVP_parse_private_key, EVP_parse_public_key, EC_KEY, EVP_PKEY, EVP_PKEY_CTX, EVP_PKEY_ED25519,
RSA,
};
#[cfg(all(feature = "unstable", not(feature = "fips")))]
use crate::aws_lc::{
EVP_PKEY_pqdsa_new_raw_private_key, EVP_PKEY_pqdsa_new_raw_public_key, EVP_PKEY_PQDSA,
NID_MLDSA44, NID_MLDSA65, NID_MLDSA87,
};
use crate::cbb::LcCBB;
use crate::digest::digest_ctx::DigestContext;
use crate::digest::Digest;
use crate::error::{KeyRejected, Unspecified};
use crate::fips::indicator_check;
use crate::pkcs8::Version;
use crate::ptr::{ConstPointer, LcPtr};
use crate::{cbs, digest};
use core::ffi::c_int;
use std::ptr::{null, null_mut};
impl PartialEq<Self> for LcPtr<EVP_PKEY> {
/// Only compares params and public key
fn eq(&self, other: &Self) -> bool {
// EVP_PKEY_cmp only compares params and public key
1 == unsafe { EVP_PKEY_cmp(self.as_const_ptr(), other.as_const_ptr()) }
}
}
#[allow(non_camel_case_types)]
pub(crate) trait EVP_PKEY_CTX_consumer: Fn(*mut EVP_PKEY_CTX) -> Result<(), ()> {}
impl<T> EVP_PKEY_CTX_consumer for T where T: Fn(*mut EVP_PKEY_CTX) -> Result<(), ()> {}
#[allow(non_upper_case_globals, clippy::type_complexity)]
pub(crate) const No_EVP_PKEY_CTX_consumer: Option<fn(*mut EVP_PKEY_CTX) -> Result<(), ()>> = None;
impl ConstPointer<'_, EVP_PKEY> {
pub(crate) fn validate_as_ed25519(&self) -> Result<(), KeyRejected> {
const ED25519_KEY_TYPE: c_int = EVP_PKEY_ED25519;
const ED25519_MIN_BITS: c_int = 253;
const ED25519_MAX_BITS: c_int = 256;
let key_type = self.id();
if key_type != ED25519_KEY_TYPE {
return Err(KeyRejected::wrong_algorithm());
}
let bits: c_int = self.key_size_bits().try_into().unwrap();
if bits < ED25519_MIN_BITS {
return Err(KeyRejected::too_small());
}
if bits > ED25519_MAX_BITS {
return Err(KeyRejected::too_large());
}
Ok(())
}
// EVP_PKEY_NONE = 0;
// EVP_PKEY_RSA = 6;
// EVP_PKEY_RSA_PSS = 912;
// EVP_PKEY_DSA = 116;
// EVP_PKEY_EC = 408;
// EVP_PKEY_ED25519 = 949;
// EVP_PKEY_X25519 = 948;
// EVP_PKEY_KYBER512 = 970;
// EVP_PKEY_HKDF = 969;
// EVP_PKEY_DH = 28;
// EVP_PKEY_RSA2 = 19;
// EVP_PKEY_X448 = 961;
// EVP_PKEY_ED448 = 960;
pub(crate) fn id(&self) -> i32 {
unsafe { EVP_PKEY_id(self.as_const_ptr()) }
}
pub(crate) fn key_size_bytes(&self) -> usize {
self.key_size_bits() / 8
}
pub(crate) fn key_size_bits(&self) -> usize {
unsafe { EVP_PKEY_bits(self.as_const_ptr()) }
.try_into()
.unwrap()
}
pub(crate) fn signature_size_bytes(&self) -> usize {
unsafe { EVP_PKEY_size(self.as_const_ptr()) }
.try_into()
.unwrap()
}
#[allow(dead_code)]
pub(crate) fn get_ec_key(&self) -> Result<ConstPointer<'_, EC_KEY>, KeyRejected> {
self.project_const_lifetime(unsafe {
|evp_pkey| EVP_PKEY_get0_EC_KEY(evp_pkey.as_const_ptr())
})
.map_err(|()| KeyRejected::wrong_algorithm())
}
pub(crate) fn get_rsa(&self) -> Result<ConstPointer<'_, RSA>, KeyRejected> {
self.project_const_lifetime(unsafe {
|evp_pkey| EVP_PKEY_get0_RSA(evp_pkey.as_const_ptr())
})
.map_err(|()| KeyRejected::wrong_algorithm())
}
pub(crate) fn marshal_rfc5280_public_key(&self) -> Result<Vec<u8>, Unspecified> {
// Data shows that the SubjectPublicKeyInfo is roughly 356% to 375% increase in size compared to the RSA key
// size in bytes for keys ranging from 2048-bit to 4096-bit. So size the initial capacity to be roughly
// 500% as a conservative estimate to avoid needing to reallocate for any key in that range.
let mut cbb = LcCBB::new(self.key_size_bytes() * 5);
if 1 != unsafe { EVP_marshal_public_key(cbb.as_mut_ptr(), self.as_const_ptr()) } {
return Err(Unspecified);
}
cbb.into_vec()
}
pub(crate) fn marshal_rfc5208_private_key(
&self,
version: Version,
) -> Result<Vec<u8>, Unspecified> {
let key_size_bytes =
TryInto::<usize>::try_into(unsafe { EVP_PKEY_bits(self.as_const_ptr()) })
.expect("fit in usize")
/ 8;
let mut cbb = LcCBB::new(key_size_bytes * 5);
match version {
Version::V1 => {
if 1 != unsafe { EVP_marshal_private_key(cbb.as_mut_ptr(), self.as_const_ptr()) } {
return Err(Unspecified);
}
}
Version::V2 => {
if 1 != unsafe { EVP_marshal_private_key_v2(cbb.as_mut_ptr(), self.as_const_ptr()) }
{
return Err(Unspecified);
}
}
}
cbb.into_vec()
}
pub(crate) fn marshal_raw_private_key(&self) -> Result<Vec<u8>, Unspecified> {
let mut size = 0;
if 1 != unsafe { EVP_PKEY_get_raw_private_key(self.as_const_ptr(), null_mut(), &mut size) }
{
return Err(Unspecified);
}
let mut buffer = vec![0u8; size];
let buffer_size = self.marshal_raw_private_to_buffer(&mut buffer)?;
debug_assert_eq!(buffer_size, size);
Ok(buffer)
}
pub(crate) fn marshal_raw_private_to_buffer(
&self,
buffer: &mut [u8],
) -> Result<usize, Unspecified> {
let mut key_len = buffer.len();
if 1 == unsafe {
EVP_PKEY_get_raw_private_key(self.as_const_ptr(), buffer.as_mut_ptr(), &mut key_len)
} {
Ok(key_len)
} else {
Err(Unspecified)
}
}
#[allow(dead_code)]
pub(crate) fn marshal_raw_public_key(&self) -> Result<Vec<u8>, Unspecified> {
let mut size = 0;
if 1 != unsafe { EVP_PKEY_get_raw_public_key(self.as_const_ptr(), null_mut(), &mut size) } {
return Err(Unspecified);
}
let mut buffer = vec![0u8; size];
let buffer_size = self.marshal_raw_public_to_buffer(&mut buffer)?;
debug_assert_eq!(buffer_size, size);
Ok(buffer)
}
pub(crate) fn marshal_raw_public_to_buffer(
&self,
buffer: &mut [u8],
) -> Result<usize, Unspecified> {
let mut key_len = buffer.len();
if 1 == unsafe {
// `EVP_PKEY_get_raw_public_key` writes the total length
// to `encapsulate_key_size` in the event that the buffer we provide is larger then
// required.
EVP_PKEY_get_raw_public_key(self.as_const_ptr(), buffer.as_mut_ptr(), &mut key_len)
} {
Ok(key_len)
} else {
Err(Unspecified)
}
}
}
impl LcPtr<EVP_PKEY> {
#[inline]
pub unsafe fn as_mut_unsafe_ptr(&self) -> *mut EVP_PKEY {
self.as_const_ptr().cast_mut()
}
pub(crate) fn parse_rfc5280_public_key(
bytes: &[u8],
evp_pkey_type: c_int,
) -> Result<Self, KeyRejected> {
let mut cbs = cbs::build_CBS(bytes);
// Also checks the validity of the key
let evp_pkey = LcPtr::new(unsafe { EVP_parse_public_key(&mut cbs) })
.map_err(|()| KeyRejected::invalid_encoding())?;
evp_pkey
.as_const()
.id()
.eq(&evp_pkey_type)
.then_some(evp_pkey)
.ok_or(KeyRejected::wrong_algorithm())
}
pub(crate) fn parse_rfc5208_private_key(
bytes: &[u8],
evp_pkey_type: c_int,
) -> Result<Self, KeyRejected> {
let mut cbs = cbs::build_CBS(bytes);
// Also checks the validity of the key
let evp_pkey = LcPtr::new(unsafe { EVP_parse_private_key(&mut cbs) })
.map_err(|()| KeyRejected::invalid_encoding())?;
evp_pkey
.as_const()
.id()
.eq(&evp_pkey_type)
.then_some(evp_pkey)
.ok_or(KeyRejected::wrong_algorithm())
}
#[allow(non_snake_case)]
pub(crate) fn create_EVP_PKEY_CTX(&self) -> Result<LcPtr<EVP_PKEY_CTX>, ()> {
// The only modification made by EVP_PKEY_CTX_new to `priv_key` is to increment its
// refcount. The modification is made while holding a global lock:
// https://github.com/aws/aws-lc/blob/61503f7fe72457e12d3446853a5452d175560c49/crypto/refcount_lock.c#L29
LcPtr::new(unsafe { EVP_PKEY_CTX_new(self.as_mut_unsafe_ptr(), null_mut()) })
}
pub(crate) fn parse_raw_private_key(
bytes: &[u8],
evp_pkey_type: c_int,
) -> Result<Self, KeyRejected> {
#[cfg(all(feature = "unstable", not(feature = "fips")))]
if evp_pkey_type == EVP_PKEY_PQDSA {
return match bytes.len() {
2560 => Self::new(unsafe {
EVP_PKEY_pqdsa_new_raw_private_key(NID_MLDSA44, bytes.as_ptr(), bytes.len())
}),
4032 => Self::new(unsafe {
EVP_PKEY_pqdsa_new_raw_private_key(NID_MLDSA65, bytes.as_ptr(), bytes.len())
}),
4896 => Self::new(unsafe {
EVP_PKEY_pqdsa_new_raw_private_key(NID_MLDSA87, bytes.as_ptr(), bytes.len())
}),
_ => Err(()),
}
.map_err(|()| KeyRejected::invalid_encoding());
}
Self::new(unsafe {
EVP_PKEY_new_raw_private_key(evp_pkey_type, null_mut(), bytes.as_ptr(), bytes.len())
})
.map_err(|()| KeyRejected::unspecified())
}
pub(crate) fn parse_raw_public_key(
bytes: &[u8],
evp_pkey_type: c_int,
) -> Result<Self, KeyRejected> {
#[cfg(all(feature = "unstable", not(feature = "fips")))]
if evp_pkey_type == EVP_PKEY_PQDSA {
return match bytes.len() {
1312 => Self::new(unsafe {
EVP_PKEY_pqdsa_new_raw_public_key(NID_MLDSA44, bytes.as_ptr(), bytes.len())
}),
1952 => Self::new(unsafe {
EVP_PKEY_pqdsa_new_raw_public_key(NID_MLDSA65, bytes.as_ptr(), bytes.len())
}),
2592 => Self::new(unsafe {
EVP_PKEY_pqdsa_new_raw_public_key(NID_MLDSA87, bytes.as_ptr(), bytes.len())
}),
_ => Err(()),
}
.map_err(|()| KeyRejected::unspecified());
}
Self::new(unsafe {
EVP_PKEY_new_raw_public_key(evp_pkey_type, null_mut(), bytes.as_ptr(), bytes.len())
})
.map_err(|()| KeyRejected::invalid_encoding())
}
pub(crate) fn sign<F>(
&self,
message: &[u8],
digest: Option<&'static digest::Algorithm>,
padding_fn: Option<F>,
) -> Result<Box<[u8]>, Unspecified>
where
F: EVP_PKEY_CTX_consumer,
{
let mut md_ctx = DigestContext::new_uninit();
let evp_md = if let Some(alg) = digest {
digest::match_digest_type(&alg.id).as_const_ptr()
} else {
null()
};
let mut pctx = null_mut::<EVP_PKEY_CTX>();
if 1 != unsafe {
// EVP_DigestSignInit does not mutate |pkey| for thread-safety purposes and may be
// used concurrently with other non-mutating functions on |pkey|.
// https://github.com/aws/aws-lc/blob/9b4b5a15a97618b5b826d742419ccd54c819fa42/include/openssl/evp.h#L297-L313
EVP_DigestSignInit(
md_ctx.as_mut_ptr(),
&mut pctx,
evp_md,
null_mut(),
self.as_mut_unsafe_ptr(),
)
} {
return Err(Unspecified);
}
if let Some(pad_fn) = padding_fn {
pad_fn(pctx)?;
}
// Determine the maximum length of the signature.
let mut sig_len = 0;
if 1 != unsafe {
EVP_DigestSign(
md_ctx.as_mut_ptr(),
null_mut(),
&mut sig_len,
message.as_ptr(),
message.len(),
)
} {
return Err(Unspecified);
}
if sig_len == 0 {
return Err(Unspecified);
}
let mut signature = vec![0u8; sig_len];
if 1 != indicator_check!(unsafe {
EVP_DigestSign(
md_ctx.as_mut_ptr(),
signature.as_mut_ptr(),
&mut sig_len,
message.as_ptr(),
message.len(),
)
}) {
return Err(Unspecified);
}
signature.truncate(sig_len);
Ok(signature.into_boxed_slice())
}
pub(crate) fn sign_digest<F>(
&self,
digest: &Digest,
padding_fn: Option<F>,
) -> Result<Box<[u8]>, Unspecified>
where
F: EVP_PKEY_CTX_consumer,
{
let mut pctx = self.create_EVP_PKEY_CTX()?;
if 1 != unsafe { EVP_PKEY_sign_init(pctx.as_mut_ptr()) } {
return Err(Unspecified);
}
if let Some(pad_fn) = padding_fn {
pad_fn(pctx.as_mut_ptr())?;
}
let msg_digest = digest.as_ref();
let mut sig_len = 0;
if 1 != unsafe {
EVP_PKEY_sign(
pctx.as_mut_ptr(),
null_mut(),
&mut sig_len,
msg_digest.as_ptr(),
msg_digest.len(),
)
} {
return Err(Unspecified);
}
let mut signature = vec![0u8; sig_len];
if 1 != indicator_check!(unsafe {
EVP_PKEY_sign(
pctx.as_mut_ptr(),
signature.as_mut_ptr(),
&mut sig_len,
msg_digest.as_ptr(),
msg_digest.len(),
)
}) {
return Err(Unspecified);
}
signature.truncate(sig_len);
Ok(signature.into_boxed_slice())
}
pub(crate) fn verify<F>(
&self,
msg: &[u8],
digest: Option<&'static digest::Algorithm>,
padding_fn: Option<F>,
signature: &[u8],
) -> Result<(), Unspecified>
where
F: EVP_PKEY_CTX_consumer,
{
let mut md_ctx = DigestContext::new_uninit();
let evp_md = if let Some(alg) = digest {
digest::match_digest_type(&alg.id).as_const_ptr()
} else {
null()
};
let mut pctx = null_mut::<EVP_PKEY_CTX>();
if 1 != unsafe {
EVP_DigestVerifyInit(
md_ctx.as_mut_ptr(),
&mut pctx,
evp_md,
null_mut(),
self.as_mut_unsafe_ptr(),
)
} {
return Err(Unspecified);
}
if let Some(pad_fn) = padding_fn {
pad_fn(pctx)?;
}
if 1 != indicator_check!(unsafe {
EVP_DigestVerify(
md_ctx.as_mut_ptr(),
signature.as_ptr(),
signature.len(),
msg.as_ptr(),
msg.len(),
)
}) {
return Err(Unspecified);
}
Ok(())
}
pub(crate) fn verify_digest_sig<F>(
&self,
digest: &Digest,
padding_fn: Option<F>,
signature: &[u8],
) -> Result<(), Unspecified>
where
F: EVP_PKEY_CTX_consumer,
{
let mut pctx = self.create_EVP_PKEY_CTX()?;
if 1 != unsafe { EVP_PKEY_verify_init(pctx.as_mut_ptr()) } {
return Err(Unspecified);
}
if let Some(pad_fn) = padding_fn {
pad_fn(pctx.as_mut_ptr())?;
}
let msg_digest = digest.as_ref();
if 1 == unsafe {
indicator_check!(EVP_PKEY_verify(
pctx.as_mut_ptr(),
signature.as_ptr(),
signature.len(),
msg_digest.as_ptr(),
msg_digest.len(),
))
} {
Ok(())
} else {
Err(Unspecified)
}
}
pub(crate) fn agree(&self, peer_key: &mut Self) -> Result<Box<[u8]>, Unspecified> {
let mut pctx = self.create_EVP_PKEY_CTX()?;
if 1 != unsafe { EVP_PKEY_derive_init(pctx.as_mut_ptr()) } {
return Err(Unspecified);
}
let mut secret_len = 0;
if 1 != unsafe { EVP_PKEY_derive_set_peer(pctx.as_mut_ptr(), peer_key.as_mut_ptr()) } {
return Err(Unspecified);
}
if 1 != unsafe { EVP_PKEY_derive(pctx.as_mut_ptr(), null_mut(), &mut secret_len) } {
return Err(Unspecified);
}
let mut secret = vec![0u8; secret_len];
if 1 != indicator_check!(unsafe {
EVP_PKEY_derive(pctx.as_mut_ptr(), secret.as_mut_ptr(), &mut secret_len)
}) {
return Err(Unspecified);
}
secret.truncate(secret_len);
Ok(secret.into_boxed_slice())
}
pub(crate) fn generate<F>(pkey_type: c_int, params_fn: Option<F>) -> Result<Self, Unspecified>
where
F: EVP_PKEY_CTX_consumer,
{
let mut pkey_ctx = LcPtr::new(unsafe { EVP_PKEY_CTX_new_id(pkey_type, null_mut()) })?;
if 1 != unsafe { EVP_PKEY_keygen_init(pkey_ctx.as_mut_ptr()) } {
return Err(Unspecified);
}
if let Some(pad_fn) = params_fn {
pad_fn(pkey_ctx.as_mut_ptr())?;
}
let mut pkey = null_mut::<EVP_PKEY>();
if 1 != indicator_check!(unsafe { EVP_PKEY_keygen(pkey_ctx.as_mut_ptr(), &mut pkey) }) {
return Err(Unspecified);
}
Ok(LcPtr::new(pkey)?)
}
}
impl Clone for LcPtr<EVP_PKEY> {
fn clone(&self) -> Self {
// EVP_PKEY_up_ref increments the refcount while holding a global lock:
// https://github.com/aws/aws-lc/blob/61503f7fe72457e12d3446853a5452d175560c49/crypto/refcount_lock.c#L29
assert_eq!(
1,
unsafe { EVP_PKEY_up_ref(self.as_mut_unsafe_ptr()) },
"infallible AWS-LC function"
);
Self::new(unsafe { self.as_mut_unsafe_ptr() }).expect("non-null AWS-LC EVP_PKEY pointer")
}
}

192
vendor/aws-lc-rs/src/fips.rs vendored Normal file
View File

@@ -0,0 +1,192 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
/// Retrieve the FIPS module service status.
#[allow(dead_code)] // appease clippy
#[cfg(all(feature = "fips", debug_assertions))]
pub(crate) fn get_fips_service_status() -> FipsServiceStatus<()> {
if let Some(status) = indicator::get_status() {
if status {
FipsServiceStatus::Approved(())
} else {
FipsServiceStatus::NonApproved(())
}
} else {
FipsServiceStatus::Unset(())
}
}
#[inline]
pub(crate) fn set_fips_service_status_unapproved() {
#[cfg(all(feature = "fips", debug_assertions))]
indicator::set_unapproved();
}
#[allow(dead_code)]
#[cfg(all(feature = "fips", debug_assertions))]
#[inline]
pub(crate) fn clear_fips_service_status() {
indicator::clear();
}
#[cfg(all(feature = "fips", debug_assertions))]
pub(crate) mod indicator {
use core::cell::Cell;
thread_local! {
static STATUS_INDICATOR: Cell<Option<bool>> = const { Cell::new(None) };
}
// Retrieves and returns the current indicator status while resetting the indicator
// for future calls.
pub fn get_status() -> Option<bool> {
STATUS_INDICATOR.with(|v| {
let swap = Cell::new(None);
v.swap(&swap);
swap.take()
})
}
pub fn set_approved() {
STATUS_INDICATOR.with(|v| v.set(Some(true)));
}
pub fn set_unapproved() {
STATUS_INDICATOR.with(|v| v.set(Some(false)));
}
pub fn clear() {
STATUS_INDICATOR.with(|v| v.set(None));
}
}
#[cfg(all(feature = "fips", debug_assertions))]
#[inline]
pub(crate) fn service_indicator_before_call() -> u64 {
unsafe { aws_lc::FIPS_service_indicator_before_call() }
}
#[cfg(all(feature = "fips", debug_assertions))]
#[inline]
pub(crate) fn service_indicator_after_call() -> u64 {
unsafe { aws_lc::FIPS_service_indicator_after_call() }
}
/// The FIPS Module Service Status
#[allow(dead_code)] // appease clippy
#[cfg(all(feature = "fips", debug_assertions))]
#[allow(clippy::module_name_repetitions)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub(crate) enum FipsServiceStatus<R> {
/// Indicates that the current thread is using approved FIPS cryptographic services.
Approved(R),
/// Indicates that the current thread has used non-approved FIPS cryptographic services.
/// The service indicator status can be reset using `reset_fips_service_status`.
/// `reset_fips_service_status` will return `NonApprovedMode` if the service used a non-approved
/// service, and automatically resets the service status for you.
NonApproved(R),
/// Indicates that the service indicator is not set
Unset(R),
}
#[cfg(all(feature = "fips", debug_assertions))]
impl<R> FipsServiceStatus<R> {
/// Maps a `ServiceStatus<R>` to a `ServiceStatus<S>` by applying a function to a contained value.
#[allow(dead_code)]
pub fn map<S, F>(self, op: F) -> FipsServiceStatus<S>
where
F: FnOnce(R) -> S,
{
match self {
FipsServiceStatus::Approved(v) => FipsServiceStatus::Approved(op(v)),
FipsServiceStatus::NonApproved(v) => FipsServiceStatus::NonApproved(op(v)),
FipsServiceStatus::Unset(v) => FipsServiceStatus::Unset(op(v)),
}
}
}
macro_rules! indicator_check {
($function:expr) => {{
#[cfg(all(feature = "fips", debug_assertions))]
{
use crate::fips::{service_indicator_after_call, service_indicator_before_call};
let before = service_indicator_before_call();
let result = $function;
let after = service_indicator_after_call();
if before == after {
crate::fips::indicator::set_unapproved();
result
} else {
crate::fips::indicator::set_approved();
result
}
}
#[cfg(any(not(feature = "fips"), not(debug_assertions)))]
{
$function
}
}};
}
pub(crate) use indicator_check;
#[allow(unused_macros)]
#[cfg(all(feature = "fips", debug_assertions))]
macro_rules! check_fips_service_status {
($function:expr) => {{
// Clear the current indicator status first by retrieving it
use $crate::fips::{clear_fips_service_status, get_fips_service_status};
clear_fips_service_status();
// do the expression
let result = $function;
// Check indicator after expression
get_fips_service_status().map(|()| result)
}};
}
#[allow(unused_imports)]
#[cfg(all(feature = "fips", debug_assertions))]
pub(crate) use check_fips_service_status;
#[allow(unused_macros)]
#[cfg(all(feature = "fips", debug_assertions))]
macro_rules! assert_fips_status_indicator {
($function:expr, $expect:path) => {
assert_fips_status_indicator!($function, $expect, "unexpected service indicator")
};
($function:expr, $expect:path, $message:literal) => {{
match crate::fips::check_fips_service_status!($function) {
$expect(v) => v,
_ => panic!($message),
}
}};
}
#[allow(unused_imports)]
#[cfg(all(feature = "fips", debug_assertions))]
pub(crate) use assert_fips_status_indicator;
#[cfg(test)]
mod tests {
#[cfg(all(feature = "fips", debug_assertions))]
#[test]
fn test_service_status() {
use crate::fips::FipsServiceStatus;
assert_eq!(
FipsServiceStatus::Approved(true),
FipsServiceStatus::Approved(()).map(|()| true)
);
assert_eq!(
FipsServiceStatus::NonApproved(true),
FipsServiceStatus::NonApproved(()).map(|()| true)
);
assert_eq!(
FipsServiceStatus::Unset(true),
FipsServiceStatus::Unset(()).map(|()| true)
);
}
}

67
vendor/aws-lc-rs/src/hex.rs vendored Normal file
View File

@@ -0,0 +1,67 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
/// Converts bytes to a lower-case hex string
#[allow(clippy::missing_panics_doc)]
pub fn encode<T: AsRef<[u8]>>(bytes: T) -> String {
let bytes = bytes.as_ref();
let mut encoding = String::with_capacity(2 * bytes.len());
for byte in bytes {
let upper_val = byte >> 4u8;
let lower_val = byte & 0x0f;
// DON'T PANIC: it shouldn't be possible to panic because only bottom 4 bits can be set.
encoding.push(char::from_digit(u32::from(upper_val), 16).unwrap());
encoding.push(char::from_digit(u32::from(lower_val), 16).unwrap());
}
encoding
}
/// Converts bytes to an upper-case hex string
pub fn encode_upper<T: AsRef<[u8]>>(bytes: T) -> String {
encode(bytes).to_ascii_uppercase()
}
/// Converts a hex string to a vector of bytes
/// # Errors
/// Returns an error if `hex_str` contains a non-hex digit.
#[allow(clippy::missing_panics_doc)]
pub fn decode(hex_str: &str) -> Result<Vec<u8>, String> {
let mut bytes = Vec::<u8>::with_capacity(hex_str.len() / 2 + 1);
let mut current_byte = b'\0';
let mut index: u32 = 0;
for ch in hex_str.chars() {
if !ch.is_ascii_hexdigit() {
return Err("Invalid hex string".to_string());
}
#[allow(clippy::cast_possible_truncation)]
// DON'T PANIC: it should not be possible to panic because we verify above that the character is a
// hex digit.
let value = ch.to_digit(16).unwrap() as u8;
if index % 2 == 0 {
current_byte = value << 4;
} else {
current_byte |= value;
bytes.push(current_byte);
}
if let Some(idx) = index.checked_add(1) {
index = idx;
} else {
break;
}
}
if index % 2 == 1 {
bytes.push(current_byte);
}
Ok(bytes)
}
/// Converts a hex string to a vector of bytes.
/// It ignores any characters that are not valid hex digits.
#[must_use]
#[allow(clippy::missing_panics_doc)]
pub fn decode_dirty(hex_str: &str) -> Vec<u8> {
let clean: String = hex_str.chars().filter(char::is_ascii_hexdigit).collect();
// DON'T PANIC: it should not be possible to panic because we filter out all non-hex digits.
decode(clean.as_str()).unwrap()
}

537
vendor/aws-lc-rs/src/hkdf.rs vendored Normal file
View File

@@ -0,0 +1,537 @@
// Copyright 2015 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! HMAC-based Extract-and-Expand Key Derivation Function.
//!
//! HKDF is specified in [RFC 5869].
//!
//! [RFC 5869]: https://tools.ietf.org/html/rfc5869
//!
//! # Example
//! ```
//! use aws_lc_rs::{aead, hkdf, hmac, rand};
//!
//! // Generate a (non-secret) salt value
//! let mut salt_bytes = [0u8; 32];
//! rand::fill(&mut salt_bytes).unwrap();
//!
//! // Extract pseudo-random key from secret keying materials
//! let salt = hkdf::Salt::new(hkdf::HKDF_SHA256, &salt_bytes);
//! let pseudo_random_key = salt.extract(b"secret input keying material");
//!
//! // Derive HMAC key
//! let hmac_key_material = pseudo_random_key
//! .expand(
//! &[b"hmac contextual info"],
//! hkdf::HKDF_SHA256.hmac_algorithm(),
//! )
//! .unwrap();
//! let hmac_key = hmac::Key::from(hmac_key_material);
//!
//! // Derive UnboundKey for AES-128-GCM
//! let aes_keying_material = pseudo_random_key
//! .expand(&[b"aes contextual info"], &aead::AES_128_GCM)
//! .unwrap();
//! let aead_unbound_key = aead::UnboundKey::from(aes_keying_material);
//! ```
use crate::aws_lc::{HKDF_expand, HKDF};
use crate::error::Unspecified;
use crate::fips::indicator_check;
use crate::{digest, hmac};
use alloc::sync::Arc;
use core::fmt;
use zeroize::Zeroize;
/// An HKDF algorithm.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct Algorithm(hmac::Algorithm);
impl Algorithm {
/// The underlying HMAC algorithm.
#[inline]
#[must_use]
pub fn hmac_algorithm(&self) -> hmac::Algorithm {
self.0
}
}
/// HKDF using HMAC-SHA-1. Obsolete.
pub const HKDF_SHA1_FOR_LEGACY_USE_ONLY: Algorithm = Algorithm(hmac::HMAC_SHA1_FOR_LEGACY_USE_ONLY);
/// HKDF using HMAC-SHA-256.
pub const HKDF_SHA256: Algorithm = Algorithm(hmac::HMAC_SHA256);
/// HKDF using HMAC-SHA-384.
pub const HKDF_SHA384: Algorithm = Algorithm(hmac::HMAC_SHA384);
/// HKDF using HMAC-SHA-512.
pub const HKDF_SHA512: Algorithm = Algorithm(hmac::HMAC_SHA512);
/// General Info length's for HKDF don't normally exceed 256 bits.
/// We set the default capacity to a value larger than should be needed
/// so that the value passed to |`HKDF_expand`| is only allocated once.
const HKDF_INFO_DEFAULT_CAPACITY_LEN: usize = 80;
/// The maximum output size of a PRK computed by |`HKDF_extract`| is the maximum digest
/// size that can be outputted by *AWS-LC*.
const MAX_HKDF_PRK_LEN: usize = digest::MAX_OUTPUT_LEN;
impl KeyType for Algorithm {
fn len(&self) -> usize {
self.0.digest_algorithm().output_len
}
}
/// A salt for HKDF operations.
pub struct Salt {
algorithm: Algorithm,
bytes: Arc<[u8]>,
}
#[allow(clippy::missing_fields_in_debug)]
impl fmt::Debug for Salt {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("hkdf::Salt")
.field("algorithm", &self.algorithm.0)
.finish()
}
}
impl Salt {
/// Constructs a new `Salt` with the given value based on the given digest
/// algorithm.
///
/// Constructing a `Salt` is relatively expensive so it is good to reuse a
/// `Salt` object instead of re-constructing `Salt`s with the same value.
///
// # FIPS
// The following conditions must be met:
// * Algorithm is one of the following:
// * `HKDF_SHA1_FOR_LEGACY_USE_ONLY`
// * `HKDF_SHA256`
// * `HKDF_SHA384`
// * `HKDF_SHA512`
// * `value.len() > 0` is true
//
/// # Panics
/// `new` panics if salt creation fails
#[must_use]
pub fn new(algorithm: Algorithm, value: &[u8]) -> Self {
Self {
algorithm,
bytes: Arc::from(value),
}
}
/// The [HKDF-Extract] operation.
///
/// [HKDF-Extract]: https://tools.ietf.org/html/rfc5869#section-2.2
///
/// # Panics
/// Panics if the extract operation is unable to be performed
#[inline]
#[must_use]
pub fn extract(&self, secret: &[u8]) -> Prk {
Prk {
algorithm: self.algorithm,
mode: PrkMode::ExtractExpand {
secret: Arc::new(ZeroizeBoxSlice::from(secret)),
salt: Arc::clone(&self.bytes),
},
}
}
/// The algorithm used to derive this salt.
#[inline]
#[must_use]
pub fn algorithm(&self) -> Algorithm {
Algorithm(self.algorithm.hmac_algorithm())
}
}
impl From<Okm<'_, Algorithm>> for Salt {
fn from(okm: Okm<'_, Algorithm>) -> Self {
let algorithm = okm.prk.algorithm;
let salt_len = okm.len().len();
let mut salt_bytes = vec![0u8; salt_len];
okm.fill(&mut salt_bytes).unwrap();
Self {
algorithm,
bytes: Arc::from(salt_bytes.as_slice()),
}
}
}
/// The length of the OKM (Output Keying Material) for a `Prk::expand()` call.
#[allow(clippy::len_without_is_empty)]
pub trait KeyType {
/// The length that `Prk::expand()` should expand its input to.
fn len(&self) -> usize;
}
#[derive(Clone)]
enum PrkMode {
Expand {
key_bytes: [u8; MAX_HKDF_PRK_LEN],
key_len: usize,
},
ExtractExpand {
secret: Arc<ZeroizeBoxSlice<u8>>,
salt: Arc<[u8]>,
},
}
impl PrkMode {
fn fill(&self, algorithm: Algorithm, out: &mut [u8], info: &[u8]) -> Result<(), Unspecified> {
let digest = digest::match_digest_type(&algorithm.0.digest_algorithm().id).as_const_ptr();
match &self {
PrkMode::Expand { key_bytes, key_len } => unsafe {
if 1 != indicator_check!(HKDF_expand(
out.as_mut_ptr(),
out.len(),
digest,
key_bytes.as_ptr(),
*key_len,
info.as_ptr(),
info.len(),
)) {
return Err(Unspecified);
}
},
PrkMode::ExtractExpand { secret, salt } => {
if 1 != indicator_check!(unsafe {
HKDF(
out.as_mut_ptr(),
out.len(),
digest,
secret.as_ptr(),
secret.len(),
salt.as_ptr(),
salt.len(),
info.as_ptr(),
info.len(),
)
}) {
return Err(Unspecified);
}
}
}
Ok(())
}
}
impl fmt::Debug for PrkMode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Expand { .. } => f.debug_struct("Expand").finish_non_exhaustive(),
Self::ExtractExpand { .. } => f.debug_struct("ExtractExpand").finish_non_exhaustive(),
}
}
}
struct ZeroizeBoxSlice<T: Zeroize>(Box<[T]>);
impl<T: Zeroize> core::ops::Deref for ZeroizeBoxSlice<T> {
type Target = [T];
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T: Clone + Zeroize> From<&[T]> for ZeroizeBoxSlice<T> {
fn from(value: &[T]) -> Self {
Self(Vec::from(value).into_boxed_slice())
}
}
impl<T: Zeroize> Drop for ZeroizeBoxSlice<T> {
fn drop(&mut self) {
self.0.zeroize();
}
}
/// A HKDF PRK (pseudorandom key).
#[derive(Clone)]
pub struct Prk {
algorithm: Algorithm,
mode: PrkMode,
}
impl Drop for Prk {
fn drop(&mut self) {
if let PrkMode::Expand {
ref mut key_bytes, ..
} = self.mode
{
key_bytes.zeroize();
}
}
}
#[allow(clippy::missing_fields_in_debug)]
impl fmt::Debug for Prk {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("hkdf::Prk")
.field("algorithm", &self.algorithm.0)
.field("mode", &self.mode)
.finish()
}
}
impl Prk {
/// Construct a new `Prk` directly with the given value.
///
/// Usually one can avoid using this. It is useful when the application
/// intentionally wants to leak the PRK secret, e.g. to implement
/// `SSLKEYLOGFILE` functionality.
///
// # FIPS
// The following conditions must be met:
// * Algorithm is one of the following:
// * `HKDF_SHA1_FOR_LEGACY_USE_ONLY`
// * `HKDF_SHA256`
// * `HKDF_SHA384`
// * `HKDF_SHA512`
// * The `info_len` from [`Prk::expand`] is non-zero.
//
/// # Panics
/// Panics if the given Prk length exceeds the limit
#[must_use]
pub fn new_less_safe(algorithm: Algorithm, value: &[u8]) -> Self {
Prk::try_new_less_safe(algorithm, value).expect("Prk length limit exceeded.")
}
fn try_new_less_safe(algorithm: Algorithm, value: &[u8]) -> Result<Prk, Unspecified> {
let key_len = value.len();
if key_len > MAX_HKDF_PRK_LEN {
return Err(Unspecified);
}
let mut key_bytes = [0u8; MAX_HKDF_PRK_LEN];
key_bytes[0..key_len].copy_from_slice(value);
Ok(Self {
algorithm,
mode: PrkMode::Expand { key_bytes, key_len },
})
}
/// The [HKDF-Expand] operation.
///
/// [HKDF-Expand]: https://tools.ietf.org/html/rfc5869#section-2.3
///
/// # Errors
/// Returns `error::Unspecified` if:
/// * `len` is more than 255 times the digest algorithm's output length.
// # FIPS
// The following conditions must be met:
// * `Prk` must be constructed using `Salt::extract` prior to calling
// this method.
// * After concatination of the `info` slices the resulting `[u8].len() > 0` is true.
#[inline]
pub fn expand<'a, L: KeyType>(
&'a self,
info: &'a [&'a [u8]],
len: L,
) -> Result<Okm<'a, L>, Unspecified> {
let len_cached = len.len();
if len_cached > 255 * self.algorithm.0.digest_algorithm().output_len {
return Err(Unspecified);
}
Ok(Okm {
prk: self,
info,
len,
})
}
}
impl From<Okm<'_, Algorithm>> for Prk {
fn from(okm: Okm<Algorithm>) -> Self {
let algorithm = okm.len;
let key_len = okm.len.len();
let mut key_bytes = [0u8; MAX_HKDF_PRK_LEN];
okm.fill(&mut key_bytes[0..key_len]).unwrap();
Self {
algorithm,
mode: PrkMode::Expand { key_bytes, key_len },
}
}
}
/// An HKDF OKM (Output Keying Material)
///
/// Intentionally not `Clone` or `Copy` as an OKM is generally only safe to
/// use once.
pub struct Okm<'a, L: KeyType> {
prk: &'a Prk,
info: &'a [&'a [u8]],
len: L,
}
impl<L: KeyType> fmt::Debug for Okm<'_, L> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("hkdf::Okm").field("prk", &self.prk).finish()
}
}
/// Concatenates info slices into a contiguous buffer for HKDF operations.
/// Uses stack allocation for typical cases, heap allocation for large info.
/// Info is public context data per RFC 5869, so no zeroization is needed.
#[inline]
fn concatenate_info<F, R>(info: &[&[u8]], f: F) -> R
where
F: FnOnce(&[u8]) -> R,
{
let info_len: usize = info.iter().map(|s| s.len()).sum();
// Info is public; no need to zeroize.
if info_len <= HKDF_INFO_DEFAULT_CAPACITY_LEN {
// Use stack buffer for typical case (avoids heap allocation)
let mut stack_buf = [0u8; HKDF_INFO_DEFAULT_CAPACITY_LEN];
let mut pos = 0;
for &slice in info {
stack_buf[pos..pos + slice.len()].copy_from_slice(slice);
pos += slice.len();
}
f(&stack_buf[..info_len])
} else {
// Heap allocation for rare large info case
let mut heap_buf = Vec::with_capacity(info_len);
for &slice in info {
heap_buf.extend_from_slice(slice);
}
f(&heap_buf)
}
}
impl<L: KeyType> Okm<'_, L> {
/// The `OkmLength` given to `Prk::expand()`.
#[inline]
pub fn len(&self) -> &L {
&self.len
}
/// Fills `out` with the output of the HKDF-Expand operation for the given
/// inputs.
///
// # FIPS
// The following conditions must be met:
// * Algorithm is one of the following:
// * `HKDF_SHA1_FOR_LEGACY_USE_ONLY`
// * `HKDF_SHA256`
// * `HKDF_SHA384`
// * `HKDF_SHA512`
// * The [`Okm`] was constructed from a [`Prk`] created with [`Salt::extract`] and:
// * The `value.len()` passed to [`Salt::new`] was non-zero.
// * The `info_len` from [`Prk::expand`] was non-zero.
//
/// # Errors
/// `error::Unspecified` if the requested output length differs from the length specified by
/// `L: KeyType`.
#[inline]
pub fn fill(self, out: &mut [u8]) -> Result<(), Unspecified> {
if out.len() != self.len.len() {
return Err(Unspecified);
}
concatenate_info(self.info, |info_bytes| {
self.prk.mode.fill(self.prk.algorithm, out, info_bytes)
})
}
}
#[cfg(test)]
mod tests {
use crate::hkdf::{Salt, HKDF_SHA256, HKDF_SHA384};
#[cfg(feature = "fips")]
mod fips;
#[test]
fn hkdf_coverage() {
// Something would have gone horribly wrong for this to not pass, but we test this so our
// coverage reports will look better.
assert_ne!(HKDF_SHA256, HKDF_SHA384);
assert_eq!("Algorithm(Algorithm(SHA256))", format!("{HKDF_SHA256:?}"));
}
#[test]
fn test_debug() {
const SALT: &[u8; 32] = &[
29, 113, 120, 243, 11, 202, 39, 222, 206, 81, 163, 184, 122, 153, 52, 192, 98, 195,
240, 32, 34, 19, 160, 128, 178, 111, 97, 232, 113, 101, 221, 143,
];
const SECRET1: &[u8; 32] = &[
157, 191, 36, 107, 110, 131, 193, 6, 175, 226, 193, 3, 168, 133, 165, 181, 65, 120,
194, 152, 31, 92, 37, 191, 73, 222, 41, 112, 207, 236, 196, 174,
];
const INFO1: &[&[u8]] = &[
&[
2, 130, 61, 83, 192, 248, 63, 60, 211, 73, 169, 66, 101, 160, 196, 212, 250, 113,
],
&[
80, 46, 248, 123, 78, 204, 171, 178, 67, 204, 96, 27, 131, 24,
],
];
let alg = HKDF_SHA256;
let salt = Salt::new(alg, SALT);
let prk = salt.extract(SECRET1);
let okm = prk.expand(INFO1, alg).unwrap();
assert_eq!(
"hkdf::Salt { algorithm: Algorithm(SHA256) }",
format!("{salt:?}")
);
assert_eq!(
"hkdf::Prk { algorithm: Algorithm(SHA256), mode: ExtractExpand { .. } }",
format!("{prk:?}")
);
assert_eq!(
"hkdf::Okm { prk: hkdf::Prk { algorithm: Algorithm(SHA256), mode: ExtractExpand { .. } } }",
format!("{okm:?}")
);
}
#[test]
fn test_long_salt() {
// Test with a salt longer than the previous 80-byte limit
let long_salt = vec![0x42u8; 100];
// This should work now that we removed the MAX_HKDF_SALT_LEN restriction
let salt = Salt::new(HKDF_SHA256, &long_salt);
// Test the extract operation still works
let secret = b"test secret key material";
let prk = salt.extract(secret);
// Test expand operation
let info_data = b"test context info";
let info = [info_data.as_slice()];
let okm = prk.expand(&info, HKDF_SHA256).unwrap();
// Fill output buffer
let mut output = [0u8; 32];
okm.fill(&mut output).unwrap();
// Test with an even longer salt to demonstrate flexibility
let very_long_salt = vec![0x55u8; 500];
let very_long_salt_obj = Salt::new(HKDF_SHA256, &very_long_salt);
let prk2 = very_long_salt_obj.extract(secret);
let okm2 = prk2.expand(&info, HKDF_SHA256).unwrap();
let mut output2 = [0u8; 32];
okm2.fill(&mut output2).unwrap();
// Verify outputs are different (they should be due to different salts)
assert_ne!(output, output2);
}
}

157
vendor/aws-lc-rs/src/hkdf/tests/fips.rs vendored Normal file
View File

@@ -0,0 +1,157 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#![cfg(debug_assertions)]
use crate::fips::{assert_fips_status_indicator, FipsServiceStatus};
use crate::hkdf::{
KeyType, Prk, Salt, HKDF_SHA1_FOR_LEGACY_USE_ONLY, HKDF_SHA256, HKDF_SHA384, HKDF_SHA512,
};
const TEST_KEY_128_BIT: [u8; 16] = [
0x9f, 0xd9, 0x41, 0xc3, 0xa6, 0xfe, 0xb9, 0x26, 0x2a, 0x35, 0xa7, 0x44, 0xbb, 0xc0, 0x3a, 0x6a,
];
macro_rules! hkdf_extract_expand_api {
($name:ident, $alg:expr, $expect:path, $salt_len:literal, $info_len:literal) => {
#[test]
fn $name() {
let salt = [42u8; $salt_len];
// Will not set indicator function
let salt =
assert_fips_status_indicator!(Salt::new($alg, &salt[..]), FipsServiceStatus::Unset);
// Will not set the indicator function
let prk = assert_fips_status_indicator!(
salt.extract(&[5, 6, 7, 8]),
FipsServiceStatus::Unset
);
let info: Vec<u8> = vec![42u8; $info_len];
let info_slices: Vec<&[u8]> = vec![info.as_ref()];
// Will not set the inidcator function
let okm = assert_fips_status_indicator!(
prk.expand(info_slices.as_ref(), $alg),
FipsServiceStatus::Unset
)
.unwrap();
let mut out = vec![0u8; $alg.len()];
// Will set the indicator function
assert_fips_status_indicator!(okm.fill(&mut out), $expect).unwrap();
}
};
}
hkdf_extract_expand_api!(
test_sha1_hkdf_extract_expand_api,
HKDF_SHA1_FOR_LEGACY_USE_ONLY,
FipsServiceStatus::Approved,
16,
16
);
hkdf_extract_expand_api!(
test_sha256_hkdf_extract_expand_api_api,
HKDF_SHA256,
FipsServiceStatus::Approved,
16,
16
);
hkdf_extract_expand_api!(
test_sha384_hkdf_extract_expand_api,
HKDF_SHA384,
FipsServiceStatus::Approved,
16,
16
);
hkdf_extract_expand_api!(
test_sha512_hkdf_extract_expand_api,
HKDF_SHA512,
FipsServiceStatus::Approved,
16,
16
);
hkdf_extract_expand_api!(
test_sha1_hkdf_extract_expand_api_invalid_nonce,
HKDF_SHA1_FOR_LEGACY_USE_ONLY,
FipsServiceStatus::NonApproved,
0,
16
);
hkdf_extract_expand_api!(
test_sha256_hkdf_extract_expand_api_invalid_nonce,
HKDF_SHA256,
FipsServiceStatus::NonApproved,
0,
16
);
hkdf_extract_expand_api!(
test_sha384_hkdf_extract_expand_api_invalid_nonce,
HKDF_SHA384,
FipsServiceStatus::NonApproved,
0,
16
);
hkdf_extract_expand_api!(
test_sha512_hkdf_extract_expand_api_invalid_nonce,
HKDF_SHA512,
FipsServiceStatus::NonApproved,
0,
16
);
macro_rules! hkdf_expand_api {
($name:ident, $alg:expr, $key:expr, $expect:path, $info_len:literal) => {
#[test]
fn $name() {
let prk = Prk::new_less_safe($alg, $key);
let info: Vec<u8> = vec![42u8; $info_len];
let info_slices: Vec<&[u8]> = vec![info.as_ref()];
// Will not set the inidcator function
let okm = assert_fips_status_indicator!(
prk.expand(info_slices.as_ref(), $alg),
FipsServiceStatus::Unset
)
.unwrap();
let mut out = vec![0u8; $alg.len()];
// Will set the indicator function
assert_fips_status_indicator!(okm.fill(&mut out), $expect).unwrap();
}
};
}
hkdf_expand_api!(
sha1,
HKDF_SHA1_FOR_LEGACY_USE_ONLY,
&TEST_KEY_128_BIT[..],
FipsServiceStatus::Approved,
16
);
hkdf_expand_api!(
sha256,
HKDF_SHA256,
&TEST_KEY_128_BIT[..],
FipsServiceStatus::Approved,
16
);
hkdf_expand_api!(
sha384,
HKDF_SHA384,
&TEST_KEY_128_BIT[..],
FipsServiceStatus::Approved,
16
);
hkdf_expand_api!(
sha512,
HKDF_SHA512,
&TEST_KEY_128_BIT[..],
FipsServiceStatus::Approved,
16
);

697
vendor/aws-lc-rs/src/hmac.rs vendored Normal file
View File

@@ -0,0 +1,697 @@
// Copyright 2015-2022 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! HMAC is specified in [RFC 2104].
//!
//! After a `Key` is constructed, it can be used for multiple signing or
//! verification operations. Separating the construction of the key from the
//! rest of the HMAC operation allows the per-key precomputation to be done
//! only once, instead of it being done in every HMAC operation.
//!
//! Frequently all the data to be signed in a message is available in a single
//! contiguous piece. In that case, the module-level `sign` function can be
//! used. Otherwise, if the input is in multiple parts, `Context` should be
//! used.
//!
//! # Examples:
//!
//! ## Signing a value and verifying it wasn't tampered with
//!
//! ```
//! use aws_lc_rs::{hmac, rand};
//!
//! let rng = rand::SystemRandom::new();
//! let key = hmac::Key::generate(hmac::HMAC_SHA256, &rng)?;
//!
//! let msg = "hello, world";
//!
//! let tag = hmac::sign(&key, msg.as_bytes());
//!
//! // [We give access to the message to an untrusted party, and they give it
//! // back to us. We need to verify they didn't tamper with it.]
//!
//! hmac::verify(&key, msg.as_bytes(), tag.as_ref())?;
//!
//! # Ok::<(), aws_lc_rs::error::Unspecified>(())
//! ```
//!
//! ## Using the one-shot API:
//!
//! ```
//! use aws_lc_rs::rand::SecureRandom;
//! use aws_lc_rs::{digest, hmac, rand};
//!
//! let msg = "hello, world";
//!
//! // The sender generates a secure key value and signs the message with it.
//! // Note that in a real protocol, a key agreement protocol would be used to
//! // derive `key_value`.
//! let rng = rand::SystemRandom::new();
//! let key_value: [u8; digest::SHA256_OUTPUT_LEN] = rand::generate(&rng)?.expose();
//!
//! let s_key = hmac::Key::new(hmac::HMAC_SHA256, key_value.as_ref());
//! let tag = hmac::sign(&s_key, msg.as_bytes());
//!
//! // The receiver (somehow!) knows the key value, and uses it to verify the
//! // integrity of the message.
//! let v_key = hmac::Key::new(hmac::HMAC_SHA256, key_value.as_ref());
//! hmac::verify(&v_key, msg.as_bytes(), tag.as_ref())?;
//!
//! # Ok::<(), aws_lc_rs::error::Unspecified>(())
//! ```
//!
//! ## Using the multi-part API:
//! ```
//! use aws_lc_rs::rand::SecureRandom;
//! use aws_lc_rs::{digest, hmac, rand};
//!
//! let parts = ["hello", ", ", "world"];
//!
//! // The sender generates a secure key value and signs the message with it.
//! // Note that in a real protocol, a key agreement protocol would be used to
//! // derive `key_value`.
//! let rng = rand::SystemRandom::new();
//! let mut key_value: [u8; digest::SHA384_OUTPUT_LEN] = rand::generate(&rng)?.expose();
//!
//! let s_key = hmac::Key::new(hmac::HMAC_SHA384, key_value.as_ref());
//! let mut s_ctx = hmac::Context::with_key(&s_key);
//! for part in &parts {
//! s_ctx.update(part.as_bytes());
//! }
//! let tag = s_ctx.sign();
//!
//! // The receiver (somehow!) knows the key value, and uses it to verify the
//! // integrity of the message.
//! let v_key = hmac::Key::new(hmac::HMAC_SHA384, key_value.as_ref());
//! let mut msg = Vec::<u8>::new();
//! for part in &parts {
//! msg.extend(part.as_bytes());
//! }
//! hmac::verify(&v_key, &msg.as_ref(), tag.as_ref())?;
//!
//! # Ok::<(), aws_lc_rs::error::Unspecified>(())
//! ```
//! [RFC 2104]: https://tools.ietf.org/html/rfc2104
use crate::aws_lc::{
HMAC_CTX_cleanup, HMAC_CTX_copy_ex, HMAC_CTX_init, HMAC_Final, HMAC_Init_ex, HMAC_Update,
HMAC_CTX,
};
use crate::error::Unspecified;
use crate::fips::indicator_check;
use crate::{constant_time, digest, hkdf};
use core::ffi::c_uint;
use core::mem::MaybeUninit;
use core::ptr::null_mut;
/// A deprecated alias for `Tag`.
#[deprecated]
pub type Signature = Tag;
/// Renamed to `Context`.
#[deprecated]
pub type SigningContext = Context;
/// Renamed to `Key`.
#[deprecated]
pub type SigningKey = Key;
/// Merged into `Key`.
#[deprecated]
pub type VerificationKey = Key;
/// An HMAC algorithm.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct Algorithm(&'static digest::Algorithm);
impl Algorithm {
/// The digest algorithm this HMAC algorithm is based on.
#[inline]
#[must_use]
pub fn digest_algorithm(&self) -> &'static digest::Algorithm {
self.0
}
/// The tag length for this HMAC algorithm.
#[inline]
#[must_use]
pub fn tag_len(&self) -> usize {
self.digest_algorithm().output_len
}
}
/// HMAC using SHA-1. Obsolete.
pub const HMAC_SHA1_FOR_LEGACY_USE_ONLY: Algorithm = Algorithm(&digest::SHA1_FOR_LEGACY_USE_ONLY);
/// HMAC using SHA-224.
pub const HMAC_SHA224: Algorithm = Algorithm(&digest::SHA224);
/// HMAC using SHA-256.
pub const HMAC_SHA256: Algorithm = Algorithm(&digest::SHA256);
/// HMAC using SHA-384.
pub const HMAC_SHA384: Algorithm = Algorithm(&digest::SHA384);
/// HMAC using SHA-512.
pub const HMAC_SHA512: Algorithm = Algorithm(&digest::SHA512);
/// An HMAC tag.
///
/// For a given tag `t`, use `t.as_ref()` to get the tag value as a byte slice.
#[derive(Clone, Copy, Debug)]
pub struct Tag {
msg: [u8; digest::MAX_OUTPUT_LEN],
msg_len: usize,
}
impl AsRef<[u8]> for Tag {
#[inline]
fn as_ref(&self) -> &[u8] {
&self.msg[..self.msg_len]
}
}
struct LcHmacCtx(HMAC_CTX);
impl LcHmacCtx {
fn as_mut_ptr(&mut self) -> *mut HMAC_CTX {
&mut self.0
}
fn as_ptr(&self) -> *const HMAC_CTX {
&self.0
}
fn try_clone(&self) -> Result<Self, Unspecified> {
unsafe {
let mut hmac_ctx = MaybeUninit::<HMAC_CTX>::uninit();
HMAC_CTX_init(hmac_ctx.as_mut_ptr());
let mut hmac_ctx = hmac_ctx.assume_init();
if 1 != HMAC_CTX_copy_ex(&mut hmac_ctx, self.as_ptr()) {
return Err(Unspecified);
}
Ok(LcHmacCtx(hmac_ctx))
}
}
}
unsafe impl Send for LcHmacCtx {}
impl Drop for LcHmacCtx {
fn drop(&mut self) {
unsafe { HMAC_CTX_cleanup(self.as_mut_ptr()) }
}
}
impl Clone for LcHmacCtx {
fn clone(&self) -> Self {
self.try_clone().expect("Unable to clone LcHmacCtx")
}
}
/// A key to use for HMAC signing.
//
// # FIPS
// Use this type with one of the following algorithms:
// * `HMAC_SHA1_FOR_LEGACY_USE_ONLY`
// * `HMAC_SHA224`
// * `HMAC_SHA256`
// * `HMAC_SHA384`
// * `HMAC_SHA512`
#[derive(Clone)]
pub struct Key {
pub(crate) algorithm: Algorithm,
ctx: LcHmacCtx,
}
unsafe impl Send for Key {}
// All uses of *mut HMAC_CTX require the creation of a Context, which will clone the Key.
unsafe impl Sync for Key {}
#[allow(clippy::missing_fields_in_debug)]
impl core::fmt::Debug for Key {
fn fmt(&self, f: &mut core::fmt::Formatter) -> Result<(), core::fmt::Error> {
f.debug_struct("Key")
.field("algorithm", &self.algorithm.digest_algorithm())
.finish()
}
}
impl Key {
/// Generate an HMAC signing key using the given digest algorithm with a
/// random value generated from `rng`.
///
/// The key will be `digest_alg.output_len` bytes long, based on the
/// recommendation in [RFC 2104 Section 3].
///
/// [RFC 2104 Section 3]: https://tools.ietf.org/html/rfc2104#section-3
///
//
// # FIPS
// Use this function with one of the following algorithms:
// * `HMAC_SHA1_FOR_LEGACY_USE_ONLY`
// * `HMAC_SHA224`
// * `HMAC_SHA256`
// * `HMAC_SHA384`
// * `HMAC_SHA512`
//
/// # Errors
/// `error::Unspecified` is the `rng` fails.
pub fn generate(
algorithm: Algorithm,
rng: &dyn crate::rand::SecureRandom,
) -> Result<Self, Unspecified> {
Self::construct(algorithm, |buf| rng.fill(buf))
}
fn construct<F>(algorithm: Algorithm, fill: F) -> Result<Self, Unspecified>
where
F: FnOnce(&mut [u8]) -> Result<(), Unspecified>,
{
let mut key_bytes = [0; digest::MAX_OUTPUT_LEN];
let key_bytes = &mut key_bytes[..algorithm.tag_len()];
fill(key_bytes)?;
Ok(Self::new(algorithm, key_bytes))
}
/// Construct an HMAC signing key using the given digest algorithm and key
/// value.
///
/// `key_value` should be a value generated using a secure random number
/// generator (e.g. the `key_value` output by
/// `SealingKey::generate_serializable()`) or derived from a random key by
/// a key derivation function (e.g. `aws_lc_rs::hkdf`). In particular,
/// `key_value` shouldn't be a password.
///
/// As specified in RFC 2104, if `key_value` is shorter than the digest
/// algorithm's block length (as returned by `digest::Algorithm::block_len`,
/// not the digest length returned by `digest::Algorithm::output_len`) then
/// it will be padded with zeros. Similarly, if it is longer than the block
/// length then it will be compressed using the digest algorithm.
///
/// You should not use keys larger than the `digest_alg.block_len` because
/// the truncation described above reduces their strength to only
/// `digest_alg.output_len * 8` bits.
///
/// # Panics
/// Panics if the HMAC context cannot be constructed
#[inline]
#[must_use]
pub fn new(algorithm: Algorithm, key_value: &[u8]) -> Self {
Key::try_new(algorithm, key_value).expect("Unable to create HmacContext")
}
fn try_new(algorithm: Algorithm, key_value: &[u8]) -> Result<Self, Unspecified> {
unsafe {
let mut ctx = MaybeUninit::<HMAC_CTX>::uninit();
HMAC_CTX_init(ctx.as_mut_ptr());
let evp_md_type = digest::match_digest_type(&algorithm.digest_algorithm().id);
if 1 != HMAC_Init_ex(
ctx.as_mut_ptr(),
key_value.as_ptr().cast(),
key_value.len(),
evp_md_type.as_const_ptr(),
null_mut(),
) {
return Err(Unspecified);
}
let result = Self {
algorithm,
ctx: LcHmacCtx(ctx.assume_init()),
};
Ok(result)
}
}
unsafe fn get_hmac_ctx_ptr(&mut self) -> *mut HMAC_CTX {
self.ctx.as_mut_ptr()
}
/// The digest algorithm for the key.
#[inline]
#[must_use]
pub fn algorithm(&self) -> Algorithm {
Algorithm(self.algorithm.digest_algorithm())
}
}
impl hkdf::KeyType for Algorithm {
#[inline]
fn len(&self) -> usize {
self.tag_len()
}
}
impl From<hkdf::Okm<'_, Algorithm>> for Key {
fn from(okm: hkdf::Okm<Algorithm>) -> Self {
Self::construct(*okm.len(), |buf| okm.fill(buf)).unwrap()
}
}
/// A context for multi-step (Init-Update-Finish) HMAC signing.
///
/// Use `sign` for single-step HMAC signing.
pub struct Context {
key: Key,
}
impl Clone for Context {
fn clone(&self) -> Self {
Self {
key: self.key.clone(),
}
}
}
unsafe impl Send for Context {}
impl core::fmt::Debug for Context {
fn fmt(&self, f: &mut core::fmt::Formatter) -> Result<(), core::fmt::Error> {
f.debug_struct("Context")
.field("algorithm", &self.key.algorithm.digest_algorithm())
.finish()
}
}
impl Context {
/// Constructs a new HMAC signing context using the given digest algorithm
/// and key.
#[inline]
#[must_use]
pub fn with_key(signing_key: &Key) -> Self {
Self {
key: signing_key.clone(),
}
}
/// Updates the HMAC with all the data in `data`. `update` may be called
/// zero or more times until `finish` is called.
///
/// # Panics
/// Panics if the HMAC cannot be updated
#[inline]
pub fn update(&mut self, data: &[u8]) {
Self::try_update(self, data).expect("HMAC_Update failed");
}
#[inline]
fn try_update(&mut self, data: &[u8]) -> Result<(), Unspecified> {
unsafe {
if 1 != HMAC_Update(self.key.get_hmac_ctx_ptr(), data.as_ptr(), data.len()) {
return Err(Unspecified);
}
}
Ok(())
}
/// Finalizes the HMAC calculation and returns the HMAC value. `sign`
/// consumes the context so it cannot be (mis-)used after `sign` has been
/// called.
///
/// It is generally not safe to implement HMAC verification by comparing
/// the return value of `sign` to a tag. Use `verify` for verification
/// instead.
///
// # FIPS
// Use this method with one of the following algorithms:
// * `HMAC_SHA1_FOR_LEGACY_USE_ONLY`
// * `HMAC_SHA224`
// * `HMAC_SHA256`
// * `HMAC_SHA384`
// * `HMAC_SHA512`
//
/// # Panics
/// Panics if the HMAC calculation cannot be finalized
#[inline]
#[must_use]
pub fn sign(self) -> Tag {
Self::try_sign(self).expect("HMAC_Final failed")
}
#[inline]
fn try_sign(mut self) -> Result<Tag, Unspecified> {
let mut output = [0u8; digest::MAX_OUTPUT_LEN];
let msg_len = {
let result = internal_sign(&mut self, &mut output)?;
result.len()
};
Ok(Tag {
msg: output,
msg_len,
})
}
}
#[inline]
pub(crate) fn internal_sign<'in_out>(
ctx: &mut Context,
output: &'in_out mut [u8],
) -> Result<&'in_out mut [u8], Unspecified> {
let tag_len = ctx.key.algorithm().tag_len();
if output.len() < tag_len {
return Err(Unspecified);
}
let mut out_len = MaybeUninit::<c_uint>::uninit();
if 1 != indicator_check!(unsafe {
HMAC_Final(
ctx.key.get_hmac_ctx_ptr(),
output.as_mut_ptr(),
out_len.as_mut_ptr(),
)
}) {
return Err(Unspecified);
}
let actual_len = unsafe { out_len.assume_init() } as usize;
debug_assert!(
actual_len == tag_len,
"HMAC tag length {actual_len} does not match expected length {tag_len}"
);
Ok(&mut output[0..tag_len])
}
/// Calculates the HMAC of `data` using the key `key` in one step.
///
/// Use `Context` to calculate HMACs where the input is in multiple parts.
///
/// It is generally not safe to implement HMAC verification by comparing the
/// return value of `sign` to a tag. Use `verify` for verification instead.
//
// # FIPS
// Use this function with one of the following algorithms:
// * `HMAC_SHA1_FOR_LEGACY_USE_ONLY`
// * `HMAC_SHA224`
// * `HMAC_SHA256`
// * `HMAC_SHA384`
// * `HMAC_SHA512`
#[inline]
#[must_use]
pub fn sign(key: &Key, data: &[u8]) -> Tag {
let mut ctx = Context::with_key(key);
ctx.update(data);
ctx.sign()
}
/// Calculates the HMAC of `data` using the key `key` in one step,
/// writing the result into the provided `output` buffer.
///
/// The `output` buffer must be at least as large as the algorithm's
/// tag length (i.e., `key.algorithm().tag_len()`). The returned slice will be a
/// sub-slice of `output` containing exactly the tag bytes.
///
/// It is generally not safe to implement HMAC verification by comparing the
/// return value of `sign_to_buffer` to a tag. Use `verify` for verification instead.
//
// # FIPS
// Use this function with one of the following algorithms:
// * `HMAC_SHA1_FOR_LEGACY_USE_ONLY`
// * `HMAC_SHA224`
// * `HMAC_SHA256`
// * `HMAC_SHA384`
// * `HMAC_SHA512`
//
/// # Errors
/// `error::Unspecified` if `output` is too small or if the HMAC operation fails.
#[inline]
pub fn sign_to_buffer<'out>(
key: &Key,
data: &[u8],
output: &'out mut [u8],
) -> Result<&'out mut [u8], Unspecified> {
let mut ctx = Context::with_key(key);
ctx.update(data);
internal_sign(&mut ctx, output)
}
/// Calculates the HMAC of `data` using the signing key `key`, and verifies
/// whether the resultant value equals `tag`, in one step.
///
/// This is logically equivalent to, but more efficient than, constructing a
/// `Key` with the same value as `key` and then using `verify`.
///
/// The verification will be done in constant time to prevent timing attacks.
///
/// # Errors
/// `error::Unspecified` if the inputs are not verified.
//
// # FIPS
// Use this function with one of the following algorithms:
// * `HMAC_SHA1_FOR_LEGACY_USE_ONLY`
// * `HMAC_SHA224`
// * `HMAC_SHA256`
// * `HMAC_SHA384`
// * `HMAC_SHA512`
#[inline]
pub fn verify(key: &Key, data: &[u8], tag: &[u8]) -> Result<(), Unspecified> {
constant_time::verify_slices_are_equal(sign(key, data).as_ref(), tag)
}
#[cfg(test)]
mod tests {
use crate::{hmac, rand};
#[cfg(feature = "fips")]
mod fips;
#[test]
fn hmac_algorithm_properties() {
assert_eq!(hmac::HMAC_SHA1_FOR_LEGACY_USE_ONLY.tag_len(), 20);
assert_eq!(hmac::HMAC_SHA224.tag_len(), 28);
assert_eq!(hmac::HMAC_SHA256.tag_len(), 32);
assert_eq!(hmac::HMAC_SHA384.tag_len(), 48);
assert_eq!(hmac::HMAC_SHA512.tag_len(), 64);
}
// Make sure that internal_sign properly rejects too small buffers
// (and does not corrupt memory by buffer overflow)
#[test]
fn hmac_internal_sign_too_small_buffer() {
let rng = rand::SystemRandom::new();
for algorithm in &[
hmac::HMAC_SHA1_FOR_LEGACY_USE_ONLY,
hmac::HMAC_SHA224,
hmac::HMAC_SHA256,
hmac::HMAC_SHA384,
hmac::HMAC_SHA512,
] {
let key = hmac::Key::generate(*algorithm, &rng).unwrap();
let data = b"hello, world";
// Buffer one byte too small should fail
let mut small_buf = vec![0u8; algorithm.tag_len() - 1];
let mut ctx = hmac::Context::with_key(&key);
ctx.update(data);
assert!(super::internal_sign(&mut ctx, &mut small_buf).is_err());
// Empty buffer should fail
let mut empty_buf = vec![];
let mut ctx = hmac::Context::with_key(&key);
ctx.update(data);
assert!(super::internal_sign(&mut ctx, &mut empty_buf).is_err());
}
}
// Make sure that `Key::generate` and `verify_with_own_key` aren't
// completely wacky.
#[test]
pub fn hmac_signing_key_coverage() {
const HELLO_WORLD_GOOD: &[u8] = b"hello, world";
const HELLO_WORLD_BAD: &[u8] = b"hello, worle";
let rng = rand::SystemRandom::new();
for algorithm in &[
hmac::HMAC_SHA1_FOR_LEGACY_USE_ONLY,
hmac::HMAC_SHA224,
hmac::HMAC_SHA256,
hmac::HMAC_SHA384,
hmac::HMAC_SHA512,
] {
let key = hmac::Key::generate(*algorithm, &rng).unwrap();
let tag = hmac::sign(&key, HELLO_WORLD_GOOD);
println!("{key:?}");
assert!(hmac::verify(&key, HELLO_WORLD_GOOD, tag.as_ref()).is_ok());
assert!(hmac::verify(&key, HELLO_WORLD_BAD, tag.as_ref()).is_err());
}
}
#[test]
fn hmac_coverage() {
// Something would have gone horribly wrong for this to not pass, but we test this so our
// coverage reports will look better.
assert_ne!(hmac::HMAC_SHA256, hmac::HMAC_SHA384);
for &alg in &[
hmac::HMAC_SHA1_FOR_LEGACY_USE_ONLY,
hmac::HMAC_SHA224,
hmac::HMAC_SHA256,
hmac::HMAC_SHA384,
hmac::HMAC_SHA512,
] {
// Clone after updating context with message, then check if the final Tag is the same.
let key = hmac::Key::new(alg, &[0; 32]);
let mut ctx = hmac::Context::with_key(&key);
ctx.update(b"hello, world");
let ctx_clone = ctx.clone();
let orig_tag = ctx.sign();
let clone_tag = ctx_clone.sign();
assert_eq!(orig_tag.as_ref(), clone_tag.as_ref());
assert_eq!(orig_tag.clone().as_ref(), clone_tag.as_ref());
}
}
#[test]
fn hmac_sign_to_buffer_test() {
let rng = rand::SystemRandom::new();
for &algorithm in &[
hmac::HMAC_SHA1_FOR_LEGACY_USE_ONLY,
hmac::HMAC_SHA224,
hmac::HMAC_SHA256,
hmac::HMAC_SHA384,
hmac::HMAC_SHA512,
] {
let key = hmac::Key::generate(algorithm, &rng).unwrap();
let data = b"hello, world";
let tag_len = algorithm.tag_len();
// Test with exact size buffer
let mut output = vec![0u8; tag_len];
let result = hmac::sign_to_buffer(&key, data, &mut output).unwrap();
assert_eq!(result.len(), tag_len);
// Verify the returned tag matches sign() and passes verify()
let tag = hmac::sign(&key, data);
assert_eq!(result, tag.as_ref());
assert!(hmac::verify(&key, data, result).is_ok());
// Verify the output buffer also matches sign() and passes verify()
assert_eq!(output.as_slice(), tag.as_ref());
assert!(hmac::verify(&key, data, output.as_slice()).is_ok());
// Test with larger buffer
let mut large_output = vec![0u8; tag_len + 10];
let result2 = hmac::sign_to_buffer(&key, data, &mut large_output).unwrap();
assert_eq!(result2.len(), tag_len);
assert_eq!(result2, tag.as_ref());
assert!(hmac::verify(&key, data, result2).is_ok());
assert_eq!(&large_output[0..tag_len], tag.as_ref());
}
}
#[test]
fn hmac_sign_to_buffer_too_small_test() {
let key = hmac::Key::new(hmac::HMAC_SHA256, &[0; 32]);
let data = b"hello";
// Buffer too small should fail
let mut small_buffer = vec![0u8; hmac::HMAC_SHA256.tag_len() - 1];
assert!(hmac::sign_to_buffer(&key, data, &mut small_buffer).is_err());
// Empty buffer should fail
let mut empty_buffer = vec![];
assert!(hmac::sign_to_buffer(&key, data, &mut empty_buffer).is_err());
}
}

45
vendor/aws-lc-rs/src/hmac/tests/fips.rs vendored Normal file
View File

@@ -0,0 +1,45 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#![cfg(debug_assertions)]
use crate::digest::{SHA1_OUTPUT_LEN, SHA224_OUTPUT_LEN, SHA256_OUTPUT_LEN, SHA512_OUTPUT_LEN};
use crate::fips::{assert_fips_status_indicator, FipsServiceStatus};
use crate::hmac::{
sign, verify, Key, HMAC_SHA1_FOR_LEGACY_USE_ONLY, HMAC_SHA224, HMAC_SHA256, HMAC_SHA384,
HMAC_SHA512,
};
use crate::rand::{self, SystemRandom};
const TEST_MESSAGE: &str = "test message";
macro_rules! hmac_api {
($name:ident, $alg:expr, $out_len:expr) => {
#[test]
fn $name() {
let rng = SystemRandom::new();
let key_value: [u8; $out_len] = rand::generate(&rng).unwrap().expose();
let s_key = Key::new($alg, key_value.as_ref());
let tag = assert_fips_status_indicator!(
sign(&s_key, TEST_MESSAGE.as_bytes()),
FipsServiceStatus::Approved
);
let v_key = Key::new($alg, key_value.as_ref());
assert_fips_status_indicator!(
verify(&v_key, TEST_MESSAGE.as_bytes(), tag.as_ref()).unwrap(),
FipsServiceStatus::Approved
);
}
};
}
hmac_api!(sha1, HMAC_SHA1_FOR_LEGACY_USE_ONLY, SHA1_OUTPUT_LEN);
hmac_api!(sha224, HMAC_SHA224, SHA224_OUTPUT_LEN);
hmac_api!(sha256, HMAC_SHA256, SHA256_OUTPUT_LEN);
hmac_api!(sha384, HMAC_SHA384, SHA256_OUTPUT_LEN);
hmac_api!(sha512, HMAC_SHA512, SHA512_OUTPUT_LEN);

13
vendor/aws-lc-rs/src/io.rs vendored Normal file
View File

@@ -0,0 +1,13 @@
// Copyright 2018 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! Serialization and deserialization.
#[doc(hidden)]
pub mod der;
pub(crate) mod positive;
pub use self::positive::Positive;

357
vendor/aws-lc-rs/src/io/der.rs vendored Normal file
View File

@@ -0,0 +1,357 @@
// Copyright 2015 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! Building blocks for parsing DER-encoded ASN.1 structures.
//!
//! This module contains the foundational parts of an ASN.1 DER parser.
use super::Positive;
use crate::error;
pub const CONSTRUCTED: u8 = 1 << 5;
pub const CONTEXT_SPECIFIC: u8 = 2 << 6;
#[non_exhaustive]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[repr(u8)]
pub enum Tag {
Boolean = 0x01,
Integer = 0x02,
BitString = 0x03,
OctetString = 0x04,
Null = 0x05,
OID = 0x06,
Sequence = CONSTRUCTED | 0x10, // 0x30
UTCTime = 0x17,
GeneralizedTime = 0x18,
ContextSpecificConstructed0 = CONTEXT_SPECIFIC | CONSTRUCTED,
ContextSpecificConstructed1 = CONTEXT_SPECIFIC | CONSTRUCTED | 1,
ContextSpecificConstructed3 = CONTEXT_SPECIFIC | CONSTRUCTED | 3,
}
impl From<Tag> for usize {
fn from(tag: Tag) -> Self {
tag as Self
}
}
impl From<Tag> for u8 {
fn from(tag: Tag) -> Self {
tag as Self
} // XXX: narrowing conversion.
}
pub fn expect_tag_and_get_value<'a>(
input: &mut untrusted::Reader<'a>,
tag: Tag,
) -> Result<untrusted::Input<'a>, error::Unspecified> {
let (actual_tag, inner) = read_tag_and_get_value(input)?;
if usize::from(tag) != usize::from(actual_tag) {
return Err(error::Unspecified);
}
Ok(inner)
}
pub fn read_tag_and_get_value<'a>(
input: &mut untrusted::Reader<'a>,
) -> Result<(u8, untrusted::Input<'a>), error::Unspecified> {
let tag = input.read_byte()?;
if (tag & 0x1F) == 0x1F {
return Err(error::Unspecified); // High tag number form is not allowed.
}
// If the high order bit of the first byte is set to zero then the length
// is encoded in the seven remaining bits of that byte. Otherwise, those
// seven bits represent the number of bytes used to encode the length.
let length = match input.read_byte()? {
n if (n & 0x80) == 0 => usize::from(n),
0x81 => {
let second_byte = input.read_byte()?;
if second_byte < 128 {
return Err(error::Unspecified); // Not the canonical encoding.
}
usize::from(second_byte)
}
0x82 => {
let second_byte = usize::from(input.read_byte()?);
let third_byte = usize::from(input.read_byte()?);
let combined = (second_byte << 8) | third_byte;
if combined < 256 {
return Err(error::Unspecified); // Not the canonical encoding.
}
combined
}
_ => {
return Err(error::Unspecified); // We don't support longer lengths.
}
};
let inner = input.read_bytes(length)?;
Ok((tag, inner))
}
pub fn bit_string_with_no_unused_bits<'a>(
input: &mut untrusted::Reader<'a>,
) -> Result<untrusted::Input<'a>, error::Unspecified> {
nested(input, Tag::BitString, error::Unspecified, |value| {
let unused_bits_at_end = value.read_byte()?;
if unused_bits_at_end != 0 {
return Err(error::Unspecified);
}
Ok(value.read_bytes_to_end())
})
}
// TODO: investigate taking decoder as a reference to reduce generated code
// size.
pub fn nested<'a, F, R, E: Copy>(
input: &mut untrusted::Reader<'a>,
tag: Tag,
error: E,
decoder: F,
) -> Result<R, E>
where
F: FnOnce(&mut untrusted::Reader<'a>) -> Result<R, E>,
{
let inner = expect_tag_and_get_value(input, tag).map_err(|_| error)?;
inner.read_all(error, decoder)
}
fn nonnegative_integer<'a>(
input: &mut untrusted::Reader<'a>,
min_value: u8,
) -> Result<untrusted::Input<'a>, error::Unspecified> {
// Verify that |input|, which has had any leading zero stripped off, is the
// encoding of a value of at least |min_value|.
fn check_minimum(input: untrusted::Input, min_value: u8) -> Result<(), error::Unspecified> {
input.read_all(error::Unspecified, |input| {
let first_byte = input.read_byte()?;
if input.at_end() && first_byte < min_value {
return Err(error::Unspecified);
}
let _: untrusted::Input = input.read_bytes_to_end();
Ok(())
})
}
let value = expect_tag_and_get_value(input, Tag::Integer)?;
value.read_all(error::Unspecified, |input| {
// Empty encodings are not allowed.
let first_byte = input.read_byte()?;
if first_byte == 0 {
if input.at_end() {
// |value| is the legal encoding of zero.
if min_value > 0 {
return Err(error::Unspecified);
}
return Ok(value);
}
let r = input.read_bytes_to_end();
r.read_all(error::Unspecified, |input| {
let second_byte = input.read_byte()?;
if (second_byte & 0x80) == 0 {
// A leading zero is only allowed when the value's high bit
// is set.
return Err(error::Unspecified);
}
let _: untrusted::Input = input.read_bytes_to_end();
Ok(())
})?;
check_minimum(r, min_value)?;
return Ok(r);
}
// Negative values are not allowed.
if (first_byte & 0x80) != 0 {
return Err(error::Unspecified);
}
let _: untrusted::Input = input.read_bytes_to_end();
check_minimum(value, min_value)?;
Ok(value)
})
}
/// Parse as integer with a value in the in the range [0, 255], returning its
/// numeric value. This is typically used for parsing version numbers.
#[inline]
pub fn small_nonnegative_integer(input: &mut untrusted::Reader) -> Result<u8, error::Unspecified> {
let value = nonnegative_integer(input, 0)?;
value.read_all(error::Unspecified, |input| {
let r = input.read_byte()?;
Ok(r)
})
}
/// Parses a positive DER integer, returning the big-endian-encoded value,
/// sans any leading zero byte.
pub fn positive_integer<'a>(
input: &mut untrusted::Reader<'a>,
) -> Result<Positive<'a>, error::Unspecified> {
Ok(Positive::new_non_empty_without_leading_zeros(
nonnegative_integer(input, 1)?,
))
}
#[cfg(test)]
mod tests {
use super::*;
use untrusted::Input;
fn with_good_i<F, R>(value: &[u8], f: F)
where
F: FnOnce(&mut untrusted::Reader) -> Result<R, error::Unspecified>,
{
let r = Input::from(value).read_all(error::Unspecified, f);
assert!(r.is_ok());
}
fn with_bad_i<F, R>(value: &[u8], f: F)
where
F: FnOnce(&mut untrusted::Reader) -> Result<R, error::Unspecified>,
{
let r = Input::from(value).read_all(error::Unspecified, f);
assert!(r.is_err());
}
static ZERO_INTEGER: &[u8] = &[0x02, 0x01, 0x00];
static GOOD_POSITIVE_INTEGERS: &[(&[u8], u8)] = &[
(&[0x02, 0x01, 0x01], 0x01),
(&[0x02, 0x01, 0x02], 0x02),
(&[0x02, 0x01, 0x7e], 0x7e),
(&[0x02, 0x01, 0x7f], 0x7f),
// Values that need to have an 0x00 prefix to disambiguate them from
// them from negative values.
(&[0x02, 0x02, 0x00, 0x80], 0x80),
(&[0x02, 0x02, 0x00, 0x81], 0x81),
(&[0x02, 0x02, 0x00, 0xfe], 0xfe),
(&[0x02, 0x02, 0x00, 0xff], 0xff),
];
#[allow(clippy::type_complexity)]
static GOOD_BIG_POSITIVE_INTEGERS: &[((&[u8], &[u8]), (&[u8], &[u8]))] = &[
((&[0x02, 0x81, 129u8, 1], &[0; 128]), (&[1], &[0; 128])),
((&[0x02, 0x82, 0x01, 0x00, 1], &[0; 255]), (&[1], &[0; 255])),
];
static BAD_NONNEGATIVE_INTEGERS: &[&[u8]] = &[
&[], // At end of input
&[0x02], // Tag only
&[0x02, 0x00], // Empty value
// Length mismatch
&[0x02, 0x00, 0x01],
&[0x02, 0x01],
&[0x02, 0x01, 0x00, 0x01],
&[0x02, 0x01, 0x01, 0x00], // Would be valid if last byte is ignored.
&[0x02, 0x02, 0x01],
// Negative values
&[0x02, 0x01, 0x80],
&[0x02, 0x01, 0xfe],
&[0x02, 0x01, 0xff],
// Values that have an unnecessary leading 0x00
&[0x02, 0x02, 0x00, 0x00],
&[0x02, 0x02, 0x00, 0x01],
&[0x02, 0x02, 0x00, 0x02],
&[0x02, 0x02, 0x00, 0x7e],
&[0x02, 0x02, 0x00, 0x7f],
];
#[test]
fn test_small_nonnegative_integer() {
with_good_i(ZERO_INTEGER, |input| {
assert_eq!(small_nonnegative_integer(input)?, 0x00);
Ok(())
});
for &(test_in, test_out) in GOOD_POSITIVE_INTEGERS {
with_good_i(test_in, |input| {
assert_eq!(small_nonnegative_integer(input)?, test_out);
Ok(())
});
}
for &test_in in BAD_NONNEGATIVE_INTEGERS {
with_bad_i(test_in, |input| {
let _: u8 = small_nonnegative_integer(input)?;
Ok(())
});
}
}
#[test]
fn test_positive_integer() {
with_bad_i(ZERO_INTEGER, |input| {
let _: Positive<'_> = positive_integer(input)?;
Ok(())
});
for &(test_in, test_out) in GOOD_POSITIVE_INTEGERS {
with_good_i(test_in, |input| {
let test_out = [test_out];
assert_eq!(
positive_integer(input)?
.big_endian_without_leading_zero_as_input()
.as_slice_less_safe(),
Input::from(&test_out[..]).as_slice_less_safe()
);
Ok(())
});
}
for &test_in in BAD_NONNEGATIVE_INTEGERS {
with_bad_i(test_in, |input| {
let _: Positive<'_> = positive_integer(input)?;
Ok(())
});
}
}
#[test]
fn test_tag() {
let tgt = usize::from(Tag::GeneralizedTime);
assert_eq!(0x18usize, tgt);
let tgt = u8::from(Tag::GeneralizedTime);
assert_eq!(0x18u8, tgt);
let tgt = Tag::GeneralizedTime;
assert_eq!(tgt, Tag::GeneralizedTime);
}
#[test]
fn test_big() {
for &((bytes_in_a, bytes_in_b), (bytes_out_a, bytes_out_b)) in GOOD_BIG_POSITIVE_INTEGERS {
let mut bytes_in = Vec::new();
bytes_in.extend(bytes_in_a);
bytes_in.extend(bytes_in_b);
let mut bytes_out: Vec<u8> = Vec::new();
bytes_out.extend(bytes_out_a);
bytes_out.extend(bytes_out_b);
with_good_i(&bytes_in, |input| {
let positive = positive_integer(input)?;
let expected_bytes = positive.big_endian_without_leading_zero();
assert_eq!(expected_bytes, &bytes_out);
Ok(())
});
}
}
#[test]
fn test_bit_string_with_no_unused_bits() {
// Not a BitString
let mut reader_bad = untrusted::Reader::new(Input::from(&[0x02, 0x01]));
assert!(bit_string_with_no_unused_bits(&mut reader_bad).is_err());
// Unused bits at end
let mut reader_bad2 = untrusted::Reader::new(Input::from(&[0x03, 0x01, 0x01]));
assert!(bit_string_with_no_unused_bits(&mut reader_bad2).is_err());
let mut reader_good = untrusted::Reader::new(Input::from(&[0x03, 0x01, 0x00]));
let input = bit_string_with_no_unused_bits(&mut reader_good).unwrap();
let expected_result: &[u8] = &[];
assert_eq!(expected_result, input.as_slice_less_safe());
}
}

45
vendor/aws-lc-rs/src/io/positive.rs vendored Normal file
View File

@@ -0,0 +1,45 @@
// Copyright 2018 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! Serialization and deserialization.
/// A serialized positive integer.
#[derive(Copy, Clone)]
pub struct Positive<'a>(untrusted::Input<'a>);
impl<'a> Positive<'a> {
pub(crate) fn new_non_empty_without_leading_zeros(input: untrusted::Input<'a>) -> Self {
debug_assert!(!input.is_empty());
debug_assert!(input.len() == 1 || input.as_slice_less_safe()[0] != 0);
Self(input)
}
/// Returns the value, ordered from significant byte to least significant
/// byte, without any leading zeros. The result is guaranteed to be
/// non-empty.
#[inline]
#[must_use]
pub fn big_endian_without_leading_zero(&self) -> &'a [u8] {
self.big_endian_without_leading_zero_as_input()
.as_slice_less_safe()
}
#[inline]
pub(crate) fn big_endian_without_leading_zero_as_input(&self) -> untrusted::Input<'a> {
self.0
}
}
impl Positive<'_> {
/// Returns the first byte.
///
/// Will not panic because the value is guaranteed to have at least one
/// byte.
#[must_use]
pub fn first_byte(&self) -> u8 {
// This won't panic because
self.0.as_slice_less_safe()[0]
}
}

100
vendor/aws-lc-rs/src/iv.rs vendored Normal file
View File

@@ -0,0 +1,100 @@
// Copyright 2018 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#![allow(dead_code)]
//! Initialization Vector (IV) cryptographic primitives
use crate::error::Unspecified;
use crate::rand;
use zeroize::Zeroize;
/// Length of a 128-bit IV in bytes.
pub const IV_LEN_128_BIT: usize = 16;
/// An initialization vector that must be unique for the lifetime of the associated key
/// it is used with.
pub struct FixedLength<const L: usize>([u8; L]);
impl<const L: usize> FixedLength<L> {
/// Returns the size of the iv in bytes.
#[allow(clippy::must_use_candidate)]
pub fn size(&self) -> usize {
L
}
/// Constructs a new [`FixedLength`] from pseudo-random bytes.
///
/// # Errors
///
/// * [`Unspecified`]: Returned if there is a failure generating `L` bytes.
pub fn new() -> Result<Self, Unspecified> {
let mut iv_bytes = [0u8; L];
rand::fill(&mut iv_bytes)?;
Ok(Self(iv_bytes))
}
}
impl<const L: usize> Drop for FixedLength<L> {
fn drop(&mut self) {
self.0.zeroize();
}
}
impl<const L: usize> AsRef<[u8; L]> for FixedLength<L> {
#[inline]
fn as_ref(&self) -> &[u8; L] {
&self.0
}
}
impl<const L: usize> From<&[u8; L]> for FixedLength<L> {
#[inline]
fn from(bytes: &[u8; L]) -> Self {
FixedLength(bytes.to_owned())
}
}
impl<const L: usize> From<[u8; L]> for FixedLength<L> {
#[inline]
fn from(bytes: [u8; L]) -> Self {
FixedLength(bytes)
}
}
impl<const L: usize> TryFrom<&[u8]> for FixedLength<L> {
type Error = Unspecified;
fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
let value: &[u8; L] = value.try_into()?;
Ok(Self::from(*value))
}
}
impl<const L: usize> TryFrom<FixedLength<L>> for [u8; L] {
type Error = Unspecified;
fn try_from(value: FixedLength<L>) -> Result<Self, Self::Error> {
Ok(value.0)
}
}
#[cfg(test)]
mod tests {
use crate::iv::FixedLength;
#[test]
fn test_size() {
let fixed = FixedLength::from([0u8; 16]);
assert_eq!(16, fixed.size());
let array = [0u8; 12];
let fixed = FixedLength::<12>::try_from(array.as_slice()).unwrap();
assert_eq!(12, fixed.size());
assert!(FixedLength::<16>::try_from(array.as_slice()).is_err());
assert!(TryInto::<[u8; 12]>::try_into(fixed).is_ok());
}
}

443
vendor/aws-lc-rs/src/kdf.rs vendored Normal file
View File

@@ -0,0 +1,443 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! # Key-based Key Derivation Function (KBKDF) in Counter Mode
//!
//! [`kbkdf_ctr_hmac`] provides an implementation of KDF in Counter Mode using HMAC PRF specified in
//! [NIST SP 800-108r1-upd1](https://doi.org/10.6028/NIST.SP.800-108r1-upd1) section 4.1. Further details
//! regarding the implementation can be found on the accompanying function documentation.
//!
//! Key-based key derivation functions are used to derive additional keys from an existing cryptographic key.
//!
//! ## Example: Usage with HMAC-SHA256 PRF
//!
//! ```rust
//! # use std::error::Error;
//! use aws_lc_rs::kdf::{
//! get_kbkdf_ctr_hmac_algorithm, kbkdf_ctr_hmac, KbkdfCtrHmacAlgorithm,
//! KbkdfCtrHmacAlgorithmId,
//! };
//! #
//! # fn main() -> Result<(), Box<dyn Error>> {
//! use aws_lc_rs::error::Unspecified;
//!
//! const OUTPUT_KEY_LEN: usize = 16;
//!
//! let key: &[u8] = &[
//! 0x01, 0x85, 0xfb, 0x76, 0x61, 0xf6, 0xdd, 0x40, 0x8d, 0x98, 0x2f, 0x81, 0x0f, 0xcd, 0x50,
//! 0x04,
//! ];
//!
//! let info: &[u8] = &[
//! 0xc3, 0xf1, 0x71, 0x2a, 0x82, 0x61, 0x36, 0x43, 0xe0, 0xf7, 0x63, 0xa7, 0xa0, 0xa3, 0x15,
//! 0x88, 0xb6, 0xae, 0xd9, 0x50, 0x56, 0xdf, 0xc5, 0x12, 0x55, 0x0c, 0xf2, 0xd0, 0x0d, 0x68,
//! 0xa3, 0x2d,
//! ];
//!
//! let mut output_key = [0u8; OUTPUT_KEY_LEN];
//!
//! let kbkdf_ctr_hmac_sha256: &KbkdfCtrHmacAlgorithm =
//! get_kbkdf_ctr_hmac_algorithm(KbkdfCtrHmacAlgorithmId::Sha256).ok_or(Unspecified)?;
//!
//! kbkdf_ctr_hmac(kbkdf_ctr_hmac_sha256, key, info, &mut output_key)?;
//!
//! assert_eq!(
//! output_key,
//! [
//! 0xc6, 0x3f, 0x74, 0x7b, 0x67, 0xbe, 0x71, 0xf5, 0x7b, 0xa4, 0x56, 0x21, 0x17, 0xdd,
//! 0x29, 0x4
//! ]
//! );
//!
//! # Ok(())
//! # }
//! ```
//!
//! ## Example: Usage with HMAC-SHA256 PRF using NIST FixedInfo Construction
//!
//! ```rust
//! # use std::error::Error;
//! use aws_lc_rs::kdf::{
//! get_kbkdf_ctr_hmac_algorithm, kbkdf_ctr_hmac, KbkdfCtrHmacAlgorithm,
//! KbkdfCtrHmacAlgorithmId,
//! };
//!
//!
//! # fn main() -> Result<(), Box<dyn Error>> {
//! use aws_lc_rs::error::Unspecified;
//! const OUTPUT_KEY_LEN: usize = 16;
//!
//! let key: &[u8] = &[
//! 0x01, 0x85, 0xfb, 0x76, 0x61, 0xf6, 0xdd, 0x40, 0x8d, 0x98, 0x2f, 0x81, 0x0f, 0xcd, 0x50,
//! 0x04,
//! ];
//!
//! let label: &[u8] = b"KBKDF HMAC Counter Label";
//! let context: &[u8] = b"KBKDF HMAC Counter Context";
//!
//! let output_len_bits_be: [u8; 4] = {
//! // Multiply `output_len` by eight to convert from bytes to bits
//! // Convert value to a 32-bit big-endian representation
//! let len: u32 = (OUTPUT_KEY_LEN * 8).try_into()?;
//! len.to_be_bytes()
//! };
//!
//! // FixedInfo String: Label || 0x00 || Context || [L]
//! let mut info = Vec::<u8>::new();
//! info.extend_from_slice(label);
//! info.push(0x0);
//! info.extend_from_slice(context);
//! info.extend_from_slice(&output_len_bits_be);
//!
//! let mut output_key = [0u8; OUTPUT_KEY_LEN];
//!
//! let kbkdf_ctr_hmac_sha256: &KbkdfCtrHmacAlgorithm =
//! get_kbkdf_ctr_hmac_algorithm(KbkdfCtrHmacAlgorithmId::Sha256).ok_or(Unspecified)?;
//!
//! kbkdf_ctr_hmac(kbkdf_ctr_hmac_sha256, key, &info, &mut output_key)?;
//!
//! assert_eq!(
//! output_key,
//! [
//! 0xcd, 0xe0, 0x92, 0xc8, 0xfe, 0x96, 0x21, 0x51, 0x88, 0xd4, 0x3d, 0xe4, 0x6c, 0xf6,
//! 0x37, 0xcb
//! ]
//! );
//!
//! # Ok(())
//! # }
//! ```
//! # Single-step Key Derivation Function (SSKDF)
//!
//! [`sskdf_digest`] and [`sskdf_hmac`] provided implementations of a one-step key derivation function defined in
//! section 4 of [NIST SP 800-56Cr2](https://doi.org/10.6028/NIST.SP.800-56Cr2).
//!
//! These functions are used to derive keying material from a shared secret during a key establishment scheme.
//!
//! ## SSKDF using digest
//!
//! ```rust
//! # use std::error::Error;
//! use aws_lc_rs::kdf::{
//! get_sskdf_digest_algorithm, sskdf_digest, SskdfDigestAlgorithm, SskdfDigestAlgorithmId,
//! };
//!
//! # fn main() -> Result<(), Box<dyn Error>> {
//! use aws_lc_rs::error::Unspecified;
//!
//! const OUTPUT_KEY_LEN: usize = 16;
//!
//! let shared_secret: &[u8] = &[
//! 0x59, 0x09, 0x6b, 0x7b, 0xb7, 0x2b, 0x94, 0xc5, 0x55, 0x5c, 0x36, 0xc9, 0x76, 0x8f, 0xd8,
//! 0xe4, 0xed, 0x8f, 0x39, 0x5e, 0x78, 0x48, 0x5e, 0xb9, 0xf9, 0xdd, 0x43, 0x65, 0x55, 0x00,
//! 0xed, 0x7a,
//! ];
//!
//! let info: &[u8] = &[
//! 0x9b, 0xca, 0xd7, 0xe8, 0xee, 0xf7, 0xb2, 0x1a, 0x98, 0xff, 0x18, 0x60, 0x5c, 0x68, 0x16,
//! 0xbd,
//! ];
//!
//! let mut output_key = [0u8; OUTPUT_KEY_LEN];
//!
//! let sskdf_digest_sha256: &SskdfDigestAlgorithm =
//! get_sskdf_digest_algorithm(SskdfDigestAlgorithmId::Sha256).ok_or(Unspecified)?;
//!
//! sskdf_digest(sskdf_digest_sha256, shared_secret, info, &mut output_key)?;
//!
//! assert_eq!(
//! output_key,
//! [
//! 0x21, 0x79, 0x35, 0x6c, 0xdc, 0x30, 0x1, 0xe6, 0x3f, 0x91, 0xb3, 0xc8, 0x10, 0x7, 0xba,
//! 0x31
//! ]
//! );
//! # Ok(())
//! # }
//! ```
//!
//! ## SSKDF using HMAC
//!
//! ```rust
//! # use std::error::Error;
//! use aws_lc_rs::kdf::{
//! get_sskdf_hmac_algorithm, sskdf_hmac, SskdfHmacAlgorithm, SskdfHmacAlgorithmId,
//! };
//!
//!
//! # fn main() -> Result<(), Box<dyn Error>> {
//! use aws_lc_rs::error::Unspecified;
//!
//!
//! const OUTPUT_KEY_LEN: usize = 16;
//!
//! let shared_secret: &[u8] = &[
//! 0x59, 0x09, 0x6b, 0x7b, 0xb7, 0x2b, 0x94, 0xc5, 0x55, 0x5c, 0x36, 0xc9, 0x76, 0x8f, 0xd8,
//! 0xe4, 0xed, 0x8f, 0x39, 0x5e, 0x78, 0x48, 0x5e, 0xb9, 0xf9, 0xdd, 0x43, 0x65, 0x55, 0x00,
//! 0xed, 0x7a,
//! ];
//!
//! let info: &[u8] = &[
//! 0x9b, 0xca, 0xd7, 0xe8, 0xee, 0xf7, 0xb2, 0x1a, 0x98, 0xff, 0x18, 0x60, 0x5c, 0x68, 0x16,
//! 0xbd,
//! ];
//!
//! let salt: &[u8] = &[
//! 0x2b, 0xc5, 0xf1, 0x6c, 0x48, 0x34, 0x72, 0xd8, 0xda, 0x53, 0xf6, 0xc3, 0x0f, 0x0a, 0xf4,
//! 0x02,
//! ];
//!
//! let mut output_key = [0u8; OUTPUT_KEY_LEN];
//!
//! let sskdf_hmac_sha256: &SskdfHmacAlgorithm =
//! get_sskdf_hmac_algorithm(SskdfHmacAlgorithmId::Sha256).ok_or(Unspecified)?;
//!
//! sskdf_hmac(
//! sskdf_hmac_sha256,
//! shared_secret,
//! info,
//! salt,
//! &mut output_key,
//! )?;
//!
//! assert_eq!(
//! output_key,
//! [
//! 0x4c, 0x36, 0x80, 0x2d, 0xf5, 0xd8, 0xd6, 0x1b, 0xd5, 0xc2, 0x4, 0x7e, 0x5, 0x5a, 0x6d,
//! 0xcb
//! ]
//! );
//! # Ok(())
//! # }
//! ```
mod kbkdf;
mod sskdf;
pub use kbkdf::{
get_kbkdf_ctr_hmac_algorithm, kbkdf_ctr_hmac, KbkdfCtrHmacAlgorithm, KbkdfCtrHmacAlgorithmId,
};
pub use sskdf::{
get_sskdf_digest_algorithm, get_sskdf_hmac_algorithm, sskdf_digest, sskdf_hmac,
SskdfDigestAlgorithm, SskdfDigestAlgorithmId, SskdfHmacAlgorithm, SskdfHmacAlgorithmId,
};
#[cfg(test)]
mod tests {
use crate::kdf::sskdf::SskdfHmacAlgorithmId;
use crate::kdf::{
get_kbkdf_ctr_hmac_algorithm, get_sskdf_digest_algorithm, get_sskdf_hmac_algorithm,
kbkdf_ctr_hmac, sskdf_digest, sskdf_hmac, KbkdfCtrHmacAlgorithmId, SskdfDigestAlgorithmId,
};
#[test]
fn zero_length_output() {
let mut output = vec![0u8; 0];
assert!(sskdf_hmac(
get_sskdf_hmac_algorithm(SskdfHmacAlgorithmId::Sha256).expect("algorithm supported"),
&[0u8; 16],
&[],
&[],
&mut output
)
.is_err());
assert!(sskdf_digest(
get_sskdf_digest_algorithm(SskdfDigestAlgorithmId::Sha256)
.expect("algorithm supported"),
&[0u8; 16],
&[],
&mut output
)
.is_err());
assert!(kbkdf_ctr_hmac(
get_kbkdf_ctr_hmac_algorithm(KbkdfCtrHmacAlgorithmId::Sha256)
.expect("algorithm supported"),
&[0u8; 16],
&[],
&mut output
)
.is_err());
}
#[test]
fn zero_length_secret() {
let mut output = vec![0u8; 16];
assert!(sskdf_hmac(
get_sskdf_hmac_algorithm(SskdfHmacAlgorithmId::Sha256).expect("algorithm supported"),
&[],
&[],
&[],
&mut output
)
.is_err());
assert!(sskdf_digest(
get_sskdf_digest_algorithm(SskdfDigestAlgorithmId::Sha256)
.expect("algorithm supported"),
&[],
&[],
&mut output
)
.is_err());
assert!(kbkdf_ctr_hmac(
get_kbkdf_ctr_hmac_algorithm(KbkdfCtrHmacAlgorithmId::Sha256)
.expect("algorithm supported"),
&[],
&[],
&mut output
)
.is_err());
}
#[test]
fn sskdf_digest_test() {
for id in [
SskdfDigestAlgorithmId::Sha224,
SskdfDigestAlgorithmId::Sha256,
SskdfDigestAlgorithmId::Sha384,
SskdfDigestAlgorithmId::Sha512,
] {
let alg = get_sskdf_digest_algorithm(id).expect("supported");
assert_eq!(id, alg.id());
assert_eq!(format!("{id:?}"), format!("{alg:?}"));
assert_eq!(format!("{id:?}"), format!("{:?}", alg.id()));
let mut output = vec![0u8; 32];
sskdf_digest(alg, &[1u8; 32], &[2u8; 32], &mut output).expect("success");
}
}
#[test]
fn sskdf_hmac_test() {
for id in [
SskdfHmacAlgorithmId::Sha224,
SskdfHmacAlgorithmId::Sha256,
SskdfHmacAlgorithmId::Sha384,
SskdfHmacAlgorithmId::Sha512,
] {
let alg = get_sskdf_hmac_algorithm(id).expect("supported");
assert_eq!(id, alg.id());
assert_eq!(format!("{id:?}"), format!("{alg:?}"));
assert_eq!(format!("{id:?}"), format!("{:?}", alg.id()));
let mut output = vec![0u8; 32];
sskdf_hmac(alg, &[1u8; 32], &[2u8; 32], &[3u8; 32], &mut output).expect("success");
}
}
#[test]
fn kbkdf_ctr_hmac_test() {
for id in [
KbkdfCtrHmacAlgorithmId::Sha224,
KbkdfCtrHmacAlgorithmId::Sha256,
KbkdfCtrHmacAlgorithmId::Sha384,
KbkdfCtrHmacAlgorithmId::Sha512,
] {
let alg = get_kbkdf_ctr_hmac_algorithm(id).expect("supported");
assert_eq!(id, alg.id());
assert_eq!(format!("{id:?}"), format!("{alg:?}"));
assert_eq!(format!("{id:?}"), format!("{:?}", alg.id()));
let mut output = vec![0u8; 32];
kbkdf_ctr_hmac(alg, &[1u8; 32], &[2u8; 32], &mut output).expect("success");
}
}
#[test]
fn algorithm_equality() {
let alg1 = get_kbkdf_ctr_hmac_algorithm(KbkdfCtrHmacAlgorithmId::Sha256).unwrap();
let alg2 = get_kbkdf_ctr_hmac_algorithm(KbkdfCtrHmacAlgorithmId::Sha256).unwrap();
assert_eq!(alg1, alg2);
let alg2 = get_kbkdf_ctr_hmac_algorithm(KbkdfCtrHmacAlgorithmId::Sha512).unwrap();
assert_ne!(alg1, alg2);
let alg1 = get_sskdf_digest_algorithm(SskdfDigestAlgorithmId::Sha256).unwrap();
let alg2 = get_sskdf_digest_algorithm(SskdfDigestAlgorithmId::Sha256).unwrap();
assert_eq!(alg1, alg2);
let alg2 = get_sskdf_digest_algorithm(SskdfDigestAlgorithmId::Sha512).unwrap();
assert_ne!(alg1, alg2);
let alg1 = get_sskdf_hmac_algorithm(SskdfHmacAlgorithmId::Sha256).unwrap();
let alg2 = get_sskdf_hmac_algorithm(SskdfHmacAlgorithmId::Sha256).unwrap();
assert_eq!(alg1, alg2);
let alg2 = get_sskdf_hmac_algorithm(SskdfHmacAlgorithmId::Sha512).unwrap();
assert_ne!(alg1, alg2);
}
}
#[cfg(test)]
mod more_tests {
use crate::kdf::{
get_kbkdf_ctr_hmac_algorithm, get_sskdf_digest_algorithm, get_sskdf_hmac_algorithm,
KbkdfCtrHmacAlgorithmId, SskdfDigestAlgorithmId, SskdfHmacAlgorithmId,
};
macro_rules! assert_get_algorithm {
($name:ident, $getter:path, $alg:expr) => {
#[test]
fn $name() {
assert!($getter($alg).is_some());
}
};
}
assert_get_algorithm!(
get_sskdf_hmac_algorithm_hmac_sha224,
get_sskdf_hmac_algorithm,
SskdfHmacAlgorithmId::Sha224
);
assert_get_algorithm!(
get_sskdf_hmac_algorithm_hmac_sha256,
get_sskdf_hmac_algorithm,
SskdfHmacAlgorithmId::Sha256
);
assert_get_algorithm!(
get_sskdf_hmac_algorithm_hmac_sha384,
get_sskdf_hmac_algorithm,
SskdfHmacAlgorithmId::Sha384
);
assert_get_algorithm!(
get_sskdf_hmac_algorithm_hmac_sha512,
get_sskdf_hmac_algorithm,
SskdfHmacAlgorithmId::Sha512
);
assert_get_algorithm!(
get_sskdf_digest_algorithm_sha224,
get_sskdf_digest_algorithm,
SskdfDigestAlgorithmId::Sha224
);
assert_get_algorithm!(
get_sskdf_digest_algorithm_sha256,
get_sskdf_digest_algorithm,
SskdfDigestAlgorithmId::Sha256
);
assert_get_algorithm!(
get_sskdf_digest_algorithm_sha384,
get_sskdf_digest_algorithm,
SskdfDigestAlgorithmId::Sha384
);
assert_get_algorithm!(
get_sskdf_digest_algorithm_sha512,
get_sskdf_digest_algorithm,
SskdfDigestAlgorithmId::Sha512
);
assert_get_algorithm!(
get_kbkdf_ctr_hmac_algorithm_sha224,
get_kbkdf_ctr_hmac_algorithm,
KbkdfCtrHmacAlgorithmId::Sha224
);
assert_get_algorithm!(
get_kbkdf_ctr_hmac_algorithm_sha256,
get_kbkdf_ctr_hmac_algorithm,
KbkdfCtrHmacAlgorithmId::Sha256
);
assert_get_algorithm!(
get_kbkdf_ctr_hmac_algorithm_sha384,
get_kbkdf_ctr_hmac_algorithm,
KbkdfCtrHmacAlgorithmId::Sha384
);
assert_get_algorithm!(
get_kbkdf_ctr_hmac_algorithm_sha512,
get_kbkdf_ctr_hmac_algorithm,
KbkdfCtrHmacAlgorithmId::Sha512
);
}

146
vendor/aws-lc-rs/src/kdf/kbkdf.rs vendored Normal file
View File

@@ -0,0 +1,146 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#![allow(clippy::module_name_repetitions)]
use crate::aws_lc::{KBKDF_ctr_hmac, EVP_MD};
use crate::digest::{match_digest_type, AlgorithmID};
use crate::error::Unspecified;
use crate::ptr::ConstPointer;
/// KBKDF in Counter Mode with HMAC-SHA224
#[allow(dead_code)]
const KBKDF_CTR_HMAC_SHA224: KbkdfCtrHmacAlgorithm = KbkdfCtrHmacAlgorithm {
id: KbkdfCtrHmacAlgorithmId::Sha224,
};
/// KBKDF in Counter Mode with HMAC-SHA256
#[allow(dead_code)]
const KBKDF_CTR_HMAC_SHA256: KbkdfCtrHmacAlgorithm = KbkdfCtrHmacAlgorithm {
id: KbkdfCtrHmacAlgorithmId::Sha256,
};
/// KBKDF in Counter Mode with HMAC-SHA384
#[allow(dead_code)]
const KBKDF_CTR_HMAC_SHA384: KbkdfCtrHmacAlgorithm = KbkdfCtrHmacAlgorithm {
id: KbkdfCtrHmacAlgorithmId::Sha384,
};
/// KBKDF in Counter Mode with HMAC-SHA512
#[allow(dead_code)]
const KBKDF_CTR_HMAC_SHA512: KbkdfCtrHmacAlgorithm = KbkdfCtrHmacAlgorithm {
id: KbkdfCtrHmacAlgorithmId::Sha512,
};
/// Retrieve [`KbkdfCtrHmacAlgorithm`] using the [`KbkdfCtrHmacAlgorithmId`] specified by `id`.
#[must_use]
pub const fn get_kbkdf_ctr_hmac_algorithm(
id: KbkdfCtrHmacAlgorithmId,
) -> Option<&'static KbkdfCtrHmacAlgorithm> {
{
Some(match id {
KbkdfCtrHmacAlgorithmId::Sha224 => &KBKDF_CTR_HMAC_SHA224,
KbkdfCtrHmacAlgorithmId::Sha256 => &KBKDF_CTR_HMAC_SHA256,
KbkdfCtrHmacAlgorithmId::Sha384 => &KBKDF_CTR_HMAC_SHA384,
KbkdfCtrHmacAlgorithmId::Sha512 => &KBKDF_CTR_HMAC_SHA512,
})
}
}
/// KBKDF in Counter Mode with HMAC Algorithm
pub struct KbkdfCtrHmacAlgorithm {
id: KbkdfCtrHmacAlgorithmId,
}
impl KbkdfCtrHmacAlgorithm {
/// Returns the KBKDF Counter HMAC Algorithm Identifier
#[must_use]
pub fn id(&self) -> KbkdfCtrHmacAlgorithmId {
self.id
}
#[must_use]
fn get_evp_md(&self) -> ConstPointer<'_, EVP_MD> {
match_digest_type(match self.id {
KbkdfCtrHmacAlgorithmId::Sha224 => &AlgorithmID::SHA224,
KbkdfCtrHmacAlgorithmId::Sha256 => &AlgorithmID::SHA256,
KbkdfCtrHmacAlgorithmId::Sha384 => &AlgorithmID::SHA384,
KbkdfCtrHmacAlgorithmId::Sha512 => &AlgorithmID::SHA512,
})
}
}
impl PartialEq for KbkdfCtrHmacAlgorithm {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for KbkdfCtrHmacAlgorithm {}
impl core::fmt::Debug for KbkdfCtrHmacAlgorithm {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Debug::fmt(&self.id, f)
}
}
/// Key-based Derivation Function Algorithm Identifier
#[non_exhaustive]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum KbkdfCtrHmacAlgorithmId {
/// KBKDF in Counter Mode with HMAC-SHA224
Sha224,
/// KBKDF in Counter Mode with HMAC-SHA256
Sha256,
/// KBKDF in Counter Mode with HMAC-SHA384
Sha384,
/// KBKDF in Counter Mode with HMAC-SHA512
Sha512,
}
/// # Key-based Key Derivation Function (KBKDF) in Counter Mode with HMAC PRF
///
/// ## Input Validation and Defaults
/// * `output.len() > 0 and `secret.len() > 0`
/// * `output.len() <= usize::MAX - DIGEST_LENGTH`
/// * The requested `output.len()` would result in overflowing the counter.
///
/// ## Implementation Notes
///
/// This implementation adheres to the algorithm specified in Section 4.1 of the
/// NIST Special Publication 800-108 Revision 1 Update 1 published on August
/// 2022. Using HMAC as the PRF function. In this implementation:
/// * The counter is 32-bits and is represented in big-endian format
/// * The counter is placed before the fixed info string
///
/// Specification available at <https://doi.org/10.6028/NIST.SP.800-108r1-upd1>
///
/// # Errors
/// `Unspecified` is returned if input validation fails or an unexpected error occurs.
pub fn kbkdf_ctr_hmac(
algorithm: &'static KbkdfCtrHmacAlgorithm,
secret: &[u8],
info: &[u8],
output: &mut [u8],
) -> Result<(), Unspecified> {
let evp_md = algorithm.get_evp_md();
let out_len = output.len();
if 1 != unsafe {
KBKDF_ctr_hmac(
output.as_mut_ptr(),
out_len,
evp_md.as_const_ptr(),
secret.as_ptr(),
secret.len(),
info.as_ptr(),
info.len(),
)
} {
return Err(Unspecified);
}
Ok(())
}

287
vendor/aws-lc-rs/src/kdf/sskdf.rs vendored Normal file
View File

@@ -0,0 +1,287 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#![allow(clippy::module_name_repetitions)]
use crate::aws_lc::{SSKDF_digest, SSKDF_hmac, EVP_MD};
use crate::digest::{match_digest_type, AlgorithmID};
use crate::error::Unspecified;
use crate::ptr::ConstPointer;
/// SSKDF with HMAC-SHA224
#[allow(dead_code)]
const SSKDF_HMAC_SHA224: SskdfHmacAlgorithm = SskdfHmacAlgorithm {
id: SskdfHmacAlgorithmId::Sha224,
};
/// SSKDF with HMAC-SHA256
#[allow(dead_code)]
const SSKDF_HMAC_SHA256: SskdfHmacAlgorithm = SskdfHmacAlgorithm {
id: SskdfHmacAlgorithmId::Sha256,
};
/// SSKDF with HMAC-SHA384
#[allow(dead_code)]
const SSKDF_HMAC_SHA384: SskdfHmacAlgorithm = SskdfHmacAlgorithm {
id: SskdfHmacAlgorithmId::Sha384,
};
/// SSKDF with HMAC-SHA512
#[allow(dead_code)]
const SSKDF_HMAC_SHA512: SskdfHmacAlgorithm = SskdfHmacAlgorithm {
id: SskdfHmacAlgorithmId::Sha512,
};
/// SSKDF with SHA224
#[allow(dead_code)]
const SSKDF_DIGEST_SHA224: SskdfDigestAlgorithm = SskdfDigestAlgorithm {
id: SskdfDigestAlgorithmId::Sha224,
};
/// SSKDF with SHA256
#[allow(dead_code)]
const SSKDF_DIGEST_SHA256: SskdfDigestAlgorithm = SskdfDigestAlgorithm {
id: SskdfDigestAlgorithmId::Sha256,
};
/// SSKDF with SHA384
#[allow(dead_code)]
const SSKDF_DIGEST_SHA384: SskdfDigestAlgorithm = SskdfDigestAlgorithm {
id: SskdfDigestAlgorithmId::Sha384,
};
/// SSKDF with SHA512
#[allow(dead_code)]
const SSKDF_DIGEST_SHA512: SskdfDigestAlgorithm = SskdfDigestAlgorithm {
id: SskdfDigestAlgorithmId::Sha512,
};
/// Retrieve [`SskdfHmacAlgorithm`] using the [`SskdfHmacAlgorithmId`] specified by `id`.
#[must_use]
pub const fn get_sskdf_hmac_algorithm(
id: SskdfHmacAlgorithmId,
) -> Option<&'static SskdfHmacAlgorithm> {
{
match id {
SskdfHmacAlgorithmId::Sha224 => Some(&SSKDF_HMAC_SHA224),
SskdfHmacAlgorithmId::Sha256 => Some(&SSKDF_HMAC_SHA256),
SskdfHmacAlgorithmId::Sha384 => Some(&SSKDF_HMAC_SHA384),
SskdfHmacAlgorithmId::Sha512 => Some(&SSKDF_HMAC_SHA512),
}
}
}
/// Retrieve [`SskdfDigestAlgorithm`] using the [`SskdfDigestAlgorithmId`] specified by `id`.
#[must_use]
pub const fn get_sskdf_digest_algorithm(
id: SskdfDigestAlgorithmId,
) -> Option<&'static SskdfDigestAlgorithm> {
{
match id {
SskdfDigestAlgorithmId::Sha224 => Some(&SSKDF_DIGEST_SHA224),
SskdfDigestAlgorithmId::Sha256 => Some(&SSKDF_DIGEST_SHA256),
SskdfDigestAlgorithmId::Sha384 => Some(&SSKDF_DIGEST_SHA384),
SskdfDigestAlgorithmId::Sha512 => Some(&SSKDF_DIGEST_SHA512),
}
}
}
/// SSKDF algorithm using HMAC
pub struct SskdfHmacAlgorithm {
id: SskdfHmacAlgorithmId,
}
impl SskdfHmacAlgorithm {
/// Returns the SSKDF HMAC Algorithm Identifier
#[must_use]
pub fn id(&self) -> SskdfHmacAlgorithmId {
self.id
}
#[must_use]
fn get_evp_md(&self) -> ConstPointer<'_, EVP_MD> {
match_digest_type(match self.id {
SskdfHmacAlgorithmId::Sha224 => &AlgorithmID::SHA224,
SskdfHmacAlgorithmId::Sha256 => &AlgorithmID::SHA256,
SskdfHmacAlgorithmId::Sha384 => &AlgorithmID::SHA384,
SskdfHmacAlgorithmId::Sha512 => &AlgorithmID::SHA512,
})
}
}
impl PartialEq for SskdfHmacAlgorithm {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for SskdfHmacAlgorithm {}
impl core::fmt::Debug for SskdfHmacAlgorithm {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Debug::fmt(&self.id, f)
}
}
/// SSKDF algorithm using digest
pub struct SskdfDigestAlgorithm {
id: SskdfDigestAlgorithmId,
}
impl SskdfDigestAlgorithm {
/// Returns the SSKDF Algorithm Identifier
#[must_use]
pub fn id(&self) -> SskdfDigestAlgorithmId {
self.id
}
#[must_use]
fn get_evp_md(&self) -> ConstPointer<'_, EVP_MD> {
match_digest_type(match self.id {
SskdfDigestAlgorithmId::Sha224 => &AlgorithmID::SHA224,
SskdfDigestAlgorithmId::Sha256 => &AlgorithmID::SHA256,
SskdfDigestAlgorithmId::Sha384 => &AlgorithmID::SHA384,
SskdfDigestAlgorithmId::Sha512 => &AlgorithmID::SHA512,
})
}
}
impl PartialEq for SskdfDigestAlgorithm {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for SskdfDigestAlgorithm {}
impl core::fmt::Debug for SskdfDigestAlgorithm {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Debug::fmt(&self.id, f)
}
}
/// Single-step (One-step) Key Derivation Function Digest Algorithm Identifier
#[non_exhaustive]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum SskdfDigestAlgorithmId {
/// SSKDF with SHA224
Sha224,
/// SSKDF with SHA256
Sha256,
/// SSKDF with SHA384
Sha384,
/// SSKDF with SHA512
Sha512,
}
/// Single-step (One-step) Key Derivation Function HMAC Algorithm Identifier
#[non_exhaustive]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum SskdfHmacAlgorithmId {
/// SSKDF with HMAC-SHA224
Sha224,
/// SSKDF with HMAC-SHA256
Sha256,
/// SSKDF with HMAC-SHA384
Sha384,
/// SSKDF with HMAC-SHA512
Sha512,
}
/// # Single-step Key Derivation Function using HMAC
///
/// This algorithm may be referred to as "Single-Step KDF" or "NIST Concatenation KDF" by other
/// implementors.
///
/// ## Input Validation and Defaults
/// * `output.len()`, `secret.len()`, `info.len()` each must be <= 2^30.
/// * The default salt, an all zero byte string with length equal to the digest block length, is used
/// if `salt.len() == 0`.
/// * `output.len() > 0 and `secret.len() > 0`
///
/// ## Implementation Notes
///
/// This implementation adheres to the algorithm specified in Section 4 of the
/// NIST Special Publication 800-56C Revision 2 published on August 2020.
/// Using Option 2 for the auxiliary function H.
///
/// Specification is available at <https://doi.org/10.6028/NIST.SP.800-56Cr2>
///
/// # Errors
/// `Unspecified` is returned if input validation fails or an unexpected error occurs.
pub fn sskdf_hmac(
algorithm: &'static SskdfHmacAlgorithm,
secret: &[u8],
info: &[u8],
salt: &[u8],
output: &mut [u8],
) -> Result<(), Unspecified> {
let evp_md = algorithm.get_evp_md();
let out_len = output.len();
if 1 != unsafe {
SSKDF_hmac(
output.as_mut_ptr(),
out_len,
evp_md.as_const_ptr(),
secret.as_ptr(),
secret.len(),
info.as_ptr(),
info.len(),
salt.as_ptr(),
salt.len(),
)
} {
return Err(Unspecified);
}
Ok(())
}
/// # Single-step Key Derivation Function using digest
///
/// This algorithm may be referred to as "Single-Step KDF" or "NIST Concatenation KDF" by other
/// implementors.
///
/// ## Input Validation and Defaults
/// * `output.len()`, `secret.len()`, `info.len()` each must be <= 2^30.
/// * `output.len() > 0 and `secret.len() > 0`
///
/// ## Implementation Notes
///
/// This implementation adheres to the algorithm specified in Section 4 of the
/// NIST Special Publication 800-56C Revision 2 published on August 2020.
/// Using Option 1 for the auxiliary function H.
///
/// Specification is available at <https://doi.org/10.6028/NIST.SP.800-56Cr2>
///
/// # Errors
/// `Unspecified` is returned if input validation fails or an unexpected error occurs.
pub fn sskdf_digest(
algorithm: &'static SskdfDigestAlgorithm,
secret: &[u8],
info: &[u8],
output: &mut [u8],
) -> Result<(), Unspecified> {
let evp_md = algorithm.get_evp_md();
let out_len = output.len();
if 1 != unsafe {
SSKDF_digest(
output.as_mut_ptr(),
out_len,
evp_md.as_const_ptr(),
secret.as_ptr(),
secret.len(),
info.as_ptr(),
info.len(),
)
} {
return Err(Unspecified);
}
Ok(())
}

906
vendor/aws-lc-rs/src/kem.rs vendored Normal file
View File

@@ -0,0 +1,906 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! Key-Encapsulation Mechanisms (KEMs), including support for Kyber Round 3 Submission.
//!
//! # Example
//!
//! Note that this example uses the Kyber-512 Round 3 algorithm, but other algorithms can be used
//! in the exact same way by substituting
//! `kem::<desired_algorithm_here>` for `kem::KYBER512_R3`.
//!
//! ```rust
//! use aws_lc_rs::{
//! kem::{Ciphertext, DecapsulationKey, EncapsulationKey},
//! kem::{ML_KEM_512}
//! };
//!
//! // Alice generates their (private) decapsulation key.
//! let decapsulation_key = DecapsulationKey::generate(&ML_KEM_512)?;
//!
//! // Alices computes the (public) encapsulation key.
//! let encapsulation_key = decapsulation_key.encapsulation_key()?;
//!
//! let encapsulation_key_bytes = encapsulation_key.key_bytes()?;
//!
//! // Alice sends the encapsulation key bytes to bob through some
//! // protocol message.
//! let encapsulation_key_bytes = encapsulation_key_bytes.as_ref();
//!
//! // Bob constructs the (public) encapsulation key from the key bytes provided by Alice.
//! let retrieved_encapsulation_key = EncapsulationKey::new(&ML_KEM_512, encapsulation_key_bytes)?;
//!
//! // Bob executes the encapsulation algorithm to to produce their copy of the secret, and associated ciphertext.
//! let (ciphertext, bob_secret) = retrieved_encapsulation_key.encapsulate()?;
//!
//! // Alice receives ciphertext bytes from bob
//! let ciphertext_bytes = ciphertext.as_ref();
//!
//! // Bob sends Alice the ciphertext computed from the encapsulation algorithm, Alice runs decapsulation to derive their
//! // copy of the secret.
//! let alice_secret = decapsulation_key.decapsulate(Ciphertext::from(ciphertext_bytes))?;
//!
//! // Alice and Bob have now arrived to the same secret
//! assert_eq!(alice_secret.as_ref(), bob_secret.as_ref());
//!
//! # Ok::<(), aws_lc_rs::error::Unspecified>(())
//! ```
use crate::aws_lc::{
EVP_PKEY_CTX_kem_set_params, EVP_PKEY_decapsulate, EVP_PKEY_encapsulate,
EVP_PKEY_kem_new_raw_public_key, EVP_PKEY_kem_new_raw_secret_key, EVP_PKEY, EVP_PKEY_KEM,
};
use crate::buffer::Buffer;
use crate::encoding::generated_encodings;
use crate::error::{KeyRejected, Unspecified};
use crate::ptr::LcPtr;
use alloc::borrow::Cow;
use core::cmp::Ordering;
use zeroize::Zeroize;
const ML_KEM_512_SHARED_SECRET_LENGTH: usize = 32;
const ML_KEM_512_PUBLIC_KEY_LENGTH: usize = 800;
const ML_KEM_512_SECRET_KEY_LENGTH: usize = 1632;
const ML_KEM_512_CIPHERTEXT_LENGTH: usize = 768;
const ML_KEM_768_SHARED_SECRET_LENGTH: usize = 32;
const ML_KEM_768_PUBLIC_KEY_LENGTH: usize = 1184;
const ML_KEM_768_SECRET_KEY_LENGTH: usize = 2400;
const ML_KEM_768_CIPHERTEXT_LENGTH: usize = 1088;
const ML_KEM_1024_SHARED_SECRET_LENGTH: usize = 32;
const ML_KEM_1024_PUBLIC_KEY_LENGTH: usize = 1568;
const ML_KEM_1024_SECRET_KEY_LENGTH: usize = 3168;
const ML_KEM_1024_CIPHERTEXT_LENGTH: usize = 1568;
/// NIST FIPS 203 ML-KEM-512 algorithm.
pub const ML_KEM_512: Algorithm<AlgorithmId> = Algorithm {
id: AlgorithmId::MlKem512,
decapsulate_key_size: ML_KEM_512_SECRET_KEY_LENGTH,
encapsulate_key_size: ML_KEM_512_PUBLIC_KEY_LENGTH,
ciphertext_size: ML_KEM_512_CIPHERTEXT_LENGTH,
shared_secret_size: ML_KEM_512_SHARED_SECRET_LENGTH,
};
/// NIST FIPS 203 ML-KEM-768 algorithm.
pub const ML_KEM_768: Algorithm<AlgorithmId> = Algorithm {
id: AlgorithmId::MlKem768,
decapsulate_key_size: ML_KEM_768_SECRET_KEY_LENGTH,
encapsulate_key_size: ML_KEM_768_PUBLIC_KEY_LENGTH,
ciphertext_size: ML_KEM_768_CIPHERTEXT_LENGTH,
shared_secret_size: ML_KEM_768_SHARED_SECRET_LENGTH,
};
/// NIST FIPS 203 ML-KEM-1024 algorithm.
pub const ML_KEM_1024: Algorithm<AlgorithmId> = Algorithm {
id: AlgorithmId::MlKem1024,
decapsulate_key_size: ML_KEM_1024_SECRET_KEY_LENGTH,
encapsulate_key_size: ML_KEM_1024_PUBLIC_KEY_LENGTH,
ciphertext_size: ML_KEM_1024_CIPHERTEXT_LENGTH,
shared_secret_size: ML_KEM_1024_SHARED_SECRET_LENGTH,
};
use crate::aws_lc::{NID_MLKEM1024, NID_MLKEM512, NID_MLKEM768};
/// An identifier for a KEM algorithm.
pub trait AlgorithmIdentifier:
Copy + Clone + Debug + PartialEq + crate::sealed::Sealed + 'static
{
/// Returns the algorithm's associated AWS-LC nid.
fn nid(self) -> i32;
}
/// A KEM algorithm
#[derive(PartialEq)]
pub struct Algorithm<Id = AlgorithmId>
where
Id: AlgorithmIdentifier,
{
pub(crate) id: Id,
pub(crate) decapsulate_key_size: usize,
pub(crate) encapsulate_key_size: usize,
pub(crate) ciphertext_size: usize,
pub(crate) shared_secret_size: usize,
}
impl<Id> Algorithm<Id>
where
Id: AlgorithmIdentifier,
{
/// Returns the identifier for this algorithm.
#[must_use]
pub fn id(&self) -> Id {
self.id
}
#[inline]
#[allow(dead_code)]
pub(crate) fn decapsulate_key_size(&self) -> usize {
self.decapsulate_key_size
}
#[inline]
pub(crate) fn encapsulate_key_size(&self) -> usize {
self.encapsulate_key_size
}
#[inline]
pub(crate) fn ciphertext_size(&self) -> usize {
self.ciphertext_size
}
#[inline]
pub(crate) fn shared_secret_size(&self) -> usize {
self.shared_secret_size
}
}
impl<Id> Debug for Algorithm<Id>
where
Id: AlgorithmIdentifier,
{
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
Debug::fmt(&self.id, f)
}
}
/// A serializable decapulsation key usable with KEMs. This can be randomly generated with `DecapsulationKey::generate`.
pub struct DecapsulationKey<Id = AlgorithmId>
where
Id: AlgorithmIdentifier,
{
algorithm: &'static Algorithm<Id>,
evp_pkey: LcPtr<EVP_PKEY>,
}
/// Identifier for a KEM algorithm.
#[non_exhaustive]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum AlgorithmId {
/// NIST FIPS 203 ML-KEM-512 algorithm.
MlKem512,
/// NIST FIPS 203 ML-KEM-768 algorithm.
MlKem768,
/// NIST FIPS 203 ML-KEM-1024 algorithm.
MlKem1024,
}
impl AlgorithmIdentifier for AlgorithmId {
fn nid(self) -> i32 {
match self {
AlgorithmId::MlKem512 => NID_MLKEM512,
AlgorithmId::MlKem768 => NID_MLKEM768,
AlgorithmId::MlKem1024 => NID_MLKEM1024,
}
}
}
impl crate::sealed::Sealed for AlgorithmId {}
impl<Id> DecapsulationKey<Id>
where
Id: AlgorithmIdentifier,
{
/// Creates a new KEM decapsulation key from raw bytes. This method MUST NOT be used to generate
/// a new decapsulation key, rather it MUST be used to construct `DecapsulationKey` previously serialized
/// to raw bytes.
///
/// `alg` is the [`Algorithm`] to be associated with the generated `DecapsulationKey`.
///
/// `bytes` is a slice of raw bytes representing a `DecapsulationKey`.
///
/// # Security Considerations
///
/// This function performs size validation but does not fully validate key material integrity.
/// Invalid key bytes (e.g., corrupted or tampered data) may be accepted by this function but
/// will cause [`Self::decapsulate`] to fail. Only use bytes that were previously obtained from
/// [`Self::key_bytes`] on a validly generated key.
///
/// # Limitations
///
/// The `DecapsulationKey` returned by this function will NOT provide the associated
/// `EncapsulationKey` via [`Self::encapsulation_key`]. The `EncapsulationKey` must be
/// serialized and restored separately using [`EncapsulationKey::key_bytes`] and
/// [`EncapsulationKey::new`].
///
/// # Errors
///
/// Returns `KeyRejected::too_small()` if `bytes.len() < alg.decapsulate_key_size()`.
///
/// Returns `KeyRejected::too_large()` if `bytes.len() > alg.decapsulate_key_size()`.
///
/// Returns `KeyRejected::unexpected_error()` if the underlying cryptographic operation fails.
pub fn new(alg: &'static Algorithm<Id>, bytes: &[u8]) -> Result<Self, KeyRejected> {
match bytes.len().cmp(&alg.decapsulate_key_size()) {
Ordering::Less => Err(KeyRejected::too_small()),
Ordering::Greater => Err(KeyRejected::too_large()),
Ordering::Equal => Ok(()),
}?;
let evp_pkey = LcPtr::new(unsafe {
EVP_PKEY_kem_new_raw_secret_key(alg.id.nid(), bytes.as_ptr(), bytes.len())
})?;
Ok(DecapsulationKey {
algorithm: alg,
evp_pkey,
})
}
/// Generate a new KEM decapsulation key for the given algorithm.
///
/// # Errors
/// `error::Unspecified` when operation fails due to internal error.
pub fn generate(alg: &'static Algorithm<Id>) -> Result<Self, Unspecified> {
let kyber_key = kem_key_generate(alg.id.nid())?;
Ok(DecapsulationKey {
algorithm: alg,
evp_pkey: kyber_key,
})
}
/// Return the algorithm associated with the given KEM decapsulation key.
#[must_use]
pub fn algorithm(&self) -> &'static Algorithm<Id> {
self.algorithm
}
/// Returns the raw bytes of the `DecapsulationKey`.
///
/// The returned bytes can be used with [`Self::new`] to reconstruct the `DecapsulationKey`.
///
/// # Errors
///
/// Returns [`Unspecified`] if the key bytes cannot be retrieved from the underlying
/// cryptographic implementation.
pub fn key_bytes(&self) -> Result<DecapsulationKeyBytes<'static>, Unspecified> {
let decapsulation_key_bytes = self.evp_pkey.as_const().marshal_raw_private_key()?;
debug_assert_eq!(
decapsulation_key_bytes.len(),
self.algorithm.decapsulate_key_size()
);
Ok(DecapsulationKeyBytes::new(decapsulation_key_bytes))
}
/// Returns the `EncapsulationKey` associated with this `DecapsulationKey`.
///
/// # Errors
///
/// Returns [`Unspecified`] in the following cases:
/// * The `DecapsulationKey` was constructed from raw bytes using [`Self::new`],
/// as the underlying key representation does not include the public key component.
/// In this case, the `EncapsulationKey` must be serialized and restored separately.
/// * An internal error occurs while extracting the public key.
#[allow(clippy::missing_panics_doc)]
pub fn encapsulation_key(&self) -> Result<EncapsulationKey<Id>, Unspecified> {
let evp_pkey = self.evp_pkey.clone();
let encapsulation_key = EncapsulationKey {
algorithm: self.algorithm,
evp_pkey,
};
// Verify the encapsulation key is valid by attempting to get its bytes.
// Keys constructed from raw secret bytes may not have a valid public key.
if encapsulation_key.key_bytes().is_err() {
return Err(Unspecified);
}
Ok(encapsulation_key)
}
/// Performs the decapsulate operation using this `DecapsulationKey` on the given ciphertext.
///
/// `ciphertext` is the ciphertext generated by the encapsulate operation using the `EncapsulationKey`
/// associated with this `DecapsulationKey`.
///
/// # Errors
///
/// Returns [`Unspecified`] in the following cases:
/// * The `ciphertext` is malformed or was not generated for this key's algorithm.
/// * The `DecapsulationKey` was constructed from invalid bytes (e.g., corrupted or tampered
/// key material passed to [`Self::new`]). Note that [`Self::new`] only validates the size
/// of the key bytes, not their cryptographic validity.
/// * An internal cryptographic error occurs.
#[allow(clippy::needless_pass_by_value)]
pub fn decapsulate(&self, ciphertext: Ciphertext<'_>) -> Result<SharedSecret, Unspecified> {
let mut shared_secret_len = self.algorithm.shared_secret_size();
let mut shared_secret: Vec<u8> = vec![0u8; shared_secret_len];
let mut ctx = self.evp_pkey.create_EVP_PKEY_CTX()?;
let ciphertext = ciphertext.as_ref();
if 1 != unsafe {
EVP_PKEY_decapsulate(
ctx.as_mut_ptr(),
shared_secret.as_mut_ptr(),
&mut shared_secret_len,
// AWS-LC incorrectly has this as an unqualified `uint8_t *`, it should be qualified with const
ciphertext.as_ptr().cast_mut(),
ciphertext.len(),
)
} {
return Err(Unspecified);
}
// This is currently pedantic but done for safety in-case the shared_secret buffer
// size changes in the future. `EVP_PKEY_decapsulate` updates `shared_secret_len` with
// the length of the shared secret in the event the buffer provided was larger then the secret.
// This truncates the buffer to the proper length to match the shared secret written.
debug_assert_eq!(shared_secret_len, shared_secret.len());
shared_secret.truncate(shared_secret_len);
Ok(SharedSecret(shared_secret.into_boxed_slice()))
}
}
unsafe impl<Id> Send for DecapsulationKey<Id> where Id: AlgorithmIdentifier {}
unsafe impl<Id> Sync for DecapsulationKey<Id> where Id: AlgorithmIdentifier {}
impl<Id> Debug for DecapsulationKey<Id>
where
Id: AlgorithmIdentifier,
{
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
f.debug_struct("DecapsulationKey")
.field("algorithm", &self.algorithm)
.finish_non_exhaustive()
}
}
generated_encodings!(
(EncapsulationKeyBytes, EncapsulationKeyBytesType),
(DecapsulationKeyBytes, DecapsulationKeyBytesType)
);
/// A serializable encapsulation key usable with KEM algorithms. Constructed
/// from either a `DecapsulationKey` or raw bytes.
pub struct EncapsulationKey<Id = AlgorithmId>
where
Id: AlgorithmIdentifier,
{
algorithm: &'static Algorithm<Id>,
evp_pkey: LcPtr<EVP_PKEY>,
}
impl<Id> EncapsulationKey<Id>
where
Id: AlgorithmIdentifier,
{
/// Return the algorithm associated with the given KEM encapsulation key.
#[must_use]
pub fn algorithm(&self) -> &'static Algorithm<Id> {
self.algorithm
}
/// Performs the encapsulate operation using this KEM encapsulation key, generating a ciphertext
/// and associated shared secret.
///
/// # Errors
/// `error::Unspecified` when operation fails due to internal error.
pub fn encapsulate(&self) -> Result<(Ciphertext<'static>, SharedSecret), Unspecified> {
let mut ciphertext_len = self.algorithm.ciphertext_size();
let mut shared_secret_len = self.algorithm.shared_secret_size();
let mut ciphertext: Vec<u8> = vec![0u8; ciphertext_len];
let mut shared_secret: Vec<u8> = vec![0u8; shared_secret_len];
let mut ctx = self.evp_pkey.create_EVP_PKEY_CTX()?;
if 1 != unsafe {
EVP_PKEY_encapsulate(
ctx.as_mut_ptr(),
ciphertext.as_mut_ptr(),
&mut ciphertext_len,
shared_secret.as_mut_ptr(),
&mut shared_secret_len,
)
} {
return Err(Unspecified);
}
// The following two steps are currently pedantic but done for safety in-case the buffer allocation
// sizes change in the future. `EVP_PKEY_encapsulate` updates `ciphertext_len` and `shared_secret_len` with
// the length of the ciphertext and shared secret respectivly in the event the buffer provided for each was
// larger then the actual values. Thus these two steps truncate the buffers to the proper length to match the
// value lengths written.
debug_assert_eq!(ciphertext_len, ciphertext.len());
ciphertext.truncate(ciphertext_len);
debug_assert_eq!(shared_secret_len, shared_secret.len());
shared_secret.truncate(shared_secret_len);
Ok((
Ciphertext::new(ciphertext),
SharedSecret::new(shared_secret.into_boxed_slice()),
))
}
/// Returns the `EnscapsulationKey` bytes.
///
/// # Errors
/// * `Unspecified`: Any failure to retrieve the `EnscapsulationKey` bytes.
pub fn key_bytes(&self) -> Result<EncapsulationKeyBytes<'static>, Unspecified> {
let mut encapsulate_bytes = vec![0u8; self.algorithm.encapsulate_key_size()];
let encapsulate_key_size = self
.evp_pkey
.as_const()
.marshal_raw_public_to_buffer(&mut encapsulate_bytes)?;
debug_assert_eq!(encapsulate_key_size, encapsulate_bytes.len());
encapsulate_bytes.truncate(encapsulate_key_size);
Ok(EncapsulationKeyBytes::new(encapsulate_bytes))
}
/// Creates a new KEM encapsulation key from raw bytes. This method MUST NOT be used to generate
/// a new encapsulation key, rather it MUST be used to construct `EncapsulationKey` previously serialized
/// to raw bytes.
///
/// `alg` is the [`Algorithm`] to be associated with the generated `EncapsulationKey`.
///
/// `bytes` is a slice of raw bytes representing a `EncapsulationKey`.
///
/// # Errors
/// `error::KeyRejected` when operation fails during key creation.
pub fn new(alg: &'static Algorithm<Id>, bytes: &[u8]) -> Result<Self, KeyRejected> {
match bytes.len().cmp(&alg.encapsulate_key_size()) {
Ordering::Less => Err(KeyRejected::too_small()),
Ordering::Greater => Err(KeyRejected::too_large()),
Ordering::Equal => Ok(()),
}?;
let pubkey = LcPtr::new(unsafe {
EVP_PKEY_kem_new_raw_public_key(alg.id.nid(), bytes.as_ptr(), bytes.len())
})?;
Ok(EncapsulationKey {
algorithm: alg,
evp_pkey: pubkey,
})
}
}
unsafe impl<Id> Send for EncapsulationKey<Id> where Id: AlgorithmIdentifier {}
unsafe impl<Id> Sync for EncapsulationKey<Id> where Id: AlgorithmIdentifier {}
impl<Id> Debug for EncapsulationKey<Id>
where
Id: AlgorithmIdentifier,
{
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
f.debug_struct("EncapsulationKey")
.field("algorithm", &self.algorithm)
.finish_non_exhaustive()
}
}
/// A set of encrypted bytes produced by [`EncapsulationKey::encapsulate`],
/// and used as an input to [`DecapsulationKey::decapsulate`].
pub struct Ciphertext<'a>(Cow<'a, [u8]>);
impl<'a> Ciphertext<'a> {
fn new(value: Vec<u8>) -> Ciphertext<'a> {
Self(Cow::Owned(value))
}
}
impl Drop for Ciphertext<'_> {
fn drop(&mut self) {
if let Cow::Owned(ref mut v) = self.0 {
v.zeroize();
}
}
}
impl AsRef<[u8]> for Ciphertext<'_> {
fn as_ref(&self) -> &[u8] {
match self.0 {
Cow::Borrowed(v) => v,
Cow::Owned(ref v) => v.as_ref(),
}
}
}
impl<'a> From<&'a [u8]> for Ciphertext<'a> {
fn from(value: &'a [u8]) -> Self {
Self(Cow::Borrowed(value))
}
}
/// The cryptographic shared secret output from the KEM encapsulate / decapsulate process.
pub struct SharedSecret(Box<[u8]>);
impl SharedSecret {
fn new(value: Box<[u8]>) -> Self {
Self(value)
}
}
impl Drop for SharedSecret {
fn drop(&mut self) {
self.0.zeroize();
}
}
impl AsRef<[u8]> for SharedSecret {
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
// Returns an LcPtr to an EVP_PKEY
#[inline]
fn kem_key_generate(nid: i32) -> Result<LcPtr<EVP_PKEY>, Unspecified> {
let params_fn = |ctx| {
if 1 == unsafe { EVP_PKEY_CTX_kem_set_params(ctx, nid) } {
Ok(())
} else {
Err(())
}
};
LcPtr::<EVP_PKEY>::generate(EVP_PKEY_KEM, Some(params_fn))
}
#[cfg(test)]
mod tests {
use super::{Ciphertext, DecapsulationKey, EncapsulationKey, SharedSecret};
use crate::error::KeyRejected;
use crate::kem::{ML_KEM_1024, ML_KEM_512, ML_KEM_768};
#[test]
fn ciphertext() {
let ciphertext_bytes = vec![42u8; 4];
let ciphertext = Ciphertext::from(ciphertext_bytes.as_ref());
assert_eq!(ciphertext.as_ref(), &[42, 42, 42, 42]);
drop(ciphertext);
let ciphertext_bytes = vec![42u8; 4];
let ciphertext = Ciphertext::<'static>::new(ciphertext_bytes);
assert_eq!(ciphertext.as_ref(), &[42, 42, 42, 42]);
}
#[test]
fn shared_secret() {
let secret_bytes = vec![42u8; 4];
let shared_secret = SharedSecret::new(secret_bytes.into_boxed_slice());
assert_eq!(shared_secret.as_ref(), &[42, 42, 42, 42]);
}
#[test]
fn test_kem_serialize() {
for algorithm in [&ML_KEM_512, &ML_KEM_768, &ML_KEM_1024] {
let priv_key = DecapsulationKey::generate(algorithm).unwrap();
assert_eq!(priv_key.algorithm(), algorithm);
// Test DecapsulationKey serialization
let priv_key_raw_bytes = priv_key.key_bytes().unwrap();
assert_eq!(
priv_key_raw_bytes.as_ref().len(),
algorithm.decapsulate_key_size()
);
let priv_key_from_bytes =
DecapsulationKey::new(algorithm, priv_key_raw_bytes.as_ref()).unwrap();
assert_eq!(
priv_key.key_bytes().unwrap().as_ref(),
priv_key_from_bytes.key_bytes().unwrap().as_ref()
);
assert_eq!(priv_key.algorithm(), priv_key_from_bytes.algorithm());
// Test EncapsulationKey serialization
let pub_key = priv_key.encapsulation_key().unwrap();
let pubkey_raw_bytes = pub_key.key_bytes().unwrap();
let pub_key_from_bytes =
EncapsulationKey::new(algorithm, pubkey_raw_bytes.as_ref()).unwrap();
assert_eq!(
pub_key.key_bytes().unwrap().as_ref(),
pub_key_from_bytes.key_bytes().unwrap().as_ref()
);
assert_eq!(pub_key.algorithm(), pub_key_from_bytes.algorithm());
}
}
#[test]
fn test_kem_wrong_sizes() {
for algorithm in [&ML_KEM_512, &ML_KEM_768, &ML_KEM_1024] {
// Test EncapsulationKey size validation
let too_long_bytes = vec![0u8; algorithm.encapsulate_key_size() + 1];
let long_pub_key_from_bytes = EncapsulationKey::new(algorithm, &too_long_bytes);
assert_eq!(
long_pub_key_from_bytes.err(),
Some(KeyRejected::too_large())
);
let too_short_bytes = vec![0u8; algorithm.encapsulate_key_size() - 1];
let short_pub_key_from_bytes = EncapsulationKey::new(algorithm, &too_short_bytes);
assert_eq!(
short_pub_key_from_bytes.err(),
Some(KeyRejected::too_small())
);
// Test DecapsulationKey size validation
let too_long_bytes = vec![0u8; algorithm.decapsulate_key_size() + 1];
let long_priv_key_from_bytes = DecapsulationKey::new(algorithm, &too_long_bytes);
assert_eq!(
long_priv_key_from_bytes.err(),
Some(KeyRejected::too_large())
);
let too_short_bytes = vec![0u8; algorithm.decapsulate_key_size() - 1];
let short_priv_key_from_bytes = DecapsulationKey::new(algorithm, &too_short_bytes);
assert_eq!(
short_priv_key_from_bytes.err(),
Some(KeyRejected::too_small())
);
}
}
#[test]
fn test_kem_e2e() {
for algorithm in [&ML_KEM_512, &ML_KEM_768, &ML_KEM_1024] {
let priv_key = DecapsulationKey::generate(algorithm).unwrap();
assert_eq!(priv_key.algorithm(), algorithm);
// Serialize and reconstruct the decapsulation key
let priv_key_bytes = priv_key.key_bytes().unwrap();
let priv_key_from_bytes =
DecapsulationKey::new(algorithm, priv_key_bytes.as_ref()).unwrap();
// Keys reconstructed from bytes cannot provide encapsulation_key()
assert!(priv_key_from_bytes.encapsulation_key().is_err());
let pub_key = priv_key.encapsulation_key().unwrap();
let (alice_ciphertext, alice_secret) =
pub_key.encapsulate().expect("encapsulate successful");
// Decapsulate using the reconstructed key
let bob_secret = priv_key_from_bytes
.decapsulate(alice_ciphertext)
.expect("decapsulate successful");
assert_eq!(alice_secret.as_ref(), bob_secret.as_ref());
}
}
#[test]
fn test_serialized_kem_e2e() {
for algorithm in [&ML_KEM_512, &ML_KEM_768, &ML_KEM_1024] {
let priv_key = DecapsulationKey::generate(algorithm).unwrap();
assert_eq!(priv_key.algorithm(), algorithm);
let pub_key = priv_key.encapsulation_key().unwrap();
// Generate public key bytes to send to bob
let pub_key_bytes = pub_key.key_bytes().unwrap();
// Generate private key bytes for alice to store securely
let priv_key_bytes = priv_key.key_bytes().unwrap();
// Test that priv_key's EVP_PKEY isn't entirely freed since we remove this pub_key's reference.
drop(pub_key);
drop(priv_key);
let retrieved_pub_key =
EncapsulationKey::new(algorithm, pub_key_bytes.as_ref()).unwrap();
let (ciphertext, bob_secret) = retrieved_pub_key
.encapsulate()
.expect("encapsulate successful");
// Alice reconstructs her private key from stored bytes
let retrieved_priv_key =
DecapsulationKey::new(algorithm, priv_key_bytes.as_ref()).unwrap();
let alice_secret = retrieved_priv_key
.decapsulate(ciphertext)
.expect("decapsulate successful");
assert_eq!(alice_secret.as_ref(), bob_secret.as_ref());
}
}
#[test]
fn test_decapsulation_key_serialization_roundtrip() {
for algorithm in [&ML_KEM_512, &ML_KEM_768, &ML_KEM_1024] {
// Generate original key
let original_key = DecapsulationKey::generate(algorithm).unwrap();
// Test key_bytes() returns correct size
let key_bytes = original_key.key_bytes().unwrap();
assert_eq!(key_bytes.as_ref().len(), algorithm.decapsulate_key_size());
// Test round-trip serialization/deserialization
let reconstructed_key = DecapsulationKey::new(algorithm, key_bytes.as_ref()).unwrap();
// Verify algorithm consistency
assert_eq!(original_key.algorithm(), reconstructed_key.algorithm());
assert_eq!(original_key.algorithm(), algorithm);
// Test serialization produces identical bytes (stability check)
let key_bytes_2 = reconstructed_key.key_bytes().unwrap();
assert_eq!(key_bytes.as_ref(), key_bytes_2.as_ref());
// Test functional equivalence: both keys decrypt the same ciphertext identically
let pub_key = original_key.encapsulation_key().unwrap();
let (ciphertext, expected_secret) =
pub_key.encapsulate().expect("encapsulate successful");
let secret_from_original = original_key
.decapsulate(Ciphertext::from(ciphertext.as_ref()))
.expect("decapsulate with original key");
let secret_from_reconstructed = reconstructed_key
.decapsulate(Ciphertext::from(ciphertext.as_ref()))
.expect("decapsulate with reconstructed key");
// Verify both keys produce identical secrets
assert_eq!(expected_secret.as_ref(), secret_from_original.as_ref());
assert_eq!(expected_secret.as_ref(), secret_from_reconstructed.as_ref());
// Verify secret length matches algorithm specification
assert_eq!(expected_secret.as_ref().len(), algorithm.shared_secret_size);
}
}
#[test]
fn test_decapsulation_key_zeroed_bytes() {
// Test behavior when constructing DecapsulationKey from zeroed bytes of correct size.
// ML-KEM accepts any bytes of the correct size as a valid secret key (seed-based).
// This test documents the expected behavior.
for algorithm in [&ML_KEM_512, &ML_KEM_768, &ML_KEM_1024] {
let zeroed_bytes = vec![0u8; algorithm.decapsulate_key_size()];
// Constructing a key from zeroed bytes should succeed (ML-KEM treats any
// correctly-sized byte sequence as a valid seed)
let key_from_zeroed = DecapsulationKey::new(algorithm, &zeroed_bytes);
assert!(
key_from_zeroed.is_ok(),
"DecapsulationKey::new should accept zeroed bytes of correct size for {:?}",
algorithm.id()
);
let key = key_from_zeroed.unwrap();
// The key should be able to serialize back to bytes
let key_bytes = key.key_bytes();
assert!(
key_bytes.is_ok(),
"key_bytes() should succeed for key constructed from zeroed bytes"
);
assert_eq!(key_bytes.unwrap().as_ref(), zeroed_bytes.as_slice());
// encapsulation_key() should fail since key was constructed from raw bytes
assert!(
key.encapsulation_key().is_err(),
"encapsulation_key() should fail for key constructed from raw bytes"
);
// Test decapsulation behavior with zeroed-seed key.
// Generate a valid ciphertext from a properly generated key pair
let valid_key = DecapsulationKey::generate(algorithm).unwrap();
let valid_pub_key = valid_key.encapsulation_key().unwrap();
let (ciphertext, _) = valid_pub_key.encapsulate().unwrap();
// Decapsulating with a zeroed-seed key fails because the key material
// doesn't represent a valid ML-KEM private key structure.
// This documents that ML-KEM validates key integrity during decapsulation.
let decapsulate_result = key.decapsulate(Ciphertext::from(ciphertext.as_ref()));
assert!(
decapsulate_result.is_err(),
"decapsulate should fail with invalid (zeroed) key material for {:?}",
algorithm.id()
);
}
}
#[test]
fn test_cross_algorithm_key_rejection() {
// Test that keys from one algorithm are rejected when used with a different algorithm
// due to size mismatches.
let algorithms = [&ML_KEM_512, &ML_KEM_768, &ML_KEM_1024];
for source_alg in &algorithms {
let key = DecapsulationKey::generate(source_alg).unwrap();
let key_bytes = key.key_bytes().unwrap();
for target_alg in &algorithms {
if source_alg.id() == target_alg.id() {
// Same algorithm should succeed
let result = DecapsulationKey::new(target_alg, key_bytes.as_ref());
assert!(
result.is_ok(),
"Same algorithm should accept its own key bytes"
);
} else {
// Different algorithm should fail due to size mismatch
let result = DecapsulationKey::new(target_alg, key_bytes.as_ref());
assert!(
result.is_err(),
"Algorithm {:?} should reject key bytes from {:?}",
target_alg.id(),
source_alg.id()
);
// Verify the error is size-related
let err = result.err().unwrap();
let source_size = source_alg.decapsulate_key_size();
let target_size = target_alg.decapsulate_key_size();
if source_size < target_size {
assert_eq!(
err,
KeyRejected::too_small(),
"Smaller key should be rejected as too_small"
);
} else {
assert_eq!(
err,
KeyRejected::too_large(),
"Larger key should be rejected as too_large"
);
}
}
}
}
// Also test EncapsulationKey cross-algorithm rejection for completeness
for source_alg in &algorithms {
let decap_key = DecapsulationKey::generate(source_alg).unwrap();
let encap_key = decap_key.encapsulation_key().unwrap();
let key_bytes = encap_key.key_bytes().unwrap();
for target_alg in &algorithms {
if source_alg.id() == target_alg.id() {
let result = EncapsulationKey::new(target_alg, key_bytes.as_ref());
assert!(
result.is_ok(),
"Same algorithm should accept its own encapsulation key bytes"
);
} else {
let result = EncapsulationKey::new(target_alg, key_bytes.as_ref());
assert!(
result.is_err(),
"Algorithm {:?} should reject encapsulation key bytes from {:?}",
target_alg.id(),
source_alg.id()
);
}
}
}
}
#[test]
fn test_debug_fmt() {
let private = DecapsulationKey::generate(&ML_KEM_512).expect("successful generation");
assert_eq!(
format!("{private:?}"),
"DecapsulationKey { algorithm: MlKem512, .. }"
);
assert_eq!(
format!(
"{:?}",
private.encapsulation_key().expect("public key retrievable")
),
"EncapsulationKey { algorithm: MlKem512, .. }"
);
}
}

417
vendor/aws-lc-rs/src/key_wrap.rs vendored Normal file
View File

@@ -0,0 +1,417 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! Key Wrap Algorithms.
//!
//! # Examples
//! ```rust
//! # use std::error::Error;
//! # fn main() -> Result<(), Box<dyn Error>> {
//! use aws_lc_rs::key_wrap::{AesKek, KeyWrapPadded, AES_128};
//!
//! const KEY: &[u8] = &[
//! 0xa8, 0xe0, 0x6d, 0xa6, 0x25, 0xa6, 0x5b, 0x25, 0xcf, 0x50, 0x30, 0x82, 0x68, 0x30, 0xb6,
//! 0x61,
//! ];
//! const PLAINTEXT: &[u8] = &[0x43, 0xac, 0xff, 0x29, 0x31, 0x20, 0xdd, 0x5d];
//!
//! let kek = AesKek::new(&AES_128, KEY)?;
//!
//! let mut output = vec![0u8; PLAINTEXT.len() + 15];
//!
//! let ciphertext = kek.wrap_with_padding(PLAINTEXT, &mut output)?;
//!
//! let kek = AesKek::new(&AES_128, KEY)?;
//!
//! let mut output = vec![0u8; ciphertext.len()];
//!
//! let plaintext = kek.unwrap_with_padding(&*ciphertext, &mut output)?;
//!
//! assert_eq!(PLAINTEXT, plaintext);
//! # Ok(())
//! # }
//! ```
use crate::aws_lc::{
AES_set_decrypt_key, AES_set_encrypt_key, AES_unwrap_key, AES_unwrap_key_padded, AES_wrap_key,
AES_wrap_key_padded, AES_KEY,
};
use crate::error::Unspecified;
use crate::fips::indicator_check;
use crate::sealed::Sealed;
use core::fmt::Debug;
use core::mem::MaybeUninit;
use core::ptr::null;
mod tests;
/// The Key Wrapping Algorithm Identifier
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
#[non_exhaustive]
pub enum BlockCipherId {
/// AES Block Cipher with 128-bit key.
Aes128,
/// AES Block Cipher with 256-bit key.
Aes256,
}
/// A key wrap block cipher.
pub trait BlockCipher: 'static + Debug + Sealed {
/// The block cipher identifier.
fn id(&self) -> BlockCipherId;
/// The key size in bytes to be used with the block cipher.
fn key_len(&self) -> usize;
}
/// An AES Block Cipher
pub struct AesBlockCipher {
id: BlockCipherId,
key_len: usize,
}
impl BlockCipher for AesBlockCipher {
/// Returns the algorithm identifier.
#[inline]
fn id(&self) -> BlockCipherId {
self.id
}
/// Returns the algorithm key length.
#[inline]
fn key_len(&self) -> usize {
self.key_len
}
}
impl Sealed for AesBlockCipher {}
impl Debug for AesBlockCipher {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
Debug::fmt(&self.id, f)
}
}
/// AES Block Cipher with 128-bit key.
pub const AES_128: AesBlockCipher = AesBlockCipher {
id: BlockCipherId::Aes128,
key_len: 16,
};
/// AES Block Cipher with 256-bit key.
pub const AES_256: AesBlockCipher = AesBlockCipher {
id: BlockCipherId::Aes256,
key_len: 32,
};
/// A Key Wrap (KW) algorithm implementation.
#[allow(clippy::module_name_repetitions)]
pub trait KeyWrap: Sealed {
/// Peforms the key wrap encryption algorithm using a block cipher.
/// It wraps `plaintext` and writes the corresponding ciphertext to `output`.
///
/// # Errors
/// * [`Unspecified`]: Any error that has occurred performing the operation.
fn wrap<'output>(
self,
plaintext: &[u8],
output: &'output mut [u8],
) -> Result<&'output mut [u8], Unspecified>;
/// Peforms the key wrap decryption algorithm using a block cipher.
/// It unwraps `ciphertext` and writes the corresponding plaintext to `output`.
///
/// # Errors
/// * [`Unspecified`]: Any error that has occurred performing the operation.
fn unwrap<'output>(
self,
ciphertext: &[u8],
output: &'output mut [u8],
) -> Result<&'output mut [u8], Unspecified>;
}
/// A Key Wrap with Padding (KWP) algorithm implementation.
#[allow(clippy::module_name_repetitions)]
pub trait KeyWrapPadded: Sealed {
/// Peforms the key wrap padding encryption algorithm using a block cipher.
/// It wraps and pads `plaintext` writes the corresponding ciphertext to `output`.
///
/// # Errors
/// * [`Unspecified`]: Any error that has occurred performing the operation.
fn wrap_with_padding<'output>(
self,
plaintext: &[u8],
output: &'output mut [u8],
) -> Result<&'output mut [u8], Unspecified>;
/// Peforms the key wrap padding decryption algorithm using a block cipher.
/// It unwraps the padded `ciphertext` and writes the corresponding plaintext to `output`.
///
/// # Errors
/// * [`Unspecified`]: Any error that has occurred performing the operation.
fn unwrap_with_padding<'output>(
self,
ciphertext: &[u8],
output: &'output mut [u8],
) -> Result<&'output mut [u8], Unspecified>;
}
/// AES Key Encryption Key.
pub type AesKek = KeyEncryptionKey<AesBlockCipher>;
/// The key-encryption key used with the selected cipher algorithn to wrap or unwrap a key.
///
/// Implements the NIST SP 800-38F key wrapping algoirthm.
///
/// The NIST specification is similar to that of RFC 3394 but with the following caveats:
/// * Specifies a maxiumum plaintext length that can be accepted.
/// * Allows implementations to specify a subset of valid lengths accepted.
/// * Allows for the usage of other 128-bit block ciphers other than AES.
pub struct KeyEncryptionKey<Cipher: BlockCipher> {
cipher: &'static Cipher,
key: Box<[u8]>,
}
impl<Cipher: BlockCipher> KeyEncryptionKey<Cipher> {
/// Construct a new Key Encryption Key.
///
/// # Errors
/// * [`Unspecified`]: Any error that occurs constructing the key encryption key.
pub fn new(cipher: &'static Cipher, key: &[u8]) -> Result<Self, Unspecified> {
if key.len() != cipher.key_len() {
return Err(Unspecified);
}
let key = Vec::from(key).into_boxed_slice();
Ok(Self { cipher, key })
}
/// Returns the block cipher algorithm identifier configured for the key.
#[must_use]
pub fn block_cipher_id(&self) -> BlockCipherId {
self.cipher.id()
}
}
impl<Cipher: BlockCipher> Sealed for KeyEncryptionKey<Cipher> {}
impl KeyWrap for KeyEncryptionKey<AesBlockCipher> {
/// Peforms the key wrap encryption algorithm using `KeyEncryptionKey`'s configured block cipher.
/// It wraps `plaintext` and writes the corresponding ciphertext to `output`.
///
/// # Validation
/// * `plaintext.len()` must be a multiple of eight
/// * `output.len() >= (input.len() + 8)`
///
/// # Errors
/// * [`Unspecified`]: An error occurred either due to `output` being insufficiently sized, `input` exceeding
/// the allowed input size, or for other unspecified reasons.
fn wrap<'output>(
self,
plaintext: &[u8],
output: &'output mut [u8],
) -> Result<&'output mut [u8], Unspecified> {
if output.len() < plaintext.len() + 8 {
return Err(Unspecified);
}
let mut aes_key = MaybeUninit::<AES_KEY>::uninit();
let key_bits: u32 = (self.key.len() * 8).try_into().map_err(|_| Unspecified)?;
if 0 != unsafe { AES_set_encrypt_key(self.key.as_ptr(), key_bits, aes_key.as_mut_ptr()) } {
return Err(Unspecified);
}
let aes_key = unsafe { aes_key.assume_init() };
// AWS-LC validates the following:
// * in_len <= INT_MAX - 8
// * in_len >= 16
// * in_len % 8 == 0
let out_len = indicator_check!(unsafe {
AES_wrap_key(
&aes_key,
null(),
output.as_mut_ptr(),
plaintext.as_ptr(),
plaintext.len(),
)
});
if out_len == -1 {
return Err(Unspecified);
}
let out_len: usize = out_len.try_into().map_err(|_| Unspecified)?;
debug_assert_eq!(out_len, plaintext.len() + 8);
Ok(&mut output[..out_len])
}
/// Peforms the key wrap decryption algorithm using `KeyEncryptionKey`'s configured block cipher.
/// It unwraps `ciphertext` and writes the corresponding plaintext to `output`.
///
/// # Validation
/// * `ciphertext.len()` must be a multiple of 8
/// * `output.len() >= (input.len() - 8)`
///
/// # Errors
/// * [`Unspecified`]: An error occurred either due to `output` being insufficiently sized, `input` exceeding
/// the allowed input size, or for other unspecified reasons.
fn unwrap<'output>(
self,
ciphertext: &[u8],
output: &'output mut [u8],
) -> Result<&'output mut [u8], Unspecified> {
if output.len() < ciphertext.len() - 8 {
return Err(Unspecified);
}
let mut aes_key = MaybeUninit::<AES_KEY>::uninit();
if 0 != unsafe {
AES_set_decrypt_key(
self.key.as_ptr(),
(self.key.len() * 8).try_into().map_err(|_| Unspecified)?,
aes_key.as_mut_ptr(),
)
} {
return Err(Unspecified);
}
let aes_key = unsafe { aes_key.assume_init() };
// AWS-LC validates the following:
// * in_len < INT_MAX
// * in_len > 24
// * in_len % 8 == 0
let out_len = indicator_check!(unsafe {
AES_unwrap_key(
&aes_key,
null(),
output.as_mut_ptr(),
ciphertext.as_ptr(),
ciphertext.len(),
)
});
if out_len == -1 {
return Err(Unspecified);
}
let out_len: usize = out_len.try_into().map_err(|_| Unspecified)?;
debug_assert_eq!(out_len, ciphertext.len() - 8);
Ok(&mut output[..out_len])
}
}
impl KeyWrapPadded for KeyEncryptionKey<AesBlockCipher> {
/// Peforms the key wrap padding encryption algorithm using `KeyEncryptionKey`'s configured block cipher.
/// It wraps and pads `plaintext` writes the corresponding ciphertext to `output`.
///
/// # Validation
/// * `output.len() >= (input.len() + 15)`
///
/// # Errors
/// * [`Unspecified`]: An error occurred either due to `output` being insufficiently sized, `input` exceeding
/// the allowed input size, or for other unspecified reasons.
fn wrap_with_padding<'output>(
self,
plaintext: &[u8],
output: &'output mut [u8],
) -> Result<&'output mut [u8], Unspecified> {
let mut aes_key = MaybeUninit::<AES_KEY>::uninit();
let key_bits: u32 = (self.key.len() * 8).try_into().map_err(|_| Unspecified)?;
if 0 != unsafe { AES_set_encrypt_key(self.key.as_ptr(), key_bits, aes_key.as_mut_ptr()) } {
return Err(Unspecified);
}
let aes_key = unsafe { aes_key.assume_init() };
let mut out_len: usize = 0;
// AWS-LC validates the following:
// * in_len != 0
// * in_len <= INT_MAX
// * max_out >= required_padding + 8
if 1 != indicator_check!(unsafe {
AES_wrap_key_padded(
&aes_key,
output.as_mut_ptr(),
&mut out_len,
output.len(),
plaintext.as_ptr(),
plaintext.len(),
)
}) {
return Err(Unspecified);
}
Ok(&mut output[..out_len])
}
/// Peforms the key wrap padding decryption algorithm using `KeyEncryptionKey`'s configured block cipher.
/// It unwraps the padded `ciphertext` and writes the corresponding plaintext to `output`.
///
/// # Sizing `output`
/// `output.len() >= input.len()`.
///
/// # Errors
/// * [`Unspecified`]: An error occurred either due to `output` being insufficiently sized, `input` exceeding
/// the allowed input size, or for other unspecified reasons.
fn unwrap_with_padding<'output>(
self,
ciphertext: &[u8],
output: &'output mut [u8],
) -> Result<&'output mut [u8], Unspecified> {
let mut aes_key = MaybeUninit::<AES_KEY>::uninit();
if 0 != unsafe {
AES_set_decrypt_key(
self.key.as_ptr(),
(self.key.len() * 8).try_into().map_err(|_| Unspecified)?,
aes_key.as_mut_ptr(),
)
} {
return Err(Unspecified);
}
let aes_key = unsafe { aes_key.assume_init() };
let mut out_len: usize = 0;
// AWS-LC validates the following:
// * in_len >= AES_BLOCK_SIZE
// * max_out >= in_len - 8
if 1 != indicator_check!(unsafe {
AES_unwrap_key_padded(
&aes_key,
output.as_mut_ptr(),
&mut out_len,
output.len(),
ciphertext.as_ptr(),
ciphertext.len(),
)
}) {
return Err(Unspecified);
}
Ok(&mut output[..out_len])
}
}
impl<Cipher: BlockCipher> Debug for KeyEncryptionKey<Cipher> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("KeyEncryptionKey")
.field("cipher", &self.cipher)
.finish_non_exhaustive()
}
}

684
vendor/aws-lc-rs/src/key_wrap/tests.rs vendored Normal file
View File

@@ -0,0 +1,684 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#![cfg(test)]
#[cfg(feature = "fips")]
mod fips;
use crate::key_wrap::AesKek;
use super::{BlockCipher, BlockCipherId, KeyWrap, KeyWrapPadded, AES_128, AES_256};
macro_rules! block_cipher_test {
($name:ident, $alg:expr, $id:expr, $key_len:literal) => {
#[test]
fn $name() {
let x: &dyn BlockCipher = $alg;
assert_eq!($id, x.id());
assert_eq!($alg.key_len(), $key_len);
}
};
}
block_cipher_test!(aes_128_cipher, &AES_128, BlockCipherId::Aes128, 16);
block_cipher_test!(aes_256_cipher, &AES_256, BlockCipherId::Aes256, 32);
#[test]
fn key_encryption_key_debug_impl() {
let kek = AesKek::new(&AES_128, &[42u8; 16]).expect("key created");
assert_eq!(
"KeyEncryptionKey { cipher: Aes128, .. }",
format!("{kek:?}")
);
}
macro_rules! nist_aes_key_wrap_test {
($name:ident, $alg:expr, $key:expr, $plaintext:expr, $expect:expr) => {
#[test]
fn $name() {
const K: &[u8] = $key;
const P: &[u8] = $plaintext;
const C: &[u8] = $expect;
let kek = AesKek::new($alg, K).expect("key creation successful");
assert_eq!($alg.id(), kek.block_cipher_id());
let mut output = vec![0u8; C.len()];
let wrapped = Vec::from(kek.wrap(P, &mut output).expect("wrap successful"));
assert_eq!(wrapped, C);
let kek = AesKek::new($alg, K).expect("key creation successful");
let mut output = vec![0u8; C.len()];
let unwrapped = kek.unwrap(&wrapped, &mut output).expect("wrap successful");
assert_eq!(unwrapped, P);
}
};
}
macro_rules! nist_aes_key_wrap_with_padding_test {
($name:ident, $alg:expr, $key:expr, $plaintext:expr, $expect:expr) => {
#[test]
fn $name() {
const K: &[u8] = $key;
const P: &[u8] = $plaintext;
const C: &[u8] = $expect;
let kek = AesKek::new($alg, K).expect("key creation successful");
assert_eq!($alg.id(), kek.block_cipher_id());
let mut output = vec![0u8; C.len()];
let wrapped = Vec::from(
kek.wrap_with_padding(P, &mut output)
.expect("wrap successful"),
);
assert_eq!(wrapped, C);
let kek = AesKek::new($alg, K).expect("key creation successful");
let mut output = vec![0u8; C.len()];
let unwrapped = kek
.unwrap_with_padding(&wrapped, &mut output)
.expect("wrap successful");
assert_eq!(unwrapped, P);
}
};
}
macro_rules! nist_aes_key_unwrap_test {
($name:ident, $alg:expr, $key:expr, $ciphertext:expr) => {
#[test]
fn $name() {
const K: &[u8] = $key;
const C: &[u8] = $ciphertext;
let kek = AesKek::new($alg, K).expect("key creation successful");
let mut output = vec![0u8; C.len()];
kek.unwrap(C, &mut output).expect_err("unwrap to fail");
}
};
($name:ident, $alg:expr, $key:expr, $ciphertext:expr, $expect:expr) => {
#[test]
fn $name() {
const K: &[u8] = $key;
const C: &[u8] = $ciphertext;
const P: &[u8] = $expect;
let kek = AesKek::new($alg, K).expect("key creation successful");
let mut output = vec![0u8; C.len()];
let unwrapped = Vec::from(kek.unwrap(C, &mut output).expect("unwrap successful"));
assert_eq!(unwrapped, P);
let kek = AesKek::new($alg, K).expect("key creation successful");
let mut output = vec![0u8; C.len()];
let wrapped = kek.wrap(&unwrapped, &mut output).expect("wrap successful");
assert_eq!(wrapped, C);
}
};
}
macro_rules! nist_aes_key_unwrap_with_padding_test {
($name:ident, $alg:expr, $key:expr, $ciphertext:expr) => {
#[test]
fn $name() {
const K: &[u8] = $key;
const C: &[u8] = $ciphertext;
let kek = AesKek::new($alg, K).expect("key creation successful");
let mut output = vec![0u8; C.len()];
kek.unwrap_with_padding(C, &mut output)
.expect_err("unwrap to fail");
}
};
($name:ident, $alg:expr, $key:expr, $ciphertext:expr, $expect:expr) => {
#[test]
fn $name() {
const K: &[u8] = $key;
const C: &[u8] = $ciphertext;
const P: &[u8] = $expect;
let kek = AesKek::new($alg, K).expect("key creation successful");
let mut output = vec![0u8; C.len()];
let unwrapped = Vec::from(
kek.unwrap_with_padding(C, &mut output)
.expect("unwrap successful"),
);
assert_eq!(unwrapped, P);
let kek = AesKek::new($alg, K).expect("key creation successful");
let mut output = vec![0u8; C.len()];
let wrapped = kek
.wrap_with_padding(&unwrapped, &mut output)
.expect("wrap successful");
assert_eq!(wrapped, C);
}
};
}
nist_aes_key_wrap_with_padding_test!(
kwp_ae_aes128_8bit_len,
&AES_128,
&[
0x6d, 0xec, 0xf1, 0x0a, 0x1c, 0xaf, 0x8e, 0x3b, 0x80, 0xc7, 0xa4, 0xbe, 0x8c, 0x9c, 0x84,
0xe8,
],
&[0x49],
&[
0x01, 0xa7, 0xd6, 0x57, 0xfc, 0x4a, 0x5b, 0x21, 0x6f, 0x26, 0x1c, 0xca, 0x4d, 0x05, 0x2c,
0x2b,
]
);
nist_aes_key_wrap_with_padding_test!(
kwp_ae_aes128_248bit_len,
&AES_128,
&[
0xbe, 0x96, 0xdc, 0x19, 0x5e, 0xc0, 0x34, 0xd6, 0x16, 0x48, 0x6e, 0xd7, 0x0e, 0x97, 0xfe,
0x83
],
&[
0x85, 0xb5, 0x43, 0x7b, 0x63, 0x35, 0xeb, 0xba, 0x76, 0x35, 0x90, 0x3a, 0x44, 0x93, 0xd1,
0x2a, 0x77, 0xd9, 0x35, 0x7a, 0x9e, 0x0d, 0xbc, 0x01, 0x34, 0x56, 0xd8, 0x5f, 0x1d, 0x32,
0x01
],
&[
0x97, 0x47, 0x69, 0xb3, 0xa7, 0xb4, 0xd5, 0xd3, 0x29, 0x85, 0xf8, 0x7f, 0xdd, 0xf9, 0x99,
0x06, 0x31, 0xe5, 0x61, 0x0f, 0xbf, 0xb2, 0x78, 0x38, 0x7b, 0x58, 0xb1, 0xf4, 0x8e, 0x05,
0xc7, 0x7d, 0x2f, 0xb7, 0x57, 0x5c, 0x51, 0x69, 0xeb, 0x0e
]
);
nist_aes_key_unwrap_with_padding_test!(
kwp_ad_aes128_8bit_len,
&AES_128,
&[
0x49, 0x31, 0x9c, 0x33, 0x12, 0x31, 0xcd, 0x6b, 0xf7, 0x4c, 0x2f, 0x70, 0xb0, 0x7f, 0xcc,
0x5c
],
&[
0x9c, 0x21, 0x1f, 0x32, 0xf8, 0xb3, 0x41, 0xf3, 0x2b, 0x05, 0x2f, 0xed, 0x5f, 0x31, 0xa3,
0x87
],
&[0xe4]
);
nist_aes_key_unwrap_with_padding_test!(
kwp_ad_aes128_8bit_len_fail,
&AES_128,
&[
0x7a, 0x3f, 0x4d, 0x97, 0x05, 0x01, 0xbf, 0x86, 0x14, 0x7e, 0x91, 0x5f, 0xe1, 0xb9, 0x03,
0x18
],
&[
0xad, 0xd7, 0x0b, 0xaf, 0xaf, 0xb1, 0x5e, 0x79, 0xc3, 0xa8, 0x5c, 0xe1, 0xde, 0x55, 0x82,
0x72
]
);
nist_aes_key_unwrap_with_padding_test!(
kwp_ad_aes128_248bit_len,
&AES_128,
&[
0x28, 0x90, 0x23, 0x37, 0x90, 0x78, 0xb8, 0x21, 0xfc, 0x24, 0xf7, 0x18, 0xbd, 0xc9, 0x43,
0x31
],
&[
0xff, 0x51, 0xb7, 0xae, 0x52, 0x46, 0x23, 0x44, 0xfc, 0x45, 0x5f, 0x72, 0xbe, 0x05, 0x9b,
0x56, 0xa9, 0x8c, 0xc8, 0x33, 0xa1, 0xcf, 0x3b, 0x20, 0xb6, 0x88, 0x71, 0x12, 0xf5, 0xa4,
0x3f, 0xd4, 0x5e, 0x9c, 0x5f, 0x51, 0xe7, 0xc6, 0x62, 0xf4
],
&[
0xbe, 0xd5, 0x24, 0xc6, 0x40, 0x2e, 0xeb, 0x77, 0x38, 0x69, 0x6f, 0x31, 0x06, 0x99, 0x9f,
0xc9, 0x31, 0xbe, 0xd6, 0x76, 0x88, 0x38, 0x34, 0x5d, 0x18, 0xba, 0x44, 0xe1, 0xb0, 0x32,
0xb8
]
);
nist_aes_key_unwrap_with_padding_test!(
kwp_ad_aes128_248bit_len_fail,
&AES_128,
&[
0x69, 0x29, 0x11, 0x7e, 0x6c, 0xb1, 0x8e, 0xa4, 0xa2, 0x98, 0x58, 0x86, 0xf0, 0x8c, 0x0a,
0xe1
],
&[
0x5f, 0xd9, 0xe7, 0x7c, 0x37, 0x04, 0x1c, 0x2e, 0xbd, 0x4c, 0x34, 0x6d, 0x5b, 0x6c, 0x78,
0xf7, 0xb4, 0x85, 0xca, 0x58, 0x9d, 0x6b, 0x0b, 0x54, 0x16, 0xd0, 0x28, 0x7a, 0x6d, 0xb3,
0x6b, 0x39, 0xbd, 0xc9, 0x61, 0xb4, 0xdc, 0x2f, 0xec, 0xbc
]
);
nist_aes_key_wrap_test!(
kw_ae_aes128_128bit_len,
&AES_128,
&[
0x75, 0x75, 0xda, 0x3a, 0x93, 0x60, 0x7c, 0xc2, 0xbf, 0xd8, 0xce, 0xc7, 0xaa, 0xdf, 0xd9,
0xa6
],
&[
0x42, 0x13, 0x6d, 0x3c, 0x38, 0x4a, 0x3e, 0xea, 0xc9, 0x5a, 0x06, 0x6f, 0xd2, 0x8f, 0xed,
0x3f
],
&[
0x03, 0x1f, 0x6b, 0xd7, 0xe6, 0x1e, 0x64, 0x3d, 0xf6, 0x85, 0x94, 0x81, 0x6f, 0x64, 0xca,
0xa3, 0xf5, 0x6f, 0xab, 0xea, 0x25, 0x48, 0xf5, 0xfb
]
);
nist_aes_key_wrap_test!(
kw_ae_aes128_256bit_len,
&AES_128,
&[
0xe5, 0xd0, 0x58, 0xe7, 0xf1, 0xc2, 0x2c, 0x01, 0x6c, 0x4e, 0x1c, 0xc9, 0xb2, 0x6b, 0x9f,
0x8f
],
&[
0x7f, 0x60, 0x4e, 0x9b, 0x8d, 0x39, 0xd3, 0xc9, 0x1e, 0x19, 0x3f, 0xe6, 0xf1, 0x96, 0xc1,
0xe3, 0xda, 0x62, 0x11, 0xa7, 0xc9, 0xa3, 0x3b, 0x88, 0x73, 0xb6, 0x4b, 0x13, 0x8d, 0x18,
0x03, 0xe4
],
&[
0x60, 0xb9, 0xf8, 0xac, 0x79, 0x7c, 0x56, 0xe0, 0x1e, 0x9b, 0x5f, 0x84, 0xd6, 0x58, 0x16,
0xa9, 0x80, 0x77, 0x78, 0x69, 0xf6, 0x79, 0x91, 0xa0, 0xe6, 0xdc, 0x19, 0xb8, 0xcd, 0x75,
0xc9, 0xb5, 0x4d, 0xb4, 0xa3, 0x84, 0x56, 0xbb, 0xd6, 0xf3
]
);
nist_aes_key_unwrap_test!(
kw_ad_aes128_128bit_len,
&AES_128,
&[
0x1c, 0xbd, 0x2f, 0x79, 0x07, 0x8b, 0x95, 0x00, 0xfa, 0xe2, 0x36, 0x96, 0x31, 0x19, 0x53,
0xeb
],
&[
0xec, 0xbd, 0x7a, 0x17, 0xc5, 0xda, 0x3c, 0xfd, 0xfe, 0x22, 0x25, 0xd2, 0xbf, 0x9a, 0xc7,
0xab, 0xce, 0x78, 0xc2, 0xb2, 0xae, 0xfa, 0x6e, 0xac
],
&[
0x9c, 0x4e, 0x67, 0x52, 0x77, 0xa3, 0xbd, 0xc3, 0xa0, 0x71, 0x04, 0x8b, 0x32, 0x7a, 0x01,
0x1e
]
);
nist_aes_key_unwrap_test!(
kw_ad_aes128_128bit_len_fail,
&AES_128,
&[
0x5e, 0xa3, 0x0c, 0x21, 0xdb, 0x36, 0xc0, 0x57, 0x72, 0x94, 0xcc, 0x70, 0xd3, 0xb8, 0x69,
0x70
],
&[
0x37, 0xe4, 0x81, 0x3d, 0x9c, 0x40, 0xc9, 0x16, 0x5b, 0x7f, 0x12, 0x0c, 0xec, 0x34, 0xa8,
0x5d, 0x3b, 0xf5, 0x6a, 0xe0, 0x7f, 0xad, 0x8f, 0x40
]
);
nist_aes_key_unwrap_test!(
kw_ad_aes128_256bit_len,
&AES_128,
&[
0x83, 0xda, 0x6e, 0x02, 0x40, 0x4d, 0x5a, 0xbf, 0xd4, 0x7d, 0x15, 0xda, 0x59, 0x18, 0x40,
0xe2
],
&[
0x3f, 0x4c, 0xbf, 0x3a, 0x98, 0x02, 0x92, 0x43, 0xda, 0x87, 0xa7, 0x56, 0xb3, 0xc5, 0x25,
0x53, 0xf9, 0x13, 0x66, 0xf4, 0xff, 0x4b, 0x10, 0x3b, 0x2c, 0x73, 0xe6, 0x8a, 0xa8, 0xca,
0x81, 0xf0, 0x1e, 0xbd, 0xa3, 0x5d, 0x71, 0x87, 0x41, 0xac
],
&[
0x67, 0xdf, 0xd6, 0x27, 0x34, 0x6e, 0xbd, 0x21, 0x78, 0x49, 0xa5, 0xba, 0x5b, 0xca, 0x6e,
0x9c, 0xe0, 0x7a, 0x77, 0x47, 0xbe, 0xd1, 0xba, 0x11, 0x9e, 0xc0, 0x15, 0x03, 0x20, 0x2a,
0x07, 0x5a
]
);
nist_aes_key_unwrap_test!(
kw_ad_aes128_256bit_len_fail,
&AES_128,
&[
0x84, 0xbc, 0x6c, 0xe7, 0xee, 0x4f, 0xd9, 0xdb, 0x51, 0x25, 0x36, 0x66, 0x9d, 0x06, 0x86,
0xda
],
&[
0xc3, 0x83, 0xdb, 0x93, 0x0f, 0xfd, 0x02, 0xc0, 0x07, 0x3a, 0xc2, 0xcc, 0x79, 0xec, 0x28,
0x9e, 0x68, 0x66, 0xbd, 0xcc, 0x6a, 0x13, 0x5a, 0x3b, 0x77, 0x6a, 0xa4, 0x2f, 0x14, 0xee,
0x04, 0xf9, 0xcc, 0xa0, 0x6e, 0xd6, 0xc0, 0xb2, 0x29, 0x01
]
);
nist_aes_key_wrap_with_padding_test!(
kwp_ae_aes256_8bit_len,
&AES_256,
&[
0x95, 0xda, 0x27, 0x00, 0xca, 0x6f, 0xd9, 0xa5, 0x25, 0x54, 0xee, 0x2a, 0x8d, 0xf1, 0x38,
0x6f, 0x5b, 0x94, 0xa1, 0xa6, 0x0e, 0xd8, 0xa4, 0xae, 0xf6, 0x0a, 0x8d, 0x61, 0xab, 0x5f,
0x22, 0x5a
],
&[0xd1],
&[
0x06, 0xba, 0x7a, 0xe6, 0xf3, 0x24, 0x8c, 0xfd, 0xcf, 0x26, 0x75, 0x07, 0xfa, 0x00, 0x1b,
0xc4
]
);
nist_aes_key_wrap_with_padding_test!(
kwp_ae_aes256_248bit_len,
&AES_256,
&[
0xe9, 0xbb, 0x7f, 0x44, 0xc7, 0xba, 0xaf, 0xbf, 0x39, 0x2a, 0xb9, 0x12, 0x58, 0x9a, 0x2f,
0x8d, 0xb5, 0x32, 0x68, 0x10, 0x6e, 0xaf, 0xb7, 0x46, 0x89, 0xbb, 0x18, 0x33, 0x13, 0x6e,
0x61, 0x13
],
&[
0xff, 0xe9, 0x52, 0x60, 0x48, 0x34, 0xbf, 0xf8, 0x99, 0xe6, 0x36, 0x58, 0xf3, 0x42, 0x46,
0x81, 0x5c, 0x91, 0x59, 0x7e, 0xb4, 0x0a, 0x21, 0x72, 0x9e, 0x0a, 0x8a, 0x95, 0x9b, 0x61,
0xf2
],
&[
0x15, 0xb9, 0xf0, 0x6f, 0xbc, 0x76, 0x5e, 0x5e, 0x3d, 0x55, 0xd6, 0xb8, 0x24, 0x61, 0x6f,
0x21, 0x92, 0x1d, 0x2a, 0x69, 0x18, 0xee, 0x7b, 0xf1, 0x40, 0x6b, 0x52, 0x42, 0x74, 0xe1,
0x70, 0xb4, 0xa7, 0x83, 0x33, 0xca, 0x5e, 0xe9, 0x2a, 0xf5
]
);
nist_aes_key_unwrap_with_padding_test!(
kwp_ad_aes256_8bit_len,
&AES_256,
&[
0x20, 0xe4, 0xff, 0x6a, 0x88, 0xff, 0xa9, 0xa2, 0x81, 0x8b, 0x81, 0x70, 0x27, 0x93, 0xd8,
0xa0, 0x16, 0x72, 0x2c, 0x2f, 0xa1, 0xff, 0x44, 0x5f, 0x24, 0xb9, 0xdb, 0x29, 0x3c, 0xb1,
0x20, 0x69
],
&[
0x85, 0x01, 0x1d, 0xc9, 0x27, 0xb1, 0x67, 0xf4, 0x11, 0xb0, 0xb8, 0xe2, 0x1b, 0x11, 0xd8,
0x19
],
&[0xd2]
);
nist_aes_key_unwrap_with_padding_test!(
kwp_ad_aes256_8bit_len_fail,
&AES_256,
&[
0xc3, 0x2c, 0xb3, 0xe1, 0xe4, 0x1a, 0x4b, 0x9f, 0x4d, 0xe7, 0x99, 0x89, 0x95, 0x78, 0x66,
0xf5, 0xdd, 0x48, 0xdb, 0xa3, 0x8c, 0x22, 0xa6, 0xeb, 0xb8, 0x0e, 0x14, 0xc8, 0x4b, 0xdd,
0x95, 0x34
],
&[
0xc2, 0x9b, 0x05, 0xc2, 0x61, 0x9a, 0x58, 0xec, 0xc1, 0xd2, 0x39, 0xe7, 0xa3, 0x42, 0x73,
0xcd
]
);
nist_aes_key_unwrap_with_padding_test!(
kwp_ad_aes256_248bit_len,
&AES_256,
&[
0x09, 0xab, 0x42, 0x86, 0xa8, 0x45, 0xc1, 0x8b, 0xb4, 0x81, 0xda, 0x91, 0xc3, 0x9a, 0x58,
0xfd, 0x52, 0xed, 0x78, 0xd5, 0x49, 0x73, 0xfc, 0x41, 0xf2, 0x51, 0x63, 0xa0, 0xc3, 0x3f,
0x47, 0x27
],
&[
0x0a, 0x18, 0x0a, 0x84, 0xb0, 0x1f, 0xc1, 0xe4, 0x4b, 0x9f, 0x93, 0x01, 0xcc, 0x89, 0xaf,
0x95, 0xde, 0x75, 0x82, 0x19, 0x01, 0x5a, 0xbc, 0x86, 0xc3, 0xe4, 0x8e, 0x76, 0x4e, 0x73,
0x79, 0x24, 0x6a, 0xe7, 0x20, 0x9a, 0xaa, 0x4f, 0x88, 0x9d
],
&[
0x4c, 0x1b, 0x6a, 0xcc, 0xb4, 0x92, 0xc8, 0x8b, 0x10, 0xa5, 0x6a, 0x56, 0xeb, 0x9b, 0x6d,
0x6e, 0xd9, 0x79, 0x70, 0x56, 0xa5, 0x59, 0xfe, 0x3f, 0x0c, 0x7c, 0x04, 0x29, 0xa2, 0x00,
0xaf
]
);
nist_aes_key_unwrap_with_padding_test!(
kwp_ad_aes256_248bit_len_fail,
&AES_256,
&[
0x8c, 0x35, 0xfb, 0x77, 0x76, 0x6d, 0x04, 0xf4, 0x8d, 0x5b, 0x52, 0x27, 0x5c, 0x5c, 0x5f,
0x31, 0xf5, 0x68, 0x07, 0x84, 0x19, 0xe5, 0xc2, 0x33, 0x59, 0x18, 0x96, 0x5f, 0xbe, 0x53,
0xce, 0xdd
],
&[
0xba, 0xcc, 0xcb, 0x17, 0x14, 0xdb, 0xaa, 0x49, 0x08, 0xc2, 0x65, 0x4a, 0xa8, 0xdb, 0xb1,
0xdd, 0xbd, 0xdd, 0x8a, 0xb8, 0x19, 0x42, 0x9b, 0x02, 0x66, 0x19, 0xfb, 0x1c, 0x0f, 0xa7,
0x5a, 0x82, 0x47, 0x37, 0x2b, 0x2f, 0xee, 0xab, 0x1e, 0x1d
]
);
nist_aes_key_wrap_test!(
kw_ae_aes256_128bit_len,
&AES_256,
&[
0xf5, 0x97, 0x82, 0xf1, 0xdc, 0xeb, 0x05, 0x44, 0xa8, 0xda, 0x06, 0xb3, 0x49, 0x69, 0xb9,
0x21, 0x2b, 0x55, 0xce, 0x6d, 0xcb, 0xdd, 0x09, 0x75, 0xa3, 0x3f, 0x4b, 0x3f, 0x88, 0xb5,
0x38, 0xda
],
&[
0x73, 0xd3, 0x30, 0x60, 0xb5, 0xf9, 0xf2, 0xeb, 0x57, 0x85, 0xc0, 0x70, 0x3d, 0xdf, 0xa7,
0x04
],
&[
0x2e, 0x63, 0x94, 0x6e, 0xa3, 0xc0, 0x90, 0x90, 0x2f, 0xa1, 0x55, 0x83, 0x75, 0xfd, 0xb2,
0x90, 0x77, 0x42, 0xac, 0x74, 0xe3, 0x94, 0x03, 0xfc
]
);
nist_aes_key_wrap_test!(
kw_ae_aes256_256bit_len,
&AES_256,
&[
0x8b, 0x54, 0xe6, 0xbc, 0x3d, 0x20, 0xe8, 0x23, 0xd9, 0x63, 0x43, 0xdc, 0x77, 0x6c, 0x0d,
0xb1, 0x0c, 0x51, 0x70, 0x8c, 0xee, 0xcc, 0x9a, 0x38, 0xa1, 0x4b, 0xeb, 0x4c, 0xa5, 0xb8,
0xb2, 0x21
],
&[
0xd6, 0x19, 0x26, 0x35, 0xc6, 0x20, 0xde, 0xe3, 0x05, 0x4e, 0x09, 0x63, 0x39, 0x6b, 0x26,
0x0a, 0xf5, 0xc6, 0xf0, 0x26, 0x95, 0xa5, 0x20, 0x5f, 0x15, 0x95, 0x41, 0xb4, 0xbc, 0x58,
0x4b, 0xac
],
&[
0xb1, 0x3e, 0xeb, 0x76, 0x19, 0xfa, 0xb8, 0x18, 0xf1, 0x51, 0x92, 0x66, 0x51, 0x6c, 0xeb,
0x82, 0xab, 0xc0, 0xe6, 0x99, 0xa7, 0x15, 0x3c, 0xf2, 0x6e, 0xdc, 0xb8, 0xae, 0xb8, 0x79,
0xf4, 0xc0, 0x11, 0xda, 0x90, 0x68, 0x41, 0xfc, 0x59, 0x56
]
);
nist_aes_key_unwrap_test!(
kw_ad_aes256_128bit_len,
&AES_256,
&[
0x80, 0xaa, 0x99, 0x73, 0x27, 0xa4, 0x80, 0x6b, 0x6a, 0x7a, 0x41, 0xa5, 0x2b, 0x86, 0xc3,
0x71, 0x03, 0x86, 0xf9, 0x32, 0x78, 0x6e, 0xf7, 0x96, 0x76, 0xfa, 0xfb, 0x90, 0xb8, 0x26,
0x3c, 0x5f
],
&[
0x42, 0x3c, 0x96, 0x0d, 0x8a, 0x2a, 0xc4, 0xc1, 0xd3, 0x3d, 0x3d, 0x97, 0x7b, 0xf0, 0xa9,
0x15, 0x59, 0xf9, 0x9c, 0x8a, 0xcd, 0x29, 0x3d, 0x43
],
&[
0x0a, 0x25, 0x6b, 0xa7, 0x5c, 0xfa, 0x03, 0xaa, 0xa0, 0x2b, 0xa9, 0x42, 0x03, 0xf1, 0x5b,
0xaa
]
);
nist_aes_key_unwrap_test!(
kw_ad_aes256_128bit_len_fail,
&AES_256,
&[
0x08, 0xc9, 0x36, 0xb2, 0x5b, 0x56, 0x7a, 0x0a, 0xa6, 0x79, 0xc2, 0x9f, 0x20, 0x1b, 0xf8,
0xb1, 0x90, 0x32, 0x7d, 0xf0, 0xc2, 0x56, 0x3e, 0x39, 0xce, 0xe0, 0x61, 0xf1, 0x49, 0xf4,
0xd9, 0x1b
],
&[
0xe2, 0x27, 0xeb, 0x8a, 0xe9, 0xd2, 0x39, 0xcc, 0xd8, 0x92, 0x8a, 0xde, 0xc3, 0x9c, 0x28,
0x81, 0x0c, 0xa9, 0xb3, 0xdc, 0x1f, 0x36, 0x64, 0x44
]
);
nist_aes_key_unwrap_test!(
kw_ad_aes256_256bit_len,
&AES_256,
&[
0x04, 0x9c, 0x7b, 0xcb, 0xa0, 0x3e, 0x04, 0x39, 0x5c, 0x2a, 0x22, 0xe6, 0xa9, 0x21, 0x5c,
0xda, 0xe0, 0xf7, 0x62, 0xb0, 0x77, 0xb1, 0x24, 0x4b, 0x44, 0x31, 0x47, 0xf5, 0x69, 0x57,
0x99, 0xfa
],
&[
0x77, 0x6b, 0x1e, 0x91, 0xe9, 0x35, 0xd1, 0xf8, 0x0a, 0x53, 0x79, 0x02, 0x18, 0x6d, 0x6b,
0x00, 0xdf, 0xc6, 0xaf, 0xc1, 0x20, 0x00, 0xf1, 0xbd, 0xe9, 0x13, 0xdf, 0x5d, 0x67, 0x40,
0x70, 0x61, 0xdb, 0x82, 0x27, 0xfc, 0xd0, 0x89, 0x53, 0xd4
],
&[
0xe6, 0x17, 0x83, 0x1c, 0x7d, 0xb8, 0x03, 0x8f, 0xda, 0x4c, 0x59, 0x40, 0x37, 0x75, 0xc3,
0xd4, 0x35, 0x13, 0x6a, 0x56, 0x6f, 0x35, 0x09, 0xc2, 0x73, 0xe1, 0xda, 0x1e, 0xf9, 0xf5,
0x0a, 0xea
]
);
nist_aes_key_unwrap_test!(
kw_ad_aes256_256bit_len_fail,
&AES_256,
&[
0x3c, 0x7c, 0x55, 0x9f, 0xb9, 0x9d, 0x2e, 0x3f, 0x82, 0x80, 0xc9, 0xbe, 0x14, 0xb0, 0xf7,
0xb6, 0x76, 0xa3, 0x20, 0x53, 0xeb, 0xa8, 0xf7, 0xaf, 0xbb, 0x43, 0x04, 0xc1, 0x17, 0xa6,
0x50, 0x69
],
&[
0x86, 0x1b, 0x0a, 0x15, 0xbf, 0x59, 0x07, 0xb5, 0x51, 0xbc, 0x94, 0x82, 0xbc, 0x4d, 0xe3,
0x61, 0xde, 0x64, 0x5f, 0x18, 0xf9, 0x7f, 0xd8, 0x0f, 0xff, 0xa5, 0xb9, 0x68, 0x79, 0x23,
0x82, 0x59, 0xc6, 0x67, 0x7e, 0xb5, 0x05, 0x96, 0x20, 0x5b
]
);
macro_rules! wrap_input_output_invalid_test {
($name:ident, $input_len:expr, $output_len:expr) => {
#[test]
fn $name() {
let kek = AesKek::new(&AES_128, &[16u8; 16]).expect("key creation successful");
let input_len: usize = $input_len.try_into().unwrap();
let output_len: usize = $output_len.try_into().unwrap();
let input = vec![42u8; input_len];
let mut output = vec![0u8; output_len];
kek.wrap(input.as_slice(), output.as_mut_slice())
.expect_err("failure");
}
};
}
// Input length < 16
wrap_input_output_invalid_test!(wrap_input_len_less_than_min, 15, 23);
// Input length % 8 != 0
wrap_input_output_invalid_test!(wrap_input_len_not_multiple_of_eight, 17, 25);
// Output length < Input length - 8
wrap_input_output_invalid_test!(wrap_output_len_too_small, 16, 8);
macro_rules! unwrap_input_output_invalid_test {
($name:ident, $input_len:expr, $output_len:expr) => {
#[test]
fn $name() {
let kek = AesKek::new(&AES_128, &[16u8; 16]).expect("key creation successful");
let input_len: usize = $input_len.try_into().unwrap();
let output_len: usize = $output_len.try_into().unwrap();
let input = vec![42u8; input_len];
let mut output = vec![0u8; output_len];
kek.unwrap(input.as_slice(), output.as_mut_slice())
.expect_err("failure");
}
};
}
// Input length < 24
unwrap_input_output_invalid_test!(unwrap_input_len_smaller_than_min, 16, 16);
// Input length % 8 != 0
unwrap_input_output_invalid_test!(unwrap_input_len_not_multiple_of_eight, 31, 31);
// Output length < Input length - 8
unwrap_input_output_invalid_test!(unwrap_output_len_too_small, 24, 15);
macro_rules! wrap_with_padding_input_output_invalid_test {
($name:ident, $input_len:expr, $output_len:expr) => {
#[test]
fn $name() {
let kek = AesKek::new(&AES_128, &[16u8; 16]).expect("key creation successful");
let input_len: usize = $input_len.try_into().unwrap();
let output_len: usize = $output_len.try_into().unwrap();
let input = vec![42u8; input_len];
let mut output = vec![0u8; output_len];
kek.wrap_with_padding(input.as_slice(), output.as_mut_slice())
.expect_err("failure");
}
};
}
// Input length == 0
wrap_with_padding_input_output_invalid_test!(wrap_with_padding_input_len_zero, 0, 16);
// Output length is not sufficent for required padding
// In this example an input length of 6 would require 2 additional bytes of padding, plus the additional
// 8 bytes from the wrapping algorithm (So minimum of 16 bytes).
wrap_with_padding_input_output_invalid_test!(wrap_with_padding_output_len_too_small, 6, 15);
macro_rules! unwrap_with_padding_input_output_invalid_test {
($name:ident, $input_len:expr, $output_len:expr) => {
#[test]
fn $name() {
let kek = AesKek::new(&AES_128, &[16u8; 16]).expect("key creation successful");
let input_len: usize = $input_len.try_into().unwrap();
let output_len: usize = $output_len.try_into().unwrap();
let input = vec![42u8; input_len];
let mut output = vec![0u8; output_len];
kek.unwrap_with_padding(input.as_slice(), output.as_mut_slice())
.expect_err("failure");
}
};
}
// Input length < 16 (AES Block Length)
unwrap_with_padding_input_output_invalid_test!(unwrap_padded_input_len_smaller_than_min, 15, 15);
// Output length < Input length - 8
unwrap_with_padding_input_output_invalid_test!(unwrap_padded_output_len_too_small, 24, 15);

View File

@@ -0,0 +1,97 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#![cfg(debug_assertions)]
use crate::fips::{assert_fips_status_indicator, FipsServiceStatus};
use crate::key_wrap::{AesKek, KeyWrap, KeyWrapPadded, AES_128, AES_256};
const K_128: &[u8] = &[
0x60, 0x43, 0xb2, 0x73, 0xe9, 0x71, 0x26, 0x5e, 0x53, 0x8a, 0x6c, 0xcd, 0x5d, 0x5a, 0x11, 0xe4,
];
const K_256: &[u8] = &[
0x15, 0x52, 0x45, 0x0c, 0x60, 0xf3, 0x10, 0xfb, 0xc8, 0x41, 0x98, 0xe5, 0xfd, 0x70, 0x7d, 0x04,
0x8f, 0x81, 0xbf, 0x9a, 0xdc, 0x63, 0x90, 0xed, 0xe5, 0xb0, 0x4b, 0x3c, 0xe4, 0x06, 0x54, 0xba,
];
const P: &[u8] = &[
0xf2, 0x64, 0x5b, 0xa4, 0xba, 0xed, 0xa7, 0xec, 0xbc, 0x12, 0xa6, 0xad, 0x46, 0x76, 0x95, 0xa0,
];
macro_rules! nist_aes_key_wrap_test {
($name:ident, $alg:expr, $key:expr, $plaintext:expr) => {
#[test]
fn $name() {
let k = $key;
let p = $plaintext;
let kek = AesKek::new($alg, k).expect("key creation successful");
let mut output = vec![0u8; p.len() + 15];
let wrapped = Vec::from(assert_fips_status_indicator!(
kek.wrap(P, &mut output).expect("wrap successful"),
FipsServiceStatus::Approved
));
let kek = AesKek::new($alg, k).expect("key creation successful");
let mut output = vec![
0u8;
if p.len() % 8 != 0 {
p.len() + (8 - (p.len() % 8))
} else {
p.len()
}
];
let _unwrapped = assert_fips_status_indicator!(
kek.unwrap(&wrapped, &mut output).expect("wrap successful"),
FipsServiceStatus::Approved
);
}
};
}
macro_rules! nist_aes_key_wrap_with_padding_test {
($name:ident, $alg:expr, $key:expr, $plaintext:expr) => {
#[test]
fn $name() {
let k = $key;
let p = $plaintext;
let kek = AesKek::new($alg, k).expect("key creation successful");
let mut output = vec![0u8; p.len() + 15];
let wrapped = Vec::from(assert_fips_status_indicator!(
kek.wrap_with_padding(P, &mut output)
.expect("wrap successful"),
FipsServiceStatus::Approved
));
let kek = AesKek::new($alg, k).expect("key creation successful");
let mut output = vec![
0u8;
if p.len() % 8 != 0 {
p.len() + (8 - (p.len() % 8))
} else {
p.len()
}
];
let _unwrapped = assert_fips_status_indicator!(
kek.unwrap_with_padding(&wrapped, &mut output)
.expect("wrap successful"),
FipsServiceStatus::Approved
);
}
};
}
nist_aes_key_wrap_with_padding_test!(kwp_aes128, &AES_128, K_128, P);
nist_aes_key_wrap_test!(kw_aes128, &AES_128, K_128, P);
nist_aes_key_wrap_with_padding_test!(kwp_aes256, &AES_256, K_256, P);
nist_aes_key_wrap_test!(kw_aes256, &AES_256, K_256, P);

371
vendor/aws-lc-rs/src/lib.rs vendored Normal file
View File

@@ -0,0 +1,371 @@
// Copyright 2015-2016 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#![cfg_attr(not(clippy), allow(unexpected_cfgs))]
#![cfg_attr(not(clippy), allow(unknown_lints))]
#![allow(clippy::doc_markdown)]
//! A [*ring*](https://github.com/briansmith/ring)-compatible crypto library using the cryptographic
//! operations provided by [*AWS-LC*](https://github.com/aws/aws-lc). It uses either the
//! auto-generated [*aws-lc-sys*](https://crates.io/crates/aws-lc-sys) or
//! [*aws-lc-fips-sys*](https://crates.io/crates/aws-lc-fips-sys)
//! Foreign Function Interface (FFI) crates found in this repository for invoking *AWS-LC*.
//!
//! # Build
//!
//! `aws-lc-rs` is available through [crates.io](https://crates.io/crates/aws-lc-rs). It can
//! be added to your project in the [standard way](https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html)
//! using `Cargo.toml`:
//!
//! ```toml
//! [dependencies]
//! aws-lc-rs = "1"
//! ```
//! Consuming projects will need a C/C++ compiler to build.
//!
//! **Non-FIPS builds (default):**
//! * CMake is **never** required
//! * Bindgen is **never** required (pre-generated bindings are provided)
//! * Go is **never** required
//!
//! **FIPS builds:** Require **CMake**, **Go**, and potentially **bindgen** depending on the target platform.
//!
//! See our [User Guide](https://aws.github.io/aws-lc-rs/) for guidance on installing build requirements.
//!
//! # Feature Flags
//!
//! #### alloc (default)
//!
//! Allows implementation to allocate values of arbitrary size. (The meaning of this feature differs
//! from the "alloc" feature of *ring*.) Currently, this is required by the `io::writer` module.
//!
//! #### ring-io (default)
//!
//! Enable feature to access the `io` module.
//!
//! #### ring-sig-verify (default)
//!
//! Enable feature to preserve compatibility with ring's `signature::VerificationAlgorithm::verify`
//! function. This adds a requirement on `untrusted = "0.7.1"`.
//!
//! #### fips
//!
//! Enable this feature to have aws-lc-rs use the [*aws-lc-fips-sys*](https://crates.io/crates/aws-lc-fips-sys)
//! crate for the cryptographic implementations. The aws-lc-fips-sys crate provides bindings to the
//! latest version of the AWS-LC-FIPS module that has completed FIPS validation testing by an
//! accredited lab and has been submitted to NIST for certification. This will continue to be the
//! case as we periodically submit new versions of the AWS-LC-FIPS module to NIST for certification.
//! Currently, aws-lc-fips-sys binds to
//! [AWS-LC-FIPS 3.0.x](https://github.com/aws/aws-lc/tree/fips-2024-09-27).
//!
//! Consult with your local FIPS compliance team to determine the version of AWS-LC-FIPS module that you require. Consumers
//! needing to remain on a previous version of the AWS-LC-FIPS module should pin to specific versions of aws-lc-rs to avoid
//! automatically being upgraded to a newer module version.
//! (See [cargo's documentation](https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html)
//! on how to specify dependency versions.)
//!
//! | AWS-LC-FIPS module | aws-lc-rs |
//! |--------------------|-----------|
//! | 2.0.x | \<1.12.0 |
//! | 3.0.x | *latest* |
//!
//! Refer to the
//! [NIST Cryptographic Module Validation Program's Modules In Progress List](https://csrc.nist.gov/Projects/cryptographic-module-validation-program/modules-in-process/Modules-In-Process-List)
//! for the latest status of the static or dynamic AWS-LC Cryptographic Module. Please see the
//! [FIPS.md in the aws-lc repository](https://github.com/aws/aws-lc/blob/main/crypto/fipsmodule/FIPS.md)
//! for relevant security policies and information on supported operating environments.
//! We will also update our release notes and documentation to reflect any changes in FIPS certification status.
//!
//! #### non-fips
//!
//! Enable this feature to guarantee that the non-FIPS [*aws-lc-sys*](https://crates.io/crates/aws-lc-sys)
//! crate is used for cryptographic implementations. This feature is mutually exclusive with the `fips`
//! feature - enabling both will result in a compile-time error. Use this feature when you need a
//! compile-time guarantee that your build is using the non-FIPS cryptographic module.
//!
//! #### asan
//!
//! Performs an "address sanitizer" build. This can be used to help detect memory leaks. See the
//! ["Address Sanitizer" section](https://doc.rust-lang.org/beta/unstable-book/compiler-flags/sanitizer.html#addresssanitizer)
//! of the [Rust Unstable Book](https://doc.rust-lang.org/beta/unstable-book/).
//!
//! #### bindgen
//!
//! Causes `aws-lc-sys` or `aws-lc-fips-sys` to generates fresh bindings for AWS-LC instead of using
//! the pre-generated bindings. This feature requires `libclang` to be installed. See the
//! [requirements](https://rust-lang.github.io/rust-bindgen/requirements.html)
//! for [rust-bindgen](https://github.com/rust-lang/rust-bindgen)
//!
//! #### prebuilt-nasm
//!
//! Enables the use of crate provided prebuilt NASM objects under certain conditions. This only affects builds for
//! Windows x86-64 platforms. This feature is ignored if the "fips" feature is also enabled.
//!
//! Use of prebuilt NASM objects is prevented if either of the following conditions are true:
//! * The NASM assembler is detected in the build environment
//! * `AWS_LC_SYS_PREBUILT_NASM` environment variable is set with a value of `0`
//!
//! Be aware that [features are additive](https://doc.rust-lang.org/cargo/reference/features.html#feature-unification);
//! by enabling this feature, it is enabled for all crates within the same build.
//!
//! #### dev-tests-only
//!
//! Enables the `rand::unsealed` module, which re-exports the normally sealed `SecureRandom` trait.
//! This allows consumers to provide their own implementations of `SecureRandom` (e.g., a
//! deterministic RNG) for testing purposes. When enabled, a `mut_fill` method is also available on
//! `SecureRandom`.
//!
//! This feature is restricted to **dev/debug profile builds only** — attempting to use it in a
//! release build will result in a compile-time error.
//!
//! It can be enabled in two ways:
//! * **Feature flag:** `cargo test --features dev-tests-only`
//! * **Environment variable:** `AWS_LC_RS_DEV_TESTS_ONLY=1 cargo test`
//!
//! **⚠️ Warning:** This feature is intended **only** for development and testing. It must not be
//! used in production builds. The `rand::unsealed` module and `mut_fill` method are not part of the
//! stable public API and may change without notice.
//!
//! # Use of prebuilt NASM objects
//!
//! Prebuilt NASM objects are **only** applicable to Windows x86-64 platforms. They are **never** used on any other platform (Linux, macOS, etc.).
//!
//! For Windows x86 and x86-64, NASM is required for assembly code compilation. On these platforms,
//! we recommend that you install [the NASM assembler](https://www.nasm.us/). **If NASM is
//! detected in the build environment, it is always used** to compile the assembly files. Prebuilt NASM objects are only used as a fallback.
//!
//! If a NASM assembler is not available, and the "fips" feature is not enabled, then the build fails unless one of the following conditions are true:
//!
//! * You are building for `x86-64` and either:
//! * The `AWS_LC_SYS_PREBUILT_NASM` environment variable is found and has a value of "1"; OR
//! * `AWS_LC_SYS_PREBUILT_NASM` is *not found* in the environment AND the "prebuilt-nasm" feature has been enabled.
//!
//! If the above cases apply, then the crate provided prebuilt NASM objects will be used for the build. To prevent usage of prebuilt NASM
//! objects, install NASM in the build environment and/or set the variable `AWS_LC_SYS_PREBUILT_NASM` to `0` in the build environment to prevent their use.
//!
//! ## About prebuilt NASM objects
//!
//! Prebuilt NASM objects are generated using automation similar to the crate provided pregenerated bindings. See the repository's
//! [GitHub workflow configuration](https://github.com/aws/aws-lc-rs/blob/main/.github/workflows/sys-bindings-generator.yml) for more information.
//! The prebuilt NASM objects are checked into the repository
//! and are [available for inspection](https://github.com/aws/aws-lc-rs/tree/main/aws-lc-sys/builder/prebuilt-nasm).
//! For each PR submitted,
//! [CI verifies](https://github.com/aws/aws-lc-rs/blob/main/.github/workflows/tests.yml)
//! that the NASM objects newly built from source match the NASM objects currently in the repository.
//!
//! # *ring*-compatibility
//!
//! Although this library attempts to be fully compatible with *ring* (v0.16.x), there are a few places where our
//! behavior is observably different.
//!
//! * Our implementation requires the `std` library. We currently do not support a
//! [`#![no_std]`](https://docs.rust-embedded.org/book/intro/no-std.html) build.
//! * `aws-lc-rs` supports the platforms supported by `aws-lc-sys` and AWS-LC. See the
//! [Platform Support](https://aws.github.io/aws-lc-rs/platform_support.html) page in our User Guide.
//! * `Ed25519KeyPair::from_pkcs8` and `Ed25519KeyPair::from_pkcs8_maybe_unchecked` both support
//! parsing of v1 or v2 PKCS#8 documents. If a v2 encoded key is provided to either function,
//! public key component, if present, will be verified to match the one derived from the encoded
//! private key.
//!
//! # Post-Quantum Cryptography
//!
//! Details on the post-quantum algorithms supported by aws-lc-rs can be found at
//! [PQREADME](https://github.com/aws/aws-lc/tree/main/crypto/fipsmodule/PQREADME.md).
//!
//! # Motivation
//!
//! Rust developers increasingly need to deploy applications that meet US and Canadian government
//! cryptographic requirements. We evaluated how to deliver FIPS validated cryptography in idiomatic
//! and performant Rust, built around our AWS-LC offering. We found that the popular ring (v0.16)
//! library fulfilled much of the cryptographic needs in the Rust community, but it did not meet the
//! needs of developers with FIPS requirements. Our intention is to contribute a drop-in replacement
//! for ring that provides FIPS support and is compatible with the ring API. Rust developers with
//! prescribed cryptographic requirements can seamlessly integrate aws-lc-rs into their applications
//! and deploy them into AWS Regions.
#![warn(missing_docs)]
#![warn(clippy::exhaustive_enums)]
#![cfg_attr(aws_lc_rs_docsrs, feature(doc_cfg))]
extern crate alloc;
#[cfg(feature = "fips")]
extern crate aws_lc_fips_sys as aws_lc;
#[cfg(not(feature = "fips"))]
extern crate aws_lc_sys as aws_lc;
pub mod aead;
pub mod agreement;
pub mod cmac;
pub mod constant_time;
pub mod digest;
pub mod error;
pub mod hkdf;
pub mod hmac;
#[cfg(feature = "ring-io")]
pub mod io;
pub mod key_wrap;
pub mod pbkdf2;
pub mod pkcs8;
pub mod rand;
pub mod signature;
pub mod test;
mod bn;
mod buffer;
mod cbb;
mod cbs;
pub mod cipher;
mod debug;
mod ec;
mod ed25519;
pub mod encoding;
mod endian;
mod evp_pkey;
mod fips;
mod hex;
pub mod iv;
pub mod kdf;
#[allow(clippy::module_name_repetitions)]
pub mod kem;
#[cfg(all(feature = "unstable", not(feature = "fips")))]
mod pqdsa;
mod ptr;
pub mod rsa;
pub mod tls_prf;
pub mod unstable;
pub(crate) use debug::derive_debug_via_id;
// TODO: Uncomment when MSRV >= 1.64
// use core::ffi::CStr;
use std::ffi::CStr;
use crate::aws_lc::{
CRYPTO_library_init, ERR_error_string, ERR_get_error, FIPS_mode, ERR_GET_FUNC, ERR_GET_LIB,
ERR_GET_REASON,
};
use std::sync::Once;
static START: Once = Once::new();
#[inline]
/// Initialize the *AWS-LC* library. (This should generally not be needed.)
pub fn init() {
START.call_once(|| unsafe {
CRYPTO_library_init();
});
}
#[cfg(feature = "fips")]
/// Panics if the underlying implementation is not FIPS, otherwise it returns.
///
/// # Panics
/// Panics if the underlying implementation is not FIPS.
pub fn fips_mode() {
try_fips_mode().unwrap();
}
/// Indicates whether the underlying implementation is FIPS.
///
/// # Errors
/// Return an error if the underlying implementation is not FIPS, otherwise Ok.
pub fn try_fips_mode() -> Result<(), &'static str> {
init();
match unsafe { FIPS_mode() } {
1 => Ok(()),
_ => Err("FIPS mode not enabled!"),
}
}
#[cfg(feature = "fips")]
/// Panics if the underlying implementation is not using CPU jitter entropy, otherwise it returns.
///
/// # Panics
/// Panics if the underlying implementation is not using CPU jitter entropy.
pub fn fips_cpu_jitter_entropy() {
try_fips_cpu_jitter_entropy().unwrap();
}
/// Indicates whether the underlying implementation is FIPS.
///
/// # Errors
/// Return an error if the underlying implementation is not using CPU jitter entropy, otherwise Ok.
pub fn try_fips_cpu_jitter_entropy() -> Result<(), &'static str> {
init();
// TODO: Delete once FIPS_is_entropy_cpu_jitter() available on FIPS branch
// https://github.com/aws/aws-lc/pull/2088
#[cfg(feature = "fips")]
if aws_lc::CFG_CPU_JITTER_ENTROPY() {
Ok(())
} else {
Err("FIPS CPU Jitter Entropy not enabled!")
}
#[cfg(not(feature = "fips"))]
match unsafe { aws_lc::FIPS_is_entropy_cpu_jitter() } {
1 => Ok(()),
_ => Err("FIPS CPU Jitter Entropy not enabled!"),
}
}
#[allow(dead_code)]
unsafe fn dump_error() {
let err = ERR_get_error();
let lib = ERR_GET_LIB(err);
let reason = ERR_GET_REASON(err);
let func = ERR_GET_FUNC(err);
let mut buffer = [0u8; 256];
ERR_error_string(err, buffer.as_mut_ptr().cast());
let error_msg = CStr::from_bytes_with_nul_unchecked(&buffer);
eprintln!("Raw Error -- {error_msg:?}\nErr: {err}, Lib: {lib}, Reason: {reason}, Func: {func}");
}
mod sealed {
/// Traits that are designed to only be implemented internally in *aws-lc-rs*.
//
// Usage:
// ```
// use crate::sealed;
//
// pub trait MyType: sealed::Sealed {
// // [...]
// }
//
// impl sealed::Sealed for MyType {}
// ```
pub trait Sealed {}
}
#[cfg(test)]
mod tests {
use crate::{dump_error, init};
#[test]
fn test_init() {
init();
}
#[test]
fn test_dump() {
unsafe {
dump_error();
}
}
#[cfg(not(feature = "fips"))]
#[test]
fn test_fips() {
assert!({ crate::try_fips_mode().is_err() });
// Re-enable with fixed test after upstream has merged RAGDOLL
//assert!({ crate::try_fips_cpu_jitter_entropy().is_ok() });
}
#[test]
// FIPS mode is disabled for an ASAN build
#[cfg(feature = "fips")]
fn test_fips() {
#[cfg(not(feature = "asan"))]
crate::fips_mode();
if aws_lc::CFG_CPU_JITTER_ENTROPY() {
crate::fips_cpu_jitter_entropy();
}
}
}

311
vendor/aws-lc-rs/src/pbkdf2.rs vendored Normal file
View File

@@ -0,0 +1,311 @@
// Copyright 2015-2022 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! PBKDF2 derivation and verification.
//!
//! Use `derive` to derive PBKDF2 outputs. Use `verify` to verify secret
//! against previously-derived outputs.
//!
//! PBKDF2 is specified in [RFC 2898 Section 5.2] with test vectors given in
//! [RFC 6070]. See also [NIST Special Publication 800-132].
//!
//! [RFC 2898 Section 5.2]: https://tools.ietf.org/html/rfc2898#section-5.2
//! [RFC 6070]: https://tools.ietf.org/html/rfc6070
//! [NIST Special Publication 800-132]:
//! http://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf
//!
//! # Examples
//!
//! ## Password Database Example
//!
//! ```
//! use aws_lc_rs::{digest, pbkdf2};
//! use std::{collections::HashMap, num::NonZeroU32};
//!
//! static PBKDF2_ALG: pbkdf2::Algorithm = pbkdf2::PBKDF2_HMAC_SHA256;
//! const CREDENTIAL_LEN: usize = digest::SHA256_OUTPUT_LEN;
//! pub type Credential = [u8; CREDENTIAL_LEN];
//!
//! enum Error {
//! WrongUsernameOrPassword
//! }
//!
//! struct PasswordDatabase {
//! pbkdf2_iterations: NonZeroU32,
//! db_salt_component: [u8; 16],
//!
//! // Normally this would be a persistent database.
//! storage: HashMap<String, Credential>,
//! }
//!
//! impl PasswordDatabase {
//! pub fn store_password(&mut self, username: &str, password: &str) {
//! let salt = self.salt(username);
//! let mut to_store: Credential = [0u8; CREDENTIAL_LEN];
//! pbkdf2::derive(PBKDF2_ALG, self.pbkdf2_iterations, &salt,
//! password.as_bytes(), &mut to_store);
//! self.storage.insert(String::from(username), to_store);
//! }
//!
//! pub fn verify_password(&self, username: &str, attempted_password: &str)
//! -> Result<(), Error> {
//! match self.storage.get(username) {
//! Some(actual_password) => {
//! let salt = self.salt(username);
//! pbkdf2::verify(PBKDF2_ALG, self.pbkdf2_iterations, &salt,
//! attempted_password.as_bytes(),
//! actual_password)
//! .map_err(|_| Error::WrongUsernameOrPassword)
//! },
//!
//! None => Err(Error::WrongUsernameOrPassword)
//! }
//! }
//!
//! // The salt should have a user-specific component so that an attacker
//! // cannot crack one password for multiple users in the database. It
//! // should have a database-unique component so that an attacker cannot
//! // crack the same user's password across databases in the unfortunate
//! // but common case that the user has used the same password for
//! // multiple systems.
//! fn salt(&self, username: &str) -> Vec<u8> {
//! let mut salt = Vec::with_capacity(self.db_salt_component.len() +
//! username.as_bytes().len());
//! salt.extend(self.db_salt_component.as_ref());
//! salt.extend(username.as_bytes());
//! salt
//! }
//! }
//!
//! fn main() {
//! // Normally these parameters would be loaded from a configuration file.
//! let mut db = PasswordDatabase {
//! pbkdf2_iterations: NonZeroU32::new(100_000).unwrap(),
//! db_salt_component: [
//! // This value was generated from a secure PRNG.
//! 0xd6, 0x26, 0x98, 0xda, 0xf4, 0xdc, 0x50, 0x52,
//! 0x24, 0xf2, 0x27, 0xd1, 0xfe, 0x39, 0x01, 0x8a
//! ],
//! storage: HashMap::new(),
//! };
//!
//! db.store_password("alice", "@74d7]404j|W}6u");
//!
//! // An attempt to log in with the wrong password fails.
//! assert!(db.verify_password("alice", "wrong password").is_err());
//!
//! // Normally there should be an expoentially-increasing delay between
//! // attempts to further protect against online attacks.
//!
//! // An attempt to log in with the right password succeeds.
//! assert!(db.verify_password("alice", "@74d7]404j|W}6u").is_ok());
//! }
use crate::aws_lc::PKCS5_PBKDF2_HMAC;
use crate::error::Unspecified;
use crate::fips::indicator_check;
use crate::{constant_time, digest, hmac};
use core::num::NonZeroU32;
use zeroize::Zeroize;
/// A PBKDF2 algorithm.
///
/// `max_output_len` is computed as u64 instead of usize to prevent overflowing on 32-bit machines.
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct Algorithm {
algorithm: hmac::Algorithm,
max_output_len: u64,
}
/// PBKDF2 using HMAC-SHA1.
pub const PBKDF2_HMAC_SHA1: Algorithm = Algorithm {
algorithm: hmac::HMAC_SHA1_FOR_LEGACY_USE_ONLY,
max_output_len: MAX_USIZE32 * digest::SHA1_OUTPUT_LEN as u64,
};
/// PBKDF2 using HMAC-SHA256.
pub const PBKDF2_HMAC_SHA256: Algorithm = Algorithm {
algorithm: hmac::HMAC_SHA256,
max_output_len: MAX_USIZE32 * digest::SHA256_OUTPUT_LEN as u64,
};
/// PBKDF2 using HMAC-SHA384.
pub const PBKDF2_HMAC_SHA384: Algorithm = Algorithm {
algorithm: hmac::HMAC_SHA384,
max_output_len: MAX_USIZE32 * digest::SHA384_OUTPUT_LEN as u64,
};
/// PBKDF2 using HMAC-SHA512.
pub const PBKDF2_HMAC_SHA512: Algorithm = Algorithm {
algorithm: hmac::HMAC_SHA512,
max_output_len: MAX_USIZE32 * digest::SHA512_OUTPUT_LEN as u64,
};
const MAX_USIZE32: u64 = u32::MAX as u64;
/// Fills `out` with the key derived using PBKDF2 with the given inputs.
///
/// Do not use `derive` as part of verifying a secret; use `verify` instead, to
/// minimize the effectiveness of timing attacks.
///
/// `out.len()` must be no larger than the digest length * (2**32 - 1), per the
/// PBKDF2 specification.
///
/// | Parameter | RFC 2898 Section 5.2 Term
/// |-------------|-------------------------------------------
/// | `digest_alg` | PRF (HMAC with the given digest algorithm)
/// | `iterations` | c (iteration count)
/// | `salt` | S (salt)
/// | `secret` | P (password)
/// | `out` | dk (derived key)
/// | `out.len()` | dkLen (derived key length)
///
/// # Panics
///
/// `derive` panics if `out.len()` is larger than (2**32 - 1) * the digest
/// algorithm's output length, per the PBKDF2 specification.
//
// # FIPS
// The following conditions must be met:
// * Algorithm is one of the following:
// * `PBKDF2_HMAC_SHA1`
// * `PBKDF2_HMAC_SHA256`
// * `PBKDF2_HMAC_SHA384`
// * `PBKDF2_HMAC_SHA512`
// * `salt.len()` >= 16
// * `sercet.len()` >= 14
// * `iterations` >= 1000
#[inline]
pub fn derive(
algorithm: Algorithm,
iterations: NonZeroU32,
salt: &[u8],
secret: &[u8],
out: &mut [u8],
) {
try_derive(algorithm, iterations, salt, secret, out).expect("pbkdf2 derive failed");
}
#[inline]
fn try_derive(
algorithm: Algorithm,
iterations: NonZeroU32,
salt: &[u8],
secret: &[u8],
out: &mut [u8],
) -> Result<(), Unspecified> {
assert!(
out.len() as u64 <= algorithm.max_output_len,
"derived key too long"
);
if 1 != indicator_check!(unsafe {
PKCS5_PBKDF2_HMAC(
secret.as_ptr().cast(),
secret.len(),
salt.as_ptr(),
salt.len(),
iterations.get(),
digest::match_digest_type(&algorithm.algorithm.digest_algorithm().id).as_const_ptr(),
out.len(),
out.as_mut_ptr(),
)
}) {
return Err(Unspecified);
}
Ok(())
}
/// Verifies that a previously-derived (e.g., using `derive`) PBKDF2 value
/// matches the PBKDF2 value derived from the other inputs.
///
/// The comparison is done in constant time to prevent timing attacks. The
/// comparison will fail if `previously_derived` is empty (has a length of
/// zero).
///
/// | Parameter | RFC 2898 Section 5.2 Term
/// |----------------------------|--------------------------------------------
/// | `digest_alg` | PRF (HMAC with the given digest algorithm).
/// | `iterations` | c (iteration count)
/// | `salt` | S (salt)
/// | `secret` | P (password)
/// | `previously_derived` | dk (derived key)
/// | `previously_derived.len()` | dkLen (derived key length)
///
/// # Errors
/// `error::Unspecified` is the inputs were not verified.
///
/// # Panics
///
/// `verify` panics if `previously_derived.len()` is larger than (2**32 - 1) * the digest
/// algorithm's output length, per the PBKDF2 specification.
//
// # FIPS
// The following conditions must be met:
// * Algorithm is one of the following:
// * `PBKDF2_HMAC_SHA1`
// * `PBKDF2_HMAC_SHA256`
// * `PBKDF2_HMAC_SHA384`
// * `PBKDF2_HMAC_SHA512`
// * `salt.len()` >= 16
// * `secret.len()` >= 14
// * `iterations` >= 1000
#[inline]
pub fn verify(
algorithm: Algorithm,
iterations: NonZeroU32,
salt: &[u8],
secret: &[u8],
previously_derived: &[u8],
) -> Result<(), Unspecified> {
if previously_derived.is_empty() {
return Err(Unspecified);
}
assert!(
previously_derived.len() as u64 <= algorithm.max_output_len,
"derived key too long"
);
// Create a vector with the expected output length.
let mut derived_buf = vec![0u8; previously_derived.len()];
try_derive(algorithm, iterations, salt, secret, &mut derived_buf)?;
let result = constant_time::verify_slices_are_equal(&derived_buf, previously_derived);
derived_buf.zeroize();
result
}
#[cfg(test)]
mod tests {
use crate::pbkdf2;
use core::num::NonZeroU32;
#[cfg(feature = "fips")]
mod fips;
#[test]
fn pbkdf2_coverage() {
// Coverage sanity check.
assert!(pbkdf2::PBKDF2_HMAC_SHA256 == pbkdf2::PBKDF2_HMAC_SHA256);
assert!(pbkdf2::PBKDF2_HMAC_SHA256 != pbkdf2::PBKDF2_HMAC_SHA384);
let iterations = NonZeroU32::new(100_u32).unwrap();
for &alg in &[
pbkdf2::PBKDF2_HMAC_SHA1,
pbkdf2::PBKDF2_HMAC_SHA256,
pbkdf2::PBKDF2_HMAC_SHA384,
pbkdf2::PBKDF2_HMAC_SHA512,
] {
let mut out = vec![0u8; 64];
pbkdf2::derive(alg, iterations, b"salt", b"password", &mut out);
let alg_clone = alg;
let mut out2 = vec![0u8; 64];
pbkdf2::derive(alg_clone, iterations, b"salt", b"password", &mut out2);
assert_eq!(out, out2);
}
}
}

View File

@@ -0,0 +1,134 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#![cfg(debug_assertions)]
use core::num::NonZeroU32;
use crate::fips::{assert_fips_status_indicator, FipsServiceStatus};
use crate::pbkdf2::{
derive, verify, PBKDF2_HMAC_SHA1, PBKDF2_HMAC_SHA256, PBKDF2_HMAC_SHA384, PBKDF2_HMAC_SHA512,
};
macro_rules! pbkdf2_api {
($name:ident, $alg:expr, $secret_len:literal, $salt_len:literal, $iterations:literal, $expect:path) => {
#[test]
fn $name() {
// secret len >= 14 for fips indicator
let secret = vec![42u8; $secret_len];
// salt len >= 16 for fips indicator
let salt = vec![42u8; $salt_len];
let mut out = vec![0u8; 1024];
// iterations >= 1000
let iterations = NonZeroU32::new($iterations).unwrap();
assert_fips_status_indicator!(
derive($alg, iterations, &salt, &secret, &mut out),
$expect
);
assert_fips_status_indicator!(verify($alg, iterations, &salt, &secret, &out), $expect)
.unwrap();
}
};
}
pbkdf2_api!(
sha1_13secret_15_salt_999iter,
PBKDF2_HMAC_SHA1,
13,
15,
999,
FipsServiceStatus::NonApproved
);
pbkdf2_api!(
sha1_14secret_16_salt_1000iter,
PBKDF2_HMAC_SHA1,
14,
16,
1000,
FipsServiceStatus::Approved
);
pbkdf2_api!(
sha1_15secret_17_salt_1001iter,
PBKDF2_HMAC_SHA1,
15,
16,
1001,
FipsServiceStatus::Approved
);
pbkdf2_api!(
sha256_13secret_15_salt_999iter,
PBKDF2_HMAC_SHA256,
13,
15,
999,
FipsServiceStatus::NonApproved
);
pbkdf2_api!(
sha256_14secret_16_salt_1000iter,
PBKDF2_HMAC_SHA256,
14,
16,
1000,
FipsServiceStatus::Approved
);
pbkdf2_api!(
sha256_15secret_17_salt_1001iter,
PBKDF2_HMAC_SHA256,
15,
16,
1001,
FipsServiceStatus::Approved
);
pbkdf2_api!(
sha384_13secret_15_salt_999iter,
PBKDF2_HMAC_SHA384,
13,
15,
999,
FipsServiceStatus::NonApproved
);
pbkdf2_api!(
sha384_14secret_16_salt_1000iter,
PBKDF2_HMAC_SHA384,
14,
16,
1000,
FipsServiceStatus::Approved
);
pbkdf2_api!(
sha384_15secret_17_salt_1001iter,
PBKDF2_HMAC_SHA384,
15,
16,
1001,
FipsServiceStatus::Approved
);
pbkdf2_api!(
sha512_13secret_15_salt_999iter,
PBKDF2_HMAC_SHA512,
13,
15,
999,
FipsServiceStatus::NonApproved
);
pbkdf2_api!(
sha512_14secret_16_salt_1000iter,
PBKDF2_HMAC_SHA512,
14,
16,
1000,
FipsServiceStatus::Approved
);
pbkdf2_api!(
sha512_15secret_17_salt_1001iter,
PBKDF2_HMAC_SHA512,
15,
16,
1001,
FipsServiceStatus::Approved
);

40
vendor/aws-lc-rs/src/pkcs8.rs vendored Normal file
View File

@@ -0,0 +1,40 @@
// Copyright 2017 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! PKCS#8 is specified in [RFC 5208].
//!
//! [RFC 5208]: https://tools.ietf.org/html/rfc5208.
use zeroize::Zeroize;
/// A generated PKCS#8 document.
pub struct Document {
bytes: Vec<u8>,
}
impl Document {
pub(crate) fn new(bytes: Vec<u8>) -> Self {
Self { bytes }
}
}
impl AsRef<[u8]> for Document {
#[inline]
fn as_ref(&self) -> &[u8] {
&self.bytes
}
}
impl Drop for Document {
fn drop(&mut self) {
self.bytes.zeroize();
}
}
#[derive(Copy, Clone)]
pub(crate) enum Version {
V1,
V2,
}

165
vendor/aws-lc-rs/src/pqdsa.rs vendored Normal file
View File

@@ -0,0 +1,165 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
pub(crate) mod key_pair;
pub(crate) mod signature;
use crate::aws_lc::{EVP_PKEY, EVP_PKEY_PQDSA, NID_MLDSA44, NID_MLDSA65, NID_MLDSA87};
use crate::error::{KeyRejected, Unspecified};
use crate::ptr::LcPtr;
use core::ffi::c_int;
#[derive(Debug, Eq, PartialEq)]
#[allow(non_camel_case_types)]
pub(crate) enum AlgorithmID {
ML_DSA_44,
ML_DSA_65,
ML_DSA_87,
}
impl AlgorithmID {
#[allow(dead_code)]
pub(crate) const fn from_nid(nid: c_int) -> Result<Self, Unspecified> {
match nid {
NID_MLDSA44 => Ok(Self::ML_DSA_44),
NID_MLDSA65 => Ok(Self::ML_DSA_65),
NID_MLDSA87 => Ok(Self::ML_DSA_87),
_ => Err(Unspecified),
}
}
pub(crate) const fn nid(&self) -> c_int {
match self {
Self::ML_DSA_44 => NID_MLDSA44,
Self::ML_DSA_65 => NID_MLDSA65,
Self::ML_DSA_87 => NID_MLDSA87,
}
}
#[allow(dead_code)]
pub(crate) const fn priv_key_size_bytes(&self) -> usize {
match self {
Self::ML_DSA_44 => 2560,
Self::ML_DSA_65 => 4032,
Self::ML_DSA_87 => 4896,
}
}
pub(crate) const fn pub_key_size_bytes(&self) -> usize {
match self {
Self::ML_DSA_44 => 1312,
Self::ML_DSA_65 => 1952,
Self::ML_DSA_87 => 2592,
}
}
pub(crate) const fn seed_size_bytes(&self) -> usize {
// All ML-DSA variants use 32-byte seeds per FIPS 204
match self {
Self::ML_DSA_44 | Self::ML_DSA_65 | Self::ML_DSA_87 => 32,
}
}
pub(crate) const fn signature_size_bytes(&self) -> usize {
match self {
Self::ML_DSA_44 => 2420,
Self::ML_DSA_65 => 3309,
Self::ML_DSA_87 => 4627,
}
}
}
pub(crate) fn validate_pqdsa_evp_key(
evp_pkey: &LcPtr<EVP_PKEY>,
id: &'static AlgorithmID,
) -> Result<(), KeyRejected> {
if evp_pkey.as_const().key_size_bytes() == id.pub_key_size_bytes() {
Ok(())
} else {
Err(KeyRejected::unspecified())
}
}
pub(crate) fn parse_pqdsa_public_key(
key_bytes: &[u8],
id: &'static AlgorithmID,
) -> Result<LcPtr<EVP_PKEY>, KeyRejected> {
LcPtr::<EVP_PKEY>::parse_rfc5280_public_key(key_bytes, EVP_PKEY_PQDSA)
.or(LcPtr::<EVP_PKEY>::parse_raw_public_key(
key_bytes,
EVP_PKEY_PQDSA,
))
.and_then(|key| validate_pqdsa_evp_key(&key, id).map(|()| key))
}
#[cfg(test)]
mod tests {
use crate::aws_lc::{
EVP_PKEY_cmp, EVP_PKEY, EVP_PKEY_PQDSA, NID_MLDSA44, NID_MLDSA65, NID_MLDSA87,
};
use crate::evp_pkey::*;
use crate::pkcs8::Version;
use crate::pqdsa::key_pair::evp_key_pqdsa_generate;
use crate::pqdsa::AlgorithmID;
use crate::ptr::LcPtr;
#[test]
fn test_keygen() {
for nid in [NID_MLDSA44, NID_MLDSA65, NID_MLDSA87] {
let key = evp_key_pqdsa_generate(nid).unwrap();
println!("key size: {:?}", key.as_const().key_size_bytes());
test_serialization_for(&key, &AlgorithmID::from_nid(nid).unwrap());
test_signing_for(&key, &AlgorithmID::from_nid(nid).unwrap());
}
}
fn test_serialization_for(evp_pkey: &LcPtr<EVP_PKEY>, id: &AlgorithmID) {
let public_buffer = evp_pkey.as_const().marshal_rfc5280_public_key().unwrap();
println!("public marshall: {public_buffer:?}");
let key_public =
LcPtr::<EVP_PKEY>::parse_rfc5280_public_key(&public_buffer, EVP_PKEY_PQDSA).unwrap();
let private_buffer = evp_pkey
.as_const()
.marshal_rfc5208_private_key(Version::V1)
.unwrap();
println!("private marshall: {private_buffer:?}");
let key_private =
LcPtr::<EVP_PKEY>::parse_rfc5208_private_key(&private_buffer, EVP_PKEY_PQDSA).unwrap();
let raw_public_buffer = key_public.as_const().marshal_raw_public_key().unwrap();
assert_eq!(raw_public_buffer.len(), id.pub_key_size_bytes());
println!("raw public size: {}", raw_public_buffer.len());
let key_public2 =
LcPtr::<EVP_PKEY>::parse_raw_public_key(&raw_public_buffer, EVP_PKEY_PQDSA).unwrap();
assert_eq!(1, unsafe {
EVP_PKEY_cmp(key_public.as_const_ptr(), key_public2.as_const_ptr())
});
let raw_private_buffer = key_private.as_const().marshal_raw_private_key().unwrap();
assert_eq!(raw_private_buffer.len(), id.priv_key_size_bytes());
println!("raw private size: {}", raw_private_buffer.len());
let key_private2 =
LcPtr::<EVP_PKEY>::parse_raw_private_key(&raw_private_buffer, EVP_PKEY_PQDSA).unwrap();
assert_eq!(1, unsafe {
EVP_PKEY_cmp(key_private.as_const_ptr(), key_private2.as_const_ptr())
});
}
fn test_signing_for(evp_pkey: &LcPtr<EVP_PKEY>, id: &AlgorithmID) {
let message = b"hello world";
let signature = evp_pkey
.sign(message, None, No_EVP_PKEY_CTX_consumer)
.unwrap();
println!("signature size: {}", signature.len());
assert_eq!(signature.len(), evp_pkey.as_const().signature_size_bytes());
assert_eq!(signature.len(), id.signature_size_bytes());
evp_pkey
.verify(message, None, No_EVP_PKEY_CTX_consumer, &signature)
.unwrap();
println!("verified: {signature:?}");
}
}

439
vendor/aws-lc-rs/src/pqdsa/key_pair.rs vendored Normal file
View File

@@ -0,0 +1,439 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aws_lc::{
EVP_PKEY_CTX_pqdsa_set_params, EVP_PKEY_pqdsa_new_raw_private_key, EVP_PKEY, EVP_PKEY_PQDSA,
};
use crate::encoding::{AsDer, AsRawBytes, Pkcs8V1Der, PqdsaPrivateKeyRaw};
use crate::error::{KeyRejected, Unspecified};
use crate::evp_pkey::No_EVP_PKEY_CTX_consumer;
use crate::pkcs8;
use crate::pkcs8::{Document, Version};
use crate::pqdsa::signature::{PqdsaSigningAlgorithm, PublicKey};
use crate::pqdsa::validate_pqdsa_evp_key;
use crate::ptr::LcPtr;
use crate::signature::KeyPair;
use core::fmt::{Debug, Formatter};
use std::ffi::c_int;
/// A PQDSA (Post-Quantum Digital Signature Algorithm) key pair, used for signing and verification.
#[allow(clippy::module_name_repetitions)]
pub struct PqdsaKeyPair {
algorithm: &'static PqdsaSigningAlgorithm,
evp_pkey: LcPtr<EVP_PKEY>,
pubkey: PublicKey,
}
#[allow(clippy::missing_fields_in_debug)]
impl Debug for PqdsaKeyPair {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("PqdsaKeyPair")
.field("algorithm", &self.algorithm)
.finish()
}
}
impl KeyPair for PqdsaKeyPair {
type PublicKey = PublicKey;
fn public_key(&self) -> &Self::PublicKey {
&self.pubkey
}
}
/// A PQDSA private key.
pub struct PqdsaPrivateKey<'a>(pub(crate) &'a PqdsaKeyPair);
impl AsDer<Pkcs8V1Der<'static>> for PqdsaPrivateKey<'_> {
/// Serializes the key to PKCS#8 v1 DER.
///
/// # Errors
/// Returns `Unspecified` if serialization fails.
fn as_der(&self) -> Result<Pkcs8V1Der<'static>, Unspecified> {
Ok(Pkcs8V1Der::new(
self.0
.evp_pkey
.as_const()
.marshal_rfc5208_private_key(pkcs8::Version::V1)?,
))
}
}
impl AsRawBytes<PqdsaPrivateKeyRaw<'static>> for PqdsaPrivateKey<'_> {
fn as_raw_bytes(&self) -> Result<PqdsaPrivateKeyRaw<'static>, Unspecified> {
Ok(PqdsaPrivateKeyRaw::new(
self.0.evp_pkey.as_const().marshal_raw_private_key()?,
))
}
}
impl PqdsaKeyPair {
/// Generates a new PQDSA key pair for the specified algorithm.
///
/// # Errors
/// Returns `Unspecified` is the key generation fails.
pub fn generate(algorithm: &'static PqdsaSigningAlgorithm) -> Result<Self, Unspecified> {
let evp_pkey = evp_key_pqdsa_generate(algorithm.0.id.nid())?;
let pubkey = PublicKey::from_private_evp_pkey(&evp_pkey)?;
Ok(Self {
algorithm,
evp_pkey,
pubkey,
})
}
/// Constructs a key pair from the parsing of PKCS#8.
///
/// # Errors
/// Returns `Unspecified` if the key is not valid for the specified signing algorithm.
pub fn from_pkcs8(
algorithm: &'static PqdsaSigningAlgorithm,
pkcs8: &[u8],
) -> Result<Self, KeyRejected> {
let evp_pkey = LcPtr::<EVP_PKEY>::parse_rfc5208_private_key(pkcs8, EVP_PKEY_PQDSA)?;
validate_pqdsa_evp_key(&evp_pkey, algorithm.0.id)?;
let pubkey = PublicKey::from_private_evp_pkey(&evp_pkey)?;
Ok(Self {
algorithm,
evp_pkey,
pubkey,
})
}
/// Constructs a key pair from raw private key bytes.
///
/// # Errors
/// Returns `Unspecified` if the key is not valid for the specified signing algorithm.
pub fn from_raw_private_key(
algorithm: &'static PqdsaSigningAlgorithm,
raw_private_key: &[u8],
) -> Result<Self, KeyRejected> {
let evp_pkey = LcPtr::<EVP_PKEY>::parse_raw_private_key(raw_private_key, EVP_PKEY_PQDSA)?;
validate_pqdsa_evp_key(&evp_pkey, algorithm.0.id)?;
let pubkey = PublicKey::from_private_evp_pkey(&evp_pkey)?;
Ok(Self {
algorithm,
evp_pkey,
pubkey,
})
}
/// Constructs a key pair deterministically from a 32-byte seed.
///
/// Per FIPS 204, the same seed always produces the same key pair. This enables
/// reproducible key generation for testing, ACVP validation, and interoperability
/// with implementations that store seeds rather than expanded private keys.
///
/// `algorithm` is the [`PqdsaSigningAlgorithm`] to be associated with the key pair.
///
/// `seed` is the 32-byte seed from which the key pair is deterministically derived.
/// All ML-DSA variants (ML-DSA-44, ML-DSA-65, ML-DSA-87) use 32-byte seeds.
///
/// # Security Considerations
///
/// The seed is the root secret. Compromise of the seed is equivalent to compromise
/// of the private key. Callers are responsible for generating seeds from a
/// cryptographically secure random source and protecting them accordingly.
///
/// This method expands the seed into the full private key internally. The seed
/// itself is not retained in the returned [`PqdsaKeyPair`]; the expanded key material
/// is stored instead. The expanded private key can be retrieved via
/// [`Self::private_key`] and serialized via [`Self::to_pkcs8`] or
/// [`PqdsaPrivateKey::as_raw_bytes`].
///
/// # Errors
///
/// Returns `KeyRejected::too_small()` if `seed.len() < 32`.
///
/// Returns `KeyRejected::too_large()` if `seed.len() > 32`.
///
/// Returns `KeyRejected::unspecified()` if the underlying cryptographic operation fails.
pub fn from_seed(
algorithm: &'static PqdsaSigningAlgorithm,
seed: &[u8],
) -> Result<Self, KeyRejected> {
let expected_seed_len = algorithm.0.id.seed_size_bytes();
match seed.len().cmp(&expected_seed_len) {
core::cmp::Ordering::Less => return Err(KeyRejected::too_small()),
core::cmp::Ordering::Greater => return Err(KeyRejected::too_large()),
core::cmp::Ordering::Equal => {}
}
let nid = algorithm.0.id.nid();
let evp_pkey = LcPtr::new(unsafe {
EVP_PKEY_pqdsa_new_raw_private_key(nid, seed.as_ptr(), seed.len())
})
.map_err(|()| KeyRejected::unspecified())?;
validate_pqdsa_evp_key(&evp_pkey, algorithm.0.id)?;
let pubkey =
PublicKey::from_private_evp_pkey(&evp_pkey).map_err(|_| KeyRejected::unspecified())?;
Ok(Self {
algorithm,
evp_pkey,
pubkey,
})
}
/// Serializes the private key to PKCS#8 v1 DER.
///
/// # Errors
/// Returns `Unspecified` if serialization fails.
pub fn to_pkcs8(&self) -> Result<Document, Unspecified> {
Ok(Document::new(
self.evp_pkey
.as_const()
.marshal_rfc5208_private_key(Version::V1)?,
))
}
/// Uses this key to sign the message provided. The signature is written to the `signature`
/// slice provided. It returns the length of the signature on success.
///
/// # Errors
/// Returns `Unspecified` if signing fails.
pub fn sign(&self, msg: &[u8], signature: &mut [u8]) -> Result<usize, Unspecified> {
let sig_length = self.algorithm.signature_len();
if signature.len() < sig_length {
return Err(Unspecified);
}
let sig_bytes = self.evp_pkey.sign(msg, None, No_EVP_PKEY_CTX_consumer)?;
signature[0..sig_length].copy_from_slice(&sig_bytes);
Ok(sig_length)
}
/// Returns the signing algorithm associated with this key pair.
#[must_use]
pub fn algorithm(&self) -> &'static PqdsaSigningAlgorithm {
self.algorithm
}
/// Returns the private key associated with this key pair.
#[must_use]
pub fn private_key(&self) -> PqdsaPrivateKey<'_> {
PqdsaPrivateKey(self)
}
}
unsafe impl Send for PqdsaKeyPair {}
unsafe impl Sync for PqdsaKeyPair {}
pub(crate) fn evp_key_pqdsa_generate(nid: c_int) -> Result<LcPtr<EVP_PKEY>, Unspecified> {
let params_fn = |ctx| {
if 1 == unsafe { EVP_PKEY_CTX_pqdsa_set_params(ctx, nid) } {
Ok(())
} else {
Err(())
}
};
LcPtr::<EVP_PKEY>::generate(EVP_PKEY_PQDSA, Some(params_fn))
}
#[cfg(all(test, feature = "unstable"))]
mod tests {
use super::*;
use crate::signature::UnparsedPublicKey;
use crate::unstable::signature::{ML_DSA_44_SIGNING, ML_DSA_65_SIGNING, ML_DSA_87_SIGNING};
const TEST_ALGORITHMS: &[&PqdsaSigningAlgorithm] =
&[&ML_DSA_44_SIGNING, &ML_DSA_65_SIGNING, &ML_DSA_87_SIGNING];
#[test]
fn test_public_key_serialization() {
for &alg in TEST_ALGORITHMS {
// Generate a new key pair
let keypair = PqdsaKeyPair::generate(alg).unwrap();
let message = b"Test message";
let different_message = b"Different message";
let mut signature = vec![0; alg.signature_len()];
assert!(keypair
.sign(message, &mut signature[0..(alg.signature_len() - 1)])
.is_err());
let sig_len = keypair.sign(message, &mut signature).unwrap();
assert_eq!(sig_len, alg.signature_len());
let invalid_signature = vec![0u8; alg.signature_len()];
let original_public_key = keypair.public_key();
let x509_der = original_public_key.as_der().unwrap();
let x509_public_key = UnparsedPublicKey::new(alg.0, x509_der.as_ref());
assert!(x509_public_key.verify(message, signature.as_ref()).is_ok());
assert!(x509_public_key
.verify(different_message, signature.as_ref())
.is_err());
assert!(x509_public_key.verify(message, &invalid_signature).is_err());
let raw = original_public_key.as_ref();
let raw_public_key = UnparsedPublicKey::new(alg.0, raw);
assert!(raw_public_key.verify(message, signature.as_ref()).is_ok());
assert!(raw_public_key
.verify(different_message, signature.as_ref())
.is_err());
assert!(raw_public_key
.verify(different_message, &invalid_signature)
.is_err());
#[cfg(feature = "ring-sig-verify")]
#[allow(deprecated)]
{
use crate::signature::VerificationAlgorithm;
assert!(alg
.0
.verify(
raw.into(),
message.as_ref().into(),
signature.as_slice().into()
)
.is_ok());
}
}
}
#[test]
fn test_private_key_serialization() {
for &alg in TEST_ALGORITHMS {
// Generate a new key pair
let keypair = PqdsaKeyPair::generate(alg).unwrap();
let message = b"Test message";
let mut original_signature = vec![0; alg.signature_len()];
let sig_len = keypair.sign(message, &mut original_signature).unwrap();
assert_eq!(sig_len, alg.signature_len());
let public_key = keypair.public_key();
let unparsed_public_key = UnparsedPublicKey::new(alg.0, public_key.as_ref());
unparsed_public_key
.verify(message, original_signature.as_ref())
.unwrap();
let pkcs8_1 = keypair.to_pkcs8().unwrap();
let pkcs8_2 = keypair.private_key().as_der().unwrap();
let raw = keypair.private_key().as_raw_bytes().unwrap();
assert_eq!(pkcs8_1.as_ref(), pkcs8_2.as_ref());
let pkcs8_keypair = PqdsaKeyPair::from_pkcs8(alg, pkcs8_1.as_ref()).unwrap();
let raw_keypair = PqdsaKeyPair::from_raw_private_key(alg, raw.as_ref()).unwrap();
assert_eq!(pkcs8_keypair.evp_pkey, raw_keypair.evp_pkey);
}
}
#[test]
fn test_from_seed() {
for &alg in TEST_ALGORITHMS {
let seed = [1u8; 32];
let kp = PqdsaKeyPair::from_seed(alg, &seed).unwrap();
assert_eq!(kp.algorithm(), alg);
// Verify key works for signing
let msg = b"seed test";
let mut sig = vec![0; alg.signature_len()];
let sig_len = kp.sign(msg, &mut sig).unwrap();
assert_eq!(sig_len, alg.signature_len());
}
}
#[test]
fn test_from_seed_deterministic() {
for &alg in TEST_ALGORITHMS {
let seed = [42u8; 32];
let kp1 = PqdsaKeyPair::from_seed(alg, &seed).unwrap();
let kp2 = PqdsaKeyPair::from_seed(alg, &seed).unwrap();
assert_eq!(kp1.public_key().as_ref(), kp2.public_key().as_ref());
}
}
#[test]
fn test_from_seed_wrong_size() {
use crate::error::KeyRejected;
for &alg in TEST_ALGORITHMS {
assert_eq!(
PqdsaKeyPair::from_seed(alg, &[0u8; 31]).err(),
Some(KeyRejected::too_small())
);
assert_eq!(
PqdsaKeyPair::from_seed(alg, &[0u8; 33]).err(),
Some(KeyRejected::too_large())
);
assert_eq!(
PqdsaKeyPair::from_seed(alg, &[]).err(),
Some(KeyRejected::too_small())
);
}
}
#[test]
fn test_from_seed_different_seeds_different_keys() {
for &alg in TEST_ALGORITHMS {
let kp1 = PqdsaKeyPair::from_seed(alg, &[1u8; 32]).unwrap();
let kp2 = PqdsaKeyPair::from_seed(alg, &[2u8; 32]).unwrap();
assert_ne!(kp1.public_key().as_ref(), kp2.public_key().as_ref());
}
}
#[test]
fn test_from_seed_raw_private_key_roundtrip() {
use crate::encoding::AsRawBytes;
for &alg in TEST_ALGORITHMS {
let seed = [55u8; 32];
let kp = PqdsaKeyPair::from_seed(alg, &seed).unwrap();
let raw_bytes = kp.private_key().as_raw_bytes().unwrap();
let kp2 = PqdsaKeyPair::from_raw_private_key(alg, raw_bytes.as_ref()).unwrap();
assert_eq!(kp.public_key().as_ref(), kp2.public_key().as_ref());
}
}
#[test]
fn test_from_seed_pkcs8_roundtrip() {
for &alg in TEST_ALGORITHMS {
let seed = [77u8; 32];
let kp = PqdsaKeyPair::from_seed(alg, &seed).unwrap();
let pkcs8 = kp.to_pkcs8().unwrap();
let kp2 = PqdsaKeyPair::from_pkcs8(alg, pkcs8.as_ref()).unwrap();
assert_eq!(kp.public_key().as_ref(), kp2.public_key().as_ref());
}
}
#[test]
fn test_from_seed_same_seed_different_algorithms() {
// Same seed with different algorithms should produce different keys
let seed = [42u8; 32];
let kp_44 = PqdsaKeyPair::from_seed(&ML_DSA_44_SIGNING, &seed).unwrap();
let kp_65 = PqdsaKeyPair::from_seed(&ML_DSA_65_SIGNING, &seed).unwrap();
let kp_87 = PqdsaKeyPair::from_seed(&ML_DSA_87_SIGNING, &seed).unwrap();
// Public keys have different sizes across algorithms, so they must differ
assert_ne!(
kp_44.public_key().as_ref().len(),
kp_65.public_key().as_ref().len()
);
assert_ne!(
kp_65.public_key().as_ref().len(),
kp_87.public_key().as_ref().len()
);
}
// Additional test for the algorithm getter
#[test]
fn test_algorithm_getter() {
for &alg in TEST_ALGORITHMS {
let keypair = PqdsaKeyPair::generate(alg).unwrap();
assert_eq!(keypair.algorithm(), alg);
}
}
// Additional test for the algorithm getter
#[test]
fn test_debug() {
for &alg in TEST_ALGORITHMS {
let keypair = PqdsaKeyPair::generate(alg).unwrap();
assert!(
format!("{keypair:?}").starts_with("PqdsaKeyPair { algorithm: PqdsaSigningAlgorithm(PqdsaVerificationAlgorithm { id:"),
"{keypair:?}"
);
let pubkey = keypair.public_key();
assert!(
format!("{pubkey:?}").starts_with("PqdsaPublicKey("),
"{pubkey:?}"
);
}
}
}

153
vendor/aws-lc-rs/src/pqdsa/signature.rs vendored Normal file
View File

@@ -0,0 +1,153 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aws_lc::EVP_PKEY;
use crate::buffer::Buffer;
use crate::digest::Digest;
use crate::encoding::{AsDer, PublicKeyX509Der};
use crate::error::Unspecified;
use crate::evp_pkey::No_EVP_PKEY_CTX_consumer;
use crate::pqdsa::{parse_pqdsa_public_key, AlgorithmID};
use crate::ptr::LcPtr;
use crate::signature::{ParsedPublicKey, ParsedVerificationAlgorithm, VerificationAlgorithm};
use crate::{digest, sealed};
use core::fmt;
use core::fmt::{Debug, Formatter};
#[cfg(feature = "ring-sig-verify")]
use untrusted::Input;
/// An PQDSA verification algorithm.
#[derive(Debug, Eq, PartialEq)]
pub struct PqdsaVerificationAlgorithm {
pub(crate) id: &'static AlgorithmID,
}
impl sealed::Sealed for PqdsaVerificationAlgorithm {}
/// An PQDSA signing algorithm.
#[derive(Debug, Eq, PartialEq)]
pub struct PqdsaSigningAlgorithm(pub(crate) &'static PqdsaVerificationAlgorithm);
impl PqdsaSigningAlgorithm {
/// Returns the size of the signature in bytes.
#[must_use]
pub fn signature_len(&self) -> usize {
self.0.id.signature_size_bytes()
}
}
/// A PQDSA public key.
#[derive(Clone)]
pub struct PublicKey {
evp_pkey: LcPtr<EVP_PKEY>,
pub(crate) octets: Box<[u8]>,
}
unsafe impl Send for PublicKey {}
unsafe impl Sync for PublicKey {}
impl PublicKey {
pub(crate) fn from_private_evp_pkey(evp_pkey: &LcPtr<EVP_PKEY>) -> Result<Self, Unspecified> {
let octets = evp_pkey.as_const().marshal_raw_public_key()?;
Ok(Self {
evp_pkey: evp_pkey.clone(),
octets: octets.into_boxed_slice(),
})
}
}
impl ParsedVerificationAlgorithm for PqdsaVerificationAlgorithm {
fn parsed_verify_sig(
&self,
public_key: &ParsedPublicKey,
msg: &[u8],
signature: &[u8],
) -> Result<(), Unspecified> {
let evp_pkey = public_key.key();
evp_pkey.verify(msg, None, No_EVP_PKEY_CTX_consumer, signature)
}
fn parsed_verify_digest_sig(
&self,
public_key: &ParsedPublicKey,
digest: &Digest,
signature: &[u8],
) -> Result<(), Unspecified> {
let evp_pkey = public_key.key();
evp_pkey.verify_digest_sig(digest, No_EVP_PKEY_CTX_consumer, signature)
}
}
impl VerificationAlgorithm for PqdsaVerificationAlgorithm {
/// Verifies the the signature of `msg` using the public key `public_key`.
///
/// # Errors
/// `error::Unspecified` if the signature is invalid.
#[cfg(feature = "ring-sig-verify")]
fn verify(
&self,
public_key: Input<'_>,
msg: Input<'_>,
signature: Input<'_>,
) -> Result<(), Unspecified> {
self.verify_sig(
public_key.as_slice_less_safe(),
msg.as_slice_less_safe(),
signature.as_slice_less_safe(),
)
}
/// Verifies the signature for `msg` using the `public_key`.
///
/// # Errors
/// `error::Unspecified` if the signature is invalid.
fn verify_sig(
&self,
public_key: &[u8],
msg: &[u8],
signature: &[u8],
) -> Result<(), Unspecified> {
let evp_pkey = parse_pqdsa_public_key(public_key, self.id)?;
evp_pkey.verify(msg, None, No_EVP_PKEY_CTX_consumer, signature)
}
/// DO NOT USE. This function is required by `VerificationAlgorithm` but cannot be used w/ Ed25519.
///
/// # Errors
/// Always returns `Unspecified`.
fn verify_digest_sig(
&self,
_public_key: &[u8],
_digest: &digest::Digest,
_signature: &[u8],
) -> Result<(), Unspecified> {
Err(Unspecified)
}
}
impl AsRef<[u8]> for PublicKey {
/// Serializes the public key as a raw byte string.
fn as_ref(&self) -> &[u8] {
self.octets.as_ref()
}
}
impl AsDer<PublicKeyX509Der<'static>> for PublicKey {
/// Provides the public key as a DER-encoded (X.509) `SubjectPublicKeyInfo` structure.
/// # Errors
/// Returns an error if the public key fails to marshal to X.509.
fn as_der(&self) -> Result<PublicKeyX509Der<'static>, crate::error::Unspecified> {
let der = self.evp_pkey.as_const().marshal_rfc5280_public_key()?;
Ok(PublicKeyX509Der::from(Buffer::new(der)))
}
}
impl Debug for PublicKey {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
f.write_str(&format!(
"PqdsaPublicKey(\"{}\")",
crate::hex::encode(self.octets.as_ref())
))
}
}

255
vendor/aws-lc-rs/src/ptr.rs vendored Normal file
View File

@@ -0,0 +1,255 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use crate::aws_lc::{
BN_free, CMAC_CTX_free, ECDSA_SIG_free, EC_GROUP_free, EC_KEY_free, EC_POINT_free,
EVP_AEAD_CTX_free, EVP_CIPHER_CTX_free, EVP_PKEY_CTX_free, EVP_PKEY_free, OPENSSL_free,
RSA_free, BIGNUM, CMAC_CTX, ECDSA_SIG, EC_GROUP, EC_KEY, EC_POINT, EVP_AEAD_CTX,
EVP_CIPHER_CTX, EVP_PKEY, EVP_PKEY_CTX, RSA,
};
use std::marker::PhantomData;
pub(crate) type LcPtr<T> = ManagedPointer<*mut T>;
pub(crate) type DetachableLcPtr<T> = DetachablePointer<*mut T>;
#[derive(Debug)]
pub(crate) struct ManagedPointer<P: Pointer> {
pointer: P,
}
impl<P: Pointer> ManagedPointer<P> {
#[inline]
pub fn new<T: IntoPointer<P>>(value: T) -> Result<Self, ()> {
if let Some(pointer) = value.into_pointer() {
Ok(Self { pointer })
} else {
Err(())
}
}
pub unsafe fn as_slice(&self, len: usize) -> &[P::T] {
core::slice::from_raw_parts(self.pointer.as_const_ptr(), len)
}
}
impl<P: Pointer> Drop for ManagedPointer<P> {
#[inline]
fn drop(&mut self) {
self.pointer.free();
}
}
impl<'a, P: Pointer> From<&'a ManagedPointer<P>> for ConstPointer<'a, P::T> {
fn from(ptr: &'a ManagedPointer<P>) -> ConstPointer<'a, P::T> {
ConstPointer {
ptr: ptr.pointer.as_const_ptr(),
_lifetime: PhantomData,
}
}
}
impl<P: Pointer> ManagedPointer<P> {
#[inline]
pub fn as_const(&self) -> ConstPointer<'_, P::T> {
self.into()
}
#[inline]
pub fn as_const_ptr(&self) -> *const P::T {
self.pointer.as_const_ptr()
}
pub fn project_const_lifetime<'a, C>(
&'a self,
f: unsafe fn(&'a Self) -> *const C,
) -> Result<ConstPointer<'a, C>, ()> {
let ptr = unsafe { f(self) };
if ptr.is_null() {
return Err(());
}
Ok(ConstPointer {
ptr,
_lifetime: PhantomData,
})
}
#[inline]
pub fn as_mut_ptr(&mut self) -> *mut P::T {
self.pointer.as_mut_ptr()
}
}
impl<P: Pointer> DetachablePointer<P> {
#[inline]
pub fn as_mut_ptr(&mut self) -> *mut P::T {
self.pointer.as_mut().unwrap().as_mut_ptr()
}
}
#[derive(Debug)]
#[allow(clippy::module_name_repetitions)]
pub(crate) struct DetachablePointer<P: Pointer> {
pointer: Option<P>,
}
impl<P: Pointer> DetachablePointer<P> {
#[inline]
pub fn new<T: IntoPointer<P>>(value: T) -> Result<Self, ()> {
if let Some(pointer) = value.into_pointer() {
Ok(Self {
pointer: Some(pointer),
})
} else {
Err(())
}
}
#[inline]
pub fn detach(mut self) -> P {
self.pointer.take().unwrap()
}
}
impl<P: Pointer> From<DetachablePointer<P>> for ManagedPointer<P> {
#[inline]
fn from(mut dptr: DetachablePointer<P>) -> Self {
match dptr.pointer.take() {
Some(pointer) => ManagedPointer { pointer },
None => {
// Safety: pointer is only None when DetachableLcPtr is detached or dropped
unreachable!()
}
}
}
}
impl<P: Pointer> Drop for DetachablePointer<P> {
#[inline]
fn drop(&mut self) {
if let Some(mut pointer) = self.pointer.take() {
pointer.free();
}
}
}
#[derive(Debug)]
pub(crate) struct ConstPointer<'a, T> {
ptr: *const T,
_lifetime: PhantomData<&'a T>,
}
impl<T> ConstPointer<'static, T> {
pub unsafe fn new_static(ptr: *const T) -> Result<Self, ()> {
if ptr.is_null() {
return Err(());
}
Ok(ConstPointer {
ptr,
_lifetime: PhantomData,
})
}
}
impl<T> ConstPointer<'_, T> {
pub fn project_const_lifetime<'a, C>(
&'a self,
f: unsafe fn(&'a Self) -> *const C,
) -> Result<ConstPointer<'a, C>, ()> {
let ptr = unsafe { f(self) };
if ptr.is_null() {
return Err(());
}
Ok(ConstPointer {
ptr,
_lifetime: PhantomData,
})
}
pub fn as_const_ptr(&self) -> *const T {
self.ptr
}
}
pub(crate) trait Pointer {
type T;
fn free(&mut self);
fn as_const_ptr(&self) -> *const Self::T;
fn as_mut_ptr(&mut self) -> *mut Self::T;
}
pub(crate) trait IntoPointer<P> {
fn into_pointer(self) -> Option<P>;
}
impl<T> IntoPointer<*mut T> for *mut T {
#[inline]
fn into_pointer(self) -> Option<*mut T> {
if self.is_null() {
None
} else {
Some(self)
}
}
}
macro_rules! create_pointer {
($ty:ty, $free:path) => {
impl Pointer for *mut $ty {
type T = $ty;
#[inline]
fn free(&mut self) {
unsafe {
let ptr = *self;
$free(ptr.cast());
}
}
#[inline]
fn as_const_ptr(&self) -> *const Self::T {
self.cast()
}
#[inline]
fn as_mut_ptr(&mut self) -> *mut Self::T {
*self
}
}
};
}
// `OPENSSL_free` and the other `XXX_free` functions perform a zeroization of the memory when it's
// freed. This is different than functions of the same name in OpenSSL which generally do not zero
// memory.
create_pointer!(u8, OPENSSL_free);
create_pointer!(EC_GROUP, EC_GROUP_free);
create_pointer!(EC_POINT, EC_POINT_free);
create_pointer!(EC_KEY, EC_KEY_free);
create_pointer!(ECDSA_SIG, ECDSA_SIG_free);
create_pointer!(BIGNUM, BN_free);
create_pointer!(EVP_PKEY, EVP_PKEY_free);
create_pointer!(EVP_PKEY_CTX, EVP_PKEY_CTX_free);
create_pointer!(RSA, RSA_free);
create_pointer!(EVP_AEAD_CTX, EVP_AEAD_CTX_free);
create_pointer!(EVP_CIPHER_CTX, EVP_CIPHER_CTX_free);
create_pointer!(CMAC_CTX, CMAC_CTX_free);
#[cfg(test)]
mod tests {
use crate::aws_lc::BIGNUM;
use crate::ptr::{DetachablePointer, ManagedPointer};
#[test]
fn test_debug() {
let num = 100u64;
let detachable_ptr: DetachablePointer<*mut BIGNUM> =
DetachablePointer::try_from(num).unwrap();
let debug = format!("{detachable_ptr:?}");
assert!(debug.contains("DetachablePointer { pointer: Some("));
let lc_ptr = ManagedPointer::new(detachable_ptr.detach()).unwrap();
let debug = format!("{lc_ptr:?}");
assert!(debug.contains("ManagedPointer { pointer:"));
}
}

273
vendor/aws-lc-rs/src/rand.rs vendored Normal file
View File

@@ -0,0 +1,273 @@
// Copyright 2015-2016 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! Cryptographic pseudo-random number generation.
//!
//! An application should create a single `SystemRandom` and then use it for
//! all randomness generation. See `SystemRandom`'s documentation for more
//! details.
//! # Example
//! ```
//! use aws_lc_rs::{rand, rand::SecureRandom};
//!
//! // Using `rand::fill`
//! let mut rand_bytes = [0u8; 32];
//! rand::fill(&mut rand_bytes).unwrap();
//!
//! // Using `SystemRandom`
//! let rng = rand::SystemRandom::new();
//! rng.fill(&mut rand_bytes).unwrap();
//!
//! // Using `rand::generate`
//! let random_array = rand::generate(&rng).unwrap();
//! let more_rand_bytes: [u8; 64] = random_array.expose();
//! ```
use crate::aws_lc::RAND_bytes;
use crate::error::Unspecified;
use crate::fips::indicator_check;
use core::fmt::Debug;
/// Re-exports of sealed traits for development testing.
///
/// This module is only available when the `dev-tests-only` feature is enabled.
/// It exposes the [`SecureRandom`](unsealed::SecureRandom) trait, allowing consumers
/// to provide their own implementations (e.g., a deterministic RNG) for testing purposes.
///
/// # Example
///
/// ```ignore
/// use aws_lc_rs::rand::{unsealed, SecureRandom};
/// use aws_lc_rs::error::Unspecified;
///
/// #[derive(Debug)]
/// struct DeterministicRandom(u8);
///
/// impl unsealed::SecureRandom for DeterministicRandom {
/// fn fill_impl(&self, dest: &mut [u8]) -> Result<(), Unspecified> {
/// for (i, byte) in dest.iter_mut().enumerate() {
/// *byte = self.0.wrapping_add(i as u8);
/// }
/// Ok(())
/// }
/// }
/// ```
#[cfg(any(dev_tests_only, aws_lc_rs_docsrs))]
#[cfg_attr(aws_lc_rs_docsrs, doc(cfg(feature = "dev-tests-only")))]
#[allow(unused_imports)]
pub mod unsealed {
pub use super::sealed::*;
}
/// A secure random number generator.
pub trait SecureRandom: sealed::SecureRandom {
/// Fills `dest` with random bytes.
///
/// # Errors
/// `error::Unspecified` if unable to fill `dest`.
fn fill(&self, dest: &mut [u8]) -> Result<(), Unspecified>;
/// Fills `dest` with random bytes.
///
/// This method is only available when the `dev-tests-only` feature is enabled.
///
/// # Errors
/// `error::Unspecified` if unable to fill `dest`.
#[cfg(any(test, dev_tests_only, aws_lc_rs_docsrs))]
#[cfg_attr(aws_lc_rs_docsrs, doc(cfg(feature = "dev-tests-only")))]
fn mut_fill(&mut self, dest: &mut [u8]) -> Result<(), Unspecified>;
}
impl<T> SecureRandom for T
where
T: sealed::SecureRandom,
{
#[inline]
fn fill(&self, dest: &mut [u8]) -> Result<(), Unspecified> {
self.fill_impl(dest)
}
#[inline]
#[cfg(any(test, dev_tests_only, aws_lc_rs_docsrs))]
fn mut_fill(&mut self, dest: &mut [u8]) -> Result<(), Unspecified> {
self.fill_impl(dest)
}
}
/// A random value constructed from a `SecureRandom` that hasn't been exposed
/// through any safe Rust interface.
///
/// Intentionally does not implement any traits other than `Sized`.
pub struct Random<T: RandomlyConstructable>(T);
impl<T: RandomlyConstructable> Random<T> {
/// Expose the random value.
#[inline]
pub fn expose(self) -> T {
self.0
}
}
/// Generate the new random value using `rng`.
///
/// # Errors
/// `error::Unspecified` if unable to fill buffer.
#[inline]
pub fn generate<T: RandomlyConstructable>(
rng: &dyn SecureRandom,
) -> Result<Random<T>, Unspecified> {
let mut r = T::zero();
rng.fill(r.as_mut_bytes())?;
Ok(Random(r))
}
pub(crate) mod sealed {
use crate::error;
/// A sealed trait for secure random number generation.
pub trait SecureRandom: core::fmt::Debug {
/// Fills `dest` with random bytes.
///
/// # Errors
/// Returns `error::Unspecified` if unable to fill `dest`.
fn fill_impl(&self, dest: &mut [u8]) -> Result<(), error::Unspecified>;
}
/// A sealed trait for types that can be randomly constructed.
pub trait RandomlyConstructable: Sized {
/// Returns a zeroed instance of the type.
fn zero() -> Self;
/// Returns a mutable byte slice of the value.
fn as_mut_bytes(&mut self) -> &mut [u8];
}
impl<const T: usize> RandomlyConstructable for [u8; T] {
#[inline]
fn zero() -> Self {
[0; T]
}
#[inline]
fn as_mut_bytes(&mut self) -> &mut [u8] {
&mut self[..]
}
}
}
/// A type that can be returned by `aws_lc_rs::rand::generate()`.
pub trait RandomlyConstructable: sealed::RandomlyConstructable {}
impl<T> RandomlyConstructable for T where T: sealed::RandomlyConstructable {}
/// A secure random number generator where the random values come from the
/// underlying *AWS-LC* libcrypto.
///
/// A single `SystemRandom` may be shared across multiple threads safely.
//
// # FIPS
// Use this implementation for retrieving random bytes.
#[derive(Clone, Debug)]
pub struct SystemRandom(());
const SYSTEM_RANDOM: SystemRandom = SystemRandom(());
impl SystemRandom {
/// Constructs a new `SystemRandom`.
#[inline]
#[must_use]
pub fn new() -> Self {
Self::default()
}
}
impl Default for SystemRandom {
fn default() -> Self {
SYSTEM_RANDOM
}
}
impl sealed::SecureRandom for SystemRandom {
#[inline]
fn fill_impl(&self, dest: &mut [u8]) -> Result<(), Unspecified> {
fill(dest)
}
}
/// Fills `dest` with random bytes.
///
// # FIPS
// Use this for retrieving random bytes or [`SystemRandom`].
//
/// # Errors
/// `error::Unspecified` if unable to fill `dest`.
pub fn fill(dest: &mut [u8]) -> Result<(), Unspecified> {
if 1 != indicator_check!(unsafe { RAND_bytes(dest.as_mut_ptr(), dest.len()) }) {
return Err(Unspecified);
}
Ok(())
}
#[cfg(test)]
mod tests {
use crate::rand;
use core::array::IntoIter;
use crate::rand::{generate, SecureRandom, SystemRandom};
#[test]
fn test_secure_random_fill() {
// Collect enough random values so that the assertions below should never fail again
let mut random_array = [0u8; 1009];
let rng = SystemRandom::new();
rng.fill(&mut random_array).unwrap();
let (mean, variance) = mean_variance(&mut random_array.into_iter());
assert!((106f64..150f64).contains(&mean), "Mean: {mean}");
assert!(variance > 8f64);
println!("Mean: {mean} Variance: {variance}");
}
#[test]
fn test_rand_fill() {
// Collect enough random values so that the assertions below should never fail again
let mut random_array = [0u8; 1009];
rand::fill(&mut random_array).unwrap();
let (mean, variance) = mean_variance(&mut random_array.into_iter());
assert!((106f64..150f64).contains(&mean), "Mean: {mean}");
assert!(variance > 8f64);
println!("Mean: {mean} Variance: {variance}");
}
#[test]
fn test_randomly_constructable() {
let rando = SystemRandom::new();
let random_array = generate(&rando).unwrap();
// Collect enough random values so that the assertions below should never fail again
let random_array: [u8; 1009] = random_array.expose();
let (mean, variance) = mean_variance(&mut random_array.into_iter());
assert!((106f64..150f64).contains(&mean), "Mean: {mean}");
assert!(variance > 8f64);
println!("Mean: {mean} Variance: {variance}");
}
fn mean_variance<T: Into<f64>, const N: usize>(iterable: &mut IntoIter<T, N>) -> (f64, f64) {
let iter = iterable;
let mean: Option<T> = iter.next();
let mut mean = mean.unwrap().into();
let mut var_squared = 0f64;
let mut count = 1f64;
for value in iter.by_ref() {
count += 1f64;
let value = value.into();
let prev_mean = mean;
mean = prev_mean + (value - prev_mean) / count;
var_squared =
var_squared + ((value - prev_mean) * (value - mean) - var_squared) / count;
}
(mean, var_squared.sqrt())
}
}

144
vendor/aws-lc-rs/src/rsa.rs vendored Normal file
View File

@@ -0,0 +1,144 @@
// Copyright 2015-2016 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
//! RSA Signature and Encryption Support.
//!
//! # OAEP Encryption / Decryption
//!
//! ```rust
//! # use std::error::Error;
//! # fn main() -> Result<(), Box<dyn Error>> {
//! use aws_lc_rs::{
//! encoding::{AsDer, Pkcs8V1Der, PublicKeyX509Der},
//! rsa::{KeySize, OAEP_SHA256_MGF1SHA256, OaepPublicEncryptingKey, OaepPrivateDecryptingKey, PublicEncryptingKey, PrivateDecryptingKey}
//! };
//!
//! // Generate a RSA 2048-bit key.
//! let private_key = PrivateDecryptingKey::generate(KeySize::Rsa2048)?;
//!
//! // Serialize the RSA private key to DER encoded PKCS#8 format for later usage.
//! let private_key_der = AsDer::<Pkcs8V1Der>::as_der(&private_key)?;
//! let private_key_der_bytes = private_key_der.as_ref();
//!
//! // Load a RSA private key from DER encoded PKCS#8 document.
//! let private_key = PrivateDecryptingKey::from_pkcs8(private_key_der_bytes)?;
//!
//! // Retrieve the RSA public key
//! let public_key = private_key.public_key();
//!
//! // Serialize the RSA public key to DER encoded X.509 SubjectPublicKeyInfo for later usage.
//! let public_key_der = AsDer::<PublicKeyX509Der>::as_der(&public_key)?;
//! let public_key_der_bytes = public_key_der.as_ref();
//!
//! // Load a RSA public key from DER encoded X.509 SubjectPublicKeyInfo.
//! let public_key = PublicEncryptingKey::from_der(public_key_der_bytes)?;
//!
//! // Construct a RSA-OAEP public encrypting key
//! let public_key = OaepPublicEncryptingKey::new(public_key)?;
//!
//! // The maximum size plaintext can be determined by calling `OaepPublicEncryptingKey::max_plaintext_size`
//! let message = b"hello world";
//! let mut ciphertext = vec![0u8; public_key.ciphertext_size()]; // Output will be the size of the RSA key length in bytes rounded up.
//!
//! // Encrypt a message with the public key without the optional label provided.
//! let ciphertext = public_key.encrypt(&OAEP_SHA256_MGF1SHA256, message, &mut ciphertext, None)?;
//!
//! assert_ne!(message, ciphertext);
//!
//! // Construct a RSA-OAEP private decrypting key
//! let private_key = OaepPrivateDecryptingKey::new(private_key)?;
//!
//! // Decrypt a message with the private key.
//! let mut plaintext = vec![0u8; private_key.min_output_size()];
//! let plaintext = private_key.decrypt(&OAEP_SHA256_MGF1SHA256, ciphertext, &mut plaintext, None)?;
//!
//! assert_eq!(message, plaintext);
//!
//! # Ok(())
//! # }
//! ```
// *R* and *r* in Montgomery math refer to different things, so we always use
// `R` to refer to *R* to avoid confusion, even when that's against the normal
// naming conventions. Also the standard camelCase names are used for `KeyPair`
// components.
mod encoding;
mod encryption;
pub(crate) mod key;
pub(crate) mod signature;
pub use self::encryption::oaep::{
OaepAlgorithm, OaepPrivateDecryptingKey, OaepPublicEncryptingKey, OAEP_SHA1_MGF1SHA1,
OAEP_SHA256_MGF1SHA256, OAEP_SHA384_MGF1SHA384, OAEP_SHA512_MGF1SHA512,
};
pub use self::encryption::pkcs1::{Pkcs1PrivateDecryptingKey, Pkcs1PublicEncryptingKey};
pub use self::encryption::{EncryptionAlgorithmId, PrivateDecryptingKey, PublicEncryptingKey};
pub use self::key::{KeyPair, KeySize, PublicKey, PublicKeyComponents};
#[allow(clippy::module_name_repetitions)]
pub use self::signature::RsaParameters;
pub(crate) use self::signature::RsaVerificationAlgorithmId;
#[cfg(test)]
mod tests {
#[cfg(feature = "fips")]
mod fips;
#[cfg(feature = "ring-io")]
#[test]
fn test_rsa() {
use crate::signature::KeyPair;
use crate::test::from_dirty_hex;
let rsa_pkcs8_input: Vec<u8> = from_dirty_hex(
r"308204bd020100300d06092a864886f70d0101010500048204a7308204a30201000282010100b9d7a
f84fa4184a5f22037ec8aff2db5f78bd8c21e714e579ae57c6398c4950f3a694b17bfccf488766159aec5bb7c2c4
3d59c798cbd45a09c9c86933f126879ee7eadcd404f61ecfc425197cab03946ba381a49ef3b4d0f60b17f8a747cd
e56a834a7f6008f35ffb2f60a54ceda1974ff2a9963aba7f80d4e2916a93d8c74bb1ba5f3b189a4e8f0377bd3e94
b5cc3f9c53cb8c8c7c0af394818755e968b7a76d9cada8da7af5fbe25da2a09737d5e4e4d7092aa16a0718d7322c
e8aca767015128d6d35775ea9cb8bb1ac6512e1b787d34015221be780a37b1d69bc3708bfd8832591be6095a768f
0fd3b3457927e6ae3641d55799a29a0a269cb4a693bc14b0203010001028201001c5fb7e69fa6dd2fd0f5e653f12
ce0b7c5a1ce6864e97bc2985dad4e2f86e4133d21d25b3fe774f658cca83aace9e11d8905d62c20b6cd28a680a77
357cfe1afac201f3d1532898afb40cce0560bedd2c49fc833bd98da3d1cd03cded0c637d4173e62de865b572d410
f9ba83324cd7a3573359428232f1628f6d104e9e6c5f380898b5570201cf11eb5f7e0c4933139c7e7fba67582287
ffb81b84fa81e9a2d9739815a25790c06ead7abcf286bd43c6e3d009d01f15fca3d720bbea48b0c8ccf8764f3c82
2e61159d8efcbff38c794f8afe040b45df14c976a91b1b6d886a55b8e68969bcb30c7197920d97d7721d78d954d8
9ffecbcc93c6ee82a86fe754102818100eba1cbe453f5cb2fb7eabc12d697267d25785a8f7b43cc2cb14555d3618
c63929b19839dcd4212397ecda8ad872f97ede6ac95ebda7322bbc9409bac2b24ae56ad62202800c670365ae2867
1195fe934978a5987bee2fcea06561b782630b066b0a35c3f559a281f0f729fc282ef8ebdbb065d60000223da6ed
b732fa32d82bb02818100c9e81e353315fd88eff53763ed7b3859f419a0a158f5155851ce0fe6e43188e44fb43dd
25bcdb7f3839fe84a5db88c6525e5bcbae513bae5ff54398106bd8ae4d241c082f8a64a9089531f7b57b09af5204
2efa097140702dda55a2141c174dd7a324761267728a6cc4ce386c034393d855ebe985c4e5f2aec2bd3f2e2123ab
1028180566889dd9c50798771397a68aa1ad9b970e136cc811676ac3901c51c741c48737dbf187de8c47eec68acc
05b8a4490c164230c0366a36c2c52fc075a56a3e7eecf3c39b091c0336c2b5e00913f0de5f62c5046ceb9d88188c
c740d34bd44839bd4d0c346527cea93a15596727d139e53c35eed25043bc4ac18950f237c02777b0281800f9dd98
049e44088efee6a8b5b19f5c0d765880c12c25a154bb6817a5d5a0b798544aea76f9c58c707fe3d4c4b3573fe7ad
0eb291580d22ae9f5ccc0d311a40590d1af1f3236427c2d72f57367d3ec185b9771cb5d041a8ab93409e59a9d68f
99c72f91c658a3fe5aed59f9f938c368530a4a45f4a7c7155f3906c4354030ef102818100c89e0ba805c970abd84
a70770d8fc57bfaa34748a58b77fcddaf0ca285db91953ef5728c1be7470da5540df6af56bb04c0f5ec500f83b08
057664cb1551e1e29c58d8b1e9d70e23ed57fdf9936c591a83c1dc954f6654d4a245b6d8676d045c2089ffce537d
234fc88e98d92afa92926c75b286e8fee70e273d762bbe63cd63b",
);
let key = super::KeyPair::from_pkcs8(&rsa_pkcs8_input).unwrap();
let pk = key.public_key();
let modulus_bytes = pk.modulus().big_endian_without_leading_zero();
assert_eq!(&rsa_pkcs8_input[38..294], modulus_bytes);
}
#[test]
fn test_debug() {
use crate::signature;
assert_eq!(
"{ RSA_PSS_SHA512 }",
format!("{:?}", signature::RSA_PSS_SHA512)
);
assert_eq!(
"{ RSA_PSS_2048_8192_SHA256 }",
format!("{:?}", signature::RSA_PSS_2048_8192_SHA256)
);
}
}

104
vendor/aws-lc-rs/src/rsa/encoding.rs vendored Normal file
View File

@@ -0,0 +1,104 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
/// [RFC 8017](https://www.rfc-editor.org/rfc/rfc8017.html)
///
/// PKCS #1: RSA Cryptography Specifications Version 2.2
pub(in crate::rsa) mod rfc8017 {
use crate::aws_lc::{
EVP_PKEY_assign_RSA, EVP_PKEY_new, RSA_parse_private_key, RSA_public_key_from_bytes,
RSA_public_key_to_bytes, EVP_PKEY,
};
use crate::cbs;
use crate::error::{KeyRejected, Unspecified};
use crate::ptr::{DetachableLcPtr, LcPtr};
use std::ptr::null_mut;
/// DER encode a RSA public key to `RSAPublicKey` structure.
pub(in crate::rsa) fn encode_public_key_der(
pubkey: &LcPtr<EVP_PKEY>,
) -> Result<Box<[u8]>, Unspecified> {
let mut pubkey_bytes = null_mut::<u8>();
let mut outlen: usize = 0;
if 1 != unsafe {
RSA_public_key_to_bytes(
&mut pubkey_bytes,
&mut outlen,
pubkey.as_const().get_rsa()?.as_const_ptr(),
)
} {
return Err(Unspecified);
}
let pubkey_bytes = LcPtr::new(pubkey_bytes)?;
let pubkey_slice = unsafe { pubkey_bytes.as_slice(outlen) };
let pubkey_vec = Vec::from(pubkey_slice);
Ok(pubkey_vec.into_boxed_slice())
}
/// Decode a DER encoded `RSAPublicKey` structure.
#[inline]
pub(in crate::rsa) fn decode_public_key_der(
public_key: &[u8],
) -> Result<LcPtr<EVP_PKEY>, KeyRejected> {
let mut rsa = DetachableLcPtr::new(unsafe {
RSA_public_key_from_bytes(public_key.as_ptr(), public_key.len())
})?;
let mut pkey = LcPtr::new(unsafe { EVP_PKEY_new() })?;
if 1 != unsafe { EVP_PKEY_assign_RSA(pkey.as_mut_ptr(), rsa.as_mut_ptr()) } {
return Err(KeyRejected::unspecified());
}
rsa.detach();
Ok(pkey)
}
/// Decodes a DER encoded `RSAPrivateKey` structure.
#[inline]
pub(in crate::rsa) fn decode_private_key_der(
private_key: &[u8],
) -> Result<LcPtr<EVP_PKEY>, KeyRejected> {
let mut cbs = cbs::build_CBS(private_key);
let mut rsa = DetachableLcPtr::new(unsafe { RSA_parse_private_key(&mut cbs) })?;
let mut pkey = LcPtr::new(unsafe { EVP_PKEY_new() })?;
if 1 != unsafe { EVP_PKEY_assign_RSA(pkey.as_mut_ptr(), rsa.as_mut_ptr()) } {
return Err(KeyRejected::unspecified());
}
rsa.detach();
Ok(pkey)
}
}
/// [RFC 5280](https://www.rfc-editor.org/rfc/rfc5280.html)
///
/// Encodings that use the `SubjectPublicKeyInfo` structure.
pub(in crate::rsa) mod rfc5280 {
use crate::aws_lc::{EVP_PKEY, EVP_PKEY_RSA, EVP_PKEY_RSA_PSS};
use crate::buffer::Buffer;
use crate::encoding::PublicKeyX509Der;
use crate::error::{KeyRejected, Unspecified};
use crate::ptr::LcPtr;
pub(in crate::rsa) fn encode_public_key_der(
key: &LcPtr<EVP_PKEY>,
) -> Result<PublicKeyX509Der<'static>, Unspecified> {
let der = key.as_const().marshal_rfc5280_public_key()?;
Ok(PublicKeyX509Der::from(Buffer::new(der)))
}
pub(in crate::rsa) fn decode_public_key_der(
value: &[u8],
) -> Result<LcPtr<EVP_PKEY>, KeyRejected> {
LcPtr::<EVP_PKEY>::parse_rfc5280_public_key(value, EVP_PKEY_RSA).or(
// Does anyone encode with this OID?
LcPtr::<EVP_PKEY>::parse_rfc5280_public_key(value, EVP_PKEY_RSA_PSS),
)
}
}

208
vendor/aws-lc-rs/src/rsa/encryption.rs vendored Normal file
View File

@@ -0,0 +1,208 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
pub(super) mod oaep;
pub(super) mod pkcs1;
use super::key::{generate_rsa_key, is_rsa_key};
use super::{encoding, KeySize};
use crate::aws_lc::{EVP_PKEY, EVP_PKEY_RSA};
use crate::encoding::{AsDer, Pkcs8V1Der, PublicKeyX509Der};
use crate::error::{KeyRejected, Unspecified};
use crate::pkcs8::Version;
use crate::ptr::LcPtr;
use core::fmt::Debug;
/// RSA Encryption Algorithm Identifier
#[allow(clippy::module_name_repetitions)]
#[derive(Debug, Clone, Copy, PartialEq)]
#[non_exhaustive]
pub enum EncryptionAlgorithmId {
/// RSA-OAEP with SHA1 Hash and SHA1 MGF1
OaepSha1Mgf1sha1,
/// RSA-OAEP with SHA256 Hash and SHA256 MGF1
OaepSha256Mgf1sha256,
/// RSA-OAEP with SHA384 Hash and SHA384 MGF1
OaepSha384Mgf1sha384,
/// RSA-OAEP with SHA512 Hash and SHA512 MGF1
OaepSha512Mgf1sha512,
}
/// An RSA private key used for decrypting ciphertext encrypted by a [`PublicEncryptingKey`].
pub struct PrivateDecryptingKey(LcPtr<EVP_PKEY>);
impl PrivateDecryptingKey {
fn new(evp_pkey: LcPtr<EVP_PKEY>) -> Result<Self, Unspecified> {
Self::validate_key(&evp_pkey)?;
Ok(Self(evp_pkey))
}
fn validate_key(key: &LcPtr<EVP_PKEY>) -> Result<(), Unspecified> {
if !is_rsa_key(key) {
return Err(Unspecified);
}
match key.as_const().key_size_bits() {
2048..=8192 => Ok(()),
_ => Err(Unspecified),
}
}
/// Generate a new RSA private key pair for use with asymmetrical encryption.
///
/// Supports the following key sizes:
/// * `KeySize::Rsa2048`
/// * `KeySize::Rsa3072`
/// * `KeySize::Rsa4096`
/// * `KeySize::Rsa8192`
///
/// # Errors
/// * `Unspecified` for any error that occurs during the generation of the RSA keypair.
pub fn generate(size: KeySize) -> Result<Self, Unspecified> {
let key = generate_rsa_key(size.bits())?;
Self::new(key)
}
/// Generate a new RSA private key pair for use with asymmetrical encryption.
///
/// Supports the following key sizes:
/// * `KeySize::Rsa2048`
/// * `KeySize::Rsa3072`
/// * `KeySize::Rsa4096`
/// * `KeySize::Rsa8192`
///
/// ## Deprecated
/// This is equivalent to `KeyPair::generate`.
///
/// # Errors
/// * `Unspecified` for any error that occurs during the generation of the RSA keypair.
#[cfg(feature = "fips")]
#[deprecated]
pub fn generate_fips(size: KeySize) -> Result<Self, Unspecified> {
Self::generate(size)
}
/// Construct a `PrivateDecryptingKey` from the provided PKCS#8 (v1) document.
///
/// Supports RSA key sizes between 2048 and 8192 (inclusive).
///
/// # Errors
/// * `Unspecified` for any error that occurs during deserialization of this key from PKCS#8.
pub fn from_pkcs8(pkcs8: &[u8]) -> Result<Self, KeyRejected> {
let key = LcPtr::<EVP_PKEY>::parse_rfc5208_private_key(pkcs8, EVP_PKEY_RSA)?;
Ok(Self::new(key)?)
}
/// Returns a boolean indicator if this RSA key is an approved FIPS 140-3 key.
#[cfg(feature = "fips")]
#[must_use]
pub fn is_valid_fips_key(&self) -> bool {
super::key::is_valid_fips_key(&self.0)
}
/// Returns the RSA signature size in bytes.
#[must_use]
pub fn key_size_bytes(&self) -> usize {
self.0.as_const().signature_size_bytes()
}
/// Returns the RSA key size in bits.
#[must_use]
pub fn key_size_bits(&self) -> usize {
self.0.as_const().key_size_bits()
}
/// Retrieves the `PublicEncryptingKey` corresponding with this `PrivateDecryptingKey`.
#[must_use]
#[allow(clippy::missing_panics_doc)]
pub fn public_key(&self) -> PublicEncryptingKey {
PublicEncryptingKey::new(self.0.clone()).expect(
"PublicEncryptingKey key size to be supported by PrivateDecryptingKey key sizes",
)
}
}
impl Debug for PrivateDecryptingKey {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_tuple("PrivateDecryptingKey").finish()
}
}
impl AsDer<Pkcs8V1Der<'static>> for PrivateDecryptingKey {
fn as_der(&self) -> Result<Pkcs8V1Der<'static>, Unspecified> {
Ok(Pkcs8V1Der::new(
self.0.as_const().marshal_rfc5208_private_key(Version::V1)?,
))
}
}
impl Clone for PrivateDecryptingKey {
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
/// An RSA public key used for encrypting plaintext that is decrypted by a [`PrivateDecryptingKey`].
pub struct PublicEncryptingKey(LcPtr<EVP_PKEY>);
impl PublicEncryptingKey {
pub(crate) fn new(evp_pkey: LcPtr<EVP_PKEY>) -> Result<Self, Unspecified> {
Self::validate_key(&evp_pkey)?;
Ok(Self(evp_pkey))
}
fn validate_key(key: &LcPtr<EVP_PKEY>) -> Result<(), Unspecified> {
if !is_rsa_key(key) {
return Err(Unspecified);
}
match key.as_const().key_size_bits() {
2048..=8192 => Ok(()),
_ => Err(Unspecified),
}
}
/// Construct a `PublicEncryptingKey` from X.509 `SubjectPublicKeyInfo` DER encoded bytes.
///
/// # Errors
/// * `Unspecified` for any error that occurs deserializing from bytes.
pub fn from_der(value: &[u8]) -> Result<Self, KeyRejected> {
let key = encoding::rfc5280::decode_public_key_der(value)?;
Ok(Self::new(key)?)
}
/// Returns the RSA signature size in bytes.
#[must_use]
pub fn key_size_bytes(&self) -> usize {
self.0.as_const().signature_size_bytes()
}
/// Returns the RSA key size in bits.
#[must_use]
pub fn key_size_bits(&self) -> usize {
self.0.as_const().key_size_bits()
}
}
impl Debug for PublicEncryptingKey {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_tuple("PublicEncryptingKey").finish()
}
}
impl Clone for PublicEncryptingKey {
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
impl AsDer<PublicKeyX509Der<'static>> for PublicEncryptingKey {
/// Serialize this `PublicEncryptingKey` to a X.509 `SubjectPublicKeyInfo` structure as DER encoded bytes.
///
/// # Errors
/// * `Unspecified` for any error that occurs serializing to bytes.
fn as_der(&self) -> Result<PublicKeyX509Der<'static>, Unspecified> {
encoding::rfc5280::encode_public_key_der(&self.0)
}
}

View File

@@ -0,0 +1,331 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#![allow(clippy::module_name_repetitions)]
use super::{EncryptionAlgorithmId, PrivateDecryptingKey, PublicEncryptingKey};
use crate::aws_lc::{
EVP_PKEY_CTX_set0_rsa_oaep_label, EVP_PKEY_CTX_set_rsa_mgf1_md, EVP_PKEY_CTX_set_rsa_oaep_md,
EVP_PKEY_CTX_set_rsa_padding, EVP_PKEY_decrypt, EVP_PKEY_decrypt_init, EVP_PKEY_encrypt,
EVP_PKEY_encrypt_init, EVP_sha1, EVP_sha256, EVP_sha384, EVP_sha512, OPENSSL_malloc, EVP_MD,
EVP_PKEY_CTX, RSA_PKCS1_OAEP_PADDING,
};
use crate::error::Unspecified;
use crate::fips::indicator_check;
use crate::ptr::{DetachableLcPtr, LcPtr};
use core::fmt::Debug;
use core::mem::size_of_val;
use core::ptr::null_mut;
/// RSA-OAEP with SHA1 Hash and SHA1 MGF1
pub const OAEP_SHA1_MGF1SHA1: OaepAlgorithm = OaepAlgorithm {
id: EncryptionAlgorithmId::OaepSha1Mgf1sha1,
oaep_hash_fn: EVP_sha1,
mgf1_hash_fn: EVP_sha1,
};
/// RSA-OAEP with SHA256 Hash and SHA256 MGF1
pub const OAEP_SHA256_MGF1SHA256: OaepAlgorithm = OaepAlgorithm {
id: EncryptionAlgorithmId::OaepSha256Mgf1sha256,
oaep_hash_fn: EVP_sha256,
mgf1_hash_fn: EVP_sha256,
};
/// RSA-OAEP with SHA384 Hash and SHA384 MGF1
pub const OAEP_SHA384_MGF1SHA384: OaepAlgorithm = OaepAlgorithm {
id: EncryptionAlgorithmId::OaepSha384Mgf1sha384,
oaep_hash_fn: EVP_sha384,
mgf1_hash_fn: EVP_sha384,
};
/// RSA-OAEP with SHA512 Hash and SHA512 MGF1
pub const OAEP_SHA512_MGF1SHA512: OaepAlgorithm = OaepAlgorithm {
id: EncryptionAlgorithmId::OaepSha512Mgf1sha512,
oaep_hash_fn: EVP_sha512,
mgf1_hash_fn: EVP_sha512,
};
type OaepHashFn = unsafe extern "C" fn() -> *const EVP_MD;
type Mgf1HashFn = unsafe extern "C" fn() -> *const EVP_MD;
/// An RSA-OAEP algorithm.
pub struct OaepAlgorithm {
id: EncryptionAlgorithmId,
oaep_hash_fn: OaepHashFn,
mgf1_hash_fn: Mgf1HashFn,
}
impl OaepAlgorithm {
/// Returns the `EncryptionAlgorithmId`.
#[must_use]
pub fn id(&self) -> EncryptionAlgorithmId {
self.id
}
#[inline]
fn oaep_hash_fn(&self) -> OaepHashFn {
self.oaep_hash_fn
}
#[inline]
fn mgf1_hash_fn(&self) -> Mgf1HashFn {
self.mgf1_hash_fn
}
}
impl Debug for OaepAlgorithm {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
Debug::fmt(&self.id, f)
}
}
/// An RSA-OAEP public key for encryption.
pub struct OaepPublicEncryptingKey {
public_key: PublicEncryptingKey,
}
impl OaepPublicEncryptingKey {
/// Constructs an `OaepPublicEncryptingKey` from a `PublicEncryptingKey`.
/// # Errors
/// * `Unspecified`: Any error that occurs while attempting to construct an RSA-OAEP public key.
pub fn new(public_key: PublicEncryptingKey) -> Result<Self, Unspecified> {
Ok(Self { public_key })
}
/// Encrypts the contents in `plaintext` and writes the corresponding ciphertext to `ciphertext`.
/// Returns the subslice of `ciphertext` containing the ciphertext output.
///
/// # Max Plaintext Length
/// The provided length of `plaintext` must be at most [`Self::max_plaintext_size`].
///
/// # Sizing `output`
/// For `OAEP_SHA1_MGF1SHA1`, `OAEP_SHA256_MGF1SHA256`, `OAEP_SHA384_MGF1SHA384`, `OAEP_SHA512_MGF1SHA512` The
/// length of `output` must be greater then or equal to [`Self::ciphertext_size`].
///
/// # Errors
/// * `Unspecified` for any error that occurs while encrypting `plaintext`.
pub fn encrypt<'ciphertext>(
&self,
algorithm: &'static OaepAlgorithm,
plaintext: &[u8],
ciphertext: &'ciphertext mut [u8],
label: Option<&[u8]>,
) -> Result<&'ciphertext mut [u8], Unspecified> {
let mut pkey_ctx = self.public_key.0.create_EVP_PKEY_CTX()?;
if 1 != unsafe { EVP_PKEY_encrypt_init(pkey_ctx.as_mut_ptr()) } {
return Err(Unspecified);
}
configure_oaep_crypto_operation(
&mut pkey_ctx,
algorithm.oaep_hash_fn(),
algorithm.mgf1_hash_fn(),
label,
)?;
let mut out_len = ciphertext.len();
if 1 != indicator_check!(unsafe {
EVP_PKEY_encrypt(
pkey_ctx.as_mut_ptr(),
ciphertext.as_mut_ptr(),
&mut out_len,
plaintext.as_ptr(),
plaintext.len(),
)
}) {
return Err(Unspecified);
}
Ok(&mut ciphertext[..out_len])
}
/// Returns the RSA key size in bytes.
#[must_use]
pub fn key_size_bytes(&self) -> usize {
self.public_key.key_size_bytes()
}
/// Returns the RSA key size in bits.
#[must_use]
pub fn key_size_bits(&self) -> usize {
self.public_key.key_size_bits()
}
/// Returns the max plaintext that could be decrypted using this key and with the provided algorithm.
#[must_use]
pub fn max_plaintext_size(&self, algorithm: &'static OaepAlgorithm) -> usize {
#[allow(unreachable_patterns)]
let hash_len: usize = match algorithm.id() {
EncryptionAlgorithmId::OaepSha1Mgf1sha1 => 20,
EncryptionAlgorithmId::OaepSha256Mgf1sha256 => 32,
EncryptionAlgorithmId::OaepSha384Mgf1sha384 => 48,
EncryptionAlgorithmId::OaepSha512Mgf1sha512 => 64,
_ => unreachable!(),
};
// The RSA-OAEP algorithms we support use the hashing algorithm for the hash and mgf1 functions.
self.key_size_bytes() - 2 * hash_len - 2
}
/// Returns the max ciphertext size that will be output by `Self::encrypt`.
#[must_use]
pub fn ciphertext_size(&self) -> usize {
self.key_size_bytes()
}
}
impl Debug for OaepPublicEncryptingKey {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("OaepPublicEncryptingKey")
.finish_non_exhaustive()
}
}
/// An RSA-OAEP private key for decryption.
pub struct OaepPrivateDecryptingKey {
private_key: PrivateDecryptingKey,
}
impl OaepPrivateDecryptingKey {
/// Constructs an `OaepPrivateDecryptingKey` from a `PrivateDecryptingKey`.
/// # Errors
/// * `Unspecified`: Any error that occurs while attempting to construct an RSA-OAEP public key.
pub fn new(private_key: PrivateDecryptingKey) -> Result<Self, Unspecified> {
Ok(Self { private_key })
}
/// Decrypts the contents in `ciphertext` and writes the corresponding plaintext to `plaintext`.
/// Returns the subslice of `plaintext` containing the plaintext output.
///
/// # Max Ciphertext Length
/// The provided length of `ciphertext` must be [`Self::key_size_bytes`].
///
/// # Sizing `output`
/// For `OAEP_SHA1_MGF1SHA1`, `OAEP_SHA256_MGF1SHA256`, `OAEP_SHA384_MGF1SHA384`, `OAEP_SHA512_MGF1SHA512`. The
/// length of `output` must be greater then or equal to [`Self::min_output_size`].
///
/// # Errors
/// * `Unspecified` for any error that occurs while decrypting `ciphertext`.
pub fn decrypt<'plaintext>(
&self,
algorithm: &'static OaepAlgorithm,
ciphertext: &[u8],
plaintext: &'plaintext mut [u8],
label: Option<&[u8]>,
) -> Result<&'plaintext mut [u8], Unspecified> {
let mut pkey_ctx = self.private_key.0.create_EVP_PKEY_CTX()?;
if 1 != unsafe { EVP_PKEY_decrypt_init(pkey_ctx.as_mut_ptr()) } {
return Err(Unspecified);
}
configure_oaep_crypto_operation(
&mut pkey_ctx,
algorithm.oaep_hash_fn(),
algorithm.mgf1_hash_fn(),
label,
)?;
let mut out_len = plaintext.len();
if 1 != indicator_check!(unsafe {
EVP_PKEY_decrypt(
pkey_ctx.as_mut_ptr(),
plaintext.as_mut_ptr(),
&mut out_len,
ciphertext.as_ptr(),
ciphertext.len(),
)
}) {
return Err(Unspecified);
}
Ok(&mut plaintext[..out_len])
}
/// Returns the RSA key size in bytes.
#[must_use]
pub fn key_size_bytes(&self) -> usize {
self.private_key.key_size_bytes()
}
/// Returns the RSA key size in bits.
#[must_use]
pub fn key_size_bits(&self) -> usize {
self.private_key.key_size_bits()
}
/// Returns the minimum plaintext buffer size required for `Self::decrypt`.
#[must_use]
pub fn min_output_size(&self) -> usize {
self.key_size_bytes()
}
}
impl Debug for OaepPrivateDecryptingKey {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("OaepPrivateDecryptingKey")
.finish_non_exhaustive()
}
}
fn configure_oaep_crypto_operation(
evp_pkey_ctx: &mut LcPtr<EVP_PKEY_CTX>,
oaep_hash_fn: OaepHashFn,
mgf1_hash_fn: Mgf1HashFn,
label: Option<&[u8]>,
) -> Result<(), Unspecified> {
if 1 != unsafe {
EVP_PKEY_CTX_set_rsa_padding(evp_pkey_ctx.as_mut_ptr(), RSA_PKCS1_OAEP_PADDING)
} {
return Err(Unspecified);
}
if 1 != unsafe { EVP_PKEY_CTX_set_rsa_oaep_md(evp_pkey_ctx.as_mut_ptr(), oaep_hash_fn()) } {
return Err(Unspecified);
}
if 1 != unsafe { EVP_PKEY_CTX_set_rsa_mgf1_md(evp_pkey_ctx.as_mut_ptr(), mgf1_hash_fn()) } {
return Err(Unspecified);
}
let label = label.unwrap_or(&[0u8; 0]);
if label.is_empty() {
// Safety: Don't pass zero-length slice pointers to C code :)
if 1 != unsafe {
EVP_PKEY_CTX_set0_rsa_oaep_label(evp_pkey_ctx.as_mut_ptr(), null_mut(), 0)
} {
return Err(Unspecified);
}
return Ok(());
}
// AWS-LC takes ownership of the label memory, and will call OPENSSL_free, so we are forced to copy it for now.
let mut label_ptr =
DetachableLcPtr::<u8>::new(unsafe { OPENSSL_malloc(size_of_val(label)) }.cast())?;
{
// memcpy the label data into the AWS-LC allocation
let label_ptr =
unsafe { core::slice::from_raw_parts_mut(label_ptr.as_mut_ptr(), label.len()) };
label_ptr.copy_from_slice(label);
}
if 1 != unsafe {
EVP_PKEY_CTX_set0_rsa_oaep_label(
evp_pkey_ctx.as_mut_ptr(),
label_ptr.as_mut_ptr(),
label.len(),
)
} {
return Err(Unspecified);
}
// AWS-LC owns the allocation now, so we detach it to avoid freeing it here when label_ptr goes out of scope.
label_ptr.detach();
Ok(())
}

View File

@@ -0,0 +1,191 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
#![allow(clippy::module_name_repetitions)]
use super::{PrivateDecryptingKey, PublicEncryptingKey};
use crate::aws_lc::{
EVP_PKEY_CTX_set_rsa_padding, EVP_PKEY_decrypt, EVP_PKEY_decrypt_init, EVP_PKEY_encrypt,
EVP_PKEY_encrypt_init, EVP_PKEY_CTX, RSA_PKCS1_PADDING,
};
use crate::error::Unspecified;
use crate::fips::indicator_check;
use crate::ptr::LcPtr;
use core::fmt::Debug;
/// RSA PKCS1-v1.5 public key for encryption.
pub struct Pkcs1PublicEncryptingKey {
public_key: PublicEncryptingKey,
}
impl Pkcs1PublicEncryptingKey {
/// Constructs an `Pkcs1PublicEncryptingKey` from a `PublicEncryptingKey`.
/// # Errors
/// * `Unspecified`: Any error that occurs while attempting to construct an RSA-OAEP public key.
pub fn new(public_key: PublicEncryptingKey) -> Result<Self, Unspecified> {
Ok(Self { public_key })
}
/// Encrypts the contents in `plaintext` and writes the corresponding ciphertext to `ciphertext`.
/// Returns the subslice of `ciphertext` containing the ciphertext output.
///
/// # Max Plaintext Length
/// The provided length of `plaintext` must be at most [`Self::max_plaintext_size`].
///
/// # Sizing `output`
/// The length of `output` must be greater than or equal to [`Self::ciphertext_size`].
///
/// # Errors
/// * `Unspecified` for any error that occurs while encrypting `plaintext`.
pub fn encrypt<'ciphertext>(
&self,
plaintext: &[u8],
ciphertext: &'ciphertext mut [u8],
) -> Result<&'ciphertext mut [u8], Unspecified> {
let mut pkey_ctx = self.public_key.0.create_EVP_PKEY_CTX()?;
if 1 != unsafe { EVP_PKEY_encrypt_init(pkey_ctx.as_mut_ptr()) } {
return Err(Unspecified);
}
configure_pkcs1_crypto_operation(&mut pkey_ctx)?;
let mut out_len = ciphertext.len();
if 1 != indicator_check!(unsafe {
EVP_PKEY_encrypt(
pkey_ctx.as_mut_ptr(),
ciphertext.as_mut_ptr(),
&mut out_len,
plaintext.as_ptr(),
plaintext.len(),
)
}) {
return Err(Unspecified);
}
Ok(&mut ciphertext[..out_len])
}
/// Returns the RSA key size in bytes.
#[must_use]
pub fn key_size_bytes(&self) -> usize {
self.public_key.key_size_bytes()
}
/// Returns the RSA key size in bits.
#[must_use]
pub fn key_size_bits(&self) -> usize {
self.public_key.key_size_bits()
}
/// Returns the max plaintext that could be encrypted using this key.
#[must_use]
pub fn max_plaintext_size(&self) -> usize {
const RSA_PKCS1_PADDING_SIZE: usize = 11; // crypto/fipsmodule/rsa/internal.h
self.key_size_bytes() - RSA_PKCS1_PADDING_SIZE
}
/// Returns the max ciphertext size that will be output by `Self::encrypt`.
#[must_use]
pub fn ciphertext_size(&self) -> usize {
self.key_size_bytes()
}
}
impl Debug for Pkcs1PublicEncryptingKey {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("Pkcs1PublicEncryptingKey")
.finish_non_exhaustive()
}
}
/// RSA PKCS1-v1.5 private key for decryption.
pub struct Pkcs1PrivateDecryptingKey {
private_key: PrivateDecryptingKey,
}
impl Pkcs1PrivateDecryptingKey {
/// Constructs an `Pkcs1PrivateDecryptingKey` from a `PrivateDecryptingKey`.
/// # Errors
/// * `Unspecified`: Any error that occurs while attempting to construct an RSA-OAEP public key.
pub fn new(private_key: PrivateDecryptingKey) -> Result<Self, Unspecified> {
Ok(Self { private_key })
}
/// Decrypts the contents in `ciphertext` and writes the corresponding plaintext to `plaintext`.
/// Returns the subslice of `plaintext` containing the plaintext output.
///
/// # Max Ciphertext Length
/// The provided length of `ciphertext` must be [`Self::key_size_bytes`].
///
/// # Sizing `output`
/// The length of `output` must be greater than or equal to [`Self::min_output_size`].
///
/// # Errors
/// * `Unspecified` for any error that occurs while decrypting `ciphertext`.
pub fn decrypt<'plaintext>(
&self,
ciphertext: &[u8],
plaintext: &'plaintext mut [u8],
) -> Result<&'plaintext mut [u8], Unspecified> {
let mut pkey_ctx = self.private_key.0.create_EVP_PKEY_CTX()?;
if 1 != unsafe { EVP_PKEY_decrypt_init(pkey_ctx.as_mut_ptr()) } {
return Err(Unspecified);
}
configure_pkcs1_crypto_operation(&mut pkey_ctx)?;
let mut out_len = plaintext.len();
if 1 != indicator_check!(unsafe {
EVP_PKEY_decrypt(
pkey_ctx.as_mut_ptr(),
plaintext.as_mut_ptr(),
&mut out_len,
ciphertext.as_ptr(),
ciphertext.len(),
)
}) {
return Err(Unspecified);
}
Ok(&mut plaintext[..out_len])
}
/// Returns the RSA key size in bytes.
#[must_use]
pub fn key_size_bytes(&self) -> usize {
self.private_key.key_size_bytes()
}
/// Returns the RSA key size in bits.
#[must_use]
pub fn key_size_bits(&self) -> usize {
self.private_key.key_size_bits()
}
/// Returns the minimum plaintext buffer size required for `Self::decrypt`.
#[must_use]
pub fn min_output_size(&self) -> usize {
self.key_size_bytes()
}
}
impl Debug for Pkcs1PrivateDecryptingKey {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("Pkcs1PrivateDecryptingKey")
.finish_non_exhaustive()
}
}
fn configure_pkcs1_crypto_operation(
evp_pkey_ctx: &mut LcPtr<EVP_PKEY_CTX>,
) -> Result<(), Unspecified> {
if 1 != unsafe { EVP_PKEY_CTX_set_rsa_padding(evp_pkey_ctx.as_mut_ptr(), RSA_PKCS1_PADDING) } {
return Err(Unspecified);
}
Ok(())
}

567
vendor/aws-lc-rs/src/rsa/key.rs vendored Normal file
View File

@@ -0,0 +1,567 @@
// Copyright 2015-2016 Brian Smith.
// SPDX-License-Identifier: ISC
// Modifications copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use super::signature::{RsaEncoding, RsaPadding};
use super::{encoding, RsaParameters};
use crate::aws_lc::{
EVP_PKEY_CTX_set_rsa_keygen_bits, EVP_PKEY_CTX_set_signature_md, EVP_PKEY_assign_RSA,
EVP_PKEY_new, RSA_new, RSA_set0_key, RSA_size, EVP_PKEY, EVP_PKEY_CTX, EVP_PKEY_RSA,
EVP_PKEY_RSA_PSS,
};
#[cfg(feature = "ring-io")]
use crate::aws_lc::{RSA_get0_e, RSA_get0_n};
use crate::encoding::{AsDer, Pkcs8V1Der, PublicKeyX509Der};
use crate::error::{KeyRejected, Unspecified};
#[cfg(feature = "ring-io")]
use crate::io;
use crate::ptr::{DetachableLcPtr, LcPtr};
use crate::rsa::PublicEncryptingKey;
use crate::sealed::Sealed;
use crate::{hex, rand};
#[cfg(feature = "fips")]
use aws_lc::RSA_check_fips;
use core::fmt::{self, Debug, Formatter};
use core::ptr::null_mut;
// TODO: Uncomment when MSRV >= 1.64
// use core::ffi::c_int;
use std::os::raw::c_int;
use crate::digest::{match_digest_type, Digest};
use crate::pkcs8::Version;
use crate::rsa::encoding::{rfc5280, rfc8017};
use crate::rsa::signature::configure_rsa_pkcs1_pss_padding;
#[cfg(feature = "ring-io")]
use untrusted::Input;
use zeroize::Zeroize;
/// RSA key-size.
#[allow(clippy::module_name_repetitions)]
#[non_exhaustive]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum KeySize {
/// 2048-bit key
Rsa2048,
/// 3072-bit key
Rsa3072,
/// 4096-bit key
Rsa4096,
/// 8192-bit key
Rsa8192,
}
#[allow(clippy::len_without_is_empty)]
impl KeySize {
/// Returns the size of the key in bytes.
#[inline]
#[must_use]
pub fn len(self) -> usize {
match self {
Self::Rsa2048 => 256,
Self::Rsa3072 => 384,
Self::Rsa4096 => 512,
Self::Rsa8192 => 1024,
}
}
/// Returns the key size in bits.
#[inline]
pub(super) fn bits(self) -> i32 {
match self {
Self::Rsa2048 => 2048,
Self::Rsa3072 => 3072,
Self::Rsa4096 => 4096,
Self::Rsa8192 => 8192,
}
}
}
/// An RSA key pair, used for signing.
#[allow(clippy::module_name_repetitions)]
pub struct KeyPair {
// https://github.com/aws/aws-lc/blob/ebaa07a207fee02bd68fe8d65f6b624afbf29394/include/openssl/evp.h#L295
// An |EVP_PKEY| object represents a public or private RSA key. A given object may be
// used concurrently on multiple threads by non-mutating functions, provided no
// other thread is concurrently calling a mutating function. Unless otherwise
// documented, functions which take a |const| pointer are non-mutating and
// functions which take a non-|const| pointer are mutating.
pub(super) evp_pkey: LcPtr<EVP_PKEY>,
pub(super) serialized_public_key: PublicKey,
}
impl Sealed for KeyPair {}
unsafe impl Send for KeyPair {}
unsafe impl Sync for KeyPair {}
impl KeyPair {
fn new(evp_pkey: LcPtr<EVP_PKEY>) -> Result<Self, KeyRejected> {
KeyPair::validate_private_key(&evp_pkey)?;
let serialized_public_key = PublicKey::new(&evp_pkey)?;
Ok(KeyPair {
evp_pkey,
serialized_public_key,
})
}
/// Generate a RSA `KeyPair` of the specified key-strength.
///
/// Supports the following key sizes:
/// * `KeySize::Rsa2048`
/// * `KeySize::Rsa3072`
/// * `KeySize::Rsa4096`
/// * `KeySize::Rsa8192`
///
/// # Errors
/// * `Unspecified`: Any key generation failure.
pub fn generate(size: KeySize) -> Result<Self, Unspecified> {
let private_key = generate_rsa_key(size.bits())?;
Ok(Self::new(private_key)?)
}
/// Generate a RSA `KeyPair` of the specified key-strength.
///
/// ## Deprecated
/// This is equivalent to `KeyPair::generate`.
///
/// # Errors
/// * `Unspecified`: Any key generation failure.
#[cfg(feature = "fips")]
#[deprecated]
pub fn generate_fips(size: KeySize) -> Result<Self, Unspecified> {
Self::generate(size)
}
/// Parses an unencrypted PKCS#8 DER encoded RSA private key.
///
/// Keys can be generated using [`KeyPair::generate`].
///
/// # *ring*-compatibility
///
/// *aws-lc-rs* does not impose the same limitations that *ring* does for
/// RSA keys. Thus signatures may be generated by keys that are not accepted
/// by *ring*. In particular:
/// * RSA private keys ranging between 2048-bit keys and 8192-bit keys are supported.
/// * The public exponent does not have a required minimum size.
///
/// # Errors
/// `error::KeyRejected` if bytes do not encode an RSA private key or if the key is otherwise
/// not acceptable.
pub fn from_pkcs8(pkcs8: &[u8]) -> Result<Self, KeyRejected> {
let key = LcPtr::<EVP_PKEY>::parse_rfc5208_private_key(pkcs8, EVP_PKEY_RSA)?;
Self::new(key)
}
/// Parses a DER-encoded `RSAPrivateKey` structure (RFC 8017).
///
/// # Errors
/// `error:KeyRejected` on error.
pub fn from_der(input: &[u8]) -> Result<Self, KeyRejected> {
let key = encoding::rfc8017::decode_private_key_der(input)?;
Self::new(key)
}
/// Returns a boolean indicator if this RSA key is an approved FIPS 140-3 key.
#[cfg(feature = "fips")]
#[must_use]
pub fn is_valid_fips_key(&self) -> bool {
is_valid_fips_key(&self.evp_pkey)
}
fn validate_private_key(key: &LcPtr<EVP_PKEY>) -> Result<(), KeyRejected> {
if !is_rsa_key(key) {
return Err(KeyRejected::unspecified());
}
match key.as_const().key_size_bits() {
2048..=8192 => Ok(()),
_ => Err(KeyRejected::unspecified()),
}
}
/// Sign `msg`. `msg` is digested using the digest algorithm from
/// `padding_alg` and the digest is then padded using the padding algorithm
/// from `padding_alg`. The signature is written into `signature`;
/// `signature`'s length must be exactly the length returned by
/// `public_modulus_len()`.
///
/// This function does *not* take a precomputed digest; instead, `sign`
/// calculates the digest itself. See `sign_digest`.
///
/// # *ring* Compatibility
/// Our implementation ignores the `SecureRandom` parameter.
// # FIPS
// The following conditions must be met:
// * RSA Key Sizes: 2048, 3072, 4096
// * Digest Algorithms: SHA256, SHA384, SHA512
//
/// # Errors
/// `error::Unspecified` on error.
/// With "fips" feature enabled, errors if digest length is greater than `u32::MAX`.
pub fn sign(
&self,
padding_alg: &'static dyn RsaEncoding,
_rng: &dyn rand::SecureRandom,
msg: &[u8],
signature: &mut [u8],
) -> Result<(), Unspecified> {
let encoding = padding_alg.encoding();
let padding_fn = if let RsaPadding::RSA_PKCS1_PSS_PADDING = encoding.padding() {
Some(configure_rsa_pkcs1_pss_padding)
} else {
None
};
let sig_bytes = self
.evp_pkey
.sign(msg, Some(encoding.digest_algorithm()), padding_fn)?;
signature.copy_from_slice(&sig_bytes);
Ok(())
}
/// The `digest` is padded using the padding algorithm
/// from `padding_alg`. The signature is written into `signature`;
/// `signature`'s length must be exactly the length returned by
/// `public_modulus_len()`.
///
/// # *ring* Compatibility
/// Our implementation ignores the `SecureRandom` parameter.
//
// # FIPS
// Not allowed
//
/// # Errors
/// `error::Unspecified` on error.
/// With "fips" feature enabled, errors if digest length is greater than `u32::MAX`.
pub fn sign_digest(
&self,
padding_alg: &'static dyn RsaEncoding,
digest: &Digest,
signature: &mut [u8],
) -> Result<(), Unspecified> {
let encoding = padding_alg.encoding();
if encoding.digest_algorithm() != digest.algorithm() {
return Err(Unspecified);
}
let padding_fn = Some({
|pctx: *mut EVP_PKEY_CTX| {
let evp_md = match_digest_type(&digest.algorithm().id);
if 1 != unsafe { EVP_PKEY_CTX_set_signature_md(pctx, evp_md.as_const_ptr()) } {
return Err(());
}
if let RsaPadding::RSA_PKCS1_PSS_PADDING = encoding.padding() {
configure_rsa_pkcs1_pss_padding(pctx)
} else {
Ok(())
}
}
});
let sig_bytes = self.evp_pkey.sign_digest(digest, padding_fn)?;
signature.copy_from_slice(&sig_bytes);
Ok(())
}
/// Returns the length in bytes of the key pair's public modulus.
///
/// A signature has the same length as the public modulus.
#[must_use]
pub fn public_modulus_len(&self) -> usize {
// This was already validated to be an RSA key so this can't fail
match self.evp_pkey.as_const().get_rsa() {
Ok(rsa) => {
// https://github.com/awslabs/aws-lc/blob/main/include/openssl/rsa.h#L99
unsafe { RSA_size(rsa.as_const_ptr()) as usize }
}
Err(_) => unreachable!(),
}
}
}
impl Debug for KeyPair {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
f.write_str(&format!(
"RsaKeyPair {{ public_key: {:?} }}",
self.serialized_public_key
))
}
}
impl crate::signature::KeyPair for KeyPair {
type PublicKey = PublicKey;
fn public_key(&self) -> &Self::PublicKey {
&self.serialized_public_key
}
}
impl AsDer<Pkcs8V1Der<'static>> for KeyPair {
fn as_der(&self) -> Result<Pkcs8V1Der<'static>, Unspecified> {
Ok(Pkcs8V1Der::new(
self.evp_pkey
.as_const()
.marshal_rfc5208_private_key(Version::V1)?,
))
}
}
/// A serialized RSA public key.
#[derive(Clone)]
#[allow(clippy::module_name_repetitions)]
pub struct PublicKey {
key: Box<[u8]>,
#[cfg(feature = "ring-io")]
modulus: Box<[u8]>,
#[cfg(feature = "ring-io")]
exponent: Box<[u8]>,
}
impl Drop for PublicKey {
fn drop(&mut self) {
self.key.zeroize();
#[cfg(feature = "ring-io")]
self.modulus.zeroize();
#[cfg(feature = "ring-io")]
self.exponent.zeroize();
}
}
impl PublicKey {
pub(super) fn new(evp_pkey: &LcPtr<EVP_PKEY>) -> Result<Self, KeyRejected> {
let key = encoding::rfc8017::encode_public_key_der(evp_pkey)?;
#[cfg(feature = "ring-io")]
{
let evp_pkey = evp_pkey.as_const();
let pubkey = evp_pkey.get_rsa()?;
let modulus = pubkey
.project_const_lifetime(unsafe { |pubkey| RSA_get0_n(pubkey.as_const_ptr()) })?;
let modulus = modulus.to_be_bytes().into_boxed_slice();
let exponent = pubkey
.project_const_lifetime(unsafe { |pubkey| RSA_get0_e(pubkey.as_const_ptr()) })?;
let exponent = exponent.to_be_bytes().into_boxed_slice();
Ok(PublicKey {
key,
modulus,
exponent,
})
}
#[cfg(not(feature = "ring-io"))]
Ok(PublicKey { key })
}
/// Parses an RSA public key from either RFC8017 or RFC5280
/// # Errors
/// `KeyRejected` if the encoding is not for a valid RSA key.
pub fn from_der(input: &[u8]) -> Result<Self, KeyRejected> {
// These both invoke `RSA_check_key`:
// https://github.com/aws/aws-lc/blob/4368aaa6975ba41bd76d3bb12fac54c4680247fb/crypto/rsa_extra/rsa_asn1.c#L105-L109
PublicKey::new(
&rfc8017::decode_public_key_der(input).or(rfc5280::decode_public_key_der(input))?,
)
}
}
pub(crate) fn parse_rsa_public_key(input: &[u8]) -> Result<LcPtr<EVP_PKEY>, KeyRejected> {
rfc8017::decode_public_key_der(input).or(rfc5280::decode_public_key_der(input))
}
impl Debug for PublicKey {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
f.write_str(&format!(
"RsaPublicKey(\"{}\")",
hex::encode(self.key.as_ref())
))
}
}
impl AsRef<[u8]> for PublicKey {
/// DER encode a RSA public key to (RFC 8017) `RSAPublicKey` structure.
fn as_ref(&self) -> &[u8] {
self.key.as_ref()
}
}
impl AsDer<PublicKeyX509Der<'static>> for PublicKey {
fn as_der(&self) -> Result<PublicKeyX509Der<'static>, Unspecified> {
// TODO: refactor
let evp_pkey = rfc8017::decode_public_key_der(self.as_ref())?;
rfc5280::encode_public_key_der(&evp_pkey)
}
}
#[cfg(feature = "ring-io")]
impl PublicKey {
/// The public modulus (n).
#[must_use]
pub fn modulus(&self) -> io::Positive<'_> {
io::Positive::new_non_empty_without_leading_zeros(Input::from(self.modulus.as_ref()))
}
/// The public exponent (e).
#[must_use]
pub fn exponent(&self) -> io::Positive<'_> {
io::Positive::new_non_empty_without_leading_zeros(Input::from(self.exponent.as_ref()))
}
/// Returns the length in bytes of the public modulus.
#[must_use]
pub fn modulus_len(&self) -> usize {
self.modulus.len()
}
}
/// Low-level API for RSA public keys.
///
/// When the public key is in DER-encoded PKCS#1 ASN.1 format, it is
/// recommended to use `aws_lc_rs::signature::verify()` with
/// `aws_lc_rs::signature::RSA_PKCS1_*`, because `aws_lc_rs::signature::verify()`
/// will handle the parsing in that case. Otherwise, this function can be used
/// to pass in the raw bytes for the public key components as
/// `untrusted::Input` arguments.
#[allow(clippy::module_name_repetitions)]
#[derive(Clone)]
pub struct PublicKeyComponents<B>
where
B: AsRef<[u8]> + Debug,
{
/// The public modulus, encoded in big-endian bytes without leading zeros.
pub n: B,
/// The public exponent, encoded in big-endian bytes without leading zeros.
pub e: B,
}
impl<B: AsRef<[u8]> + Debug> Debug for PublicKeyComponents<B> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_struct("RsaPublicKeyComponents")
.field("n", &self.n)
.field("e", &self.e)
.finish()
}
}
impl<B: Copy + AsRef<[u8]> + Debug> Copy for PublicKeyComponents<B> {}
impl<B> PublicKeyComponents<B>
where
B: AsRef<[u8]> + Debug,
{
#[inline]
fn build_rsa(&self) -> Result<LcPtr<EVP_PKEY>, ()> {
let n_bytes = self.n.as_ref();
if n_bytes.is_empty() || n_bytes[0] == 0u8 {
return Err(());
}
let mut n_bn = DetachableLcPtr::try_from(n_bytes)?;
let e_bytes = self.e.as_ref();
if e_bytes.is_empty() || e_bytes[0] == 0u8 {
return Err(());
}
let mut e_bn = DetachableLcPtr::try_from(e_bytes)?;
let mut rsa = DetachableLcPtr::new(unsafe { RSA_new() })?;
if 1 != unsafe {
RSA_set0_key(
rsa.as_mut_ptr(),
n_bn.as_mut_ptr(),
e_bn.as_mut_ptr(),
null_mut(),
)
} {
return Err(());
}
n_bn.detach();
e_bn.detach();
let mut pkey = LcPtr::new(unsafe { EVP_PKEY_new() })?;
if 1 != unsafe { EVP_PKEY_assign_RSA(pkey.as_mut_ptr(), rsa.as_mut_ptr()) } {
return Err(());
}
rsa.detach();
Ok(pkey)
}
/// Verifies that `signature` is a valid signature of `message` using `self`
/// as the public key. `params` determine what algorithm parameters
/// (padding, digest algorithm, key length range, etc.) are used in the
/// verification.
///
/// # Errors
/// `error::Unspecified` if `message` was not verified.
pub fn verify(
&self,
params: &RsaParameters,
message: &[u8],
signature: &[u8],
) -> Result<(), Unspecified> {
let rsa = self.build_rsa()?;
super::signature::verify_rsa_signature(
params.digest_algorithm(),
params.padding(),
&rsa,
message,
signature,
params.bit_size_range(),
)
}
}
#[cfg(feature = "ring-io")]
impl From<&PublicKey> for PublicKeyComponents<Vec<u8>> {
fn from(public_key: &PublicKey) -> Self {
PublicKeyComponents {
n: public_key.modulus.to_vec(),
e: public_key.exponent.to_vec(),
}
}
}
impl<B> TryInto<PublicEncryptingKey> for PublicKeyComponents<B>
where
B: AsRef<[u8]> + Debug,
{
type Error = Unspecified;
/// Try to build a `PublicEncryptingKey` from the public key components.
///
/// # Errors
/// `error::Unspecified` if the key failed to verify.
fn try_into(self) -> Result<PublicEncryptingKey, Self::Error> {
let rsa = self.build_rsa()?;
PublicEncryptingKey::new(rsa)
}
}
pub(super) fn generate_rsa_key(size: c_int) -> Result<LcPtr<EVP_PKEY>, Unspecified> {
let params_fn = |ctx| {
if 1 == unsafe { EVP_PKEY_CTX_set_rsa_keygen_bits(ctx, size) } {
Ok(())
} else {
Err(())
}
};
LcPtr::<EVP_PKEY>::generate(EVP_PKEY_RSA, Some(params_fn))
}
#[cfg(feature = "fips")]
#[must_use]
pub(super) fn is_valid_fips_key(key: &LcPtr<EVP_PKEY>) -> bool {
// This should always be an RSA key and must-never panic.
let evp_pkey = key.as_const();
let rsa_key = evp_pkey.get_rsa().expect("RSA EVP_PKEY");
1 == unsafe { RSA_check_fips((rsa_key.as_const_ptr()).cast_mut()) }
}
pub(super) fn is_rsa_key(key: &LcPtr<EVP_PKEY>) -> bool {
let id = key.as_const().id();
id == EVP_PKEY_RSA || id == EVP_PKEY_RSA_PSS
}

321
vendor/aws-lc-rs/src/rsa/signature.rs vendored Normal file
View File

@@ -0,0 +1,321 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR ISC
use std::fmt::{self, Debug, Formatter};
use std::ops::RangeInclusive;
use crate::aws_lc::{
EVP_PKEY_CTX_set_rsa_padding, EVP_PKEY_CTX_set_rsa_pss_saltlen, EVP_PKEY_CTX_set_signature_md,
RSA_bits, EVP_PKEY, EVP_PKEY_CTX, RSA_PKCS1_PSS_PADDING, RSA_PSS_SALTLEN_DIGEST,
};
use crate::digest::{self, match_digest_type, Digest};
use crate::error::Unspecified;
use crate::ptr::LcPtr;
use crate::rsa::key::parse_rsa_public_key;
use crate::sealed::Sealed;
use crate::signature::{ParsedPublicKey, ParsedVerificationAlgorithm, VerificationAlgorithm};
use super::encoding;
#[cfg(feature = "ring-sig-verify")]
use untrusted::Input;
#[allow(non_camel_case_types)]
#[allow(clippy::module_name_repetitions)]
#[derive(Debug)]
pub enum RsaPadding {
RSA_PKCS1_PADDING,
RSA_PKCS1_PSS_PADDING,
}
/// Parameters for RSA verification.
pub struct RsaParameters(
&'static digest::Algorithm,
&'static RsaPadding,
RangeInclusive<u32>,
&'static RsaVerificationAlgorithmId,
);
impl RsaParameters {
#[inline]
pub(crate) fn digest_algorithm(&self) -> &'static digest::Algorithm {
self.0
}
#[inline]
pub(crate) fn padding(&self) -> &'static RsaPadding {
self.1
}
#[inline]
pub(crate) fn bit_size_range(&self) -> &RangeInclusive<u32> {
&self.2
}
}
impl ParsedVerificationAlgorithm for RsaParameters {
fn parsed_verify_sig(
&self,
public_key: &ParsedPublicKey,
msg: &[u8],
signature: &[u8],
) -> Result<(), Unspecified> {
let evp_pkey = public_key.key();
verify_rsa_signature(
self.digest_algorithm(),
self.padding(),
evp_pkey,
msg,
signature,
self.bit_size_range(),
)
}
fn parsed_verify_digest_sig(
&self,
public_key: &ParsedPublicKey,
digest: &Digest,
signature: &[u8],
) -> Result<(), Unspecified> {
let evp_pkey = public_key.key();
verify_rsa_digest_signature(
self.padding(),
evp_pkey,
digest,
signature,
self.bit_size_range(),
)
}
}
impl VerificationAlgorithm for RsaParameters {
#[cfg(feature = "ring-sig-verify")]
fn verify(
&self,
public_key: Input<'_>,
msg: Input<'_>,
signature: Input<'_>,
) -> Result<(), Unspecified> {
self.verify_sig(
public_key.as_slice_less_safe(),
msg.as_slice_less_safe(),
signature.as_slice_less_safe(),
)
}
fn verify_sig(
&self,
public_key: &[u8],
msg: &[u8],
signature: &[u8],
) -> Result<(), Unspecified> {
let evp_pkey = parse_rsa_public_key(public_key)?;
verify_rsa_signature(
self.digest_algorithm(),
self.padding(),
&evp_pkey,
msg,
signature,
self.bit_size_range(),
)
}
fn verify_digest_sig(
&self,
public_key: &[u8],
digest: &Digest,
signature: &[u8],
) -> Result<(), Unspecified> {
if self.digest_algorithm() != digest.algorithm() {
return Err(Unspecified);
}
let evp_pkey = parse_rsa_public_key(public_key)?;
verify_rsa_digest_signature(
self.padding(),
&evp_pkey,
digest,
signature,
self.bit_size_range(),
)
}
}
impl Sealed for RsaParameters {}
impl Debug for RsaParameters {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.write_str(&format!("{{ {:?} }}", self.3))
}
}
impl RsaParameters {
pub(crate) const fn new(
digest_alg: &'static digest::Algorithm,
padding: &'static RsaPadding,
range: RangeInclusive<u32>,
verification_alg: &'static RsaVerificationAlgorithmId,
) -> Self {
Self(digest_alg, padding, range, verification_alg)
}
/// Parses a DER-encoded `RSAPublicKey` structure (RFC 8017) to determine its size in bits.
///
/// # Errors
/// `error::Unspecified` on parse error.
pub fn public_modulus_len(public_key: &[u8]) -> Result<u32, Unspecified> {
let rsa = encoding::rfc8017::decode_public_key_der(public_key)?;
Ok(unsafe { RSA_bits(rsa.as_const().get_rsa()?.as_const_ptr()) })
}
#[must_use]
/// Minimum modulus length in bits.
pub fn min_modulus_len(&self) -> u32 {
*self.2.start()
}
#[must_use]
/// Maximum modulus length in bits.
pub fn max_modulus_len(&self) -> u32 {
*self.2.end()
}
}
#[derive(Debug)]
#[allow(non_camel_case_types)]
pub(crate) enum RsaVerificationAlgorithmId {
RSA_PKCS1_1024_8192_SHA1_FOR_LEGACY_USE_ONLY,
RSA_PKCS1_1024_8192_SHA256_FOR_LEGACY_USE_ONLY,
RSA_PKCS1_1024_8192_SHA512_FOR_LEGACY_USE_ONLY,
RSA_PKCS1_2048_8192_SHA1_FOR_LEGACY_USE_ONLY,
RSA_PKCS1_2048_8192_SHA256,
RSA_PKCS1_2048_8192_SHA384,
RSA_PKCS1_2048_8192_SHA512,
RSA_PKCS1_3072_8192_SHA384,
RSA_PSS_2048_8192_SHA256,
RSA_PSS_2048_8192_SHA384,
RSA_PSS_2048_8192_SHA512,
}
#[derive(Debug)]
#[allow(non_camel_case_types)]
pub(crate) enum RsaSigningAlgorithmId {
RSA_PSS_SHA256,
RSA_PSS_SHA384,
RSA_PSS_SHA512,
RSA_PKCS1_SHA256,
RSA_PKCS1_SHA384,
RSA_PKCS1_SHA512,
}
#[allow(clippy::module_name_repetitions)]
/// Encoding type for an RSA signature
pub struct RsaSignatureEncoding(
&'static digest::Algorithm,
&'static RsaPadding,
&'static RsaSigningAlgorithmId,
);
impl RsaSignatureEncoding {
pub(crate) const fn new(
digest_alg: &'static digest::Algorithm,
padding: &'static RsaPadding,
sig_alg: &'static RsaSigningAlgorithmId,
) -> Self {
Self(digest_alg, padding, sig_alg)
}
#[inline]
pub(super) fn digest_algorithm(&self) -> &'static digest::Algorithm {
self.0
}
#[inline]
pub(super) fn padding(&self) -> &'static RsaPadding {
self.1
}
}
impl Sealed for RsaSignatureEncoding {}
/// An RSA signature encoding as described in [RFC 3447 Section 8].
///
/// [RFC 3447 Section 8]: https://tools.ietf.org/html/rfc3447#section-8
#[allow(clippy::module_name_repetitions)]
pub trait RsaEncoding: 'static + Sync + Sealed + Debug {
/// The signature encoding.
fn encoding(&'static self) -> &'static RsaSignatureEncoding;
}
impl RsaEncoding for RsaSignatureEncoding {
fn encoding(&'static self) -> &'static RsaSignatureEncoding {
self
}
}
impl Debug for RsaSignatureEncoding {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
f.write_str(&format!("{{ {:?} }}", self.2))
}
}
#[inline]
pub(crate) fn configure_rsa_pkcs1_pss_padding(pctx: *mut EVP_PKEY_CTX) -> Result<(), ()> {
if 1 != unsafe { EVP_PKEY_CTX_set_rsa_padding(pctx, RSA_PKCS1_PSS_PADDING) } {
return Err(());
}
if 1 != unsafe { EVP_PKEY_CTX_set_rsa_pss_saltlen(pctx, RSA_PSS_SALTLEN_DIGEST) } {
return Err(());
}
Ok(())
}
#[inline]
pub(crate) fn verify_rsa_signature(
algorithm: &'static digest::Algorithm,
padding: &'static RsaPadding,
public_key: &LcPtr<EVP_PKEY>,
msg: &[u8],
signature: &[u8],
allowed_bit_size: &RangeInclusive<u32>,
) -> Result<(), Unspecified> {
if !allowed_bit_size.contains(&public_key.as_const().key_size_bits().try_into()?) {
return Err(Unspecified);
}
let padding_fn = if let RsaPadding::RSA_PKCS1_PSS_PADDING = padding {
Some(configure_rsa_pkcs1_pss_padding)
} else {
None
};
public_key.verify(msg, Some(algorithm), padding_fn, signature)
}
#[inline]
pub(crate) fn verify_rsa_digest_signature(
padding: &'static RsaPadding,
public_key: &LcPtr<EVP_PKEY>,
digest: &Digest,
signature: &[u8],
allowed_bit_size: &RangeInclusive<u32>,
) -> Result<(), Unspecified> {
if !allowed_bit_size.contains(&public_key.as_const().key_size_bits().try_into()?) {
return Err(Unspecified);
}
let padding_fn = Some({
|pctx: *mut EVP_PKEY_CTX| {
let evp_md = match_digest_type(&digest.algorithm().id);
if 1 != unsafe { EVP_PKEY_CTX_set_signature_md(pctx, evp_md.as_const_ptr()) } {
return Err(());
}
if let RsaPadding::RSA_PKCS1_PSS_PADDING = padding {
configure_rsa_pkcs1_pss_padding(pctx)
} else {
Ok(())
}
}
});
public_key.verify_digest_sig(digest, padding_fn, signature)
}

Some files were not shown because too many files have changed in this diff Show More