chore: checkpoint before Python removal

This commit is contained in:
2026-03-26 22:33:59 +00:00
parent 683cec9307
commit e568ddf82a
29972 changed files with 11269302 additions and 2 deletions

View File

@@ -0,0 +1 @@
{"files":{".cargo_vcs_info.json":"2eb5a4f27dbf31cf63a63c70a2b7d8799914be648f56eb5f16c600a86bcb9c26",".editorconfig":"ab01b958c6114e846eeb3ab8f23bd5cb9f4523c6230968bc02a8aab5ca3ae3a7",".travis.yml":"ec8a89fd77f378ae44b052e699457bb049f2e65340e37143407417b8040b9bf1","Cargo.lock":"21610c43af26728947615128c38e407843f350ed88097aa01f6208bee9325100","Cargo.toml":"e8591a61b0fca0a73e34485db425e0fba0130a1ef31365e4b46ff16a7cdcb37a","Cargo.toml.orig":"277a596242963d5fbef08a7d967f97e3bbd44bccdc050a94f896fbb7883040f9","LICENSE":"a2010f343487d3f7618affe54f789f5487602331c0a8d03f49e9a7c547cf0499","README.md":"44668b11e891dfdb78974e86ec42eefd45d44499715a3d83bd4a2200b892bc7d","benches/kangaroo.rs":"60e3eb7249dfd3985d49dabc726d0613cfaf9f3daf3b1d3ccbf47697733cb651","benches/keccak.rs":"7885e55621f95430638fbeb44ff4d347ba39e273a85547b3fbd42a11b1f45f12","build.rs":"c3e63c57ab0d28633f522c4a056c3230e9399fda6e511954da81ebfd00b216b2","examples/sha3.rs":"4502cd021e73fcfcb366f69dd6e808222a82148e5a5fe4b6bc2f7716ddcbb709","src/cshake.rs":"55428e5dd7acce4b5b8f3942ef193238381c1372fe0d5574a147cf24e1d7062b","src/k12.rs":"e1890350a70e51abf3e11b671cc1f32ec50e26ee0275166b71a0b087446731d9","src/keccak.rs":"e56adff13f8a55c2acfd1d6eaffdec8387870d85d4bffda96e9501da925b2af5","src/keccakf.rs":"318bddc0302a16a02b7bf85c02f4f02a29792c3c9581aba6946ccaa5c2034416","src/keccakp.rs":"90aae4a198381a8775b49309008b0ab2a1d892b8a5aac7cefa5e08a0c3923716","src/kmac.rs":"489805ef15caec95395fa0f4e31e51b40968ba43be155b24301a7f786ad06965","src/lib.rs":"c14112176bb359d24486f141a36370fe3a894c4dc7cdb952718a7509c93e424a","src/parallel_hash.rs":"2c91c75c8497de422dab69cfce762d3188a1ff1af604df20987058615ea18c23","src/sha3.rs":"e72aab9e536438114049f590a47c6df6250f0fffb6664dbcb85a87a151fb7e65","src/shake.rs":"dce5e46f5fda03abd42faecaa2b6d5ccc0abfd53aff59a67d63dec828fc02e2b","src/tuple_hash.rs":"2d5946a1095e9d046c6f74419c611d692cc74cfe354221d1ae48972828cd8609","tests/cshake.rs":"39193ab67320a3104bb73b4a3e83f1c516a845fdbed1ddfe714dcf6972a1ca68","tests/kangaroo.rs":"3a8c426f84317daecc6f82577a44cb74fd34311f29b5fa8db9b01817fc8ae262","tests/keccak.rs":"3577c04cc74e1c037d8cb1c03445ad6a9cf831d124451a97b2e66121b9140447","tests/kmac.rs":"1e4028cea536ebc3a52feecb99016f3389e6523c5f1ee5251476bc6d4de4e1ef","tests/parallel_hash.rs":"e067e3e9a2e28a191cfca5e507cfb18680fd35c89932aa7f1f744169bb23217c","tests/sha3.rs":"553c9b44954cd577fe5ca305ee340f3dd189c8f43c3cd949a223d9181ce4203c","tests/shake.rs":"fd1db640f3da13ec3e5dae1f37314f02cc147f7e874ea1a8f80370f2c7080a3a","tests/tuple_hash.rs":"36c80c3c6f5aa5acf22e2eed5bee8bc2c47d33412ae6c7418be4e6b6d9fd0a6a"},"package":"2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237"}

View File

@@ -0,0 +1,5 @@
{
"git": {
"sha1": "f034411460d0012925052017c1658b3ce9cda532"
}
}

11
vendor/tiny-keccak/.editorconfig vendored Normal file
View File

@@ -0,0 +1,11 @@
root = true
[*]
indent_style=space
indent_size=4
tab_width=4
end_of_line=lf
charset=utf-8
trim_trailing_whitespace=true
max_line_length=100
insert_final_newline=true

52
vendor/tiny-keccak/.travis.yml vendored Normal file
View File

@@ -0,0 +1,52 @@
language: rust
services: docker
sudo: required
branches:
only:
- master
matrix:
fast_finish: false
include:
- env: TARGET=x86_64-unknown-linux-gnu FEATURES=--all-features
rust: stable
- env: TARGET=i686-unknown-linux-gnu FEATURES=--all-features
rust: stable
- env: TARGET=powerpc-unknown-linux-gnu FEATURES=--all-features
rust: stable
- env: TARGET=powerpc64-unknown-linux-gnu FEATURES=--all-features
rust: stable
- env: TARGET=x86_64-unknown-linux-gnu FEATURES=--all-features
rust: nightly
- env: TARGET=x86_64-unknown-linux-gnu FEATURES="--features cshake"
rust: stable
- env: TARGET=x86_64-unknown-linux-gnu FEATURES="--features fips202"
rust: stable
- env: TARGET=x86_64-unknown-linux-gnu FEATURES="--features k12"
rust: stable
- env: TARGET=x86_64-unknown-linux-gnu FEATURES="--features keccak"
rust: stable
- env: TARGET=x86_64-unknown-linux-gnu FEATURES="--features kmac"
rust: stable
- env: TARGET=x86_64-unknown-linux-gnu FEATURES="--features sha3"
rust: stable
- env: TARGET=x86_64-unknown-linux-gnu FEATURES="--features shake"
rust: stable
- env: TARGET=x86_64-unknown-linux-gnu FEATURES="--features sp800"
rust: stable
- env: TARGET=x86_64-unknown-linux-gnu FEATURES="--features tuple_hash"
rust: stable
- env: TARGET=x86_64-unknown-linux-gnu FEATURES="--features parallel_hash"
rust: stable
install:
- cargo install cross --force
- source ~/.cargo/env || true
script:
- cross test $FEATURES --verbose --all --release --target $TARGET
cache: cargo

16
vendor/tiny-keccak/Cargo.lock generated vendored Normal file
View File

@@ -0,0 +1,16 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
[[package]]
name = "crunchy"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "tiny-keccak"
version = "2.0.2"
dependencies = [
"crunchy 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[metadata]
"checksum crunchy 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"

93
vendor/tiny-keccak/Cargo.toml vendored Normal file
View File

@@ -0,0 +1,93 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
edition = "2018"
name = "tiny-keccak"
version = "2.0.2"
authors = ["debris <marek.kotewicz@gmail.com>"]
description = "An implementation of Keccak derived functions."
homepage = "https://github.com/debris/tiny-keccak"
keywords = ["hash", "sha3", "keccak", "crypto", "kangarootwelve"]
categories = ["cryptography", "no-std"]
license = "CC0-1.0"
[package.metadata.docs.rs]
all-features = true
[profile.dev]
opt-level = 3
debug = false
[profile.test]
opt-level = 3
debug = false
[[example]]
name = "sha3"
required-features = ["sha3"]
[[test]]
name = "keccak"
required-features = ["keccak"]
[[test]]
name = "cshake"
required-features = ["cshake"]
[[test]]
name = "tuple_hash"
required-features = ["tuple_hash"]
[[test]]
name = "kangaroo"
required-features = ["k12"]
[[test]]
name = "sha3"
required-features = ["sha3"]
[[test]]
name = "shake"
required-features = ["shake"]
[[test]]
name = "kmac"
required-features = ["kmac"]
[[test]]
name = "parallel_hash"
required-features = ["parallel_hash"]
[[bench]]
name = "keccak"
required-features = ["keccak"]
[[bench]]
name = "kangaroo"
required-features = ["k12"]
[dependencies.crunchy]
version = "0.2.2"
[features]
cshake = []
default = []
fips202 = ["keccak", "shake", "sha3"]
k12 = []
keccak = []
kmac = ["cshake"]
parallel_hash = ["cshake"]
sha3 = []
shake = []
sp800 = ["cshake", "kmac", "tuple_hash"]
tuple_hash = ["cshake"]
[badges.travis-ci]
branch = "master"
repository = "debris/tiny-keccak"

121
vendor/tiny-keccak/LICENSE vendored Normal file
View File

@@ -0,0 +1,121 @@
Creative Commons Legal Code
CC0 1.0 Universal
CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN
ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS
PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM
THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED
HEREUNDER.
Statement of Purpose
The laws of most jurisdictions throughout the world automatically confer
exclusive Copyright and Related Rights (defined below) upon the creator
and subsequent owner(s) (each and all, an "owner") of an original work of
authorship and/or a database (each, a "Work").
Certain owners wish to permanently relinquish those rights to a Work for
the purpose of contributing to a commons of creative, cultural and
scientific works ("Commons") that the public can reliably and without fear
of later claims of infringement build upon, modify, incorporate in other
works, reuse and redistribute as freely as possible in any form whatsoever
and for any purposes, including without limitation commercial purposes.
These owners may contribute to the Commons to promote the ideal of a free
culture and the further production of creative, cultural and scientific
works, or to gain reputation or greater distribution for their Work in
part through the use and efforts of others.
For these and/or other purposes and motivations, and without any
expectation of additional consideration or compensation, the person
associating CC0 with a Work (the "Affirmer"), to the extent that he or she
is an owner of Copyright and Related Rights in the Work, voluntarily
elects to apply CC0 to the Work and publicly distribute the Work under its
terms, with knowledge of his or her Copyright and Related Rights in the
Work and the meaning and intended legal effect of CC0 on those rights.
1. Copyright and Related Rights. A Work made available under CC0 may be
protected by copyright and related or neighboring rights ("Copyright and
Related Rights"). Copyright and Related Rights include, but are not
limited to, the following:
i. the right to reproduce, adapt, distribute, perform, display,
communicate, and translate a Work;
ii. moral rights retained by the original author(s) and/or performer(s);
iii. publicity and privacy rights pertaining to a person's image or
likeness depicted in a Work;
iv. rights protecting against unfair competition in regards to a Work,
subject to the limitations in paragraph 4(a), below;
v. rights protecting the extraction, dissemination, use and reuse of data
in a Work;
vi. database rights (such as those arising under Directive 96/9/EC of the
European Parliament and of the Council of 11 March 1996 on the legal
protection of databases, and under any national implementation
thereof, including any amended or successor version of such
directive); and
vii. other similar, equivalent or corresponding rights throughout the
world based on applicable law or treaty, and any national
implementations thereof.
2. Waiver. To the greatest extent permitted by, but not in contravention
of, applicable law, Affirmer hereby overtly, fully, permanently,
irrevocably and unconditionally waives, abandons, and surrenders all of
Affirmer's Copyright and Related Rights and associated claims and causes
of action, whether now known or unknown (including existing as well as
future claims and causes of action), in the Work (i) in all territories
worldwide, (ii) for the maximum duration provided by applicable law or
treaty (including future time extensions), (iii) in any current or future
medium and for any number of copies, and (iv) for any purpose whatsoever,
including without limitation commercial, advertising or promotional
purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each
member of the public at large and to the detriment of Affirmer's heirs and
successors, fully intending that such Waiver shall not be subject to
revocation, rescission, cancellation, termination, or any other legal or
equitable action to disrupt the quiet enjoyment of the Work by the public
as contemplated by Affirmer's express Statement of Purpose.
3. Public License Fallback. Should any part of the Waiver for any reason
be judged legally invalid or ineffective under applicable law, then the
Waiver shall be preserved to the maximum extent permitted taking into
account Affirmer's express Statement of Purpose. In addition, to the
extent the Waiver is so judged Affirmer hereby grants to each affected
person a royalty-free, non transferable, non sublicensable, non exclusive,
irrevocable and unconditional license to exercise Affirmer's Copyright and
Related Rights in the Work (i) in all territories worldwide, (ii) for the
maximum duration provided by applicable law or treaty (including future
time extensions), (iii) in any current or future medium and for any number
of copies, and (iv) for any purpose whatsoever, including without
limitation commercial, advertising or promotional purposes (the
"License"). The License shall be deemed effective as of the date CC0 was
applied by Affirmer to the Work. Should any part of the License for any
reason be judged legally invalid or ineffective under applicable law, such
partial invalidity or ineffectiveness shall not invalidate the remainder
of the License, and in such case Affirmer hereby affirms that he or she
will not (i) exercise any of his or her remaining Copyright and Related
Rights in the Work or (ii) assert any associated claims and causes of
action with respect to the Work, in either case contrary to Affirmer's
express Statement of Purpose.
4. Limitations and Disclaimers.
a. No trademark or patent rights held by Affirmer are waived, abandoned,
surrendered, licensed or otherwise affected by this document.
b. Affirmer offers the Work as-is and makes no representations or
warranties of any kind concerning the Work, express, implied,
statutory or otherwise, including without limitation warranties of
title, merchantability, fitness for a particular purpose, non
infringement, or the absence of latent or other defects, accuracy, or
the present or absence of errors, whether or not discoverable, all to
the greatest extent permissible under applicable law.
c. Affirmer disclaims responsibility for clearing rights of other persons
that may apply to the Work or any use thereof, including without
limitation any person's Copyright and Related Rights in the Work.
Further, Affirmer disclaims responsibility for obtaining any necessary
consents, permissions or other rights required for any use of the
Work.
d. Affirmer understands and acknowledges that Creative Commons is not a
party to this document and has no duty or obligation with respect to
this CC0 or use of the Work.

70
vendor/tiny-keccak/README.md vendored Normal file
View File

@@ -0,0 +1,70 @@
# tiny-keccak
An implementation of Keccak derived functions specified in [`FIPS-202`], [`SP800-185`] and [`KangarooTwelve`].
[![Build Status][travis-image]][travis-url]
[travis-image]: https://travis-ci.org/debris/tiny-keccak.svg?branch=master
[travis-url]: https://travis-ci.org/debris/tiny-keccak
[`FIPS-202`]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
[`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
[`KangarooTwelve`]: https://eprint.iacr.org/2016/770.pdf
[`Documentation`](https://docs.rs/tiny-keccak)
The `Keccak-f[1600]` permutation is fully unrolled; it's nearly as fast
as the Keccak team's optimized permutation.
## Usage
In your `Cargo.toml` specify what features (hash functions, you are intending to use).
Available options are: `cshake`, `fips202`, `k12`, `keccak`, `kmac`, `parallel_hash`, `sha3`,
`shake`, `sp800`, `tuple_hash`.
```toml
[dependencies]
tiny-keccak = { version = "2.0", features = ["sha3"] }
```
## Example
```rust
use tiny_keccak::Sha3;
fn main() {
let mut sha3 = Sha3::v256();
let mut output = [0u8; 32];
let expected = b"\
\x64\x4b\xcc\x7e\x56\x43\x73\x04\x09\x99\xaa\xc8\x9e\x76\x22\xf3\
\xca\x71\xfb\xa1\xd9\x72\xfd\x94\xa3\x1c\x3b\xfb\xf2\x4e\x39\x38\
";
sha3.update(b"hello");
sha3.update(b" ");
sha3.update(b"world");
sha3.finalize(&mut output);
assert_eq!(expected, &output);
}
```
## Benchmarks
Benchmarked with [rust-crypto](https://github.com/RustCrypto) sha3 on:
```
MacBook Pro (Retina, 15-inch, Mid 2015)
2,5 GHz Intel Core i7
16 GB 1600 MHz DDR3
Intel Iris Pro 1536 MB
```
Benchmark code is available [here](https://github.com/debris/tiny-keccak/blob/master/comparison/benches/sha3.rs)
```
running 4 tests
test rust_crypto_sha3_256_input_32_bytes ... bench: 677 ns/iter (+/- 113) = 47 MB/s
test rust_crypto_sha3_256_input_4096_bytes ... bench: 17,619 ns/iter (+/- 4,174) = 232 MB/s
test tiny_keccak_sha3_256_input_32_bytes ... bench: 569 ns/iter (+/- 204) = 56 MB/s
test tiny_keccak_sha3_256_input_4096_bytes ... bench: 17,185 ns/iter (+/- 4,575) = 238 MB/s
```

19
vendor/tiny-keccak/benches/kangaroo.rs vendored Normal file
View File

@@ -0,0 +1,19 @@
#![feature(test)]
extern crate test;
use test::Bencher;
use tiny_keccak::{KangarooTwelve, Hasher};
#[bench]
fn bench_k12(b: &mut Bencher) {
let data = [0u8; 32];
b.bytes = data.len() as u64;
b.iter(|| {
let mut res = [0u8; 32];
let mut k12 = KangarooTwelve::new(&[]);
k12.update(&data);
k12.finalize(&mut res);
});
}

43
vendor/tiny-keccak/benches/keccak.rs vendored Normal file
View File

@@ -0,0 +1,43 @@
#![feature(test)]
extern crate test;
use test::Bencher;
use tiny_keccak::{keccakf, Keccak, Hasher};
#[bench]
fn bench_keccak_256_input_4096_bytes(b: &mut Bencher) {
let data = [254u8; 4096];
b.bytes = data.len() as u64;
b.iter(|| {
let mut res: [u8; 32] = [0; 32];
let mut keccak = Keccak::v256();
keccak.update(&data);
keccak.finalize(&mut res);
});
}
#[bench]
fn keccakf_u64(b: &mut Bencher) {
const WORDS: usize = 25;
b.bytes = (WORDS * 8) as u64;
b.iter(|| {
let mut data = [0u64; WORDS];
keccakf(&mut data);
});
}
#[bench]
fn bench_keccak256(b: &mut Bencher) {
let data = [0u8; 32];
b.bytes = data.len() as u64;
b.iter(|| {
let mut res: [u8; 32] = [0; 32];
let mut keccak = Keccak::v256();
keccak.update(&data);
keccak.finalize(&mut res);
});
}

22
vendor/tiny-keccak/build.rs vendored Normal file
View File

@@ -0,0 +1,22 @@
#[cfg(not(any(
feature = "keccak",
feature = "shake",
feature = "sha3",
feature = "cshake",
feature = "kmac",
feature = "tuple_hash",
feature = "parallel_hash",
feature = "k12",
feature = "fips202",
feature = "sp800"
)))]
compile_error!(
"You need to specify at least one hash function you intend to use. \
Available options:\n\
keccak, shake, sha3, cshake, kmac, tuple_hash, parallel_hash, k12, fips202, sp800\n\
e.g.\n\
tiny-keccak = { version = \"2.0.0\", features = [\"sha3\"] }"
);
fn main() {
}

17
vendor/tiny-keccak/examples/sha3.rs vendored Normal file
View File

@@ -0,0 +1,17 @@
use tiny_keccak::{Hasher, Sha3};
fn main() {
let mut sha3 = Sha3::v256();
let mut output = [0; 32];
let expected = b"\
\x64\x4b\xcc\x7e\x56\x43\x73\x04\x09\x99\xaa\xc8\x9e\x76\x22\xf3\
\xca\x71\xfb\xa1\xd9\x72\xfd\x94\xa3\x1c\x3b\xfb\xf2\x4e\x39\x38\
";
sha3.update(b"hello");
sha3.update(b" ");
sha3.update(b"world");
sha3.finalize(&mut output);
assert_eq!(expected, &output);
}

77
vendor/tiny-keccak/src/cshake.rs vendored Normal file
View File

@@ -0,0 +1,77 @@
//! The `cSHAKE` extendable-output functions defined in [`SP800-185`].
//!
//! [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
use crate::{bits_to_rate, keccakf::KeccakF, left_encode, Hasher, KeccakState, Xof};
/// The `cSHAKE` extendable-output functions defined in [`SP800-185`].
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["cshake"] }
/// ```
///
/// [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
#[derive(Clone)]
pub struct CShake {
state: KeccakState<KeccakF>,
}
impl CShake {
const DELIM: u8 = 0x04;
/// Creates new [`CShake`] hasher with a security level of 128 bits.
///
/// [`CShake`]: struct.CShake.html
pub fn v128(name: &[u8], custom_string: &[u8]) -> CShake {
CShake::new(name, custom_string, 128)
}
/// Creates new [`CShake`] hasher with a security level of 256 bits.
///
/// [`CShake`]: struct.CShake.html
pub fn v256(name: &[u8], custom_string: &[u8]) -> CShake {
CShake::new(name, custom_string, 256)
}
pub(crate) fn new(name: &[u8], custom_string: &[u8], bits: usize) -> CShake {
let rate = bits_to_rate(bits);
// if there is no name and no customization string
// cSHAKE is SHAKE
if name.is_empty() && custom_string.is_empty() {
let state = KeccakState::new(rate, 0x1f);
return CShake { state };
}
let mut state = KeccakState::new(rate, Self::DELIM);
state.update(left_encode(rate).value());
state.update(left_encode(name.len() * 8).value());
state.update(name);
state.update(left_encode(custom_string.len() * 8).value());
state.update(custom_string);
state.fill_block();
CShake { state }
}
pub(crate) fn fill_block(&mut self) {
self.state.fill_block();
}
}
impl Hasher for CShake {
fn update(&mut self, input: &[u8]) {
self.state.update(input);
}
fn finalize(self, output: &mut [u8]) {
self.state.finalize(output);
}
}
impl Xof for CShake {
fn squeeze(&mut self, output: &mut [u8]) {
self.state.squeeze(output);
}
}

160
vendor/tiny-keccak/src/k12.rs vendored Normal file
View File

@@ -0,0 +1,160 @@
//! The `KangarooTwelve` hash function defined [`here`].
//!
//! [`here`]: https://eprint.iacr.org/2016/770.pdf
use crate::{bits_to_rate, keccakp::KeccakP, EncodedLen, Hasher, IntoXof, KeccakState, Xof};
fn encode_len(len: usize) -> EncodedLen {
let len_view = (len as u64).to_be_bytes();
let offset = len_view.iter().position(|i| *i != 0).unwrap_or(8);
let mut buffer = [0u8; 9];
buffer[..8].copy_from_slice(&len_view);
buffer[8] = 8 - offset as u8;
EncodedLen { offset, buffer }
}
/// The `KangarooTwelve` hash function defined [`here`].
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["k12"] }
/// ```
///
/// [`here`]: https://eprint.iacr.org/2016/770.pdf
#[derive(Clone)]
pub struct KangarooTwelve<T> {
state: KeccakState<KeccakP>,
current_chunk: KeccakState<KeccakP>,
custom_string: Option<T>,
written: usize,
chunks: usize,
}
impl<T> KangarooTwelve<T> {
const MAX_CHUNK_SIZE: usize = 8192;
/// Creates new [`KangarooTwelve`] hasher with a security level of 128 bits.
///
/// [`KangarooTwelve`]: struct.KangarooTwelve.html
pub fn new(custom_string: T) -> Self {
let rate = bits_to_rate(128);
KangarooTwelve {
state: KeccakState::new(rate, 0),
current_chunk: KeccakState::new(rate, 0x0b),
custom_string: Some(custom_string),
written: 0,
chunks: 0,
}
}
}
impl<T: AsRef<[u8]>> Hasher for KangarooTwelve<T> {
fn update(&mut self, input: &[u8]) {
let mut to_absorb = input;
if self.chunks == 0 {
let todo = core::cmp::min(Self::MAX_CHUNK_SIZE - self.written, to_absorb.len());
self.state.update(&to_absorb[..todo]);
self.written += todo;
to_absorb = &to_absorb[todo..];
if to_absorb.len() > 0 && self.written == Self::MAX_CHUNK_SIZE {
self.state.update(&[0x03, 0, 0, 0, 0, 0, 0, 0]);
self.written = 0;
self.chunks += 1;
}
}
while to_absorb.len() > 0 {
if self.written == Self::MAX_CHUNK_SIZE {
let mut chunk_hash = [0u8; 32];
let current_chunk = self.current_chunk.clone();
self.current_chunk.reset();
current_chunk.finalize(&mut chunk_hash);
self.state.update(&chunk_hash);
self.written = 0;
self.chunks += 1;
}
let todo = core::cmp::min(Self::MAX_CHUNK_SIZE - self.written, to_absorb.len());
self.current_chunk.update(&to_absorb[..todo]);
self.written += todo;
to_absorb = &to_absorb[todo..];
}
}
fn finalize(self, output: &mut [u8]) {
let mut xof = self.into_xof();
xof.squeeze(output);
}
}
/// The `KangarooTwelve` extendable-output function defined [`here`].
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["k12"] }
/// ```
///
/// # Example
///
/// ```
/// # use tiny_keccak::{KangarooTwelve, Xof, IntoXof, Hasher};
/// let input = b"hello world";
/// let mut output = [0u8; 64];
/// let mut hasher = KangarooTwelve::new(b"");
/// hasher.update(input);
/// let mut xof = hasher.into_xof();
/// xof.squeeze(&mut output[..32]);
/// xof.squeeze(&mut output[32..]);
/// ```
///
/// ---
///
/// [`KangarooTwelveXof`] can be created only by using [`KangarooTwelve::IntoXof`] interface.
///
/// [`here`]: https://eprint.iacr.org/2016/770.pdf
/// [`KangarooTwelveXof`]: struct.KangarooTwelveXof.html
/// [`KangarooTwelve::IntoXof`]: struct.KangarooTwelve.html#impl-IntoXof
#[derive(Clone)]
pub struct KangarooTwelveXof {
state: KeccakState<KeccakP>,
}
impl<T: AsRef<[u8]>> IntoXof for KangarooTwelve<T> {
type Xof = KangarooTwelveXof;
fn into_xof(mut self) -> KangarooTwelveXof {
let custom_string = self
.custom_string
.take()
.expect("KangarooTwelve cannot be initialized without custom_string; qed");
let encoded_len = encode_len(custom_string.as_ref().len());
self.update(custom_string.as_ref());
self.update(encoded_len.value());
if self.chunks == 0 {
self.state.delim = 0x07;
} else {
let encoded_chunks = encode_len(self.chunks);
let mut tmp_chunk = [0u8; 32];
self.current_chunk.finalize(&mut tmp_chunk);
self.state.update(&tmp_chunk);
self.state.update(encoded_chunks.value());
self.state.update(&[0xff, 0xff]);
self.state.delim = 0x06;
}
KangarooTwelveXof { state: self.state }
}
}
impl Xof for KangarooTwelveXof {
fn squeeze(&mut self, output: &mut [u8]) {
self.state.squeeze(output);
}
}

93
vendor/tiny-keccak/src/keccak.rs vendored Normal file
View File

@@ -0,0 +1,93 @@
//! The `Keccak` hash functions.
use super::{bits_to_rate, keccakf::KeccakF, Hasher, KeccakState};
/// The `Keccak` hash functions defined in [`Keccak SHA3 submission`].
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["keccak"] }
/// ```
///
/// [`Keccak SHA3 submission`]: https://keccak.team/files/Keccak-submission-3.pdf
#[derive(Clone)]
pub struct Keccak {
state: KeccakState<KeccakF>,
}
impl Keccak {
const DELIM: u8 = 0x01;
/// Creates new [`Keccak`] hasher with a security level of 224 bits.
///
/// [`Keccak`]: struct.Keccak.html
pub fn v224() -> Keccak {
Keccak::new(224)
}
/// Creates new [`Keccak`] hasher with a security level of 256 bits.
///
/// [`Keccak`]: struct.Keccak.html
pub fn v256() -> Keccak {
Keccak::new(256)
}
/// Creates new [`Keccak`] hasher with a security level of 384 bits.
///
/// [`Keccak`]: struct.Keccak.html
pub fn v384() -> Keccak {
Keccak::new(384)
}
/// Creates new [`Keccak`] hasher with a security level of 512 bits.
///
/// [`Keccak`]: struct.Keccak.html
pub fn v512() -> Keccak {
Keccak::new(512)
}
fn new(bits: usize) -> Keccak {
Keccak {
state: KeccakState::new(bits_to_rate(bits), Self::DELIM),
}
}
}
impl Hasher for Keccak {
/// Absorb additional input. Can be called multiple times.
///
/// # Example
///
/// ```
/// # use tiny_keccak::{Hasher, Keccak};
/// #
/// # fn main() {
/// # let mut keccak = Keccak::v256();
/// keccak.update(b"hello");
/// keccak.update(b" world");
/// # }
/// ```
fn update(&mut self, input: &[u8]) {
self.state.update(input);
}
/// Pad and squeeze the state to the output.
///
/// # Example
///
/// ```
/// # use tiny_keccak::{Hasher, Keccak};
/// #
/// # fn main() {
/// # let keccak = Keccak::v256();
/// # let mut output = [0u8; 32];
/// keccak.finalize(&mut output);
/// # }
/// #
/// ```
fn finalize(self, output: &mut [u8]) {
self.state.finalize(output);
}
}

40
vendor/tiny-keccak/src/keccakf.rs vendored Normal file
View File

@@ -0,0 +1,40 @@
use crate::{Buffer, Permutation};
const ROUNDS: usize = 24;
const RC: [u64; ROUNDS] = [
1u64,
0x8082u64,
0x800000000000808au64,
0x8000000080008000u64,
0x808bu64,
0x80000001u64,
0x8000000080008081u64,
0x8000000000008009u64,
0x8au64,
0x88u64,
0x80008009u64,
0x8000000au64,
0x8000808bu64,
0x800000000000008bu64,
0x8000000000008089u64,
0x8000000000008003u64,
0x8000000000008002u64,
0x8000000000000080u64,
0x800au64,
0x800000008000000au64,
0x8000000080008081u64,
0x8000000000008080u64,
0x80000001u64,
0x8000000080008008u64,
];
keccak_function!("`keccak-f[1600, 24]`", keccakf, ROUNDS, RC);
pub struct KeccakF;
impl Permutation for KeccakF {
fn execute(buffer: &mut Buffer) {
keccakf(buffer.words());
}
}

28
vendor/tiny-keccak/src/keccakp.rs vendored Normal file
View File

@@ -0,0 +1,28 @@
use crate::{Buffer, Permutation};
const ROUNDS: usize = 12;
const RC: [u64; ROUNDS] = [
0x000000008000808b,
0x800000000000008b,
0x8000000000008089,
0x8000000000008003,
0x8000000000008002,
0x8000000000000080,
0x000000000000800a,
0x800000008000000a,
0x8000000080008081,
0x8000000000008080,
0x0000000080000001,
0x8000000080008008,
];
keccak_function!("`keccak-p[1600, 12]`", keccakp, ROUNDS, RC);
pub struct KeccakP;
impl Permutation for KeccakP {
fn execute(buffer: &mut Buffer) {
keccakp(buffer.words());
}
}

114
vendor/tiny-keccak/src/kmac.rs vendored Normal file
View File

@@ -0,0 +1,114 @@
use crate::{bits_to_rate, left_encode, right_encode, CShake, Hasher, IntoXof, Xof};
/// The `KMAC` pseudo-random functions defined in [`SP800-185`].
///
/// The KECCAK Message Authentication Code (`KMAC`) algorithm is a `PRF` and keyed hash function based
/// on KECCAK. It provides variable-length output, and unlike [`SHAKE`] and [`cSHAKE`], altering the
/// requested output length generates a new, unrelated output. KMAC has two variants, [`KMAC128`] and
/// [`KMAC256`], built from [`cSHAKE128`] and [`cSHAKE256`], respectively. The two variants differ somewhat in
/// their technical security properties.
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["kmac"] }
/// ```
///
/// [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
/// [`KMAC128`]: struct.Kmac.html#method.v128
/// [`KMAC256`]: struct.Kmac.html#method.v256
/// [`SHAKE`]: struct.Shake.html
/// [`cSHAKE`]: struct.CShake.html
/// [`cSHAKE128`]: struct.CShake.html#method.v128
/// [`cSHAKE256`]: struct.CShake.html#method.v256
#[derive(Clone)]
pub struct Kmac {
state: CShake,
}
impl Kmac {
/// Creates new [`Kmac`] hasher with a security level of 128 bits.
///
/// [`Kmac`]: struct.Kmac.html
pub fn v128(key: &[u8], custom_string: &[u8]) -> Kmac {
Kmac::new(key, custom_string, 128)
}
/// Creates new [`Kmac`] hasher with a security level of 256 bits.
///
/// [`Kmac`]: struct.Kmac.html
pub fn v256(key: &[u8], custom_string: &[u8]) -> Kmac {
Kmac::new(key, custom_string, 256)
}
fn new(key: &[u8], custom_string: &[u8], bits: usize) -> Kmac {
let rate = bits_to_rate(bits);
let mut state = CShake::new(b"KMAC", custom_string, bits);
state.update(left_encode(rate).value());
state.update(left_encode(key.len() * 8).value());
state.update(key);
state.fill_block();
Kmac { state }
}
}
impl Hasher for Kmac {
fn update(&mut self, input: &[u8]) {
self.state.update(input)
}
fn finalize(mut self, output: &mut [u8]) {
self.state.update(right_encode(output.len() * 8).value());
self.state.finalize(output)
}
}
/// The `KMACXOF` extendable-output functions defined in [`SP800-185`].
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["kmac"] }
/// ```
///
/// # Example
///
/// ```
/// # use tiny_keccak::{Kmac, Xof, IntoXof, Hasher};
/// let input = b"hello world";
/// let mut output = [0u8; 64];
/// let mut kmac = Kmac::v256(b"", b"");
/// kmac.update(input);
/// let mut xof = kmac.into_xof();
/// xof.squeeze(&mut output[..32]);
/// xof.squeeze(&mut output[32..]);
/// ```
///
/// ---
///
/// [`KmacXof`] can be created only by using [`Kmac::IntoXof`] interface.
///
/// [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
/// [`KmacXof`]: struct.KmacXof.html
/// [`Kmac::IntoXof`]: struct.Kmac.html#impl-IntoXof
#[derive(Clone)]
pub struct KmacXof {
state: CShake,
}
impl IntoXof for Kmac {
type Xof = KmacXof;
fn into_xof(mut self) -> Self::Xof {
self.state.update(right_encode(0).value());
KmacXof { state: self.state }
}
}
impl Xof for KmacXof {
fn squeeze(&mut self, output: &mut [u8]) {
self.state.squeeze(output)
}
}

501
vendor/tiny-keccak/src/lib.rs vendored Normal file
View File

@@ -0,0 +1,501 @@
//! Keccak derived functions specified in [`FIPS-202`], [`SP800-185`] and [`KangarooTwelve`].
//!
//! # Example
//!
//! ```
//! # use tiny_keccak::Hasher;
//! #
//! # fn foo<H: Hasher>(mut hasher: H) {
//! let input_a = b"hello world";
//! let input_b = b"!";
//! let mut output = [0u8; 32];
//! hasher.update(input_a);
//! hasher.update(input_b);
//! hasher.finalize(&mut output);
//! # }
//! ```
//!
//! # Credits
//!
//! - [`coruus/keccak-tiny`] for C implementation of keccak function
//! - [`@quininer`] for `no-std` support and rust implementation [`SP800-185`]
//! - [`mimoo/GoKangarooTwelve`] for GO implementation of `KangarooTwelve`
//! - [`@Vurich`] for optimizations
//! - [`@oleganza`] for adding support for half-duplex use
//!
//! # License
//!
//! [`CC0`]. Attribution kindly requested. Blame taken too,
//! but not liability.
//!
//! [`FIPS-202`]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
//! [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
//! [`KangarooTwelve`]: https://eprint.iacr.org/2016/770.pdf
//! [`coruus/keccak-tiny`]: https://github.com/coruus/keccak-tiny
//! [`mimoo/GoKangarooTwelve`]: https://github.com/mimoo/GoKangarooTwelve
//! [`@quininer`]: https://github.com/quininer
//! [`@Vurich`]: https://github.com/Vurich
//! [`@oleganza`]: https://github.com/oleganza
//! [`CC0`]: https://github.com/debris/tiny-keccak/blob/master/LICENSE
#![no_std]
#![deny(missing_docs)]
const RHO: [u32; 24] = [
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44,
];
const PI: [usize; 24] = [
10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1,
];
const WORDS: usize = 25;
macro_rules! keccak_function {
($doc: expr, $name: ident, $rounds: expr, $rc: expr) => {
#[doc = $doc]
#[allow(unused_assignments)]
#[allow(non_upper_case_globals)]
pub fn $name(a: &mut [u64; $crate::WORDS]) {
use crunchy::unroll;
for i in 0..$rounds {
let mut array: [u64; 5] = [0; 5];
// Theta
unroll! {
for x in 0..5 {
unroll! {
for y_count in 0..5 {
let y = y_count * 5;
array[x] ^= a[x + y];
}
}
}
}
unroll! {
for x in 0..5 {
unroll! {
for y_count in 0..5 {
let y = y_count * 5;
a[y + x] ^= array[(x + 4) % 5] ^ array[(x + 1) % 5].rotate_left(1);
}
}
}
}
// Rho and pi
let mut last = a[1];
unroll! {
for x in 0..24 {
array[0] = a[$crate::PI[x]];
a[$crate::PI[x]] = last.rotate_left($crate::RHO[x]);
last = array[0];
}
}
// Chi
unroll! {
for y_step in 0..5 {
let y = y_step * 5;
unroll! {
for x in 0..5 {
array[x] = a[y + x];
}
}
unroll! {
for x in 0..5 {
a[y + x] = array[x] ^ ((!array[(x + 1) % 5]) & (array[(x + 2) % 5]));
}
}
}
};
// Iota
a[0] ^= $rc[i];
}
}
}
}
#[cfg(feature = "k12")]
mod keccakp;
#[cfg(feature = "k12")]
pub use keccakp::keccakp;
#[cfg(any(
feature = "keccak",
feature = "shake",
feature = "sha3",
feature = "cshake",
feature = "kmac",
feature = "tuple_hash",
feature = "parallel_hash"
))]
mod keccakf;
#[cfg(any(
feature = "keccak",
feature = "shake",
feature = "sha3",
feature = "cshake",
feature = "kmac",
feature = "tuple_hash",
feature = "parallel_hash"
))]
pub use keccakf::keccakf;
#[cfg(feature = "k12")]
mod k12;
#[cfg(feature = "k12")]
pub use k12::{KangarooTwelve, KangarooTwelveXof};
#[cfg(feature = "keccak")]
mod keccak;
#[cfg(feature = "keccak")]
pub use keccak::Keccak;
#[cfg(feature = "shake")]
mod shake;
#[cfg(feature = "shake")]
pub use shake::Shake;
#[cfg(feature = "sha3")]
mod sha3;
#[cfg(feature = "sha3")]
pub use sha3::Sha3;
#[cfg(feature = "cshake")]
mod cshake;
#[cfg(feature = "cshake")]
pub use cshake::CShake;
#[cfg(feature = "kmac")]
mod kmac;
#[cfg(feature = "kmac")]
pub use kmac::{Kmac, KmacXof};
#[cfg(feature = "tuple_hash")]
mod tuple_hash;
#[cfg(feature = "tuple_hash")]
pub use tuple_hash::{TupleHash, TupleHashXof};
#[cfg(feature = "parallel_hash")]
mod parallel_hash;
#[cfg(feature = "parallel_hash")]
pub use parallel_hash::{ParallelHash, ParallelHashXof};
/// A trait for hashing an arbitrary stream of bytes.
///
/// # Example
///
/// ```
/// # use tiny_keccak::Hasher;
/// #
/// # fn foo<H: Hasher>(mut hasher: H) {
/// let input_a = b"hello world";
/// let input_b = b"!";
/// let mut output = [0u8; 32];
/// hasher.update(input_a);
/// hasher.update(input_b);
/// hasher.finalize(&mut output);
/// # }
/// ```
pub trait Hasher {
/// Absorb additional input. Can be called multiple times.
fn update(&mut self, input: &[u8]);
/// Pad and squeeze the state to the output.
fn finalize(self, output: &mut [u8]);
}
/// A trait used to convert [`Hasher`] into it's [`Xof`] counterpart.
///
/// # Example
///
/// ```
/// # use tiny_keccak::IntoXof;
/// #
/// # fn foo<H: IntoXof>(hasher: H) {
/// let xof = hasher.into_xof();
/// # }
/// ```
///
/// [`Hasher`]: trait.Hasher.html
/// [`Xof`]: trait.Xof.html
pub trait IntoXof {
/// A type implementing [`Xof`], eXtendable-output function interface.
///
/// [`Xof`]: trait.Xof.html
type Xof: Xof;
/// A method used to convert type into [`Xof`].
///
/// [`Xof`]: trait.Xof.html
fn into_xof(self) -> Self::Xof;
}
/// Extendable-output function (`XOF`) is a function on bit strings in which the output can be
/// extended to any desired length.
///
/// # Example
///
/// ```
/// # use tiny_keccak::Xof;
/// #
/// # fn foo<X: Xof>(mut xof: X) {
/// let mut output = [0u8; 64];
/// xof.squeeze(&mut output[0..32]);
/// xof.squeeze(&mut output[32..]);
/// # }
/// ```
pub trait Xof {
/// A method used to retrieve another part of hash function output.
fn squeeze(&mut self, output: &mut [u8]);
}
struct EncodedLen {
offset: usize,
buffer: [u8; 9],
}
impl EncodedLen {
fn value(&self) -> &[u8] {
&self.buffer[self.offset..]
}
}
fn left_encode(len: usize) -> EncodedLen {
let mut buffer = [0u8; 9];
buffer[1..].copy_from_slice(&(len as u64).to_be_bytes());
let offset = buffer.iter().position(|i| *i != 0).unwrap_or(8);
buffer[offset - 1] = 9 - offset as u8;
EncodedLen {
offset: offset - 1,
buffer,
}
}
fn right_encode(len: usize) -> EncodedLen {
let mut buffer = [0u8; 9];
buffer[..8].copy_from_slice(&(len as u64).to_be_bytes());
let offset = buffer.iter().position(|i| *i != 0).unwrap_or(7);
buffer[8] = 8 - offset as u8;
EncodedLen { offset, buffer }
}
#[derive(Default, Clone)]
struct Buffer([u64; WORDS]);
impl Buffer {
fn words(&mut self) -> &mut [u64; WORDS] {
&mut self.0
}
#[cfg(target_endian = "little")]
#[inline]
fn execute<F: FnOnce(&mut [u8])>(&mut self, offset: usize, len: usize, f: F) {
let buffer: &mut [u8; WORDS * 8] = unsafe { core::mem::transmute(&mut self.0) };
f(&mut buffer[offset..][..len]);
}
#[cfg(target_endian = "big")]
#[inline]
fn execute<F: FnOnce(&mut [u8])>(&mut self, offset: usize, len: usize, f: F) {
fn swap_endianess(buffer: &mut [u64]) {
for item in buffer {
*item = item.swap_bytes();
}
}
let start = offset / 8;
let end = (offset + len + 7) / 8;
swap_endianess(&mut self.0[start..end]);
let buffer: &mut [u8; WORDS * 8] = unsafe { core::mem::transmute(&mut self.0) };
f(&mut buffer[offset..][..len]);
swap_endianess(&mut self.0[start..end]);
}
fn setout(&mut self, dst: &mut [u8], offset: usize, len: usize) {
self.execute(offset, len, |buffer| dst[..len].copy_from_slice(buffer));
}
fn xorin(&mut self, src: &[u8], offset: usize, len: usize) {
self.execute(offset, len, |dst| {
assert!(dst.len() <= src.len());
let len = dst.len();
let mut dst_ptr = dst.as_mut_ptr();
let mut src_ptr = src.as_ptr();
for _ in 0..len {
unsafe {
*dst_ptr ^= *src_ptr;
src_ptr = src_ptr.offset(1);
dst_ptr = dst_ptr.offset(1);
}
}
});
}
fn pad(&mut self, offset: usize, delim: u8, rate: usize) {
self.execute(offset, 1, |buff| buff[0] ^= delim);
self.execute(rate - 1, 1, |buff| buff[0] ^= 0x80);
}
}
trait Permutation {
fn execute(a: &mut Buffer);
}
#[derive(Clone, Copy)]
enum Mode {
Absorbing,
Squeezing,
}
struct KeccakState<P> {
buffer: Buffer,
offset: usize,
rate: usize,
delim: u8,
mode: Mode,
permutation: core::marker::PhantomData<P>,
}
impl<P> Clone for KeccakState<P> {
fn clone(&self) -> Self {
KeccakState {
buffer: self.buffer.clone(),
offset: self.offset,
rate: self.rate,
delim: self.delim,
mode: self.mode,
permutation: core::marker::PhantomData,
}
}
}
impl<P: Permutation> KeccakState<P> {
fn new(rate: usize, delim: u8) -> Self {
assert!(rate != 0, "rate cannot be equal 0");
KeccakState {
buffer: Buffer::default(),
offset: 0,
rate,
delim,
mode: Mode::Absorbing,
permutation: core::marker::PhantomData,
}
}
fn keccak(&mut self) {
P::execute(&mut self.buffer);
}
fn update(&mut self, input: &[u8]) {
if let Mode::Squeezing = self.mode {
self.mode = Mode::Absorbing;
self.fill_block();
}
//first foldp
let mut ip = 0;
let mut l = input.len();
let mut rate = self.rate - self.offset;
let mut offset = self.offset;
while l >= rate {
self.buffer.xorin(&input[ip..], offset, rate);
self.keccak();
ip += rate;
l -= rate;
rate = self.rate;
offset = 0;
}
self.buffer.xorin(&input[ip..], offset, l);
self.offset = offset + l;
}
fn pad(&mut self) {
self.buffer.pad(self.offset, self.delim, self.rate);
}
fn squeeze(&mut self, output: &mut [u8]) {
if let Mode::Absorbing = self.mode {
self.mode = Mode::Squeezing;
self.pad();
self.fill_block();
}
// second foldp
let mut op = 0;
let mut l = output.len();
let mut rate = self.rate - self.offset;
let mut offset = self.offset;
while l >= rate {
self.buffer.setout(&mut output[op..], offset, rate);
self.keccak();
op += rate;
l -= rate;
rate = self.rate;
offset = 0;
}
self.buffer.setout(&mut output[op..], offset, l);
self.offset = offset + l;
}
fn finalize(mut self, output: &mut [u8]) {
self.squeeze(output);
}
fn fill_block(&mut self) {
self.keccak();
self.offset = 0;
}
fn reset(&mut self) {
self.buffer = Buffer::default();
self.offset = 0;
self.mode = Mode::Absorbing;
}
}
fn bits_to_rate(bits: usize) -> usize {
200 - bits / 4
}
#[cfg(test)]
mod tests {
use crate::{left_encode, right_encode};
#[test]
fn test_left_encode() {
assert_eq!(left_encode(0).value(), &[1, 0]);
assert_eq!(left_encode(128).value(), &[1, 128]);
assert_eq!(left_encode(65536).value(), &[3, 1, 0, 0]);
assert_eq!(left_encode(4096).value(), &[2, 16, 0]);
assert_eq!(left_encode(54321).value(), &[2, 212, 49]);
}
#[test]
fn test_right_encode() {
assert_eq!(right_encode(0).value(), &[0, 1]);
assert_eq!(right_encode(128).value(), &[128, 1]);
assert_eq!(right_encode(65536).value(), &[1, 0, 0, 3]);
assert_eq!(right_encode(4096).value(), &[16, 0, 2]);
assert_eq!(right_encode(54321).value(), &[212, 49, 2]);
}
}

206
vendor/tiny-keccak/src/parallel_hash.rs vendored Normal file
View File

@@ -0,0 +1,206 @@
use crate::{left_encode, right_encode, CShake, Hasher, IntoXof, Xof};
#[derive(Clone)]
struct UnfinishedState {
state: CShake,
absorbed: usize,
}
struct Suboutout {
state: [u8; 64],
size: usize,
}
impl Suboutout {
fn security(bits: usize) -> Suboutout {
Suboutout {
state: [0u8; 64],
// 128 => 32, 256 => 64
size: bits / 4,
}
}
#[inline]
fn as_bytes(&self) -> &[u8] {
&self.state[..self.size]
}
#[inline]
fn as_bytes_mut(&mut self) -> &mut [u8] {
&mut self.state[..self.size]
}
}
/// The `ParallelHash` hash functions defined in [`SP800-185`].
///
/// The purpose of `ParallelHash` is to support the efficient hashing of very long strings, by
/// taking advantage of the parallelism available in modern processors. `ParallelHash` supports the
/// [`128-bit`] and [`256-bit`] security strengths, and also provides variable-length output.
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["parallel_hash"] }
/// ```
///
/// [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
/// [`128-bit`]: struct.ParallelHash.html#method.v128
/// [`256-bit`]: struct.ParallelHash.html#method.v256
#[derive(Clone)]
pub struct ParallelHash {
state: CShake,
block_size: usize,
bits: usize,
blocks: usize,
unfinished: Option<UnfinishedState>,
}
impl ParallelHash {
/// Creates new [`ParallelHash`] hasher with a security level of 128 bits.
///
/// [`ParallelHash`]: struct.ParallelHash.html
pub fn v128(custom_string: &[u8], block_size: usize) -> ParallelHash {
ParallelHash::new(custom_string, block_size, 128)
}
/// Creates new [`ParallelHash`] hasher with a security level of 256 bits.
///
/// [`ParallelHash`]: struct.ParallelHash.html
pub fn v256(custom_string: &[u8], block_size: usize) -> ParallelHash {
ParallelHash::new(custom_string, block_size, 256)
}
fn new(custom_string: &[u8], block_size: usize, bits: usize) -> ParallelHash {
let mut state = CShake::new(b"ParallelHash", custom_string, bits);
state.update(left_encode(block_size).value());
ParallelHash {
state,
block_size,
bits,
blocks: 0,
unfinished: None,
}
}
}
impl Hasher for ParallelHash {
fn update(&mut self, mut input: &[u8]) {
if let Some(mut unfinished) = self.unfinished.take() {
let to_absorb = self.block_size - unfinished.absorbed;
if input.len() >= to_absorb {
unfinished.state.update(&input[..to_absorb]);
input = &input[to_absorb..];
let mut suboutput = Suboutout::security(self.bits);
unfinished.state.finalize(suboutput.as_bytes_mut());
self.state.update(suboutput.as_bytes());
self.blocks += 1;
} else {
unfinished.state.update(input);
unfinished.absorbed += input.len();
self.unfinished = Some(unfinished);
return;
}
}
let bits = self.bits;
let input_blocks_end = input.len() / self.block_size * self.block_size;
let input_blocks = &input[..input_blocks_end];
let input_end = &input[input_blocks_end..];
let parts = input_blocks.chunks(self.block_size).map(|chunk| {
let mut state = CShake::new(b"", b"", bits);
state.update(chunk);
let mut suboutput = Suboutout::security(bits);
state.finalize(suboutput.as_bytes_mut());
suboutput
});
for part in parts {
self.state.update(part.as_bytes());
self.blocks += 1;
}
if !input_end.is_empty() {
assert!(self.unfinished.is_none());
let mut state = CShake::new(b"", b"", bits);
state.update(input_end);
self.unfinished = Some(UnfinishedState {
state,
absorbed: input_end.len(),
});
}
}
fn finalize(mut self, output: &mut [u8]) {
if let Some(unfinished) = self.unfinished.take() {
let mut suboutput = Suboutout::security(self.bits);
unfinished.state.finalize(suboutput.as_bytes_mut());
self.state.update(suboutput.as_bytes());
self.blocks += 1;
}
self.state.update(right_encode(self.blocks).value());
self.state.update(right_encode(output.len() * 8).value());
self.state.finalize(output);
}
}
/// The `ParallelHashXOF` extendable-output functions defined in [`SP800-185`].
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["parallel_hash"] }
/// ```
///
/// # Example
///
/// ```
/// # use tiny_keccak::{ParallelHash, Xof, IntoXof, Hasher};
/// let input = b"hello world";
/// let mut output = [0u8; 64];
/// let mut hasher = ParallelHash::v256(b"", 8);
/// hasher.update(input);
/// let mut xof = hasher.into_xof();
/// xof.squeeze(&mut output[..32]);
/// xof.squeeze(&mut output[32..]);
/// ```
///
/// ---
///
/// [`ParallelHashXof`] can be created only by using [`ParallelHash::IntoXof`] interface.
///
///
/// [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
/// [`ParallelHashXof`]: struct.ParallelHashXof.html
/// [`ParallelHash::IntoXof`]: struct.ParallelHash.html#impl-IntoXof
#[derive(Clone)]
pub struct ParallelHashXof {
state: CShake,
}
impl IntoXof for ParallelHash {
type Xof = ParallelHashXof;
fn into_xof(mut self) -> Self::Xof {
if let Some(unfinished) = self.unfinished.take() {
let mut suboutput = Suboutout::security(self.bits);
unfinished.state.finalize(suboutput.as_bytes_mut());
self.state.update(suboutput.as_bytes());
self.blocks += 1;
}
self.state.update(right_encode(self.blocks).value());
self.state.update(right_encode(0).value());
ParallelHashXof { state: self.state }
}
}
impl Xof for ParallelHashXof {
fn squeeze(&mut self, output: &mut [u8]) {
self.state.squeeze(output);
}
}

83
vendor/tiny-keccak/src/sha3.rs vendored Normal file
View File

@@ -0,0 +1,83 @@
use crate::{bits_to_rate, keccakf::KeccakF, Hasher, KeccakState};
/// The `SHA3` hash functions defined in [`FIPS-202`].
///
/// [`FIPS-202`]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["sha3"] }
/// ```
///
/// # Example
///
/// ```
/// # use tiny_keccak::{Hasher, Sha3};
/// #
/// # fn main() {
/// let input = b"hello world";
/// let mut output = [0; 32];
/// let expected = b"\
/// \x64\x4b\xcc\x7e\x56\x43\x73\x04\x09\x99\xaa\xc8\x9e\x76\x22\xf3\
/// \xca\x71\xfb\xa1\xd9\x72\xfd\x94\xa3\x1c\x3b\xfb\xf2\x4e\x39\x38\
/// ";
/// let mut sha3 = Sha3::v256();
/// sha3.update(input);
/// sha3.finalize(&mut output);
/// assert_eq!(expected, &output);
/// # }
/// ```
#[derive(Clone)]
pub struct Sha3 {
state: KeccakState<KeccakF>,
}
impl Sha3 {
const DELIM: u8 = 0x06;
/// Creates new [`Sha3`] hasher with a security level of 224 bits.
///
/// [`Sha3`]: struct.Sha3.html
pub fn v224() -> Sha3 {
Sha3::new(224)
}
/// Creates new [`Sha3`] hasher with a security level of 256 bits.
///
/// [`Sha3`]: struct.Sha3.html
pub fn v256() -> Sha3 {
Sha3::new(256)
}
/// Creates new [`Sha3`] hasher with a security level of 384 bits.
///
/// [`Sha3`]: struct.Sha3.html
pub fn v384() -> Sha3 {
Sha3::new(384)
}
/// Creates new [`Sha3`] hasher with a security level of 512 bits.
///
/// [`Sha3`]: struct.Sha3.html
pub fn v512() -> Sha3 {
Sha3::new(512)
}
fn new(bits: usize) -> Sha3 {
Sha3 {
state: KeccakState::new(bits_to_rate(bits), Self::DELIM),
}
}
}
impl Hasher for Sha3 {
fn update(&mut self, input: &[u8]) {
self.state.update(input);
}
fn finalize(self, output: &mut [u8]) {
self.state.finalize(output);
}
}

56
vendor/tiny-keccak/src/shake.rs vendored Normal file
View File

@@ -0,0 +1,56 @@
use crate::{bits_to_rate, keccakf::KeccakF, Hasher, KeccakState, Xof};
/// The `SHAKE` extendable-output functions defined in [`FIPS-202`].
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["shake"] }
/// ```
///
/// [`FIPS-202`]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
#[derive(Clone)]
pub struct Shake {
state: KeccakState<KeccakF>,
}
impl Shake {
const DELIM: u8 = 0x1f;
/// Creates new [`Shake`] hasher with a security level of 128 bits.
///
/// [`Shake`]: struct.Shake.html
pub fn v128() -> Shake {
Shake::new(128)
}
/// Creates new [`Shake`] hasher with a security level of 256 bits.
///
/// [`Shake`]: struct.Shake.html
pub fn v256() -> Shake {
Shake::new(256)
}
pub(crate) fn new(bits: usize) -> Shake {
Shake {
state: KeccakState::new(bits_to_rate(bits), Self::DELIM),
}
}
}
impl Hasher for Shake {
fn update(&mut self, input: &[u8]) {
self.state.update(input);
}
fn finalize(self, output: &mut [u8]) {
self.state.finalize(output);
}
}
impl Xof for Shake {
fn squeeze(&mut self, output: &mut [u8]) {
self.state.squeeze(output)
}
}

106
vendor/tiny-keccak/src/tuple_hash.rs vendored Normal file
View File

@@ -0,0 +1,106 @@
use crate::{left_encode, right_encode, CShake, Hasher, IntoXof, Xof};
/// The `TupleHash` hash functions defined in [`SP800-185`].
///
/// `TupleHash` is designed to provide a generic, misuse-resistant way to combine a sequence of
/// strings for hashing such that, for example, a `TupleHash` computed on the tuple (`"abc"` ,`"d"`) will
/// produce a different hash value than a `TupleHash` computed on the tuple (`"ab"`,`"cd"`), even though
/// all the remaining input parameters are kept the same, and the two resulting concatenated
/// strings, without string encoding, are identical.
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["tuple_hash"] }
/// ```
///
/// [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
#[derive(Clone)]
pub struct TupleHash {
state: CShake,
}
impl TupleHash {
/// Creates new [`TupleHash`] hasher with a security level of 128 bits.
///
/// [`TupleHash`]: struct.TupleHash.html
pub fn v128(custom_string: &[u8]) -> TupleHash {
TupleHash::new(custom_string, 128)
}
/// Creates new [`TupleHash`] hasher with a security level of 256 bits.
///
/// [`TupleHash`]: struct.TupleHash.html
pub fn v256(custom_string: &[u8]) -> TupleHash {
TupleHash::new(custom_string, 256)
}
fn new(custom_string: &[u8], bits: usize) -> TupleHash {
TupleHash {
state: CShake::new(b"TupleHash", custom_string, bits),
}
}
}
impl Hasher for TupleHash {
fn update(&mut self, input: &[u8]) {
self.state.update(left_encode(input.len() * 8).value());
self.state.update(input)
}
fn finalize(mut self, output: &mut [u8]) {
self.state.update(right_encode(output.len() * 8).value());
self.state.finalize(output)
}
}
/// The `TupleHashXOF` extendable-output functions defined in [`SP800-185`].
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["tuple_hash"] }
/// ```
///
/// # Example
///
/// ```
/// # use tiny_keccak::{TupleHash, Xof, IntoXof, Hasher};
/// let input = b"hello world";
/// let mut output = [0u8; 64];
/// let mut hasher = TupleHash::v256(b"");
/// hasher.update(input);
/// let mut xof = hasher.into_xof();
/// xof.squeeze(&mut output[..32]);
/// xof.squeeze(&mut output[32..]);
/// ```
///
/// ---
///
/// [`TupleHashXof`] can be created only by using [`TupleHash::IntoXof`] interface.
///
///
/// [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
/// [`TupleHashXof`]: struct.TupleHashXof.html
/// [`TupleHash::IntoXof`]: struct.TupleHash.html#impl-IntoXof
#[derive(Clone)]
pub struct TupleHashXof {
state: CShake,
}
impl IntoXof for TupleHash {
type Xof = TupleHashXof;
fn into_xof(mut self) -> TupleHashXof {
self.state.update(right_encode(0).value());
TupleHashXof { state: self.state }
}
}
impl Xof for TupleHashXof {
fn squeeze(&mut self, output: &mut [u8]) {
self.state.squeeze(output)
}
}

Binary file not shown.

View File

@@ -0,0 +1 @@
{"name":"tiny-keccak","vers":"2.0.2","deps":[{"name":"crunchy","req":"^0.2.2","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false}],"features":{"cshake":[],"default":[],"fips202":["keccak","shake","sha3"],"k12":[],"keccak":[],"kmac":["cshake"],"parallel_hash":["cshake"],"sha3":[],"shake":[],"sp800":["cshake","kmac","tuple_hash"],"tuple_hash":["cshake"]},"features2":null,"cksum":"18a90900a1e5cb8d3cfa9d09f5d2a71ddaa3d65ed8bf86c2bb638cef96d82845","yanked":null,"links":null,"rust_version":null,"v":2}

116
vendor/tiny-keccak/tests/cshake.rs vendored Normal file
View File

@@ -0,0 +1,116 @@
use tiny_keccak::{CShake, Hasher, Xof};
#[test]
fn test_cshake128_one() {
let input = b"\x00\x01\x02\x03";
let mut output = [0u8; 32];
let name = b"";
let custom = b"Email Signature";
let expected = b"\
\xC1\xC3\x69\x25\xB6\x40\x9A\x04\xF1\xB5\x04\xFC\xBC\xA9\xD8\x2B\
\x40\x17\x27\x7C\xB5\xED\x2B\x20\x65\xFC\x1D\x38\x14\xD5\xAA\xF5\
";
let mut cshake = CShake::v128(name, custom);
cshake.update(input);
cshake.finalize(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn test_cshake128_two() {
let input = b"\
\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C\x0D\x0E\x0F\
\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1A\x1B\x1C\x1D\x1E\x1F\
\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2A\x2B\x2C\x2D\x2E\x2F\
\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3A\x3B\x3C\x3D\x3E\x3F\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
\x60\x61\x62\x63\x64\x65\x66\x67\x68\x69\x6A\x6B\x6C\x6D\x6E\x6F\
\x70\x71\x72\x73\x74\x75\x76\x77\x78\x79\x7A\x7B\x7C\x7D\x7E\x7F\
\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8A\x8B\x8C\x8D\x8E\x8F\
\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9A\x9B\x9C\x9D\x9E\x9F\
\xA0\xA1\xA2\xA3\xA4\xA5\xA6\xA7\xA8\xA9\xAA\xAB\xAC\xAD\xAE\xAF\
\xB0\xB1\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA\xBB\xBC\xBD\xBE\xBF\
\xC0\xC1\xC2\xC3\xC4\xC5\xC6\xC7\
";
let mut output = [0u8; 32];
let name = b"";
let custom = b"Email Signature";
let expected = b"\
\xC5\x22\x1D\x50\xE4\xF8\x22\xD9\x6A\x2E\x88\x81\xA9\x61\x42\x0F\
\x29\x4B\x7B\x24\xFE\x3D\x20\x94\xBA\xED\x2C\x65\x24\xCC\x16\x6B\
";
let mut cshake = CShake::v128(name, custom);
cshake.update(input);
cshake.finalize(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn test_cshake256_one() {
let input = b"\x00\x01\x02\x03";
let mut output = [0u8; 64];
let name = b"";
let custom = b"Email Signature";
let expected = b"\
\xD0\x08\x82\x8E\x2B\x80\xAC\x9D\x22\x18\xFF\xEE\x1D\x07\x0C\x48\
\xB8\xE4\xC8\x7B\xFF\x32\xC9\x69\x9D\x5B\x68\x96\xEE\xE0\xED\xD1\
\x64\x02\x0E\x2B\xE0\x56\x08\x58\xD9\xC0\x0C\x03\x7E\x34\xA9\x69\
\x37\xC5\x61\xA7\x4C\x41\x2B\xB4\xC7\x46\x46\x95\x27\x28\x1C\x8C\
";
let mut cshake = CShake::v256(name, custom);
cshake.update(input);
cshake.finalize(&mut output);
assert_eq!(expected as &[u8], &output as &[u8]);
}
#[test]
fn test_cshake256_two() {
let input = b"\
\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C\x0D\x0E\x0F\
\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1A\x1B\x1C\x1D\x1E\x1F\
\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2A\x2B\x2C\x2D\x2E\x2F\
\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3A\x3B\x3C\x3D\x3E\x3F\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
\x60\x61\x62\x63\x64\x65\x66\x67\x68\x69\x6A\x6B\x6C\x6D\x6E\x6F\
\x70\x71\x72\x73\x74\x75\x76\x77\x78\x79\x7A\x7B\x7C\x7D\x7E\x7F\
\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8A\x8B\x8C\x8D\x8E\x8F\
\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9A\x9B\x9C\x9D\x9E\x9F\
\xA0\xA1\xA2\xA3\xA4\xA5\xA6\xA7\xA8\xA9\xAA\xAB\xAC\xAD\xAE\xAF\
\xB0\xB1\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA\xBB\xBC\xBD\xBE\xBF\
\xC0\xC1\xC2\xC3\xC4\xC5\xC6\xC7\
";
let mut output = [0u8; 64];
let name = b"";
let custom = b"Email Signature";
let expected = b"\
\x07\xDC\x27\xB1\x1E\x51\xFB\xAC\x75\xBC\x7B\x3C\x1D\x98\x3E\x8B\
\x4B\x85\xFB\x1D\xEF\xAF\x21\x89\x12\xAC\x86\x43\x02\x73\x09\x17\
\x27\xF4\x2B\x17\xED\x1D\xF6\x3E\x8E\xC1\x18\xF0\x4B\x23\x63\x3C\
\x1D\xFB\x15\x74\xC8\xFB\x55\xCB\x45\xDA\x8E\x25\xAF\xB0\x92\xBB\
";
let mut cshake = CShake::v256(name, custom);
cshake.update(input);
cshake.finalize(&mut output);
assert_eq!(expected as &[u8], &output as &[u8]);
}
#[test]
fn test_cshake_as_shake() {
let mut shake = CShake::v128(&[], &[]);
let mut output = [0; 32];
let expected = b"\
\x43\xE4\x1B\x45\xA6\x53\xF2\xA5\xC4\x49\x2C\x1A\xDD\x54\x45\x12\
\xDD\xA2\x52\x98\x33\x46\x2B\x71\xA4\x1A\x45\xBE\x97\x29\x0B\x6F\
";
for _ in 0..16 {
shake.squeeze(&mut output);
}
assert_eq!(expected, &output);
}

86
vendor/tiny-keccak/tests/kangaroo.rs vendored Normal file
View File

@@ -0,0 +1,86 @@
use tiny_keccak::{Hasher, KangarooTwelve};
fn pattern(len: usize) -> Vec<u8> {
(0..len).map(|j| (j % 251) as u8).collect()
}
fn test_kangaroo_twelve<A: AsRef<[u8]>, B: AsRef<[u8]>>(
custom_string: A,
message: B,
output_len: usize,
expected: &[u8],
) {
let mut kangaroo = KangarooTwelve::new(custom_string.as_ref());
kangaroo.update(message.as_ref());
let mut res = vec![0; output_len];
kangaroo.finalize(&mut res);
assert_eq!(&res[output_len - expected.len()..], expected);
}
#[test]
fn empty_kangaroo_twelve() {
let expected = b"\
\x1a\xc2\xd4\x50\xfc\x3b\x42\x05\xd1\x9d\xa7\xbf\xca\x1b\x37\x51\
\x3c\x08\x03\x57\x7a\xc7\x16\x7f\x06\xfe\x2c\xe1\xf0\xef\x39\xe5\
";
test_kangaroo_twelve("", "", 32, expected);
}
#[test]
fn kangaroo_twelve_long() {
let expected = b"\
\xe8\xdc\x56\x36\x42\xf7\x22\x8c\x84\x68\x4c\x89\x84\x05\xd3\xa8\
\x34\x79\x91\x58\xc0\x79\xb1\x28\x80\x27\x7a\x1d\x28\xe2\xff\x6d\
";
test_kangaroo_twelve("", "", 10032, expected);
}
#[test]
fn kangaroo_twelve_with_message() {
let expected = b"\
\x2b\xda\x92\x45\x0e\x8b\x14\x7f\x8a\x7c\xb6\x29\xe7\x84\xa0\x58\
\xef\xca\x7c\xf7\xd8\x21\x8e\x02\xd3\x45\xdf\xaa\x65\x24\x4a\x1f\
";
test_kangaroo_twelve("", pattern(1), 32, expected);
}
#[test]
fn kangaroo_twelve_with_message2() {
let expected = b"\
\x6b\xf7\x5f\xa2\x23\x91\x98\xdb\x47\x72\xe3\x64\x78\xf8\xe1\x9b\
\x0f\x37\x12\x05\xf6\xa9\xa9\x3a\x27\x3f\x51\xdf\x37\x12\x28\x88\
";
test_kangaroo_twelve("", pattern(17), 32, expected);
}
#[test]
fn kangaroo_twelve_with_custom_string() {
let expected = b"\
\xfa\xb6\x58\xdb\x63\xe9\x4a\x24\x61\x88\xbf\x7a\xf6\x9a\x13\x30\
\x45\xf4\x6e\xe9\x84\xc5\x6e\x3c\x33\x28\xca\xaf\x1a\xa1\xa5\x83\
";
test_kangaroo_twelve(pattern(1), "", 32, expected);
}
#[test]
fn kangaroo_twelve_with_custom_string_and_message() {
let expected = b"\
\xd8\x48\xc5\x06\x8c\xed\x73\x6f\x44\x62\x15\x9b\x98\x67\xfd\x4c\
\x20\xb8\x08\xac\xc3\xd5\xbc\x48\xe0\xb0\x6b\xa0\xa3\x76\x2e\xc4\
";
test_kangaroo_twelve(pattern(41), &[0xff], 32, expected);
}
#[test]
fn kangaroo_twelve_with_custom_string_and_message2() {
let expected = b"\
\x75\xd2\xf8\x6a\x2e\x64\x45\x66\x72\x6b\x4f\xbc\xfc\x56\x57\xb9\
\xdb\xcf\x07\x0c\x7b\x0d\xca\x06\x45\x0a\xb2\x91\xd7\x44\x3b\xcf\
";
test_kangaroo_twelve(
pattern(68921),
&[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff],
32,
expected,
);
}

30
vendor/tiny-keccak/tests/keccak.rs vendored Normal file
View File

@@ -0,0 +1,30 @@
use tiny_keccak::{Hasher, Keccak};
#[test]
fn empty_keccak() {
let keccak = Keccak::v256();
let mut output = [0; 32];
let expected = b"\
\xc5\xd2\x46\x01\x86\xf7\x23\x3c\x92\x7e\x7d\xb2\xdc\xc7\x03\xc0\
\xe5\x00\xb6\x53\xca\x82\x27\x3b\x7b\xfa\xd8\x04\x5d\x85\xa4\x70\
";
keccak.finalize(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn string_keccak_256() {
let mut keccak = Keccak::v256();
let mut in_and_out: [u8; 32] = [0; 32];
for i in 1..6 {
in_and_out[i as usize - 1] = i
}
let expected = b"\
\x7d\x87\xc5\xea\x75\xf7\x37\x8b\xb7\x01\xe4\x04\xc5\x06\x39\x16\
\x1a\xf3\xef\xf6\x62\x93\xe9\xf3\x75\xb5\xf1\x7e\xb5\x04\x76\xf4\
";
keccak.update(&in_and_out[0..5]);
keccak.finalize(&mut in_and_out);
assert_eq!(expected, &in_and_out);
}

333
vendor/tiny-keccak/tests/kmac.rs vendored Normal file
View File

@@ -0,0 +1,333 @@
use tiny_keccak::{Hasher, IntoXof, Kmac, Xof};
#[test]
fn test_kmac128_one() {
let key = b"\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
";
let data = b"\x00\x01\x02\x03";
let custom = b"";
let expected = b"\
\xE5\x78\x0B\x0D\x3E\xA6\xF7\xD3\xA4\x29\xC5\x70\x6A\xA4\x3A\x00\
\xFA\xDB\xD7\xD4\x96\x28\x83\x9E\x31\x87\x24\x3F\x45\x6E\xE1\x4E\
";
let mut output = [0u8; 32];
let mut kmac = Kmac::v128(key, custom);
kmac.update(data);
kmac.finalize(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn test_kmac128_two() {
let key = b"\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
";
let data = b"\x00\x01\x02\x03";
let custom = b"My Tagged Application";
let expected = b"\
\x3B\x1F\xBA\x96\x3C\xD8\xB0\xB5\x9E\x8C\x1A\x6D\x71\x88\x8B\x71\
\x43\x65\x1A\xF8\xBA\x0A\x70\x70\xC0\x97\x9E\x28\x11\x32\x4A\xA5\
";
let mut output = [0u8; 32];
let mut kmac = Kmac::v128(key, custom);
kmac.update(data);
kmac.finalize(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn test_kmac128_three() {
let key = b"\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
";
let data = b"\
\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C\x0D\x0E\x0F\
\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1A\x1B\x1C\x1D\x1E\x1F\
\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2A\x2B\x2C\x2D\x2E\x2F\
\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3A\x3B\x3C\x3D\x3E\x3F\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
\x60\x61\x62\x63\x64\x65\x66\x67\x68\x69\x6A\x6B\x6C\x6D\x6E\x6F\
\x70\x71\x72\x73\x74\x75\x76\x77\x78\x79\x7A\x7B\x7C\x7D\x7E\x7F\
\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8A\x8B\x8C\x8D\x8E\x8F\
\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9A\x9B\x9C\x9D\x9E\x9F\
\xA0\xA1\xA2\xA3\xA4\xA5\xA6\xA7\xA8\xA9\xAA\xAB\xAC\xAD\xAE\xAF\
\xB0\xB1\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA\xBB\xBC\xBD\xBE\xBF\
\xC0\xC1\xC2\xC3\xC4\xC5\xC6\xC7\
";
let custom = b"My Tagged Application";
let expected = b"\
\x1F\x5B\x4E\x6C\xCA\x02\x20\x9E\x0D\xCB\x5C\xA6\x35\xB8\x9A\x15\
\xE2\x71\xEC\xC7\x60\x07\x1D\xFD\x80\x5F\xAA\x38\xF9\x72\x92\x30\
";
let mut output = [0u8; 32];
let mut kmac = Kmac::v128(key, custom);
kmac.update(data);
kmac.finalize(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn test_kmac256_one() {
let key = b"\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
";
let data = b"\x00\x01\x02\x03";
let custom = b"My Tagged Application";
let expected = b"\
\x20\xC5\x70\xC3\x13\x46\xF7\x03\xC9\xAC\x36\xC6\x1C\x03\xCB\x64\
\xC3\x97\x0D\x0C\xFC\x78\x7E\x9B\x79\x59\x9D\x27\x3A\x68\xD2\xF7\
\xF6\x9D\x4C\xC3\xDE\x9D\x10\x4A\x35\x16\x89\xF2\x7C\xF6\xF5\x95\
\x1F\x01\x03\xF3\x3F\x4F\x24\x87\x10\x24\xD9\xC2\x77\x73\xA8\xDD\
";
let mut output = [0u8; 64];
let mut kmac = Kmac::v256(key, custom);
kmac.update(data);
kmac.finalize(&mut output);
assert_eq!(expected as &[u8], &output as &[u8]);
}
#[test]
fn test_kmac256_two() {
let key = b"\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
";
let data = b"\
\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C\x0D\x0E\x0F\
\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1A\x1B\x1C\x1D\x1E\x1F\
\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2A\x2B\x2C\x2D\x2E\x2F\
\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3A\x3B\x3C\x3D\x3E\x3F\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
\x60\x61\x62\x63\x64\x65\x66\x67\x68\x69\x6A\x6B\x6C\x6D\x6E\x6F\
\x70\x71\x72\x73\x74\x75\x76\x77\x78\x79\x7A\x7B\x7C\x7D\x7E\x7F\
\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8A\x8B\x8C\x8D\x8E\x8F\
\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9A\x9B\x9C\x9D\x9E\x9F\
\xA0\xA1\xA2\xA3\xA4\xA5\xA6\xA7\xA8\xA9\xAA\xAB\xAC\xAD\xAE\xAF\
\xB0\xB1\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA\xBB\xBC\xBD\xBE\xBF\
\xC0\xC1\xC2\xC3\xC4\xC5\xC6\xC7\
";
let custom = b"";
let expected = b"\
\x75\x35\x8C\xF3\x9E\x41\x49\x4E\x94\x97\x07\x92\x7C\xEE\x0A\xF2\
\x0A\x3F\xF5\x53\x90\x4C\x86\xB0\x8F\x21\xCC\x41\x4B\xCF\xD6\x91\
\x58\x9D\x27\xCF\x5E\x15\x36\x9C\xBB\xFF\x8B\x9A\x4C\x2E\xB1\x78\
\x00\x85\x5D\x02\x35\xFF\x63\x5D\xA8\x25\x33\xEC\x6B\x75\x9B\x69\
";
let mut output = [0u8; 64];
let mut kmac = Kmac::v256(key, custom);
kmac.update(data);
kmac.finalize(&mut output);
assert_eq!(expected as &[u8], &output as &[u8]);
}
#[test]
fn test_kmac256_three() {
let key = b"\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
";
let data = b"\
\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C\x0D\x0E\x0F\
\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1A\x1B\x1C\x1D\x1E\x1F\
\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2A\x2B\x2C\x2D\x2E\x2F\
\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3A\x3B\x3C\x3D\x3E\x3F\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
\x60\x61\x62\x63\x64\x65\x66\x67\x68\x69\x6A\x6B\x6C\x6D\x6E\x6F\
\x70\x71\x72\x73\x74\x75\x76\x77\x78\x79\x7A\x7B\x7C\x7D\x7E\x7F\
\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8A\x8B\x8C\x8D\x8E\x8F\
\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9A\x9B\x9C\x9D\x9E\x9F\
\xA0\xA1\xA2\xA3\xA4\xA5\xA6\xA7\xA8\xA9\xAA\xAB\xAC\xAD\xAE\xAF\
\xB0\xB1\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA\xBB\xBC\xBD\xBE\xBF\
\xC0\xC1\xC2\xC3\xC4\xC5\xC6\xC7\
";
let custom = b"My Tagged Application";
let expected = b"\
\xB5\x86\x18\xF7\x1F\x92\xE1\xD5\x6C\x1B\x8C\x55\xDD\xD7\xCD\x18\
\x8B\x97\xB4\xCA\x4D\x99\x83\x1E\xB2\x69\x9A\x83\x7D\xA2\xE4\xD9\
\x70\xFB\xAC\xFD\xE5\x00\x33\xAE\xA5\x85\xF1\xA2\x70\x85\x10\xC3\
\x2D\x07\x88\x08\x01\xBD\x18\x28\x98\xFE\x47\x68\x76\xFC\x89\x65\
";
let mut output = [0u8; 64];
let mut kmac = Kmac::v256(key, custom);
kmac.update(data);
kmac.finalize(&mut output);
assert_eq!(expected as &[u8], &output as &[u8]);
}
#[test]
fn test_kmac128_xof_one() {
let key = b"\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
";
let data = b"\x00\x01\x02\x03";
let custom = b"";
let expected = b"\
\xCD\x83\x74\x0B\xBD\x92\xCC\xC8\xCF\x03\x2B\x14\x81\xA0\xF4\x46\
\x0E\x7C\xA9\xDD\x12\xB0\x8A\x0C\x40\x31\x17\x8B\xAC\xD6\xEC\x35\
";
let mut output = [0u8; 32];
let mut kmac = Kmac::v128(key, custom);
kmac.update(data);
let mut xof = kmac.into_xof();
xof.squeeze(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn test_kmac128_xof_two() {
let key = b"\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
";
let data = b"\x00\x01\x02\x03";
let custom = b"My Tagged Application";
let expected = b"\
\x31\xA4\x45\x27\xB4\xED\x9F\x5C\x61\x01\xD1\x1D\xE6\xD2\x6F\x06\
\x20\xAA\x5C\x34\x1D\xEF\x41\x29\x96\x57\xFE\x9D\xF1\xA3\xB1\x6C\
";
let mut output = [0u8; 32];
let mut kmac = Kmac::v128(key, custom);
kmac.update(data);
let mut xof = kmac.into_xof();
xof.squeeze(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn test_kmac128_xof_three() {
let key = b"\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
";
let data = b"\
\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C\x0D\x0E\x0F\
\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1A\x1B\x1C\x1D\x1E\x1F\
\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2A\x2B\x2C\x2D\x2E\x2F\
\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3A\x3B\x3C\x3D\x3E\x3F\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
\x60\x61\x62\x63\x64\x65\x66\x67\x68\x69\x6A\x6B\x6C\x6D\x6E\x6F\
\x70\x71\x72\x73\x74\x75\x76\x77\x78\x79\x7A\x7B\x7C\x7D\x7E\x7F\
\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8A\x8B\x8C\x8D\x8E\x8F\
\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9A\x9B\x9C\x9D\x9E\x9F\
\xA0\xA1\xA2\xA3\xA4\xA5\xA6\xA7\xA8\xA9\xAA\xAB\xAC\xAD\xAE\xAF\
\xB0\xB1\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA\xBB\xBC\xBD\xBE\xBF\
\xC0\xC1\xC2\xC3\xC4\xC5\xC6\xC7\
";
let custom = b"My Tagged Application";
let expected = b"\
\x47\x02\x6C\x7C\xD7\x93\x08\x4A\xA0\x28\x3C\x25\x3E\xF6\x58\x49\
\x0C\x0D\xB6\x14\x38\xB8\x32\x6F\xE9\xBD\xDF\x28\x1B\x83\xAE\x0F\
";
let mut output = [0u8; 32];
let mut kmac = Kmac::v128(key, custom);
kmac.update(data);
let mut xof = kmac.into_xof();
xof.squeeze(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn test_kmac256_xof_one() {
let key = b"\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
";
let data = b"\x00\x01\x02\x03";
let custom = b"My Tagged Application";
let expected = b"\
\x17\x55\x13\x3F\x15\x34\x75\x2A\xAD\x07\x48\xF2\xC7\x06\xFB\x5C\
\x78\x45\x12\xCA\xB8\x35\xCD\x15\x67\x6B\x16\xC0\xC6\x64\x7F\xA9\
\x6F\xAA\x7A\xF6\x34\xA0\xBF\x8F\xF6\xDF\x39\x37\x4F\xA0\x0F\xAD\
\x9A\x39\xE3\x22\xA7\xC9\x20\x65\xA6\x4E\xB1\xFB\x08\x01\xEB\x2B\
";
let mut output = [0u8; 64];
let mut kmac = Kmac::v256(key, custom);
kmac.update(data);
let mut xof = kmac.into_xof();
xof.squeeze(&mut output);
assert_eq!(expected as &[u8], &output as &[u8]);
}
#[test]
fn test_kmac256_xof_two() {
let key = b"\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
";
let data = b"\
\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C\x0D\x0E\x0F\
\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1A\x1B\x1C\x1D\x1E\x1F\
\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2A\x2B\x2C\x2D\x2E\x2F\
\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3A\x3B\x3C\x3D\x3E\x3F\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
\x60\x61\x62\x63\x64\x65\x66\x67\x68\x69\x6A\x6B\x6C\x6D\x6E\x6F\
\x70\x71\x72\x73\x74\x75\x76\x77\x78\x79\x7A\x7B\x7C\x7D\x7E\x7F\
\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8A\x8B\x8C\x8D\x8E\x8F\
\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9A\x9B\x9C\x9D\x9E\x9F\
\xA0\xA1\xA2\xA3\xA4\xA5\xA6\xA7\xA8\xA9\xAA\xAB\xAC\xAD\xAE\xAF\
\xB0\xB1\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA\xBB\xBC\xBD\xBE\xBF\
\xC0\xC1\xC2\xC3\xC4\xC5\xC6\xC7\
";
let custom = b"";
let expected = b"\
\xFF\x7B\x17\x1F\x1E\x8A\x2B\x24\x68\x3E\xED\x37\x83\x0E\xE7\x97\
\x53\x8B\xA8\xDC\x56\x3F\x6D\xA1\xE6\x67\x39\x1A\x75\xED\xC0\x2C\
\xA6\x33\x07\x9F\x81\xCE\x12\xA2\x5F\x45\x61\x5E\xC8\x99\x72\x03\
\x1D\x18\x33\x73\x31\xD2\x4C\xEB\x8F\x8C\xA8\xE6\xA1\x9F\xD9\x8B\
";
let mut output = [0u8; 64];
let mut kmac = Kmac::v256(key, custom);
kmac.update(data);
let mut xof = kmac.into_xof();
xof.squeeze(&mut output);
assert_eq!(expected as &[u8], &output as &[u8]);
}
#[test]
fn test_kmac256_xof_three() {
let key = b"\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
";
let data = b"\
\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C\x0D\x0E\x0F\
\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1A\x1B\x1C\x1D\x1E\x1F\
\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2A\x2B\x2C\x2D\x2E\x2F\
\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3A\x3B\x3C\x3D\x3E\x3F\
\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F\
\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F\
\x60\x61\x62\x63\x64\x65\x66\x67\x68\x69\x6A\x6B\x6C\x6D\x6E\x6F\
\x70\x71\x72\x73\x74\x75\x76\x77\x78\x79\x7A\x7B\x7C\x7D\x7E\x7F\
\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8A\x8B\x8C\x8D\x8E\x8F\
\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9A\x9B\x9C\x9D\x9E\x9F\
\xA0\xA1\xA2\xA3\xA4\xA5\xA6\xA7\xA8\xA9\xAA\xAB\xAC\xAD\xAE\xAF\
\xB0\xB1\xB2\xB3\xB4\xB5\xB6\xB7\xB8\xB9\xBA\xBB\xBC\xBD\xBE\xBF\
\xC0\xC1\xC2\xC3\xC4\xC5\xC6\xC7\
";
let custom = b"My Tagged Application";
let expected = b"\
\xD5\xBE\x73\x1C\x95\x4E\xD7\x73\x28\x46\xBB\x59\xDB\xE3\xA8\xE3\
\x0F\x83\xE7\x7A\x4B\xFF\x44\x59\xF2\xF1\xC2\xB4\xEC\xEB\xB8\xCE\
\x67\xBA\x01\xC6\x2E\x8A\xB8\x57\x8D\x2D\x49\x9B\xD1\xBB\x27\x67\
\x68\x78\x11\x90\x02\x0A\x30\x6A\x97\xDE\x28\x1D\xCC\x30\x30\x5D\
";
let mut output = [0u8; 64];
let mut kmac = Kmac::v256(key, custom);
kmac.update(data);
let mut xof = kmac.into_xof();
xof.squeeze(&mut output);
assert_eq!(expected as &[u8], &output as &[u8]);
}

View File

@@ -0,0 +1,123 @@
use tiny_keccak::{Hasher, ParallelHash};
#[test]
fn test_parallel_hash128_one() {
let custom_string = b"";
let input = b"\
\x00\x01\x02\x03\x04\x05\x06\x07\x10\x11\x12\x13\
\x14\x15\x16\x17\x20\x21\x22\x23\x24\x25\x26\x27\
";
let block_size = 8;
let mut phash = ParallelHash::v128(custom_string, block_size);
let expected = b"\
\xBA\x8D\xC1\xD1\xD9\x79\x33\x1D\x3F\x81\x36\x03\xC6\x7F\x72\x60\
\x9A\xB5\xE4\x4B\x94\xA0\xB8\xF9\xAF\x46\x51\x44\x54\xA2\xB4\xF5\
";
let mut output = [0u8; 32];
phash.update(input);
phash.finalize(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn test_parallel_hash128_two() {
let custom_string = b"Parallel Data";
let input = b"\
\x00\x01\x02\x03\x04\x05\x06\x07\x10\x11\x12\x13\
\x14\x15\x16\x17\x20\x21\x22\x23\x24\x25\x26\x27\
";
let block_size = 8;
let mut phash = ParallelHash::v128(custom_string, block_size);
let expected = b"\
\xFC\x48\x4D\xCB\x3F\x84\xDC\xEE\xDC\x35\x34\x38\x15\x1B\xEE\x58\
\x15\x7D\x6E\xFE\xD0\x44\x5A\x81\xF1\x65\xE4\x95\x79\x5B\x72\x06\
";
let mut output = [0u8; 32];
phash.update(input);
phash.finalize(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn test_parallel_hash128_three() {
let custom_string = b"";
let input = b"\
\x00\x01\x02\x03\x04\x05\x06\x07\x10\x11\x12\x13\
\x14\x15\x16\x17\x20\x21\x22\x23\x24\x25\x26\x27\
";
let block_size = 8;
let mut phash = ParallelHash::v128(custom_string, block_size);
let expected = b"\
\xBA\x8D\xC1\xD1\xD9\x79\x33\x1D\x3F\x81\x36\x03\xC6\x7F\x72\x60\
\x9A\xB5\xE4\x4B\x94\xA0\xB8\xF9\xAF\x46\x51\x44\x54\xA2\xB4\xF5\
";
let mut output = [0u8; 32];
phash.update(&input[..13]);
phash.update(&input[13..]);
phash.finalize(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn test_parallel_hash256_one() {
let custom_string = b"";
let input = b"\
\x00\x01\x02\x03\x04\x05\x06\x07\x10\x11\x12\x13\
\x14\x15\x16\x17\x20\x21\x22\x23\x24\x25\x26\x27\
";
let block_size = 8;
let mut phash = ParallelHash::v256(custom_string, block_size);
let expected = b"\
\xBC\x1E\xF1\x24\xDA\x34\x49\x5E\x94\x8E\xAD\x20\x7D\xD9\x84\x22\
\x35\xDA\x43\x2D\x2B\xBC\x54\xB4\xC1\x10\xE6\x4C\x45\x11\x05\x53\
\x1B\x7F\x2A\x3E\x0C\xE0\x55\xC0\x28\x05\xE7\xC2\xDE\x1F\xB7\x46\
\xAF\x97\xA1\xDD\x01\xF4\x3B\x82\x4E\x31\xB8\x76\x12\x41\x04\x29\
";
let mut output = [0u8; 64];
phash.update(input);
phash.finalize(&mut output);
assert_eq!(expected as &[u8], &output as &[u8]);
}
#[test]
fn test_parallel_hash256_two() {
let custom_string = b"Parallel Data";
let input = b"\
\x00\x01\x02\x03\x04\x05\x06\x07\x10\x11\x12\x13\
\x14\x15\x16\x17\x20\x21\x22\x23\x24\x25\x26\x27\
";
let block_size = 8;
let mut phash = ParallelHash::v256(custom_string, block_size);
let expected = b"\
\xCD\xF1\x52\x89\xB5\x4F\x62\x12\xB4\xBC\x27\x05\x28\xB4\x95\x26\
\x00\x6D\xD9\xB5\x4E\x2B\x6A\xDD\x1E\xF6\x90\x0D\xDA\x39\x63\xBB\
\x33\xA7\x24\x91\xF2\x36\x96\x9C\xA8\xAF\xAE\xA2\x9C\x68\x2D\x47\
\xA3\x93\xC0\x65\xB3\x8E\x29\xFA\xE6\x51\xA2\x09\x1C\x83\x31\x10\
";
let mut output = [0u8; 64];
phash.update(input);
phash.finalize(&mut output);
assert_eq!(expected as &[u8], &output as &[u8]);
}
#[test]
fn test_parallel_hash256_three() {
let custom_string = b"";
let input = b"\
\x00\x01\x02\x03\x04\x05\x06\x07\x10\x11\x12\x13\
\x14\x15\x16\x17\x20\x21\x22\x23\x24\x25\x26\x27\
";
let block_size = 8;
let mut phash = ParallelHash::v256(custom_string, block_size);
let expected = b"\
\xBC\x1E\xF1\x24\xDA\x34\x49\x5E\x94\x8E\xAD\x20\x7D\xD9\x84\x22\
\x35\xDA\x43\x2D\x2B\xBC\x54\xB4\xC1\x10\xE6\x4C\x45\x11\x05\x53\
\x1B\x7F\x2A\x3E\x0C\xE0\x55\xC0\x28\x05\xE7\xC2\xDE\x1F\xB7\x46\
\xAF\x97\xA1\xDD\x01\xF4\x3B\x82\x4E\x31\xB8\x76\x12\x41\x04\x29\
";
let mut output = [0u8; 64];
phash.update(&input[..13]);
phash.update(&input[13..]);
phash.finalize(&mut output);
assert_eq!(expected as &[u8], &output as &[u8]);
}

94
vendor/tiny-keccak/tests/sha3.rs vendored Normal file
View File

@@ -0,0 +1,94 @@
use tiny_keccak::{Hasher, Sha3};
#[test]
fn empty_sha3_256() {
let sha3 = Sha3::v256();
let mut output = [0; 32];
let expected = b"\
\xa7\xff\xc6\xf8\xbf\x1e\xd7\x66\x51\xc1\x47\x56\xa0\x61\xd6\x62\
\xf5\x80\xff\x4d\xe4\x3b\x49\xfa\x82\xd8\x0a\x4b\x80\xf8\x43\x4a\
";
sha3.finalize(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn hello_sha3_256() {
let mut sha3 = Sha3::v256();
let input = b"hello";
let mut output = [0u8; 32];
let expected = b"\
\x33\x38\xbe\x69\x4f\x50\xc5\xf3\x38\x81\x49\x86\xcd\xf0\x68\x64\
\x53\xa8\x88\xb8\x4f\x42\x4d\x79\x2a\xf4\xb9\x20\x23\x98\xf3\x92\
";
sha3.update(input);
sha3.finalize(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn hello_sha3_256_parts() {
let mut sha3 = Sha3::v256();
let input_a = b"hell";
let input_b = b"o";
let mut output = [0u8; 32];
let expected = b"\
\x33\x38\xbe\x69\x4f\x50\xc5\xf3\x38\x81\x49\x86\xcd\xf0\x68\x64\
\x53\xa8\x88\xb8\x4f\x42\x4d\x79\x2a\xf4\xb9\x20\x23\x98\xf3\x92\
";
sha3.update(input_a);
sha3.update(input_b);
sha3.finalize(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn hello_sha3_256_parts5() {
let mut sha3 = Sha3::v256();
let mut output = [0u8; 32];
let expected = b"\
\x33\x38\xbe\x69\x4f\x50\xc5\xf3\x38\x81\x49\x86\xcd\xf0\x68\x64\
\x53\xa8\x88\xb8\x4f\x42\x4d\x79\x2a\xf4\xb9\x20\x23\x98\xf3\x92\
";
sha3.update(b"h");
sha3.update(b"e");
sha3.update(b"l");
sha3.update(b"l");
sha3.update(b"o");
sha3.finalize(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn long_string_sha3_512() {
let mut sha3 = Sha3::v512();
let input = b"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.";
let mut output = [0u8; 64];
let expected = b"\
\xf3\x2a\x94\x23\x55\x13\x51\xdf\x0a\x07\xc0\xb8\xc2\x0e\xb9\x72\
\x36\x7c\x39\x8d\x61\x06\x60\x38\xe1\x69\x86\x44\x8e\xbf\xbc\x3d\
\x15\xed\xe0\xed\x36\x93\xe3\x90\x5e\x9a\x8c\x60\x1d\x9d\x00\x2a\
\x06\x85\x3b\x97\x97\xef\x9a\xb1\x0c\xbd\xe1\x00\x9c\x7d\x0f\x09\
";
sha3.update(input);
sha3.finalize(&mut output);
assert_eq!(expected as &[u8], &output as &[u8]);
}
#[test]
fn long_string_sha3_512_parts() {
let mut sha3 = Sha3::v512();
let input_a = b"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ";
let input_b = b"ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.";
let mut output = [0u8; 64];
let expected = b"\
\xf3\x2a\x94\x23\x55\x13\x51\xdf\x0a\x07\xc0\xb8\xc2\x0e\xb9\x72\
\x36\x7c\x39\x8d\x61\x06\x60\x38\xe1\x69\x86\x44\x8e\xbf\xbc\x3d\
\x15\xed\xe0\xed\x36\x93\xe3\x90\x5e\x9a\x8c\x60\x1d\x9d\x00\x2a\
\x06\x85\x3b\x97\x97\xef\x9a\xb1\x0c\xbd\xe1\x00\x9c\x7d\x0f\x09\
";
sha3.update(input_a);
sha3.update(input_b);
sha3.finalize(&mut output);
assert_eq!(expected as &[u8], &output as &[u8]);
}

37
vendor/tiny-keccak/tests/shake.rs vendored Normal file
View File

@@ -0,0 +1,37 @@
use tiny_keccak::{Hasher, Shake, Xof};
#[test]
fn shake_xof_one() {
let mut shake = Shake::v128();
let mut output = [0; 32];
let expected = b"\
\x43\xE4\x1B\x45\xA6\x53\xF2\xA5\xC4\x49\x2C\x1A\xDD\x54\x45\x12\
\xDD\xA2\x52\x98\x33\x46\x2B\x71\xA4\x1A\x45\xBE\x97\x29\x0B\x6F\
";
for _ in 0..16 {
shake.squeeze(&mut output);
}
assert_eq!(expected, &output);
}
#[test]
fn shake_xof_two() {
let mut shake = Shake::v128();
let mut output = [0; 32];
let expected = b"\
\x44\xC9\xFB\x35\x9F\xD5\x6A\xC0\xA9\xA7\x5A\x74\x3C\xFF\x68\x62\
\xF1\x7D\x72\x59\xAB\x07\x52\x16\xC0\x69\x95\x11\x64\x3B\x64\x39\
";
for _ in 0..10 {
shake.update(&[0xa3; 20]);
}
for _ in 0..16 {
shake.squeeze(&mut output);
}
assert_eq!(expected, &output);
}

113
vendor/tiny-keccak/tests/tuple_hash.rs vendored Normal file
View File

@@ -0,0 +1,113 @@
use tiny_keccak::{Hasher, TupleHash};
#[test]
fn test_tuple_hash128_one() {
let te3 = b"\x00\x01\x02";
let te6 = b"\x10\x11\x12\x13\x14\x15";
let s0 = b"";
let expected = b"\
\xC5\xD8\x78\x6C\x1A\xFB\x9B\x82\x11\x1A\xB3\x4B\x65\xB2\xC0\x04\
\x8F\xA6\x4E\x6D\x48\xE2\x63\x26\x4C\xE1\x70\x7D\x3F\xFC\x8E\xD1\
";
let mut output = [0u8; 32];
let mut hasher = TupleHash::v128(s0);
hasher.update(te3);
hasher.update(te6);
hasher.finalize(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn test_tuple_hash128_two() {
let te3 = b"\x00\x01\x02";
let te6 = b"\x10\x11\x12\x13\x14\x15";
let s1 = b"My Tuple App";
let expected = b"\
\x75\xCD\xB2\x0F\xF4\xDB\x11\x54\xE8\x41\xD7\x58\xE2\x41\x60\xC5\
\x4B\xAE\x86\xEB\x8C\x13\xE7\xF5\xF4\x0E\xB3\x55\x88\xE9\x6D\xFB\
";
let mut output = [0u8; 32];
let mut hasher = TupleHash::v128(s1);
hasher.update(te3);
hasher.update(te6);
hasher.finalize(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn test_tuple_hash128_three() {
let te3 = b"\x00\x01\x02";
let te6 = b"\x10\x11\x12\x13\x14\x15";
let te9 = b"\x20\x21\x22\x23\x24\x25\x26\x27\x28";
let s1 = b"My Tuple App";
let expected = b"\
\xE6\x0F\x20\x2C\x89\xA2\x63\x1E\xDA\x8D\x4C\x58\x8C\xA5\xFD\x07\
\xF3\x9E\x51\x51\x99\x8D\xEC\xCF\x97\x3A\xDB\x38\x04\xBB\x6E\x84\
";
let mut output = [0u8; 32];
let mut hasher = TupleHash::v128(s1);
hasher.update(te3);
hasher.update(te6);
hasher.update(te9);
hasher.finalize(&mut output);
assert_eq!(expected, &output);
}
#[test]
fn test_tuple_hash256() {
let te3 = b"\x00\x01\x02";
let te6 = b"\x10\x11\x12\x13\x14\x15";
let s0 = b"";
let expected = b"\
\xCF\xB7\x05\x8C\xAC\xA5\xE6\x68\xF8\x1A\x12\xA2\x0A\x21\x95\xCE\
\x97\xA9\x25\xF1\xDB\xA3\xE7\x44\x9A\x56\xF8\x22\x01\xEC\x60\x73\
\x11\xAC\x26\x96\xB1\xAB\x5E\xA2\x35\x2D\xF1\x42\x3B\xDE\x7B\xD4\
\xBB\x78\xC9\xAE\xD1\xA8\x53\xC7\x86\x72\xF9\xEB\x23\xBB\xE1\x94\
";
let mut output = [0u8; 64];
let mut hasher = TupleHash::v256(s0);
hasher.update(te3);
hasher.update(te6);
hasher.finalize(&mut output);
assert_eq!(expected as &[u8], &output as &[u8]);
}
#[test]
fn test_tuple_hash256_two() {
let te3 = b"\x00\x01\x02";
let te6 = b"\x10\x11\x12\x13\x14\x15";
let s1 = b"My Tuple App";
let expected = b"\
\x14\x7C\x21\x91\xD5\xED\x7E\xFD\x98\xDB\xD9\x6D\x7A\xB5\xA1\x16\
\x92\x57\x6F\x5F\xE2\xA5\x06\x5F\x3E\x33\xDE\x6B\xBA\x9F\x3A\xA1\
\xC4\xE9\xA0\x68\xA2\x89\xC6\x1C\x95\xAA\xB3\x0A\xEE\x1E\x41\x0B\
\x0B\x60\x7D\xE3\x62\x0E\x24\xA4\xE3\xBF\x98\x52\xA1\xD4\x36\x7E\
";
let mut output = [0u8; 64];
let mut hasher = TupleHash::v256(s1);
hasher.update(te3);
hasher.update(te6);
hasher.finalize(&mut output);
assert_eq!(expected as &[u8], &output as &[u8]);
}
#[test]
fn test_tuple_hash256_three() {
let te3 = b"\x00\x01\x02";
let te6 = b"\x10\x11\x12\x13\x14\x15";
let te9 = b"\x20\x21\x22\x23\x24\x25\x26\x27\x28";
let s1 = b"My Tuple App";
let expected = b"\
\x45\x00\x0B\xE6\x3F\x9B\x6B\xFD\x89\xF5\x47\x17\x67\x0F\x69\xA9\
\xBC\x76\x35\x91\xA4\xF0\x5C\x50\xD6\x88\x91\xA7\x44\xBC\xC6\xE7\
\xD6\xD5\xB5\xE8\x2C\x01\x8D\xA9\x99\xED\x35\xB0\xBB\x49\xC9\x67\
\x8E\x52\x6A\xBD\x8E\x85\xC1\x3E\xD2\x54\x02\x1D\xB9\xE7\x90\xCE\
";
let mut output = [0u8; 64];
let mut hasher = TupleHash::v256(s1);
hasher.update(te3);
hasher.update(te6);
hasher.update(te9);
hasher.finalize(&mut output);
assert_eq!(expected as &[u8], &output as &[u8]);
}