chore: checkpoint before Python removal

This commit is contained in:
2026-03-26 22:33:59 +00:00
parent 683cec9307
commit e568ddf82a
29972 changed files with 11269302 additions and 2 deletions

77
vendor/tiny-keccak/src/cshake.rs vendored Normal file
View File

@@ -0,0 +1,77 @@
//! The `cSHAKE` extendable-output functions defined in [`SP800-185`].
//!
//! [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
use crate::{bits_to_rate, keccakf::KeccakF, left_encode, Hasher, KeccakState, Xof};
/// The `cSHAKE` extendable-output functions defined in [`SP800-185`].
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["cshake"] }
/// ```
///
/// [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
#[derive(Clone)]
pub struct CShake {
state: KeccakState<KeccakF>,
}
impl CShake {
const DELIM: u8 = 0x04;
/// Creates new [`CShake`] hasher with a security level of 128 bits.
///
/// [`CShake`]: struct.CShake.html
pub fn v128(name: &[u8], custom_string: &[u8]) -> CShake {
CShake::new(name, custom_string, 128)
}
/// Creates new [`CShake`] hasher with a security level of 256 bits.
///
/// [`CShake`]: struct.CShake.html
pub fn v256(name: &[u8], custom_string: &[u8]) -> CShake {
CShake::new(name, custom_string, 256)
}
pub(crate) fn new(name: &[u8], custom_string: &[u8], bits: usize) -> CShake {
let rate = bits_to_rate(bits);
// if there is no name and no customization string
// cSHAKE is SHAKE
if name.is_empty() && custom_string.is_empty() {
let state = KeccakState::new(rate, 0x1f);
return CShake { state };
}
let mut state = KeccakState::new(rate, Self::DELIM);
state.update(left_encode(rate).value());
state.update(left_encode(name.len() * 8).value());
state.update(name);
state.update(left_encode(custom_string.len() * 8).value());
state.update(custom_string);
state.fill_block();
CShake { state }
}
pub(crate) fn fill_block(&mut self) {
self.state.fill_block();
}
}
impl Hasher for CShake {
fn update(&mut self, input: &[u8]) {
self.state.update(input);
}
fn finalize(self, output: &mut [u8]) {
self.state.finalize(output);
}
}
impl Xof for CShake {
fn squeeze(&mut self, output: &mut [u8]) {
self.state.squeeze(output);
}
}

160
vendor/tiny-keccak/src/k12.rs vendored Normal file
View File

@@ -0,0 +1,160 @@
//! The `KangarooTwelve` hash function defined [`here`].
//!
//! [`here`]: https://eprint.iacr.org/2016/770.pdf
use crate::{bits_to_rate, keccakp::KeccakP, EncodedLen, Hasher, IntoXof, KeccakState, Xof};
fn encode_len(len: usize) -> EncodedLen {
let len_view = (len as u64).to_be_bytes();
let offset = len_view.iter().position(|i| *i != 0).unwrap_or(8);
let mut buffer = [0u8; 9];
buffer[..8].copy_from_slice(&len_view);
buffer[8] = 8 - offset as u8;
EncodedLen { offset, buffer }
}
/// The `KangarooTwelve` hash function defined [`here`].
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["k12"] }
/// ```
///
/// [`here`]: https://eprint.iacr.org/2016/770.pdf
#[derive(Clone)]
pub struct KangarooTwelve<T> {
state: KeccakState<KeccakP>,
current_chunk: KeccakState<KeccakP>,
custom_string: Option<T>,
written: usize,
chunks: usize,
}
impl<T> KangarooTwelve<T> {
const MAX_CHUNK_SIZE: usize = 8192;
/// Creates new [`KangarooTwelve`] hasher with a security level of 128 bits.
///
/// [`KangarooTwelve`]: struct.KangarooTwelve.html
pub fn new(custom_string: T) -> Self {
let rate = bits_to_rate(128);
KangarooTwelve {
state: KeccakState::new(rate, 0),
current_chunk: KeccakState::new(rate, 0x0b),
custom_string: Some(custom_string),
written: 0,
chunks: 0,
}
}
}
impl<T: AsRef<[u8]>> Hasher for KangarooTwelve<T> {
fn update(&mut self, input: &[u8]) {
let mut to_absorb = input;
if self.chunks == 0 {
let todo = core::cmp::min(Self::MAX_CHUNK_SIZE - self.written, to_absorb.len());
self.state.update(&to_absorb[..todo]);
self.written += todo;
to_absorb = &to_absorb[todo..];
if to_absorb.len() > 0 && self.written == Self::MAX_CHUNK_SIZE {
self.state.update(&[0x03, 0, 0, 0, 0, 0, 0, 0]);
self.written = 0;
self.chunks += 1;
}
}
while to_absorb.len() > 0 {
if self.written == Self::MAX_CHUNK_SIZE {
let mut chunk_hash = [0u8; 32];
let current_chunk = self.current_chunk.clone();
self.current_chunk.reset();
current_chunk.finalize(&mut chunk_hash);
self.state.update(&chunk_hash);
self.written = 0;
self.chunks += 1;
}
let todo = core::cmp::min(Self::MAX_CHUNK_SIZE - self.written, to_absorb.len());
self.current_chunk.update(&to_absorb[..todo]);
self.written += todo;
to_absorb = &to_absorb[todo..];
}
}
fn finalize(self, output: &mut [u8]) {
let mut xof = self.into_xof();
xof.squeeze(output);
}
}
/// The `KangarooTwelve` extendable-output function defined [`here`].
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["k12"] }
/// ```
///
/// # Example
///
/// ```
/// # use tiny_keccak::{KangarooTwelve, Xof, IntoXof, Hasher};
/// let input = b"hello world";
/// let mut output = [0u8; 64];
/// let mut hasher = KangarooTwelve::new(b"");
/// hasher.update(input);
/// let mut xof = hasher.into_xof();
/// xof.squeeze(&mut output[..32]);
/// xof.squeeze(&mut output[32..]);
/// ```
///
/// ---
///
/// [`KangarooTwelveXof`] can be created only by using [`KangarooTwelve::IntoXof`] interface.
///
/// [`here`]: https://eprint.iacr.org/2016/770.pdf
/// [`KangarooTwelveXof`]: struct.KangarooTwelveXof.html
/// [`KangarooTwelve::IntoXof`]: struct.KangarooTwelve.html#impl-IntoXof
#[derive(Clone)]
pub struct KangarooTwelveXof {
state: KeccakState<KeccakP>,
}
impl<T: AsRef<[u8]>> IntoXof for KangarooTwelve<T> {
type Xof = KangarooTwelveXof;
fn into_xof(mut self) -> KangarooTwelveXof {
let custom_string = self
.custom_string
.take()
.expect("KangarooTwelve cannot be initialized without custom_string; qed");
let encoded_len = encode_len(custom_string.as_ref().len());
self.update(custom_string.as_ref());
self.update(encoded_len.value());
if self.chunks == 0 {
self.state.delim = 0x07;
} else {
let encoded_chunks = encode_len(self.chunks);
let mut tmp_chunk = [0u8; 32];
self.current_chunk.finalize(&mut tmp_chunk);
self.state.update(&tmp_chunk);
self.state.update(encoded_chunks.value());
self.state.update(&[0xff, 0xff]);
self.state.delim = 0x06;
}
KangarooTwelveXof { state: self.state }
}
}
impl Xof for KangarooTwelveXof {
fn squeeze(&mut self, output: &mut [u8]) {
self.state.squeeze(output);
}
}

93
vendor/tiny-keccak/src/keccak.rs vendored Normal file
View File

@@ -0,0 +1,93 @@
//! The `Keccak` hash functions.
use super::{bits_to_rate, keccakf::KeccakF, Hasher, KeccakState};
/// The `Keccak` hash functions defined in [`Keccak SHA3 submission`].
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["keccak"] }
/// ```
///
/// [`Keccak SHA3 submission`]: https://keccak.team/files/Keccak-submission-3.pdf
#[derive(Clone)]
pub struct Keccak {
state: KeccakState<KeccakF>,
}
impl Keccak {
const DELIM: u8 = 0x01;
/// Creates new [`Keccak`] hasher with a security level of 224 bits.
///
/// [`Keccak`]: struct.Keccak.html
pub fn v224() -> Keccak {
Keccak::new(224)
}
/// Creates new [`Keccak`] hasher with a security level of 256 bits.
///
/// [`Keccak`]: struct.Keccak.html
pub fn v256() -> Keccak {
Keccak::new(256)
}
/// Creates new [`Keccak`] hasher with a security level of 384 bits.
///
/// [`Keccak`]: struct.Keccak.html
pub fn v384() -> Keccak {
Keccak::new(384)
}
/// Creates new [`Keccak`] hasher with a security level of 512 bits.
///
/// [`Keccak`]: struct.Keccak.html
pub fn v512() -> Keccak {
Keccak::new(512)
}
fn new(bits: usize) -> Keccak {
Keccak {
state: KeccakState::new(bits_to_rate(bits), Self::DELIM),
}
}
}
impl Hasher for Keccak {
/// Absorb additional input. Can be called multiple times.
///
/// # Example
///
/// ```
/// # use tiny_keccak::{Hasher, Keccak};
/// #
/// # fn main() {
/// # let mut keccak = Keccak::v256();
/// keccak.update(b"hello");
/// keccak.update(b" world");
/// # }
/// ```
fn update(&mut self, input: &[u8]) {
self.state.update(input);
}
/// Pad and squeeze the state to the output.
///
/// # Example
///
/// ```
/// # use tiny_keccak::{Hasher, Keccak};
/// #
/// # fn main() {
/// # let keccak = Keccak::v256();
/// # let mut output = [0u8; 32];
/// keccak.finalize(&mut output);
/// # }
/// #
/// ```
fn finalize(self, output: &mut [u8]) {
self.state.finalize(output);
}
}

40
vendor/tiny-keccak/src/keccakf.rs vendored Normal file
View File

@@ -0,0 +1,40 @@
use crate::{Buffer, Permutation};
const ROUNDS: usize = 24;
const RC: [u64; ROUNDS] = [
1u64,
0x8082u64,
0x800000000000808au64,
0x8000000080008000u64,
0x808bu64,
0x80000001u64,
0x8000000080008081u64,
0x8000000000008009u64,
0x8au64,
0x88u64,
0x80008009u64,
0x8000000au64,
0x8000808bu64,
0x800000000000008bu64,
0x8000000000008089u64,
0x8000000000008003u64,
0x8000000000008002u64,
0x8000000000000080u64,
0x800au64,
0x800000008000000au64,
0x8000000080008081u64,
0x8000000000008080u64,
0x80000001u64,
0x8000000080008008u64,
];
keccak_function!("`keccak-f[1600, 24]`", keccakf, ROUNDS, RC);
pub struct KeccakF;
impl Permutation for KeccakF {
fn execute(buffer: &mut Buffer) {
keccakf(buffer.words());
}
}

28
vendor/tiny-keccak/src/keccakp.rs vendored Normal file
View File

@@ -0,0 +1,28 @@
use crate::{Buffer, Permutation};
const ROUNDS: usize = 12;
const RC: [u64; ROUNDS] = [
0x000000008000808b,
0x800000000000008b,
0x8000000000008089,
0x8000000000008003,
0x8000000000008002,
0x8000000000000080,
0x000000000000800a,
0x800000008000000a,
0x8000000080008081,
0x8000000000008080,
0x0000000080000001,
0x8000000080008008,
];
keccak_function!("`keccak-p[1600, 12]`", keccakp, ROUNDS, RC);
pub struct KeccakP;
impl Permutation for KeccakP {
fn execute(buffer: &mut Buffer) {
keccakp(buffer.words());
}
}

114
vendor/tiny-keccak/src/kmac.rs vendored Normal file
View File

@@ -0,0 +1,114 @@
use crate::{bits_to_rate, left_encode, right_encode, CShake, Hasher, IntoXof, Xof};
/// The `KMAC` pseudo-random functions defined in [`SP800-185`].
///
/// The KECCAK Message Authentication Code (`KMAC`) algorithm is a `PRF` and keyed hash function based
/// on KECCAK. It provides variable-length output, and unlike [`SHAKE`] and [`cSHAKE`], altering the
/// requested output length generates a new, unrelated output. KMAC has two variants, [`KMAC128`] and
/// [`KMAC256`], built from [`cSHAKE128`] and [`cSHAKE256`], respectively. The two variants differ somewhat in
/// their technical security properties.
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["kmac"] }
/// ```
///
/// [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
/// [`KMAC128`]: struct.Kmac.html#method.v128
/// [`KMAC256`]: struct.Kmac.html#method.v256
/// [`SHAKE`]: struct.Shake.html
/// [`cSHAKE`]: struct.CShake.html
/// [`cSHAKE128`]: struct.CShake.html#method.v128
/// [`cSHAKE256`]: struct.CShake.html#method.v256
#[derive(Clone)]
pub struct Kmac {
state: CShake,
}
impl Kmac {
/// Creates new [`Kmac`] hasher with a security level of 128 bits.
///
/// [`Kmac`]: struct.Kmac.html
pub fn v128(key: &[u8], custom_string: &[u8]) -> Kmac {
Kmac::new(key, custom_string, 128)
}
/// Creates new [`Kmac`] hasher with a security level of 256 bits.
///
/// [`Kmac`]: struct.Kmac.html
pub fn v256(key: &[u8], custom_string: &[u8]) -> Kmac {
Kmac::new(key, custom_string, 256)
}
fn new(key: &[u8], custom_string: &[u8], bits: usize) -> Kmac {
let rate = bits_to_rate(bits);
let mut state = CShake::new(b"KMAC", custom_string, bits);
state.update(left_encode(rate).value());
state.update(left_encode(key.len() * 8).value());
state.update(key);
state.fill_block();
Kmac { state }
}
}
impl Hasher for Kmac {
fn update(&mut self, input: &[u8]) {
self.state.update(input)
}
fn finalize(mut self, output: &mut [u8]) {
self.state.update(right_encode(output.len() * 8).value());
self.state.finalize(output)
}
}
/// The `KMACXOF` extendable-output functions defined in [`SP800-185`].
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["kmac"] }
/// ```
///
/// # Example
///
/// ```
/// # use tiny_keccak::{Kmac, Xof, IntoXof, Hasher};
/// let input = b"hello world";
/// let mut output = [0u8; 64];
/// let mut kmac = Kmac::v256(b"", b"");
/// kmac.update(input);
/// let mut xof = kmac.into_xof();
/// xof.squeeze(&mut output[..32]);
/// xof.squeeze(&mut output[32..]);
/// ```
///
/// ---
///
/// [`KmacXof`] can be created only by using [`Kmac::IntoXof`] interface.
///
/// [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
/// [`KmacXof`]: struct.KmacXof.html
/// [`Kmac::IntoXof`]: struct.Kmac.html#impl-IntoXof
#[derive(Clone)]
pub struct KmacXof {
state: CShake,
}
impl IntoXof for Kmac {
type Xof = KmacXof;
fn into_xof(mut self) -> Self::Xof {
self.state.update(right_encode(0).value());
KmacXof { state: self.state }
}
}
impl Xof for KmacXof {
fn squeeze(&mut self, output: &mut [u8]) {
self.state.squeeze(output)
}
}

501
vendor/tiny-keccak/src/lib.rs vendored Normal file
View File

@@ -0,0 +1,501 @@
//! Keccak derived functions specified in [`FIPS-202`], [`SP800-185`] and [`KangarooTwelve`].
//!
//! # Example
//!
//! ```
//! # use tiny_keccak::Hasher;
//! #
//! # fn foo<H: Hasher>(mut hasher: H) {
//! let input_a = b"hello world";
//! let input_b = b"!";
//! let mut output = [0u8; 32];
//! hasher.update(input_a);
//! hasher.update(input_b);
//! hasher.finalize(&mut output);
//! # }
//! ```
//!
//! # Credits
//!
//! - [`coruus/keccak-tiny`] for C implementation of keccak function
//! - [`@quininer`] for `no-std` support and rust implementation [`SP800-185`]
//! - [`mimoo/GoKangarooTwelve`] for GO implementation of `KangarooTwelve`
//! - [`@Vurich`] for optimizations
//! - [`@oleganza`] for adding support for half-duplex use
//!
//! # License
//!
//! [`CC0`]. Attribution kindly requested. Blame taken too,
//! but not liability.
//!
//! [`FIPS-202`]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
//! [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
//! [`KangarooTwelve`]: https://eprint.iacr.org/2016/770.pdf
//! [`coruus/keccak-tiny`]: https://github.com/coruus/keccak-tiny
//! [`mimoo/GoKangarooTwelve`]: https://github.com/mimoo/GoKangarooTwelve
//! [`@quininer`]: https://github.com/quininer
//! [`@Vurich`]: https://github.com/Vurich
//! [`@oleganza`]: https://github.com/oleganza
//! [`CC0`]: https://github.com/debris/tiny-keccak/blob/master/LICENSE
#![no_std]
#![deny(missing_docs)]
const RHO: [u32; 24] = [
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44,
];
const PI: [usize; 24] = [
10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1,
];
const WORDS: usize = 25;
macro_rules! keccak_function {
($doc: expr, $name: ident, $rounds: expr, $rc: expr) => {
#[doc = $doc]
#[allow(unused_assignments)]
#[allow(non_upper_case_globals)]
pub fn $name(a: &mut [u64; $crate::WORDS]) {
use crunchy::unroll;
for i in 0..$rounds {
let mut array: [u64; 5] = [0; 5];
// Theta
unroll! {
for x in 0..5 {
unroll! {
for y_count in 0..5 {
let y = y_count * 5;
array[x] ^= a[x + y];
}
}
}
}
unroll! {
for x in 0..5 {
unroll! {
for y_count in 0..5 {
let y = y_count * 5;
a[y + x] ^= array[(x + 4) % 5] ^ array[(x + 1) % 5].rotate_left(1);
}
}
}
}
// Rho and pi
let mut last = a[1];
unroll! {
for x in 0..24 {
array[0] = a[$crate::PI[x]];
a[$crate::PI[x]] = last.rotate_left($crate::RHO[x]);
last = array[0];
}
}
// Chi
unroll! {
for y_step in 0..5 {
let y = y_step * 5;
unroll! {
for x in 0..5 {
array[x] = a[y + x];
}
}
unroll! {
for x in 0..5 {
a[y + x] = array[x] ^ ((!array[(x + 1) % 5]) & (array[(x + 2) % 5]));
}
}
}
};
// Iota
a[0] ^= $rc[i];
}
}
}
}
#[cfg(feature = "k12")]
mod keccakp;
#[cfg(feature = "k12")]
pub use keccakp::keccakp;
#[cfg(any(
feature = "keccak",
feature = "shake",
feature = "sha3",
feature = "cshake",
feature = "kmac",
feature = "tuple_hash",
feature = "parallel_hash"
))]
mod keccakf;
#[cfg(any(
feature = "keccak",
feature = "shake",
feature = "sha3",
feature = "cshake",
feature = "kmac",
feature = "tuple_hash",
feature = "parallel_hash"
))]
pub use keccakf::keccakf;
#[cfg(feature = "k12")]
mod k12;
#[cfg(feature = "k12")]
pub use k12::{KangarooTwelve, KangarooTwelveXof};
#[cfg(feature = "keccak")]
mod keccak;
#[cfg(feature = "keccak")]
pub use keccak::Keccak;
#[cfg(feature = "shake")]
mod shake;
#[cfg(feature = "shake")]
pub use shake::Shake;
#[cfg(feature = "sha3")]
mod sha3;
#[cfg(feature = "sha3")]
pub use sha3::Sha3;
#[cfg(feature = "cshake")]
mod cshake;
#[cfg(feature = "cshake")]
pub use cshake::CShake;
#[cfg(feature = "kmac")]
mod kmac;
#[cfg(feature = "kmac")]
pub use kmac::{Kmac, KmacXof};
#[cfg(feature = "tuple_hash")]
mod tuple_hash;
#[cfg(feature = "tuple_hash")]
pub use tuple_hash::{TupleHash, TupleHashXof};
#[cfg(feature = "parallel_hash")]
mod parallel_hash;
#[cfg(feature = "parallel_hash")]
pub use parallel_hash::{ParallelHash, ParallelHashXof};
/// A trait for hashing an arbitrary stream of bytes.
///
/// # Example
///
/// ```
/// # use tiny_keccak::Hasher;
/// #
/// # fn foo<H: Hasher>(mut hasher: H) {
/// let input_a = b"hello world";
/// let input_b = b"!";
/// let mut output = [0u8; 32];
/// hasher.update(input_a);
/// hasher.update(input_b);
/// hasher.finalize(&mut output);
/// # }
/// ```
pub trait Hasher {
/// Absorb additional input. Can be called multiple times.
fn update(&mut self, input: &[u8]);
/// Pad and squeeze the state to the output.
fn finalize(self, output: &mut [u8]);
}
/// A trait used to convert [`Hasher`] into it's [`Xof`] counterpart.
///
/// # Example
///
/// ```
/// # use tiny_keccak::IntoXof;
/// #
/// # fn foo<H: IntoXof>(hasher: H) {
/// let xof = hasher.into_xof();
/// # }
/// ```
///
/// [`Hasher`]: trait.Hasher.html
/// [`Xof`]: trait.Xof.html
pub trait IntoXof {
/// A type implementing [`Xof`], eXtendable-output function interface.
///
/// [`Xof`]: trait.Xof.html
type Xof: Xof;
/// A method used to convert type into [`Xof`].
///
/// [`Xof`]: trait.Xof.html
fn into_xof(self) -> Self::Xof;
}
/// Extendable-output function (`XOF`) is a function on bit strings in which the output can be
/// extended to any desired length.
///
/// # Example
///
/// ```
/// # use tiny_keccak::Xof;
/// #
/// # fn foo<X: Xof>(mut xof: X) {
/// let mut output = [0u8; 64];
/// xof.squeeze(&mut output[0..32]);
/// xof.squeeze(&mut output[32..]);
/// # }
/// ```
pub trait Xof {
/// A method used to retrieve another part of hash function output.
fn squeeze(&mut self, output: &mut [u8]);
}
struct EncodedLen {
offset: usize,
buffer: [u8; 9],
}
impl EncodedLen {
fn value(&self) -> &[u8] {
&self.buffer[self.offset..]
}
}
fn left_encode(len: usize) -> EncodedLen {
let mut buffer = [0u8; 9];
buffer[1..].copy_from_slice(&(len as u64).to_be_bytes());
let offset = buffer.iter().position(|i| *i != 0).unwrap_or(8);
buffer[offset - 1] = 9 - offset as u8;
EncodedLen {
offset: offset - 1,
buffer,
}
}
fn right_encode(len: usize) -> EncodedLen {
let mut buffer = [0u8; 9];
buffer[..8].copy_from_slice(&(len as u64).to_be_bytes());
let offset = buffer.iter().position(|i| *i != 0).unwrap_or(7);
buffer[8] = 8 - offset as u8;
EncodedLen { offset, buffer }
}
#[derive(Default, Clone)]
struct Buffer([u64; WORDS]);
impl Buffer {
fn words(&mut self) -> &mut [u64; WORDS] {
&mut self.0
}
#[cfg(target_endian = "little")]
#[inline]
fn execute<F: FnOnce(&mut [u8])>(&mut self, offset: usize, len: usize, f: F) {
let buffer: &mut [u8; WORDS * 8] = unsafe { core::mem::transmute(&mut self.0) };
f(&mut buffer[offset..][..len]);
}
#[cfg(target_endian = "big")]
#[inline]
fn execute<F: FnOnce(&mut [u8])>(&mut self, offset: usize, len: usize, f: F) {
fn swap_endianess(buffer: &mut [u64]) {
for item in buffer {
*item = item.swap_bytes();
}
}
let start = offset / 8;
let end = (offset + len + 7) / 8;
swap_endianess(&mut self.0[start..end]);
let buffer: &mut [u8; WORDS * 8] = unsafe { core::mem::transmute(&mut self.0) };
f(&mut buffer[offset..][..len]);
swap_endianess(&mut self.0[start..end]);
}
fn setout(&mut self, dst: &mut [u8], offset: usize, len: usize) {
self.execute(offset, len, |buffer| dst[..len].copy_from_slice(buffer));
}
fn xorin(&mut self, src: &[u8], offset: usize, len: usize) {
self.execute(offset, len, |dst| {
assert!(dst.len() <= src.len());
let len = dst.len();
let mut dst_ptr = dst.as_mut_ptr();
let mut src_ptr = src.as_ptr();
for _ in 0..len {
unsafe {
*dst_ptr ^= *src_ptr;
src_ptr = src_ptr.offset(1);
dst_ptr = dst_ptr.offset(1);
}
}
});
}
fn pad(&mut self, offset: usize, delim: u8, rate: usize) {
self.execute(offset, 1, |buff| buff[0] ^= delim);
self.execute(rate - 1, 1, |buff| buff[0] ^= 0x80);
}
}
trait Permutation {
fn execute(a: &mut Buffer);
}
#[derive(Clone, Copy)]
enum Mode {
Absorbing,
Squeezing,
}
struct KeccakState<P> {
buffer: Buffer,
offset: usize,
rate: usize,
delim: u8,
mode: Mode,
permutation: core::marker::PhantomData<P>,
}
impl<P> Clone for KeccakState<P> {
fn clone(&self) -> Self {
KeccakState {
buffer: self.buffer.clone(),
offset: self.offset,
rate: self.rate,
delim: self.delim,
mode: self.mode,
permutation: core::marker::PhantomData,
}
}
}
impl<P: Permutation> KeccakState<P> {
fn new(rate: usize, delim: u8) -> Self {
assert!(rate != 0, "rate cannot be equal 0");
KeccakState {
buffer: Buffer::default(),
offset: 0,
rate,
delim,
mode: Mode::Absorbing,
permutation: core::marker::PhantomData,
}
}
fn keccak(&mut self) {
P::execute(&mut self.buffer);
}
fn update(&mut self, input: &[u8]) {
if let Mode::Squeezing = self.mode {
self.mode = Mode::Absorbing;
self.fill_block();
}
//first foldp
let mut ip = 0;
let mut l = input.len();
let mut rate = self.rate - self.offset;
let mut offset = self.offset;
while l >= rate {
self.buffer.xorin(&input[ip..], offset, rate);
self.keccak();
ip += rate;
l -= rate;
rate = self.rate;
offset = 0;
}
self.buffer.xorin(&input[ip..], offset, l);
self.offset = offset + l;
}
fn pad(&mut self) {
self.buffer.pad(self.offset, self.delim, self.rate);
}
fn squeeze(&mut self, output: &mut [u8]) {
if let Mode::Absorbing = self.mode {
self.mode = Mode::Squeezing;
self.pad();
self.fill_block();
}
// second foldp
let mut op = 0;
let mut l = output.len();
let mut rate = self.rate - self.offset;
let mut offset = self.offset;
while l >= rate {
self.buffer.setout(&mut output[op..], offset, rate);
self.keccak();
op += rate;
l -= rate;
rate = self.rate;
offset = 0;
}
self.buffer.setout(&mut output[op..], offset, l);
self.offset = offset + l;
}
fn finalize(mut self, output: &mut [u8]) {
self.squeeze(output);
}
fn fill_block(&mut self) {
self.keccak();
self.offset = 0;
}
fn reset(&mut self) {
self.buffer = Buffer::default();
self.offset = 0;
self.mode = Mode::Absorbing;
}
}
fn bits_to_rate(bits: usize) -> usize {
200 - bits / 4
}
#[cfg(test)]
mod tests {
use crate::{left_encode, right_encode};
#[test]
fn test_left_encode() {
assert_eq!(left_encode(0).value(), &[1, 0]);
assert_eq!(left_encode(128).value(), &[1, 128]);
assert_eq!(left_encode(65536).value(), &[3, 1, 0, 0]);
assert_eq!(left_encode(4096).value(), &[2, 16, 0]);
assert_eq!(left_encode(54321).value(), &[2, 212, 49]);
}
#[test]
fn test_right_encode() {
assert_eq!(right_encode(0).value(), &[0, 1]);
assert_eq!(right_encode(128).value(), &[128, 1]);
assert_eq!(right_encode(65536).value(), &[1, 0, 0, 3]);
assert_eq!(right_encode(4096).value(), &[16, 0, 2]);
assert_eq!(right_encode(54321).value(), &[212, 49, 2]);
}
}

206
vendor/tiny-keccak/src/parallel_hash.rs vendored Normal file
View File

@@ -0,0 +1,206 @@
use crate::{left_encode, right_encode, CShake, Hasher, IntoXof, Xof};
#[derive(Clone)]
struct UnfinishedState {
state: CShake,
absorbed: usize,
}
struct Suboutout {
state: [u8; 64],
size: usize,
}
impl Suboutout {
fn security(bits: usize) -> Suboutout {
Suboutout {
state: [0u8; 64],
// 128 => 32, 256 => 64
size: bits / 4,
}
}
#[inline]
fn as_bytes(&self) -> &[u8] {
&self.state[..self.size]
}
#[inline]
fn as_bytes_mut(&mut self) -> &mut [u8] {
&mut self.state[..self.size]
}
}
/// The `ParallelHash` hash functions defined in [`SP800-185`].
///
/// The purpose of `ParallelHash` is to support the efficient hashing of very long strings, by
/// taking advantage of the parallelism available in modern processors. `ParallelHash` supports the
/// [`128-bit`] and [`256-bit`] security strengths, and also provides variable-length output.
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["parallel_hash"] }
/// ```
///
/// [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
/// [`128-bit`]: struct.ParallelHash.html#method.v128
/// [`256-bit`]: struct.ParallelHash.html#method.v256
#[derive(Clone)]
pub struct ParallelHash {
state: CShake,
block_size: usize,
bits: usize,
blocks: usize,
unfinished: Option<UnfinishedState>,
}
impl ParallelHash {
/// Creates new [`ParallelHash`] hasher with a security level of 128 bits.
///
/// [`ParallelHash`]: struct.ParallelHash.html
pub fn v128(custom_string: &[u8], block_size: usize) -> ParallelHash {
ParallelHash::new(custom_string, block_size, 128)
}
/// Creates new [`ParallelHash`] hasher with a security level of 256 bits.
///
/// [`ParallelHash`]: struct.ParallelHash.html
pub fn v256(custom_string: &[u8], block_size: usize) -> ParallelHash {
ParallelHash::new(custom_string, block_size, 256)
}
fn new(custom_string: &[u8], block_size: usize, bits: usize) -> ParallelHash {
let mut state = CShake::new(b"ParallelHash", custom_string, bits);
state.update(left_encode(block_size).value());
ParallelHash {
state,
block_size,
bits,
blocks: 0,
unfinished: None,
}
}
}
impl Hasher for ParallelHash {
fn update(&mut self, mut input: &[u8]) {
if let Some(mut unfinished) = self.unfinished.take() {
let to_absorb = self.block_size - unfinished.absorbed;
if input.len() >= to_absorb {
unfinished.state.update(&input[..to_absorb]);
input = &input[to_absorb..];
let mut suboutput = Suboutout::security(self.bits);
unfinished.state.finalize(suboutput.as_bytes_mut());
self.state.update(suboutput.as_bytes());
self.blocks += 1;
} else {
unfinished.state.update(input);
unfinished.absorbed += input.len();
self.unfinished = Some(unfinished);
return;
}
}
let bits = self.bits;
let input_blocks_end = input.len() / self.block_size * self.block_size;
let input_blocks = &input[..input_blocks_end];
let input_end = &input[input_blocks_end..];
let parts = input_blocks.chunks(self.block_size).map(|chunk| {
let mut state = CShake::new(b"", b"", bits);
state.update(chunk);
let mut suboutput = Suboutout::security(bits);
state.finalize(suboutput.as_bytes_mut());
suboutput
});
for part in parts {
self.state.update(part.as_bytes());
self.blocks += 1;
}
if !input_end.is_empty() {
assert!(self.unfinished.is_none());
let mut state = CShake::new(b"", b"", bits);
state.update(input_end);
self.unfinished = Some(UnfinishedState {
state,
absorbed: input_end.len(),
});
}
}
fn finalize(mut self, output: &mut [u8]) {
if let Some(unfinished) = self.unfinished.take() {
let mut suboutput = Suboutout::security(self.bits);
unfinished.state.finalize(suboutput.as_bytes_mut());
self.state.update(suboutput.as_bytes());
self.blocks += 1;
}
self.state.update(right_encode(self.blocks).value());
self.state.update(right_encode(output.len() * 8).value());
self.state.finalize(output);
}
}
/// The `ParallelHashXOF` extendable-output functions defined in [`SP800-185`].
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["parallel_hash"] }
/// ```
///
/// # Example
///
/// ```
/// # use tiny_keccak::{ParallelHash, Xof, IntoXof, Hasher};
/// let input = b"hello world";
/// let mut output = [0u8; 64];
/// let mut hasher = ParallelHash::v256(b"", 8);
/// hasher.update(input);
/// let mut xof = hasher.into_xof();
/// xof.squeeze(&mut output[..32]);
/// xof.squeeze(&mut output[32..]);
/// ```
///
/// ---
///
/// [`ParallelHashXof`] can be created only by using [`ParallelHash::IntoXof`] interface.
///
///
/// [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
/// [`ParallelHashXof`]: struct.ParallelHashXof.html
/// [`ParallelHash::IntoXof`]: struct.ParallelHash.html#impl-IntoXof
#[derive(Clone)]
pub struct ParallelHashXof {
state: CShake,
}
impl IntoXof for ParallelHash {
type Xof = ParallelHashXof;
fn into_xof(mut self) -> Self::Xof {
if let Some(unfinished) = self.unfinished.take() {
let mut suboutput = Suboutout::security(self.bits);
unfinished.state.finalize(suboutput.as_bytes_mut());
self.state.update(suboutput.as_bytes());
self.blocks += 1;
}
self.state.update(right_encode(self.blocks).value());
self.state.update(right_encode(0).value());
ParallelHashXof { state: self.state }
}
}
impl Xof for ParallelHashXof {
fn squeeze(&mut self, output: &mut [u8]) {
self.state.squeeze(output);
}
}

83
vendor/tiny-keccak/src/sha3.rs vendored Normal file
View File

@@ -0,0 +1,83 @@
use crate::{bits_to_rate, keccakf::KeccakF, Hasher, KeccakState};
/// The `SHA3` hash functions defined in [`FIPS-202`].
///
/// [`FIPS-202`]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["sha3"] }
/// ```
///
/// # Example
///
/// ```
/// # use tiny_keccak::{Hasher, Sha3};
/// #
/// # fn main() {
/// let input = b"hello world";
/// let mut output = [0; 32];
/// let expected = b"\
/// \x64\x4b\xcc\x7e\x56\x43\x73\x04\x09\x99\xaa\xc8\x9e\x76\x22\xf3\
/// \xca\x71\xfb\xa1\xd9\x72\xfd\x94\xa3\x1c\x3b\xfb\xf2\x4e\x39\x38\
/// ";
/// let mut sha3 = Sha3::v256();
/// sha3.update(input);
/// sha3.finalize(&mut output);
/// assert_eq!(expected, &output);
/// # }
/// ```
#[derive(Clone)]
pub struct Sha3 {
state: KeccakState<KeccakF>,
}
impl Sha3 {
const DELIM: u8 = 0x06;
/// Creates new [`Sha3`] hasher with a security level of 224 bits.
///
/// [`Sha3`]: struct.Sha3.html
pub fn v224() -> Sha3 {
Sha3::new(224)
}
/// Creates new [`Sha3`] hasher with a security level of 256 bits.
///
/// [`Sha3`]: struct.Sha3.html
pub fn v256() -> Sha3 {
Sha3::new(256)
}
/// Creates new [`Sha3`] hasher with a security level of 384 bits.
///
/// [`Sha3`]: struct.Sha3.html
pub fn v384() -> Sha3 {
Sha3::new(384)
}
/// Creates new [`Sha3`] hasher with a security level of 512 bits.
///
/// [`Sha3`]: struct.Sha3.html
pub fn v512() -> Sha3 {
Sha3::new(512)
}
fn new(bits: usize) -> Sha3 {
Sha3 {
state: KeccakState::new(bits_to_rate(bits), Self::DELIM),
}
}
}
impl Hasher for Sha3 {
fn update(&mut self, input: &[u8]) {
self.state.update(input);
}
fn finalize(self, output: &mut [u8]) {
self.state.finalize(output);
}
}

56
vendor/tiny-keccak/src/shake.rs vendored Normal file
View File

@@ -0,0 +1,56 @@
use crate::{bits_to_rate, keccakf::KeccakF, Hasher, KeccakState, Xof};
/// The `SHAKE` extendable-output functions defined in [`FIPS-202`].
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["shake"] }
/// ```
///
/// [`FIPS-202`]: https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
#[derive(Clone)]
pub struct Shake {
state: KeccakState<KeccakF>,
}
impl Shake {
const DELIM: u8 = 0x1f;
/// Creates new [`Shake`] hasher with a security level of 128 bits.
///
/// [`Shake`]: struct.Shake.html
pub fn v128() -> Shake {
Shake::new(128)
}
/// Creates new [`Shake`] hasher with a security level of 256 bits.
///
/// [`Shake`]: struct.Shake.html
pub fn v256() -> Shake {
Shake::new(256)
}
pub(crate) fn new(bits: usize) -> Shake {
Shake {
state: KeccakState::new(bits_to_rate(bits), Self::DELIM),
}
}
}
impl Hasher for Shake {
fn update(&mut self, input: &[u8]) {
self.state.update(input);
}
fn finalize(self, output: &mut [u8]) {
self.state.finalize(output);
}
}
impl Xof for Shake {
fn squeeze(&mut self, output: &mut [u8]) {
self.state.squeeze(output)
}
}

106
vendor/tiny-keccak/src/tuple_hash.rs vendored Normal file
View File

@@ -0,0 +1,106 @@
use crate::{left_encode, right_encode, CShake, Hasher, IntoXof, Xof};
/// The `TupleHash` hash functions defined in [`SP800-185`].
///
/// `TupleHash` is designed to provide a generic, misuse-resistant way to combine a sequence of
/// strings for hashing such that, for example, a `TupleHash` computed on the tuple (`"abc"` ,`"d"`) will
/// produce a different hash value than a `TupleHash` computed on the tuple (`"ab"`,`"cd"`), even though
/// all the remaining input parameters are kept the same, and the two resulting concatenated
/// strings, without string encoding, are identical.
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["tuple_hash"] }
/// ```
///
/// [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
#[derive(Clone)]
pub struct TupleHash {
state: CShake,
}
impl TupleHash {
/// Creates new [`TupleHash`] hasher with a security level of 128 bits.
///
/// [`TupleHash`]: struct.TupleHash.html
pub fn v128(custom_string: &[u8]) -> TupleHash {
TupleHash::new(custom_string, 128)
}
/// Creates new [`TupleHash`] hasher with a security level of 256 bits.
///
/// [`TupleHash`]: struct.TupleHash.html
pub fn v256(custom_string: &[u8]) -> TupleHash {
TupleHash::new(custom_string, 256)
}
fn new(custom_string: &[u8], bits: usize) -> TupleHash {
TupleHash {
state: CShake::new(b"TupleHash", custom_string, bits),
}
}
}
impl Hasher for TupleHash {
fn update(&mut self, input: &[u8]) {
self.state.update(left_encode(input.len() * 8).value());
self.state.update(input)
}
fn finalize(mut self, output: &mut [u8]) {
self.state.update(right_encode(output.len() * 8).value());
self.state.finalize(output)
}
}
/// The `TupleHashXOF` extendable-output functions defined in [`SP800-185`].
///
/// # Usage
///
/// ```toml
/// [dependencies]
/// tiny-keccak = { version = "2.0.0", features = ["tuple_hash"] }
/// ```
///
/// # Example
///
/// ```
/// # use tiny_keccak::{TupleHash, Xof, IntoXof, Hasher};
/// let input = b"hello world";
/// let mut output = [0u8; 64];
/// let mut hasher = TupleHash::v256(b"");
/// hasher.update(input);
/// let mut xof = hasher.into_xof();
/// xof.squeeze(&mut output[..32]);
/// xof.squeeze(&mut output[32..]);
/// ```
///
/// ---
///
/// [`TupleHashXof`] can be created only by using [`TupleHash::IntoXof`] interface.
///
///
/// [`SP800-185`]: https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf
/// [`TupleHashXof`]: struct.TupleHashXof.html
/// [`TupleHash::IntoXof`]: struct.TupleHash.html#impl-IntoXof
#[derive(Clone)]
pub struct TupleHashXof {
state: CShake,
}
impl IntoXof for TupleHash {
type Xof = TupleHashXof;
fn into_xof(mut self) -> TupleHashXof {
self.state.update(right_encode(0).value());
TupleHashXof { state: self.state }
}
}
impl Xof for TupleHashXof {
fn squeeze(&mut self, output: &mut [u8]) {
self.state.squeeze(output)
}
}