chore: checkpoint before Python removal

This commit is contained in:
2026-03-26 22:33:59 +00:00
parent 683cec9307
commit e568ddf82a
29972 changed files with 11269302 additions and 2 deletions

98
vendor/litemap/tests/rkyv.rs vendored Normal file
View File

@@ -0,0 +1,98 @@
// This file is part of ICU4X. For terms of use, please see the file
// called LICENSE at the top level of the ICU4X source tree
// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).
use litemap::LiteMap;
use rkyv::archived_root;
use rkyv::check_archived_root;
use rkyv::ser::serializers::AllocSerializer;
use rkyv::ser::Serializer;
use rkyv::util::AlignedBytes;
use rkyv::util::AlignedVec;
use rkyv::Deserialize;
use rkyv::Infallible;
const DATA: [(&str, &str); 11] = [
("ar", "Arabic"),
("bn", "Bangla"),
("ccp", "Chakma"),
("en", "English"),
("es", "Spanish"),
("fr", "French"),
("ja", "Japanese"),
("ru", "Russian"),
("sr", "Serbian"),
("th", "Thai"),
("tr", "Turkish"),
];
const RKYV: AlignedBytes<192> = AlignedBytes(if cfg!(target_endian = "little") {
[
74, 97, 112, 97, 110, 101, 115, 101, 97, 114, 0, 0, 0, 0, 0, 2, 65, 114, 97, 98, 105, 99,
0, 6, 98, 110, 0, 0, 0, 0, 0, 2, 66, 97, 110, 103, 108, 97, 0, 6, 99, 99, 112, 0, 0, 0, 0,
3, 67, 104, 97, 107, 109, 97, 0, 6, 101, 110, 0, 0, 0, 0, 0, 2, 69, 110, 103, 108, 105,
115, 104, 7, 101, 115, 0, 0, 0, 0, 0, 2, 83, 112, 97, 110, 105, 115, 104, 7, 102, 114, 0,
0, 0, 0, 0, 2, 70, 114, 101, 110, 99, 104, 0, 6, 106, 97, 0, 0, 0, 0, 0, 2, 8, 0, 0, 0,
144, 255, 255, 255, 114, 117, 0, 0, 0, 0, 0, 2, 82, 117, 115, 115, 105, 97, 110, 7, 115,
114, 0, 0, 0, 0, 0, 2, 83, 101, 114, 98, 105, 97, 110, 7, 116, 104, 0, 0, 0, 0, 0, 2, 84,
104, 97, 105, 0, 0, 0, 4, 116, 114, 0, 0, 0, 0, 0, 2, 84, 117, 114, 107, 105, 115, 104, 7,
80, 255, 255, 255, 11, 0, 0, 0,
]
} else {
[
74, 97, 112, 97, 110, 101, 115, 101, 97, 114, 0, 0, 0, 0, 0, 2, 65, 114, 97, 98, 105, 99,
0, 6, 98, 110, 0, 0, 0, 0, 0, 2, 66, 97, 110, 103, 108, 97, 0, 6, 99, 99, 112, 0, 0, 0, 0,
3, 67, 104, 97, 107, 109, 97, 0, 6, 101, 110, 0, 0, 0, 0, 0, 2, 69, 110, 103, 108, 105,
115, 104, 7, 101, 115, 0, 0, 0, 0, 0, 2, 83, 112, 97, 110, 105, 115, 104, 7, 102, 114, 0,
0, 0, 0, 0, 2, 70, 114, 101, 110, 99, 104, 0, 6, 106, 97, 0, 0, 0, 0, 0, 2, 0, 0, 0, 8,
144, 255, 255, 255, 114, 117, 0, 0, 0, 0, 0, 2, 82, 117, 115, 115, 105, 97, 110, 7, 115,
114, 0, 0, 0, 0, 0, 2, 83, 101, 114, 98, 105, 97, 110, 7, 116, 104, 0, 0, 0, 0, 0, 2, 84,
104, 97, 105, 0, 0, 0, 4, 116, 114, 0, 0, 0, 0, 0, 2, 84, 117, 114, 107, 105, 115, 104, 7,
255, 255, 255, 80, 0, 0, 0, 11,
]
});
type LiteMapOfStrings = LiteMap<String, String>;
type TupleVecOfStrings = Vec<(String, String)>;
fn generate() -> AlignedVec {
let map = DATA
.iter()
.map(|&(k, v)| (k.to_owned(), v.to_owned()))
.collect::<LiteMapOfStrings>();
let mut serializer = AllocSerializer::<4096>::default();
serializer
.serialize_value(&map.into_tuple_vec())
.expect("failed to archive test");
serializer.into_serializer().into_inner()
}
#[test]
fn rkyv_serialize() {
let serialized = generate();
assert_eq!(RKYV.0, serialized.as_slice());
}
#[test]
fn rkyv_archive() {
let archived = unsafe { archived_root::<TupleVecOfStrings>(&RKYV.0) };
let s = archived[0].1.as_str();
assert_eq!(s, "Arabic");
}
#[test]
fn rkyv_checked_archive() {
let archived = check_archived_root::<TupleVecOfStrings>(&RKYV.0).unwrap();
let s = archived[0].1.as_str();
assert_eq!(s, "Arabic");
}
#[test]
fn rkyv_deserialize() {
let archived = unsafe { archived_root::<TupleVecOfStrings>(&RKYV.0) };
let deserialized = archived.deserialize(&mut Infallible).unwrap();
// Safe because we are deserializing a buffer from a trusted source
let deserialized: LiteMapOfStrings = LiteMap::from_sorted_store_unchecked(deserialized);
assert_eq!(deserialized.get("tr").map(String::as_str), Some("Turkish"));
}

22
vendor/litemap/tests/serde.rs vendored Normal file
View File

@@ -0,0 +1,22 @@
// This file is part of ICU4X. For terms of use, please see the file
// called LICENSE at the top level of the ICU4X source tree
// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).
use litemap::LiteMap;
#[test]
fn test_ser() {
let mut map = LiteMap::new_vec();
map.insert(1, "jat");
map.insert(4, "sei");
map.insert(3, "saam");
map.insert(2, "ji");
let json_string = serde_json::to_string(&map).unwrap();
assert_eq!(json_string, r#"{"1":"jat","2":"ji","3":"saam","4":"sei"}"#);
let new_map = serde_json::from_str(&json_string).unwrap();
assert_eq!(map, new_map);
}

158
vendor/litemap/tests/store.rs vendored Normal file
View File

@@ -0,0 +1,158 @@
// This file is part of ICU4X. For terms of use, please see the file
// called LICENSE at the top level of the ICU4X source tree
// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).
use litemap::store::*;
use litemap::testing::check_store_full;
use std::cmp::Ordering;
/// A Vec wrapper that leverages the default function impls from `Store`
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
struct VecWithDefaults<T>(Vec<T>);
type MapF<K, V> = fn(&(K, V)) -> (&K, &V);
#[inline]
fn map_f<K, V>(input: &(K, V)) -> (&K, &V) {
(&input.0, &input.1)
}
type MapFMut<K, V> = fn(&mut (K, V)) -> (&K, &mut V);
#[inline]
fn map_f_mut<K, V>(input: &mut (K, V)) -> (&K, &mut V) {
(&input.0, &mut input.1)
}
impl<K, V> StoreConstEmpty<K, V> for VecWithDefaults<(K, V)> {
const EMPTY: VecWithDefaults<(K, V)> = VecWithDefaults(Vec::new());
}
impl<K, V> Store<K, V> for VecWithDefaults<(K, V)> {
#[inline]
fn lm_len(&self) -> usize {
self.0.as_slice().len()
}
// leave lm_is_empty as default
#[inline]
fn lm_get(&self, index: usize) -> Option<(&K, &V)> {
self.0.as_slice().get(index).map(map_f)
}
// leave lm_last as default
#[inline]
fn lm_binary_search_by<F>(&self, mut cmp: F) -> Result<usize, usize>
where
F: FnMut(&K) -> Ordering,
{
self.0.as_slice().binary_search_by(|(k, _)| cmp(k))
}
}
impl<K: Ord, V> StoreFromIterable<K, V> for VecWithDefaults<(K, V)> {
fn lm_sort_from_iter<I: IntoIterator<Item = (K, V)>>(iter: I) -> Self {
let v: Vec<_> = Vec::lm_sort_from_iter(iter);
Self(v)
}
}
impl<K, V> StoreMut<K, V> for VecWithDefaults<(K, V)> {
#[inline]
fn lm_with_capacity(capacity: usize) -> Self {
Self(Vec::with_capacity(capacity))
}
#[inline]
fn lm_reserve(&mut self, additional: usize) {
self.0.reserve(additional)
}
#[inline]
fn lm_get_mut(&mut self, index: usize) -> Option<(&K, &mut V)> {
self.0.as_mut_slice().get_mut(index).map(map_f_mut)
}
#[inline]
fn lm_push(&mut self, key: K, value: V) {
self.0.push((key, value))
}
#[inline]
fn lm_insert(&mut self, index: usize, key: K, value: V) {
self.0.insert(index, (key, value))
}
#[inline]
fn lm_remove(&mut self, index: usize) -> (K, V) {
self.0.remove(index)
}
#[inline]
fn lm_clear(&mut self) {
self.0.clear()
}
// leave lm_retain as default
}
impl<'a, K: 'a, V: 'a> StoreIterable<'a, K, V> for VecWithDefaults<(K, V)> {
type KeyValueIter = core::iter::Map<core::slice::Iter<'a, (K, V)>, MapF<K, V>>;
#[inline]
fn lm_iter(&'a self) -> Self::KeyValueIter {
self.0.as_slice().iter().map(map_f)
}
}
impl<'a, K: 'a, V: 'a> StoreIterableMut<'a, K, V> for VecWithDefaults<(K, V)> {
type KeyValueIterMut = core::iter::Map<core::slice::IterMut<'a, (K, V)>, MapFMut<K, V>>;
#[inline]
fn lm_iter_mut(&'a mut self) -> Self::KeyValueIterMut {
self.0.as_mut_slice().iter_mut().map(map_f_mut)
}
}
impl<K, V> StoreIntoIterator<K, V> for VecWithDefaults<(K, V)> {
type KeyValueIntoIter = std::vec::IntoIter<(K, V)>;
#[inline]
fn lm_into_iter(self) -> Self::KeyValueIntoIter {
IntoIterator::into_iter(self.0)
}
// leave lm_extend_end as default
// leave lm_extend_start as default
}
impl<A> std::iter::FromIterator<A> for VecWithDefaults<A> {
fn from_iter<I: IntoIterator<Item = A>>(iter: I) -> Self {
Self(Vec::from_iter(iter))
}
}
impl<K, V> StoreFromIterator<K, V> for VecWithDefaults<(K, V)> {}
impl<K: Ord, V> StoreBulkMut<K, V> for VecWithDefaults<(K, V)> {
fn lm_retain<F>(&mut self, predicate: F)
where
F: FnMut(&K, &V) -> bool,
{
self.0.lm_retain(predicate)
}
fn lm_extend<I>(&mut self, other: I)
where
I: IntoIterator<Item = (K, V)>,
{
self.0.lm_extend(other)
}
}
#[test]
fn test_default_impl() {
check_store_full::<VecWithDefaults<(u32, u64)>>();
}